diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 000000000..bafd57195 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,103 @@ +name: Bug report +title: "[Bug] " +description: Problems and issues with code of Exchangis +labels: [bug, triage] +body: + - type: markdown + attributes: + value: | + Thank you for reporting the problem! + Please make sure what you are reporting is a bug with reproducible steps. To ask questions + or share ideas, pleae post on our [Discussion page](https://github.com/WeBankFinTech/Exchangis/discussions) instead. + + - type: checkboxes + attributes: + label: Search before asking + description: > + Please make sure to search in the [issues](https://github.com/WeBankFinTech/Exchangis/issues) first to see + whether the same issue was reported already. + options: + - label: > + I searched the [issues](https://github.com/WeBankFinTech/Exchangis/issues) and found no similar + issues. + required: true + + - type: dropdown + attributes: + label: Exchangis Component + description: | + What component are you using? Exchangis has many modules, please make sure to choose the module that + you found the bug. + multiple: true + options: + - "exchangis-datasource" + - "exchangis-job-launcher" + - "exchangis-job-server" + - "exchangis-job-builder" + - "exchangis-job-metrics" + - "exchangis-project" + - "exchangis-plugins" + - "exchangis-dao" + - "exchangis-web" + validations: + required: true + + - type: textarea + attributes: + label: What happened + What you expected to happen + description: Describe 1. the bug 2. expected behavior 3. useful information (e.g., logs) + placeholder: > + Please provide the context in which the problem occurred and explain what happened. Further, + To Reproduce Steps to reproduce the behavior: 1. Go to '...' 2. Click on '....' 3. Scroll down to '.... 4. See error + please also explain why you think the behaviour is erroneous. It is extremely helpful if you can + copy and paste the fragment of logs showing the exact error messages or wrong behaviour here. + + **NOTE**: Expected behavior A clear and concise description of what you expected to happen.Screenshots If applicable, add screenshots to help explain your problem. + validations: + required: true + + - type: textarea + attributes: + label: Relevent platform + description: The platform where you occurred this issue + placeholder: > + Please specify Desktop or Smartphone, Version / Dependencies / OS / Browser + validations: + required: true + + - type: textarea + attributes: + label: Reproduction script + description: > + Please provide a reproducible script. Providing a narrow reproduction (minimal / no external dependencies) will + help us triage and address issues in the timely manner! + placeholder: > + Please provide a short code snippet (less than 50 lines if possible) that can be copy-pasted to + reproduce the issue. The snippet should have **no external library dependencies** + (i.e., use fake or mock data / environments). + + **NOTE**: If the code snippet cannot be run by itself, the issue will be marked as "needs-repro-script" + until the repro instruction is updated. + validations: + required: true + + - type: textarea + attributes: + label: Anything else + description: Anything else we need to know? + placeholder: > + How often does this problem occur? (Once? Every time? Only when certain conditions are met?) + Any relevant logs to include? Are there other relevant issues? + + - type: checkboxes + attributes: + label: Are you willing to submit a PR? + description: > + This is absolutely not required, but we are happy to guide you in the contribution process + especially if you already have a good understanding of how to implement the fix. + options: + - label: Yes I am willing to submit a PR! + + - type: markdown + attributes: + value: "Thanks for completing our form!" diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 000000000..7c34114e9 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,5 @@ +blank_issues_enabled: fasle +contact_links: + - name: Ask a question or get support + url: https://github.com/WeBankFinTech/Exchangis/discussions + about: Ask a question or request support for using Exchangis \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 000000000..357f173ff --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,63 @@ +name: Exchangis feature request +description: Suggest an idea for Exchangis project +title: "[Feature] " +labels: [enhancement] +body: + - type: markdown + attributes: + value: | + Thank you for finding the time to propose a new feature! + We really appreciate the community efforts to improve Exchangis. + - type: checkboxes + attributes: + label: Search before asking + description: > + Please make sure to search in the [issues](https://github.com/WeBankFinTech/Exchangis/issues) first to see + whether the same feature was requested already. + options: + - label: > + I had searched in the [issues](https://github.com/WeBankFinTech/Exchangis/issues) and found no similar + feature requirement. + required: true + - type: textarea + attributes: + label: Problem Description + description: Is your feature request related to a problem? Please describe. + + - type: textarea + attributes: + label: Description + description: A short description of your feature + + - type: textarea + attributes: + label: Use case + description: > + Describe the use case of your feature request. + placeholder: > + Describe the solution you'd like A clear and concise description of what you want to happen. + + - type: textarea + attributes: + label: solutions + description: Describe alternatives you've considered A clear and concise description of any alternative solutions or features you've considered. + + - type: textarea + attributes: + label: Anything else + description: Anything else we need to know? + placeholder: > + Additional context Add any other context or screenshots about the feature request here. + + - type: checkboxes + attributes: + label: Are you willing to submit a PR? + description: > + This is absolutely not required, but we are happy to guide you in the contribution process + especially if you already have a good understanding of how to implement the feature. + options: + - label: Yes I am willing to submit a PR! + + - type: markdown + attributes: + value: "Thanks for completing our form!" diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..57e883bcd --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,28 @@ +### What is the purpose of the change +(For example: Exchangis-Job defines the core ability of Exchangis, it provides the abilities of job management, job transform, and job launch. +Related issues: #50. ) + +### Brief change log +(for example:) +- defines the job server module of Exchangis; +- defines the job launcher module of Exchangis; +- defines the job metrics module of Exchangis. + +### Verifying this change +(Please pick either of the following options) +This change is a trivial rework / code cleanup without any test coverage. +(or) +This change is already covered by existing tests, such as (please describe tests). +(or) +This change added tests and can be verified as follows: +(example:) +- Added tests for creating and execute the Exchangis jobs and verify the availability of different Exchangis Job, such as sqoop job, datax job. + +### Does this pull request potentially affect one of the following parts: +- Dependencies (does it add or upgrade a dependency): (yes / no) +- Anything that affects deployment: (yes / no / don't know) +- The Core framework, i.e., JobManager, Server.: (yes / no) + +### Documentation +- Does this pull request introduce a new feature? (yes / no) +- If yes, how is the feature documented? (not applicable / docs / JavaDocs / not documented) \ No newline at end of file diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 000000000..d0c49fde7 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,53 @@ +# +# Copyright 2019 WeBank. +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +name: Exchangis CI Actions + +on: + push: + pull_request: + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + node-version: [16.13.1] + # See supported Node.js release schedule at https://nodejs.org/en/about/releases/ + + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Set up JDK 8 + uses: actions/setup-java@v2 + with: + distribution: 'adopt' + java-version: 8 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v2 + with: + node-version: ${{ matrix.node-version }} + - name: Build backend by maven + run: | + mvn -N install + mvn clean package + - name: Build frontend by node.js + run: | + cd web + npm install + npm run build diff --git a/.github/workflows/check_license.yml b/.github/workflows/check_license.yml new file mode 100644 index 000000000..10e3f9fde --- /dev/null +++ b/.github/workflows/check_license.yml @@ -0,0 +1,48 @@ +# +# Copyright 2019 WeBank. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +name: Exchangis License check + +on: [push, pull_request] + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Checkout source + uses: actions/checkout@v2 + - name: Set up JDK 8 + uses: actions/setup-java@v2 + with: + java-version: '8' + distribution: 'adopt' + - name: mvn -N install + run: + mvn -N install + - name: License check with Maven + run: | + rat_file=`mvn apache-rat:check | { grep -oe "\\S\\+/rat.txt" || true; }` + echo "rat_file=$rat_file" + if [[ -n "$rat_file" ]];then echo "check error!" && cat $rat_file && exit 123;else echo "check success!" ;fi + - name: Upload the report + uses: actions/upload-artifact@v2 + with: + name: license-check-report + path: "**/target/rat.txt" diff --git a/.github/workflows/dead-link-checker.yml b/.github/workflows/dead-link-checker.yml new file mode 100644 index 000000000..8de24aac8 --- /dev/null +++ b/.github/workflows/dead-link-checker.yml @@ -0,0 +1,17 @@ +name: Dead Link Check + +on: [push] + +jobs: + dead-links-check: + runs-on: ubuntu-latest + timeout-minutes: 30 + if: (github.repository == 'WeBankFinTech/Exchangis') + steps: + - uses: actions/checkout@v3 + - uses: gaurav-nelson/github-action-markdown-link-check@v1 + with: + use-quiet-mode: 'no' + use-verbose-mode: 'yes' + folder-path: '../' + config-file: '.github/workflows/dlc.json' diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..a2857cb35 --- /dev/null +++ b/.gitignore @@ -0,0 +1,40 @@ +/target/ +target + +### STS ### +.apt_generated +.classpath +.factorypath +.project +.settings +.springBeans +.sts4-cache + +### IntelliJ IDEA ### +.idea +*.log +*.iws +*.iml +*.ipr + +### NetBeans ### +/nbproject/private/ +/build/ +/nbbuild/ +/dist/ +/nbdist/ +/.nb-gradle/ +.mvn/wrapper/maven-wrapper.jar +.mvn/wrapper/maven-wrapper.properties +/packages/ +exchangis-server/exchangis-extds +/logs/ +/web/package-lock.json +package-lock.json +.DS_Store + +web/dist + +workspace/ + +.flattened-pom.xml \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..50fd364a9 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,9 @@ +FROM harbor.local.hching.com/library/jdk:8u301 + +ADD assembly-package/target/wedatasphere-exchangis-1.1.2.tar.gz /opt/wedatasphere-exchangis.tar.gz + +RUN cd /opt/wedatasphere-exchangis.tar.gz/packages/ && tar -zxf exchangis-server_1.1.2.tar.gz && cd /opt/wedatasphere-exchangis.tar.gz/sbin + +WORKDIR /opt/wedatasphere-exchangis.tar.gz/sbin + +ENTRYPOINT ["/bin/bash start.sh"] diff --git a/README-ZH.md b/README-ZH.md new file mode 100644 index 000000000..ffdc9911f --- /dev/null +++ b/README-ZH.md @@ -0,0 +1,70 @@ +# Exchangis + +[![License](https://img.shields.io/badge/license-Apache%202-4EB1BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html) + +[English](README.md) | 中文 + +## 介绍 + +Exchangis是微众银行大数据平台 WeDataSphere 与社区用户共同研发的的新版数据交换工具,支持异构数据源之间的结构化和非结构化数据传输同步。 + +Exchangis 抽象了一套统一的数据源和同步作业定义插件,允许用户快速接入新的数据源,并只需在数据库中简单配置即可在页面中使用。 + +基于插件化的框架设计,及计算中间件 [Linkis](https://github.com/apache/incubator-linkis),Exchangis 可快速集成对接 Linkis 已集成的数据同步引擎,将 Exchangis 的同步作业转换成 Linkis 数据同步引擎的数据同步作业。 + +借助于 [Linkis](https://github.com/apache/incubator-linkis) 计算中间件的连接、复用与简化能力,Exchangis 天生便具备了高并发、高可用、多租户隔离和资源管控的金融级数据同步能力。 + +### 界面预览 + +![image](images/zh_CN/ch1/frontend_view.png) + +## 核心特点 + +### 1. 轻量化的数据源管理 + +- 基于 Linkis DataSource,抽象了底层数据源在 Exchangis 作为一个同步作业的 Source 和 Sink 所必须的所有能力。只需简单配置即可完成一个数据源的创建。 + +- 特别数据源版本发布管理功能,支持历史版本数据源回滚,一键发布无需再次配置历史数据源。 + + +### 2. 高稳定,快响应的数据同步任务执行 + +- **近实时任务管控** +快速抓取传输任务日志以及传输速率等信息,对多任务包括CPU使用、内存使用、数据同步记录等各项指标进行监控展示,支持实时关闭任务; + +- **任务高并发传输** +多任务并发执行,并且支持复制子任务,实时展示每个任务的状态,多租户执行功能有效避免执行过程中任务彼此影响进行; + +- **任务状态自检** +监控长时间运行的任务和状态异常任务,中止任务并及时释放占用的资源。 + + +### 3. 与DSS工作流打通,一站式大数据开发的门户 + +- 实现DSS AppConn包括一级 SSO 规范,二级组织结构规范,三级开发流程规范在内的三级规范; +- 作为DSS工作流的数据交换节点,是整个工作流链路中的门户流程,为后续的工作流节点运行提供稳固的数据基础; + +### 4. 支持多种导数引擎 + +- 支持Sqoop和DataX引擎进行多种异构数据源之间的导数 + +## 整体设计 + +### 架构设计 + +![架构设计](images/zh_CN/ch1/home_page_zh.png) + + +## 相关文档 +[安装部署文档](docs/zh_CN/ch1/exchangis_deploy_cn.md) +[用户手册](docs/zh_CN/ch1/exchangis_user_manual_cn.md) + +## 交流贡献 + +如果您想得到最快的响应,请给我们提 issue,或者扫码进群: + +![communication](images/zh_CN/ch1/code.png) + +## License + +Exchangis is under the Apache 2.0 License. See the [License](./LICENSE) file for details. diff --git a/README.md b/README.md new file mode 100644 index 000000000..5e211e77a --- /dev/null +++ b/README.md @@ -0,0 +1,67 @@ +[![License](https://img.shields.io/badge/license-Apache%202-4EB1BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html) + +English | [中文](README-ZH.md) + +## Introduction + +Exchangis is a new version of data exchange tool jointly developed by WeDataSphere, a big data platform of WeBank, and community users, which supports the synchronization of structured and unstructured data transmission between heterogeneous data sources. + +Exchangis abstracts a unified set of data source and synchronization job definition plugins, allowing users to quickly access new data sources and use them on pages with simple configuration in the database. + +Based on the plugin framework design and the computing middleware [Linkis](https://github.com/apache/incubator-Linkis), Exchangis can quickly connect to the data synchronization engine in Linkis, and convert the data synchronization job of Exchangis into the job of Linkis. + +With the help of [Linkis](https://github.com/apache/incubator-linkis) computing middleware's connection, reuse and simplification capabilities, Exchangia is inherently equipped with financial-grade data synchronization capabilities of high concurrency, high availability, multi-tenant isolation and resource control. + +### Interface preview + +![image](images/zh_CN/ch1/frontend_view.png) + +## Core characteristics + +### 1. Lightweight datasource management + +- Based on Linkis DataSource, Exchangis abstracts all the necessary capabilities of the underlying data source as the Source and Sink of a synchronization job. A data source can be created with simple configuration. + +- Special datasource version publishing management function supports version history datasource rollback, and one-click publishing does not need to configure historical datasources again. + + +### 2. High-stability and fast-response data synchronization task execution + +- **Near-real-time task management** + Quickly capture information such as transmission task log and transmission rate, monitor and display various indicators of multi-task including CPU usage, memory usage, data synchronization record, etc., and support closing tasks in real time. + +- **Task high concurrent transmission** + Multi-tasks are executed concurrently, and sub-tasks can be copied to show the status of each task in real time. Multi-tenant execution function can effectively prevent tasks from affecting each other during execution. + +- **Self-check of task status** + Monitor long-running tasks and abnormal tasks, stop tasks and release occupied resources in time. + + +### 3. Integrate with DSS workflow, one-stop big data development portal + +- Realize DSS AppConn's three-level specification, including the first-level SSO specification, the second-level organizational structure specification and the third-level development process specification. + +- As the data exchange node of DSS workflow, it is the fundamental process in the whole workflow link, which provides a solid data foundation for the subsequent operation of workflow nodes. + +## Overall Design + +### Architecture Design + +![架构设计](images/zh_CN/ch1/home_page_en.png) + + +## Documents + +[Quick Deploy](docs/en_US/ch1/exchangis_deploy_en.md) +[User Manual](docs/en_US/ch1/exchangis_user_manual_en.md) + +## Communication and contribution + +If you want to get the fastest response, please mention issue to us, or scan the code into the group : + +![communication](images/en_US/ch1/code.png) + +## License + +Exchangis is under the Apache 2.0 License. See the [License](./LICENSE) file for details. + diff --git a/assembly-package/config/application-exchangis.yml b/assembly-package/config/application-exchangis.yml new file mode 100644 index 000000000..946ee2cb8 --- /dev/null +++ b/assembly-package/config/application-exchangis.yml @@ -0,0 +1,20 @@ +server: + port: 9321 +spring: + application: + name: dss-exchangis-main-server-dev +eureka: + client: + serviceUrl: + defaultZone: http://{IP}:{PORT}/eureka/ + instance: + metadata-map: + test: wedatasphere + +management: + endpoints: + web: + exposure: + include: refresh,info +logging: + config: classpath:log4j2.xml diff --git a/assembly-package/config/config.sh b/assembly-package/config/config.sh new file mode 100644 index 000000000..9a4a0e502 --- /dev/null +++ b/assembly-package/config/config.sh @@ -0,0 +1,11 @@ +#LINKIS_GATEWAY服务地址IP,用于查找linkis-mg-gateway服务 +LINKIS_GATEWAY_HOST={IP} + +#LINKIS_GATEWAY服务地址端口,用于查找linkis-mg-gateway服务 +LINKIS_GATEWAY_PORT={PORT} + +#Exchangis服务端口 +EXCHANGIS_PORT={PORT} + +#Eureka服务URL +EUREKA_URL=http://{IP:PORT}/eureka/ \ No newline at end of file diff --git a/assembly-package/config/db.sh b/assembly-package/config/db.sh new file mode 100644 index 000000000..cf33388e3 --- /dev/null +++ b/assembly-package/config/db.sh @@ -0,0 +1,9 @@ +# 设置数据库的连接信息 +# 包括IP地址、数据库名称、用户名、端口 +MYSQL_HOST={IP} +MYSQL_PORT={PORT} +MYSQL_USERNAME={username} +MYSQL_PASSWORD={password} +DATABASE={dbName} + + diff --git a/assembly-package/config/dss-exchangis-server.properties b/assembly-package/config/dss-exchangis-server.properties new file mode 100644 index 000000000..70ebaca62 --- /dev/null +++ b/assembly-package/config/dss-exchangis-server.properties @@ -0,0 +1,69 @@ +# +# Copyright 2019 WeBank +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# + +wds.linkis.test.mode=false +wds.linkis.server.mybatis.datasource.url=jdbc:mysql://{IP}:{PORT}/{database}?useSSL=false&characterEncoding=UTF-8&allowMultiQueries=true&useAffectedRows=true +wds.linkis.server.mybatis.datasource.username={username} +wds.linkis.server.mybatis.datasource.password={password} + +wds.linkis.gateway.ip={LINKIS_IP} +wds.linkis.gateway.port={LINKIS_PORT} +wds.linkis.gateway.url=http://{LINKIS_IP}:{LINKIS_PORT}/ +wds.linkis.log.clear=true +wds.linkis.server.version=v1 + +# server rpc +wds.linkis.ms.service.scan.package=com.webank.wedatasphere.exchangis + +# datasource client +wds.exchangis.datasource.client.server-url=http://{LINKIS_IP}:{LINKIS_PORT}/ +wds.exchangis.datasource.client.token.value=EXCHANGIS-AUTH +wds.exchangis.datasource.client.dws.version=v1 + +# launcher client +wds.exchangis.client.linkis.server-url=http://{LINKIS_IP}:{LINKIS_PORT}/ +wds.exchangis.client.linkis.token.value=EXCHANGIS-AUTH +wds.exchangis.datasource.extension.dir=exchangis-extds/ + +##restful +wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.exchangis.datasource.server.restful.api,\ + com.webank.wedatasphere.exchangis.project.server.restful,\ + com.webank.wedatasphere.exchangis.job.server.restful + +wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/exchangis/job/server/mapper/impl/*.xml,\ +classpath*:com/webank/wedatasphere/exchangis/project/server/mapper/impl/*.xml,\ +classpath*:com/webank/wedatasphere/exchangis/project/provider/mapper/impl/*.xml,\ +classpath*:com/webank/wedatasphere/exchangis/engine/server/mapper/*.xml + +wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.exchangis.dao,\ + com.webank.wedatasphere.exchangis.project.server.mapper,\ + com.webank.wedatasphere.exchangis.project.provider.mapper,\ + com.webank.wedatasphere.linkis.configuration.dao,\ + com.webank.wedatasphere.linkis.metadata.dao,\ + com.webank.wedatasphere.exchangis.job.server.mapper,\ + com.webank.wedatasphere.exchangis.job.server.dao,\ + com.webank.wedatasphere.exchangis.engine.dao + +wds.exchangis.job.task.scheduler.load-balancer.flexible.segments.min-occupy=0.25 +wds.exchangis.job.task.scheduler.load-balancer.flexible.segments.max-occupy=0.5 +#wds.exchangis.job.scheduler.group.max.running-jobs=4 + +wds.linkis-session.ticket.key=bdp-user-ticket-id +wds.exchangis.limit.interface.value=false + +wds.exchangis.publicKeyStr= +wds.exchangis.privateKeyStr= diff --git a/assembly-package/config/log4j2.xml b/assembly-package/config/log4j2.xml new file mode 100644 index 000000000..121b48d1d --- /dev/null +++ b/assembly-package/config/log4j2.xml @@ -0,0 +1,53 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/assembly-package/config/transform-processor-templates/datax-processor.java b/assembly-package/config/transform-processor-templates/datax-processor.java new file mode 100644 index 000000000..e69de29bb diff --git a/assembly-package/pom.xml b/assembly-package/pom.xml new file mode 100644 index 000000000..dc473f537 --- /dev/null +++ b/assembly-package/pom.xml @@ -0,0 +1,78 @@ + + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + assembly-package + pom + + + + org.apache.maven.plugins + maven-install-plugin + 2.4 + + true + + + + org.apache.maven.plugins + maven-antrun-plugin + 1.3 + + + package + + run + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 3.1.0 + + + dist + package + + single + + + false + wedatasphere-exchangis-${revision} + false + false + + src/main/assembly/assembly.xml + + + + + + + + diff --git a/assembly-package/sbin/common.sh b/assembly-package/sbin/common.sh new file mode 100644 index 000000000..8ee615b64 --- /dev/null +++ b/assembly-package/sbin/common.sh @@ -0,0 +1,19 @@ +#!/bin/bash +# +# Copyright 2020 WeBank +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +declare -A MODULE_MAIN_CLASS +MODULE_MAIN_CLASS["dss-exchangis-main-server-dev"]="com.webank.wedatasphere.exchangis.server.boot.ExchangisServerApplication" diff --git a/assembly-package/sbin/configure.sh b/assembly-package/sbin/configure.sh new file mode 100644 index 000000000..e61c428da --- /dev/null +++ b/assembly-package/sbin/configure.sh @@ -0,0 +1,25 @@ +#!/bin/bash +# +# Copyright 2020 WeBank +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# configure modules + +configure_main(){ + +} + +configure_server(){ + +} \ No newline at end of file diff --git a/assembly-package/sbin/daemon.sh b/assembly-package/sbin/daemon.sh new file mode 100644 index 000000000..c7ee8f1e1 --- /dev/null +++ b/assembly-package/sbin/daemon.sh @@ -0,0 +1,69 @@ +#!/bin/bash +# +# Copyright 2020 WeBank +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +load_env_definitions ${ENV_FILE} +if [[ "x"${EXCHANGIS_HOME} != "x" ]]; then + source ${EXCHANGIS_HOME}/sbin/launcher.sh + source ${EXCHANGIS_HOME}/sbin/common.sh +else + source ./launcher.sh + source ./common.sh +fi + +usage(){ + echo "Usage is [start|stop|restart {server}]" +} + +start(){ + # call launcher + launcher_start $1 $2 +} + +stop(){ + # call launcher + launcher_stop $1 $2 +} + +restart(){ + launcher_stop $1 $2 + if [[ $? -eq 0 ]]; then + sleep 3 + launcher_start $1 $2 + fi +} + +COMMAND=$1 +case $COMMAND in + start|stop|restart) + if [[ ! -z $2 ]]; then + SERVICE_NAME=${MODULE_DEFAULT_PREFIX}$2${MODULE_DEFAULT_SUFFIX} + MAIN_CLASS=${MODULE_MAIN_CLASS[${SERVICE_NAME}]} + if [[ "x"${MAIN_CLASS} != "x" ]]; then + $COMMAND ${SERVICE_NAME} ${MAIN_CLASS} + else + LOG ERROR "Cannot find the main class for [ ${SERVICE_NAME} ]" + fi + else + usage + exit 1 + fi + ;; + *) + usage + exit 1 + ;; +esac \ No newline at end of file diff --git a/assembly-package/sbin/env.properties b/assembly-package/sbin/env.properties new file mode 100644 index 000000000..c6e528ab4 --- /dev/null +++ b/assembly-package/sbin/env.properties @@ -0,0 +1,6 @@ +EXCHANGIS_CONF_PATH=/appcom/config/exchangis-config/background +EXCHANGIS_LOG_PATH=/appcom/logs/exchangis/background +MODULE_DEFAULT_PREFIX="dss-exchangis-main-" +MODULE_DEFAULT_SUFFIX="-dev" +DEBUG_MODE=false +DEBUG_PORT=8321 \ No newline at end of file diff --git a/assembly-package/sbin/install.sh b/assembly-package/sbin/install.sh new file mode 100644 index 000000000..2ce1569f7 --- /dev/null +++ b/assembly-package/sbin/install.sh @@ -0,0 +1,226 @@ +#!/bin/bash +# +# Copyright 2020 WeBank +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +source ~/.bashrc +shellDir=`dirname $0` +workDir=`cd ${shellDir}/..;pwd` + +SOURCE_ROOT=${workDir} +#load config +source ${SOURCE_ROOT}/config/config.sh +source ${SOURCE_ROOT}/config/db.sh +DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +SHELL_LOG="${DIR}/console.out" #console.out是什么文件? +export SQL_SOURCE_PATH="${DIR}/../db/exchangis_ddl.sql" +PACKAGE_DIR="${DIR}/../packages" +# Home Path +EXCHNGIS_HOME_PATH="${DIR}/../" + +CONF_FILE_PATH="sbin/configure.sh" +FORCE_INSTALL=false +SKIP_PACKAGE=false +USER=`whoami` +SUDO_USER=false + +CONF_PATH=${DIR}/../config + +usage(){ + printf "\033[1m Install project, run directly\n\033[0m" +} + +function LOG(){ + currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"` + echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG} # tee -a 输出是追加到文件里面 +} + +abs_path(){ + SOURCE="${BASH_SOURCE[0]}" + while [ -h "${SOURCE}" ]; do + DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + SOURCE="$(readlink "${SOURCE}")" + [[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}" + done + echo "$( cd -P "$( dirname "${SOURCE}" )" && pwd )" +} + +BIN=`abs_path` + + +is_sudo_user(){ + sudo -v >/dev/null 2>&1 #因为 sudo 在第一次执行时或是在 N分钟内没有执行(N 预设为5)会问密码 + #这条命令的意思就是在后台执行这个程序,并将错误输出2重定向到标准输出1,然后将标准输出1全部放到/dev/null文件,也就是清空. + #所以可以看出" >/dev/null 2>&1 "常用来避免shell命令或者程序等运行中有内容输出。 +} + +uncompress_packages(){ + LOG INFO "\033[1m package dir is: [${PACKAGE_DIR}]\033[0m" + local list=`ls ${PACKAGE_DIR}` + LOG INFO "\033[1m package list is: [${list}]\033[0m" + for pack in ${list} + do + local uncompress=true + if [ ${#PACKAGE_NAMES[@]} -gt 0 ]; then + uncompress=false + for server in ${PACKAGE_NAMES[@]} + do + if [ ${server} == ${pack%%.tar.gz*} ] || [ ${server} == ${pack%%.zip*} ]; then + uncompress=true + break + fi + done + fi + if [ ${uncompress} == true ]; then + if [[ ${pack} =~ tar\.gz$ ]]; then + local do_uncompress=0 + #if [ ${FORCE_INSTALL} == false ]; then + # interact_echo "Do you want to decompress this package: [${pack}]?" + # do_uncompress=$? + #fi + if [ ${do_uncompress} == 0 ]; then + LOG INFO "\033[1m Uncompress package: [${pack}] to modules directory\033[0m" + tar --skip-old-files -zxf ${PACKAGE_DIR}/${pack} -C ../ + fi + elif [[ ${pack} =~ zip$ ]]; then + local do_uncompress=0 + #if [ ${FORCE_INSTALL} == false ]; then + # interact_echo "Do you want to decompress this package: [${pack}]?" + # do_uncompress=$? + #fi + if [ ${do_uncompress} == 0 ]; then + LOG INFO "\033[1m Uncompress package: [${pack}] to modules directory\033[0m" + unzip -nq ${PACKAGE_DIR}/${pack} -d # n 解压缩时不要覆盖原有的文件 + fi + fi + # skip other packages + fi + done +} + +interact_echo(){ + while [ 1 ]; do + read -p "$1 (Y/N)" yn + if [ "${yn}x" == "Yx" ] || [ "${yn}x" == "yx" ]; then + return 0 + elif [ "${yn}x" == "Nx" ] || [ "${yn}x" == "nx" ]; then + return 1 + else + echo "Unknown choise: [$yn], please choose again." + fi + done +} + +# Initalize database +init_database(){ + BOOTSTRAP_PROP_FILE="${CONF_PATH}/dss-exchangis-main-server-dev.properties" + if [ "x${SQL_SOURCE_PATH}" != "x" ] && [ -f "${SQL_SOURCE_PATH}" ]; then + `mysql --version >/dev/null 2>&1` + DATASOURCE_URL="jdbc:mysql:\/\/${MYSQL_HOST}:${MYSQL_PORT}\/${DATABASE}\?useSSL=false\&characterEncoding=UTF-8\&allowMultiQueries=true" + sed -ri "s![#]?(wds.linkis.server.mybatis.datasource.username=)\S*!\1${MYSQL_USERNAME}!g" ${BOOTSTRAP_PROP_FILE} + sed -ri "s![#]?(wds.linkis.server.mybatis.datasource.password=)\S*!\1${MYSQL_PASSWORD}!g" ${BOOTSTRAP_PROP_FILE} + sed -ri "s![#]?(wds.linkis.server.mybatis.datasource.url=)\S*!\1${DATASOURCE_URL}!g" ${BOOTSTRAP_PROP_FILE} + interact_echo "Do you want to initalize database with sql: [${SQL_SOURCE_PATH}]?" + if [ $? == 0 ]; then + LOG INFO "\033[1m Scan out mysql command, so begin to initalize the database\033[0m" + mysql -h ${MYSQL_HOST} -P ${MYSQL_PORT} -u ${MYSQL_USERNAME} -p${MYSQL_PASSWORD} --default-character-set=utf8 -e \ + "CREATE DATABASE IF NOT EXISTS ${DATABASE}; USE ${DATABASE}; source ${SQL_SOURCE_PATH};" + fi + fi +} + +init_properties(){ + BOOTSTRAP_PROP_FILE="${CONF_PATH}/dss-exchangis-server.properties" + APPLICATION_YML="${CONF_PATH}/application-exchangis.yml" + LINKIS_GATEWAY_URL="http:\/\/${LINKIS_GATEWAY_HOST}:${LINKIS_GATEWAY_PORT}\/" + if [ "x${LINKIS_SERVER_URL}" == "x" ]; then + LINKIS_SERVER_URL="http://127.0.0.1:9001" + fi + + sed -ri "s![#]?(wds.exchangis.datasource.client.serverurl=)\S*!\1${LINKIS_GATEWAY_URL}!g" ${BOOTSTRAP_PROP_FILE} + sed -ri "s![#]?(wds.exchangis.client.linkis.server-url=)\S*!\1${LINKIS_GATEWAY_URL}!g" ${BOOTSTRAP_PROP_FILE} + sed -ri "s![#]?(port: )\S*!\1${EXCHANGIS_PORT}!g" ${APPLICATION_YML} + sed -ri "s![#]?(defaultZone: )\S*!\1${EUREKA_URL}!g" ${APPLICATION_YML} +} + +install_modules(){ + LOG INFO "\033[1m ####### Start To Install project ######\033[0m" + echo "" + if [ ${FORCE_INSTALL} == false ]; then + LOG INFO "\033[1m Install project ......\033[0m" + init_database + init_properties + else + LOG INFO "\033[1m Install project ......\033[0m" + init_database + fi + LOG INFO "\033[1m ####### Finish To Install Project ######\033[0m" +} + + +while [ 1 ]; do + case ${!OPTIND} in + -h|--help) + usage + exit 0 + ;; + "") + break + ;; + *) + echo "Argument error! " 1>&2 + exit 1 + ;; + esac +done + +is_sudo_user +if [ $? == 0 ]; then + SUDO_USER=true +fi + +MODULE_LIST_RESOLVED=() +c=0 +RESOLVED_DIR=${PACKAGE_DIR} + +server="exchangis-server" +LOG INFO "\033[1m ####### server is [${server}] ######\033[0m" +server_list=`ls ${RESOLVED_DIR} | grep -E "^(${server}|${server}_[0-9]+\\.[0-9]+\\.[0-9]+)" | grep -E "(\\.tar\\.gz|\\.zip|)$"` +LOG INFO "\033[1m ####### server_list is [${server_list}] ######\033[0m" +for _server in ${server_list} + do + # More better method to cut string? + _server=${_server%%.tar.gz*} + _server=${_server%%zip*} + MODULE_LIST_RESOLVED[$c]=${_server} + c=$(($c + 1)) + done +if [ ${SKIP_PACKAGE} == true ]; then + MODULE_LIST=${MODULE_LIST_RESOLVED} +else + PACKAGE_NAMES=${MODULE_LIST_RESOLVED} +fi + + +LOG INFO "\033[1m ####### Start To Uncompress Packages ######\033[0m" +LOG INFO "Uncompressing...." +uncompress_packages +LOG INFO "\033[1m ####### Finish To Umcompress Packages ######\033[0m" + + install_modules + + +exit 0 + diff --git a/assembly-package/sbin/launcher.sh b/assembly-package/sbin/launcher.sh new file mode 100644 index 000000000..4c9530eae --- /dev/null +++ b/assembly-package/sbin/launcher.sh @@ -0,0 +1,252 @@ +#!/bin/bash +# +# Copyright 2020 WeBank +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Launcher for modules, provided start/stop functions + +DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +ENV_FILE="${DIR}/env.properties" +SHELL_LOG="${DIR}/command.log" +USER_DIR="${DIR}/../" +EXCHANGIS_LIB_PATH="${DIR}/../lib" +EXCHANGIS_PID_PATH="${DIR}/../runtime" +# Default +MAIN_CLASS="" +DEBUG_MODE=False +DEBUG_PORT="7006" +SPRING_PROFILE="exchangis" +SLEEP_TIMEREVAL_S=2 + +function LOG(){ + currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"` + echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG} +} + +abs_path(){ + SOURCE="${BASH_SOURCE[0]}" + while [ -h "${SOURCE}" ]; do + DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + SOURCE="$(readlink "${SOURCE}")" + [[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}" + done + echo "$( cd -P "$( dirname "${SOURCE}" )" && pwd )" +} + +verify_java_env(){ + if [[ "x${JAVA_HOME}" != "x" ]]; then + ${JAVA_HOME}/bin/java -version >/dev/null 2>&1 + else + java -version >/dev/null 2>&1 + fi + if [[ $? -ne 0 ]]; then + cat 1>&2 </dev/null` + if [ "x"${pid_in_file} != "x" ]; then + p=`${JPS} -q | grep ${pid_in_file} | awk '{print $1}'` + fi + fi + else + p=`${JPS} -l | grep "$2" | awk '{print $1}'` + fi + if [ -n "$p" ]; then + # echo "$1 ($2) is still running with pid $p" + return 0 + else + # echo "$1 ($2) does not appear in the java process table" + return 1 + fi +} + +wait_for_startup(){ + local now_s=`date '+%s'` + local stop_s=$((${now_s} + $1)) + while [ ${now_s} -le ${stop_s} ];do + status_class $2 $3 + if [ $? -eq 0 ]; then + return 0 + fi + sleep ${SLEEP_TIMEREVAL_S} + now_s=`date '+%s'` + done + return 1 +} + +wait_for_stop(){ + local now_s=`date '+%s'` + local stop_s=$((${now_s} + $1)) + while [ ${now_s} -le ${stop_s} ];do + status_class $2 $3 + if [ $? -eq 1 ]; then + return 0 + fi + sleep ${SLEEP_TIMEREVAL_S} + now_s=`date '+%s'` + done + return 1 +} + +# Input: $1:module_name, $2:main class +launcher_start(){ + LOG INFO "Launcher: launch to start server [ $1 ]" + status_class $1 $2 + if [[ $? -eq 0 ]]; then + LOG INFO "Launcher: [ $1 ] has been started in process" + return 0 + fi + construct_java_command $1 $2 + # Execute + echo ${EXEC_JAVA} + LOG INFO ${EXEC_JAVA} + nohup ${EXEC_JAVA} >/dev/null 2>&1 & + LOG INFO "Launcher: waiting [ $1 ] to start complete ..." + wait_for_startup 20 $1 $2 + if [[ $? -eq 0 ]]; then + LOG INFO "Launcher: [ $1 ] start success" + LOG INFO ${EXCHANGIS_CONF_PATH} + APPLICATION_YML="${EXCHANGIS_CONF_PATH}/application-exchangis.yml" + EUREKA_URL=`cat ${APPLICATION_YML} | grep Zone | sed -n '1p'` + echo "${EUREKA_URL}" + LOG INFO "Please check exchangis server in EUREKA_ADDRESS: ${EUREKA_URL#*:} " + else + LOG ERROR "Launcher: [ $1 ] start fail over 20 seconds, please retry it" + fi +} + +# Input: $1:module_name, $2:main class +launcher_stop(){ + LOG INFO "Launcher: stop the server [ $1 ]" + local p="" + local pid_file_path=${EXCHANGIS_PID_PATH}/$1.pid + if [ "x"${pid_file_path} != "x" ]; then + if [ -f ${pid_file_path} ]; then + local pid_in_file=`cat ${pid_file_path} 2>/dev/null` + if [ "x"${pid_in_file} != "x" ]; then + p=`${JPS} -q | grep ${pid_in_file} | awk '{print $1}'` + fi + fi + elif [[ "x"$2 != "x" ]]; then + p=`${JPS} -l | grep "$2" | awk '{print $1}'` + fi + if [[ -z ${p} ]]; then + LOG INFO "Launcher: [ $1 ] didn't start successfully, not found in the java process table" + return 0 + fi + case "`uname`" in + CYCGWIN*) taskkill /PID "${p}" ;; + *) kill -SIGTERM "${p}" ;; + esac + LOG INFO "Launcher: waiting [ $1 ] to stop complete ..." + wait_for_stop 20 $1 $2 + if [[ $? -eq 0 ]]; then + LOG INFO "Launcher: [ $1 ] stop success" + else + LOG ERROR "Launcher: [ $1 ] stop exceeded over 20s " >&2 + return 1 + fi +} diff --git a/assembly-package/sbin/start-server.sh b/assembly-package/sbin/start-server.sh new file mode 100644 index 000000000..5889993c8 --- /dev/null +++ b/assembly-package/sbin/start-server.sh @@ -0,0 +1,54 @@ +#!/bin/bash +# +# Copyright 2020 WeBank +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Start exchangis-server module +MODULE_NAME="exchangis-server" + +function LOG(){ + currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"` + echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG} +} + +abs_path(){ + SOURCE="${BASH_SOURCE[0]}" + while [ -h "${SOURCE}" ]; do + DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + SOURCE="$(readlink "${SOURCE}")" + [[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}" + done + echo "$( cd -P "$( dirname "${SOURCE}" )" && pwd )" +} + +BIN=`abs_path` +SHELL_LOG="${BIN}/console.out" + +interact_echo(){ + while [ 1 ]; do + read -p "$1 (Y/N)" yn + if [ "${yn}x" == "Yx" ] || [ "${yn}x" == "yx" ]; then + return 0 + elif [ "${yn}x" == "Nx" ] || [ "${yn}x" == "nx" ]; then + return 1 + else + echo "Unknown choise: [$yn], please choose again." + fi + done +} + +start_main(){ + +} +exit $? diff --git a/assembly-package/src/main/assembly/assembly.xml b/assembly-package/src/main/assembly/assembly.xml new file mode 100644 index 000000000..e873afe23 --- /dev/null +++ b/assembly-package/src/main/assembly/assembly.xml @@ -0,0 +1,77 @@ + + + exchangis + + tar.gz + + false + + + + ${basedir}/sbin + + * + + 0777 + sbin + unix + + + ${basedir}/bin + + * + + 0777 + bin + unix + + + ${basedir}/config + + * + + 0777 + config + unix + + + + ${basedir}/../db + + * + + 0777 + db + unix + + + + ${basedir}/../exchangis-server/target/packages + + *.tar.gz + *.zip + + 0755 + packages + + + + \ No newline at end of file diff --git a/assembly/package.xml b/assembly/package.xml deleted file mode 100644 index cef49a33c..000000000 --- a/assembly/package.xml +++ /dev/null @@ -1,41 +0,0 @@ - - main - - tar.gz - - true - - - ../packages - - exchangis* - - packages - - - unix - ../bin - bin - 0755 - - - ../docs - docs - - - ../images - images - - - ../ - unix - - README.md - LICENSE - - / - - - \ No newline at end of file diff --git a/db/1.1.1/exchangis_ddl.sql b/db/1.1.1/exchangis_ddl.sql new file mode 100644 index 000000000..1002aa86b --- /dev/null +++ b/db/1.1.1/exchangis_ddl.sql @@ -0,0 +1,88 @@ +-- exchangis_job_func definition +DROP TABLE IF EXISTS `exchangis_job_func`; +CREATE TABLE `exchangis_job_func` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `func_type` varchar(50) NOT NULL, + `func_name` varchar(100) NOT NULL, + `tab_name` varchar(50) NOT NULL COMMENT 'Tab', + `name_dispaly` varchar(100) DEFAULT NULL, + `param_num` int(11) DEFAULT '0', + `ref_name` varchar(100) DEFAULT NULL, + `description` varchar(200) DEFAULT NULL, + `modify_time` datetime DEFAULT NULL, + `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `job_func_tab_name_idx` (`tab_name`,`func_name`) +) ENGINE=InnoDB AUTO_INCREMENT=12 DEFAULT CHARSET=utf8; + +-- exchangis_job_func_params definition +DROP TABLE IF EXISTS `exchangis_job_func_params`; +CREATE TABLE IF NOT EXISTS `exchangis_job_func_params`( + `func_id` INT(11) NOT NULL, + `param_name` VARCHAR(100) NOT NULL, + `order` INT(11) DEFAULT 0, + `name_display` VARCHAR(100), + `create_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY(`func_id`, `param_name`) +)Engine=InnoDB DEFAULT CHARSET=utf8; + +-- exchangis_job_param_config definition +DROP TABLE IF EXISTS `exchangis_job_param_config`; +CREATE TABLE `exchangis_job_param_config` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `config_key` varchar(64) NOT NULL, + `config_name` varchar(64) NOT NULL, + `config_direction` varchar(16) DEFAULT NULL, + `type` varchar(32) NOT NULL, + `ui_type` varchar(32) DEFAULT NULL, + `ui_field` varchar(64) DEFAULT NULL, + `ui_label` varchar(32) DEFAULT NULL, + `unit` varchar(32) DEFAULT NULL, + `required` bit(1) DEFAULT b'0', + `value_type` varchar(32) DEFAULT NULL, + `value_range` varchar(255) DEFAULT NULL, + `default_value` varchar(255) DEFAULT NULL, + `validate_type` varchar(64) DEFAULT NULL, + `validate_range` varchar(64) DEFAULT NULL, + `validate_msg` varchar(255) DEFAULT NULL, + `is_hidden` bit(1) DEFAULT NULL, + `is_advanced` bit(1) DEFAULT NULL, + `source` varchar(255) DEFAULT NULL, + `level` tinyint(4) DEFAULT NULL, + `treename` varchar(32) DEFAULT NULL, + `sort` int(11) DEFAULT NULL, + `description` varchar(255) DEFAULT NULL, + `status` tinyint(4) DEFAULT NULL, + `ref_id` bigint(20) DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=32 DEFAULT CHARSET=utf8; + +-- exchangis_engine_settings definition +DROP TABLE IF EXISTS `exchangis_engine_settings`; +CREATE TABLE `exchangis_engine_settings` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `engine_name` varchar(50) NOT NULL, + `engine_desc` varchar(500) NOT NULL, + `engine_settings_value` text, + `engine_direction` varchar(255) NOT NULL, + `res_loader_class` varchar(255), + `res_uploader_class` varchar(255), + `modify_time` datetime DEFAULT NULL, + `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `engine_setting_idx` (`engine_name`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- exchangis_job_transform_rule +DROP TABLE IF EXISTS `exchangis_job_transform_rule`; +CREATE TABLE `exchangis_job_transform_rule` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `rule_name` varchar(100) NOT NULL DEFAULT 'transform_rule', + `rule_type` varchar(64) NOT NULL DEFAULT 'DEF', + `rule_source` varchar(600) DEFAULT '{}', + `data_source_type` varchar(64) NOT NULL, + `engine_type` varchar(32), + `direction` varchar(32) NOT NULL DEFAULT 'NONE', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; \ No newline at end of file diff --git a/db/1.1.1/exchangis_dml.sql b/db/1.1.1/exchangis_dml.sql new file mode 100644 index 000000000..3e546d667 --- /dev/null +++ b/db/1.1.1/exchangis_dml.sql @@ -0,0 +1,79 @@ +-- job_func records +INSERT INTO `exchangis_job_func`(func_type,func_name,tab_name,name_dispaly,param_num,ref_name,description,modify_time) VALUES +('TRANSFORM','dx_substr','DATAX',NULL,2,NULL,NULL,NULL) +,('TRANSFORM','dx_pad','DATAX',NULL,3,NULL,NULL,NULL) +,('TRANSFORM','dx_replace','DATAX',NULL,3,NULL,NULL,NULL) +,('VERIFY','like','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','not like','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','>','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','<','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','=','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','!=','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','>=','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('TRANSFORM','dx_precision','DATAX',NULL,1,NULL,NULL,NULL) +; + +-- job_func_params records +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(1, 'startIndex', 'startIndex', 0) ON DUPLICATE KEY UPDATE `name_display` = 'startIndex'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(1, 'length', 'length', 1) ON DUPLICATE KEY UPDATE `name_display` = 'length'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(2, 'padType', 'padType(r or l)', 0) ON DUPLICATE KEY UPDATE `name_display` = 'padType(r or l)'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(2, 'length', 'length', 1) ON DUPLICATE KEY UPDATE `name_display` = 'length'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(2, 'padString', 'padString', 2) ON DUPLICATE KEY UPDATE `name_display` = 'padString'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(3, 'startIndex', 'startIndex', 0) ON DUPLICATE KEY UPDATE `name_display` = 'startIndex'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(3, 'length', 'length', 1) ON DUPLICATE KEY UPDATE `name_display` = 'length'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(3, 'replaceString', 'replaceString', 2) ON DUPLICATE KEY UPDATE `name_display` = 'replaceString'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(4, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(5, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(6, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(7, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(8, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(9, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(10, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; + +-- job_param_config records +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('setting.speed.byte','作业速率限制','','DATAX','INPUT','setting.speed.bytes','作业速率限制','Mb/s',1,'NUMBER','','5','REGEX','^[1-9]d*$','作业速率限制输入错误',0,0,'',1,'',1,'',1,NULL) +,('setting.speed.record','作业记录数限制','','DATAX','INPUT','setting.speed.records','作业记录数限制','条/s',1,'NUMBER','','100','REGEX','^[1-9]d*$','作业记录数限制输入错误',0,0,'',1,'',2,'',1,NULL) +,('setting.speed.channel','作业最大并行度','','DATAX','INPUT','setting.max.parallelism','作业最大并行度','个',1,'NUMBER','','1','REGEX','^[1-9]d*$','作业最大并行度输入错误',0,0,'',1,'',3,'',1,NULL) +,('setting.max.memory','作业最大使用内存','','DATAX','INPUT','setting.max.memory','作业最大使用内存','Mb',1,'NUMBER','','1024','REGEX','^[1-9]d*$','作业最大使用内存输入错误',0,0,'',1,'',4,'',1,NULL) +,('setting.errorLimit.record','最多错误记录数','','DATAX','INPUT','setting.errorlimit.record','最多错误记录数','条',0,'NUMBER','','','REGEX','^[0-9]d*$','最多错误记录数输入错误',0,0,'',1,'',5,'',1,NULL) +,('setting.max.parallelism','作业最大并行数','','SQOOP','INPUT','setting.max.parallelism','作业最大并行数','个',1,'NUMBER','','1','REGEX','^[1-9]d*$','作业最大并行数输入错误',0,0,'',1,'',1,'',1,NULL) +,('setting.max.memory','作业最大内存','','SQOOP','INPUT','setting.max.memory','作业最大内存','Mb',1,'NUMBER','','1024','REGEX','^[1-9]d*$','作业最大内存输入错误',0,0,'',1,'',2,'',1,NULL) +,('where','WHERE条件','SOURCE','MYSQL','INPUT','where','WHERE条件','',0,'VARCHAR','','','REGEX','^[sS]{0,500}$','WHERE条件输入过长',0,0,'',1,'',2,'',1,NULL) +,('writeMode','写入方式','SQOOP-SINK','HIVE','OPTION','writeMode','写入方式(OVERWRITE只对TEXT类型表生效)','',1,'OPTION','["OVERWRITE","APPEND"]','OVERWRITE','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('partition','分区信息','SINK','HIVE','MAP','partition','分区信息(文本)','',0,'VARCHAR','','','REGEX','^[sS]{0,50}$','分区信息过长',0,0,'/api/rest_j/v1/dss/exchangis/main/datasources/render/partition/element/map',1,'',2,'',1,NULL) +; +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('partition','分区信息','SOURCE','HIVE','MAP','partition','分区信息(文本)','',0,'VARCHAR','','','REGEX','^[sS]{0,50}$','分区信息过长',0,0,'/api/rest_j/v1/dss/exchangis/main/datasources/render/partition/element/map',1,'',2,'',1,NULL) +,('writeMode','写入方式','SQOOP-SINK','MYSQL','OPTION','writeMode','写入方式','',1,'OPTION','["INSERT","UPDATE"]','INSERT','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('batchSize','批量大小','DATAX-SINK','ELASTICSEARCH','INPUT','batchSize','批量大小','',0,'NUMBER','','','REGEX','^[1-9]d*$','批量大小输入错误',0,0,'',1,'',1,'',1,NULL) +,('query','query条件','DATAX-SOURCE','MONGODB','INPUT','query','query条件','',0,'VARCHAR','','','REGEX','^[sS]{0,500}$','query条件输入过长',0,0,'',1,'',2,'',1,NULL) +,('writeMode','写入方式','DATAX-SINK','MONGODB','OPTION','writeMode','写入方式','',1,'OPTION','["INSERT","REPLACE"]','INSERT','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('batchSize','批量大小','DATAX-SINK','MONGODB','INPUT','batchSize','批量大小','',0,'NUMBER','','','REGEX','^[1-9]d*$','批量大小输入错误',0,0,'',1,'',2,'',1,NULL) +,('transferMode','传输方式','DATAX-SOURCE','HIVE','OPTION','transferMode','传输方式','',1,'OPTION','["二进制","记录"]','二进制','','','该传输方式不可用',0,0,'',1,'',1,'',1,NULL) +,('nullFormat','空值字符','DATAX-SOURCE','HIVE','INPUT','nullFormat','空值字符','',0,'VARCHAR','','','REGEX','^[sS]{0,50}$','空值字符输入错误',0,0,'',1,'',2,'',1,49) +,('writeMode','写入方式','DATAX-SINK','MYSQL','OPTION','writeMode','写入方式','',1,'OPTION','["INSERT","UPDATE"]','INSERT','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('writeMode','写入方式','DATAX-SINK','HIVE','OPTION','writeMode','写入方式(OVERWRITE只对TEXT类型表生效)','',1,'OPTION','["append","truncate"]','append','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +; +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('nullFormat','空值字符','DATAX-SINK','HIVE','INPUT','nullFormat','空值字符','',0,'VARCHAR','','','REGEX','^[sS]{0,50}$','空值字符输入错误',0,0,'',1,'',2,'',1,49) +,('nullFormat','空值字符','DATAX-SINK','ELASTICSEARCH','INPUT','nullFormat','空值字符','',0,'VARCHAR','','','REGEX','^[sS]{0,50}$','空值字符输入错误',0,0,'',1,'',2,'',1,49) +; +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('where','WHERE条件','SOURCE','ORACLE','INPUT','where','WHERE条件',NULL,0,'VARCHAR',NULL,NULL,'REGEX','^[\\s\\S]{0,500}$','WHERE条件输入过长',0,0,NULL,1,'',2,NULL,1,NULL) +,('writeMode','写入方式','DATAX-SINK','ORACLE','OPTION','writeMode','写入方式',NULL,1,'OPTION','["INSERT","UPDATE"]','INSERT',NULL,NULL,'写入方式输入错误',0,0,NULL,1,NULL,1,NULL,1,NULL) +; + +-- engine_settings records +INSERT INTO `exchangis_engine_settings` (id, engine_name, engine_desc, engine_settings_value, engine_direction, res_loader_class, res_uploader_class, modify_time, create_time) VALUES +(1, 'datax', 'datax sync engine', '{}', 'mysql->hive,hive->mysql,mysql->oracle,oracle->mysql,oracle->hive,hive->oracle,mongodb->hive,hive->mongodb,mysql->elasticsearch,oracle->elasticsearch,mongodb->elasticsearch,mysql->mongodb,mongodb->mysql,oracle->mongodb,mongodb->oracle', 'com.webank.wedatasphere.exchangis.engine.resource.loader.datax.DataxEngineResourceLoader', NULL, NULL, '2022-08-09 18:20:51.0'), +(2, 'sqoop', 'hadoop tool', '{}', 'mysql->hive,hive->mysql', '', NULL, NULL, '2022-08-09 18:20:51.0'); + +-- exchangis_job_transform_rule records +INSERT INTO `exchangis_job_transform_rule` (rule_name,rule_type,rule_source,data_source_type,engine_type,direction) VALUES +('es_with_post_processor','DEF','{"types": ["MAPPING", "PROCESSOR"]}','ELASTICSEARCH',NULL,'SINK') +,('es_fields_not_editable','MAPPING','{"fieldEditEnable": false, "fieldDeleteEnable": false}','ELASTICSEARCH',NULL,'SINK') +,('hive_sink_not_access','MAPPING','{"fieldEditEnable": false, "fieldDeleteEnable": false, "fieldAddEnable": false}','HIVE',NULL,'SINK') +,('mongo_field_match','MAPPING','{"fieldMatchStrategyName": "CAMEL_CASE_MATCH"}','MONGODB',NULL,'SINK') +,('mysql_field_source_match','MAPPING','{"fieldMatchStrategyName": "CAMEL_CASE_MATCH","fieldEditEnable": true, "fieldDeleteEnable": true, "fieldAddEnable": false}','MYSQL',NULL,'SOURCE') +; \ No newline at end of file diff --git a/db/1.1.2/exchangis_ddl.sql b/db/1.1.2/exchangis_ddl.sql new file mode 100644 index 000000000..3609cadfd --- /dev/null +++ b/db/1.1.2/exchangis_ddl.sql @@ -0,0 +1 @@ +ALTER TABLE exchangis_job_entity MODIFY COLUMN name varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL; \ No newline at end of file diff --git a/db/1.1.3/exchangis_ddl.sql b/db/1.1.3/exchangis_ddl.sql new file mode 100644 index 000000000..a503dba4e --- /dev/null +++ b/db/1.1.3/exchangis_ddl.sql @@ -0,0 +1 @@ +ALTER TABLE exchangis_launchable_task CHANGE linkis_job_content linkis_job_content mediumtext NULL; \ No newline at end of file diff --git a/db/1.1.3/exchangis_dml.sql b/db/1.1.3/exchangis_dml.sql new file mode 100644 index 000000000..8d8530575 --- /dev/null +++ b/db/1.1.3/exchangis_dml.sql @@ -0,0 +1,7 @@ +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +,('writeMode','写入方式','DATAX-SINK','STARROCKS','OPTION','writeMode','写入方式','',1,'OPTION','["insert"]','insert','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('batchSize','批量字节数大小','DATAX-SINK','STARROCKS','INPUT','maxBatchSize','批量字节数大小','',0,'NUMBER','','','REGEX','^[1-9]\\d*$','批量大小输入错误',0,0,'',1,'',2,'',1,NULL); + +UPDATE exchangis_engine_settings +SET engine_direction='mysql->hive,hive->mysql,mysql->oracle,oracle->mysql,oracle->hive,hive->oracle,mongodb->hive,hive->mongodb,mysql->elasticsearch,oracle->elasticsearch,mongodb->elasticsearch,mysql->mongodb,mongodb->mysql,oracle->mongodb,mongodb->oracle,hive->starrocks' +WHERE engine_name='datax'; \ No newline at end of file diff --git a/db/exchangis_ddl.sql b/db/exchangis_ddl.sql new file mode 100644 index 000000000..28ce7ac9d --- /dev/null +++ b/db/exchangis_ddl.sql @@ -0,0 +1,253 @@ +-- exchangis_job_ds_bind definition +DROP TABLE IF EXISTS `exchangis_job_ds_bind`; +CREATE TABLE `exchangis_job_ds_bind` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `job_id` bigint(20) NOT NULL, + `task_index` int(11) NOT NULL, + `source_ds_id` bigint(20) NOT NULL, + `sink_ds_id` bigint(20) NOT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=59575 DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + + +-- exchangis_job_entity definition +DROP TABLE IF EXISTS `exchangis_job_entity`; +CREATE TABLE `exchangis_job_entity` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `name` varchar(255) NOT NULL, + `create_time` datetime DEFAULT NULL, + `last_update_time` datetime(3) DEFAULT NULL, + `engine_type` varchar(45) DEFAULT '', + `job_labels` varchar(255) DEFAULT NULL, + `create_user` varchar(100) DEFAULT NULL, + `job_content` mediumtext, + `execute_user` varchar(100) DEFAULT '', + `job_params` text, + `job_desc` varchar(255) DEFAULT NULL, + `job_type` varchar(50) DEFAULT NULL, + `project_id` bigint(13) DEFAULT NULL, + `source` text, + `modify_user` varchar(50) DEFAULT NULL COMMENT '修改用户', + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=5793 DEFAULT CHARSET=utf8; + + +-- exchangis_job_param_config definition +DROP TABLE IF EXISTS `exchangis_job_param_config`; +CREATE TABLE `exchangis_job_param_config` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `config_key` varchar(64) NOT NULL, + `config_name` varchar(64) NOT NULL, + `config_direction` varchar(16) DEFAULT NULL, + `type` varchar(32) NOT NULL, + `ui_type` varchar(32) DEFAULT NULL, + `ui_field` varchar(64) DEFAULT NULL, + `ui_label` varchar(32) DEFAULT NULL, + `unit` varchar(32) DEFAULT NULL, + `required` bit(1) DEFAULT b'0', + `value_type` varchar(32) DEFAULT NULL, + `value_range` varchar(255) DEFAULT NULL, + `default_value` varchar(255) DEFAULT NULL, + `validate_type` varchar(64) DEFAULT NULL, + `validate_range` varchar(64) DEFAULT NULL, + `validate_msg` varchar(255) DEFAULT NULL, + `is_hidden` bit(1) DEFAULT NULL, + `is_advanced` bit(1) DEFAULT NULL, + `source` varchar(255) DEFAULT NULL, + `level` tinyint(4) DEFAULT NULL, + `treename` varchar(32) DEFAULT NULL, + `sort` int(11) DEFAULT NULL, + `description` varchar(255) DEFAULT NULL, + `status` tinyint(4) DEFAULT NULL, + `ref_id` bigint(20) DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=32 DEFAULT CHARSET=utf8; + +-- exchangis_project_info definition +DROP TABLE IF EXISTS `exchangis_project_info`; +-- udes_gzpc_pub_sit_01.exchangis_project_info definition +CREATE TABLE `exchangis_project_info` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `name` varchar(64) NOT NULL, + `description` varchar(255) DEFAULT NULL, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + `last_update_time` datetime DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `create_user` varchar(64) DEFAULT NULL, + `last_update_user` varchar(64) DEFAULT NULL, + `project_labels` varchar(255) DEFAULT NULL, + `domain` varchar(32) DEFAULT NULL, + `exec_users` varchar(255) DEFAULT '', + `view_users` varchar(255) DEFAULT '', + `edit_users` varchar(255) DEFAULT '', + `source` text, + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=1497870871035974171 DEFAULT CHARSET=utf8; + +-- exchangis_project_user definition +DROP TABLE IF EXISTS `exchangis_project_user`; +CREATE TABLE `exchangis_project_user` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `project_id` bigint(20) NOT NULL, + `priv_user` varchar(32) COLLATE utf8_bin DEFAULT NULL, + `priv` int(20) DEFAULT NULL, + `last_update_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `exchangis_project_user_un` (`project_id`,`priv_user`,`priv`) +) ENGINE=InnoDB AUTO_INCREMENT=844 DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=COMPACT; + +-- exchangis_launchable_task definition +DROP TABLE IF EXISTS `exchangis_launchable_task`; +CREATE TABLE `exchangis_launchable_task` ( + `id` bigint(13) NOT NULL, + `name` varchar(100) NOT NULL, + `job_execution_id` varchar(64) DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `last_update_time` datetime(3) DEFAULT NULL, + `engine_type` varchar(45) DEFAULT '', + `execute_user` varchar(50) DEFAULT '', + `linkis_job_name` varchar(100) NOT NULL, + `linkis_job_content` mediumtext NOT NULL, + `linkis_params` text DEFAULT NULL, + `linkis_source` varchar(64) DEFAULT NULL, + `labels` varchar(64) DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- exchangis_launched_job_entity definition +DROP TABLE IF EXISTS `exchangis_launched_job_entity`; +CREATE TABLE `exchangis_launched_job_entity` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `name` varchar(100) NOT NULL, + `create_time` datetime DEFAULT NULL, + `last_update_time` datetime(3) DEFAULT NULL, + `job_id` bigint(20) DEFAULT NULL, + `launchable_task_num` int(20) DEFAULT '0', + `engine_type` varchar(100) DEFAULT NULL, + `execute_user` varchar(100) DEFAULT NULL, + `job_name` varchar(100) DEFAULT NULL, + `status` varchar(100) DEFAULT NULL, + `progress` varchar(100) DEFAULT NULL, + `error_code` varchar(64) DEFAULT NULL, + `error_msg` varchar(255) DEFAULT NULL, + `retry_num` bigint(10) DEFAULT NULL, + `job_execution_id` varchar(255) DEFAULT NULL, + `log_path` varchar(255) DEFAULT NULL, + `create_user` varchar(100) DEFAULT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `job_execution_id_UNIQUE` (`job_execution_id`) +) ENGINE=InnoDB AUTO_INCREMENT=8380 DEFAULT CHARSET=utf8; + +-- exchangis_launched_task_entity definition +DROP TABLE IF EXISTS `exchangis_launched_task_entity`; +CREATE TABLE `exchangis_launched_task_entity` ( + `id` bigint(20) NOT NULL, + `name` varchar(100) NOT NULL, + `create_time` datetime DEFAULT NULL, + `last_update_time` datetime(3) DEFAULT NULL, + `job_id` bigint(20) DEFAULT NULL, + `engine_type` varchar(100) DEFAULT NULL, + `execute_user` varchar(100) DEFAULT NULL, + `job_name` varchar(100) DEFAULT NULL, + `progress` varchar(64) DEFAULT NULL, + `error_code` varchar(64) DEFAULT NULL, + `error_msg` varchar(255) DEFAULT NULL, + `retry_num` bigint(10) DEFAULT NULL, + `task_id` varchar(64) DEFAULT NULL, + `linkis_job_id` varchar(200) DEFAULT NULL, + `linkis_job_info` varchar(1000) DEFAULT NULL, + `job_execution_id` varchar(100) DEFAULT NULL, + `launch_time` datetime DEFAULT NULL, + `running_time` datetime DEFAULT NULL, + `metrics` text, + `status` varchar(64) DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- exchangis_job_func definition +DROP TABLE IF EXISTS `exchangis_job_func`; +CREATE TABLE `exchangis_job_func` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `func_type` varchar(50) NOT NULL, + `func_name` varchar(100) NOT NULL, + `tab_name` varchar(50) NOT NULL COMMENT 'Tab', + `name_dispaly` varchar(100) DEFAULT NULL, + `param_num` int(11) DEFAULT '0', + `ref_name` varchar(100) DEFAULT NULL, + `description` varchar(200) DEFAULT NULL, + `modify_time` datetime DEFAULT NULL, + `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `job_func_tab_name_idx` (`tab_name`,`func_name`) +) ENGINE=InnoDB AUTO_INCREMENT=12 DEFAULT CHARSET=utf8; + +-- exchangis_job_func_params definition +DROP TABLE IF EXISTS `exchangis_job_func_params`; +CREATE TABLE IF NOT EXISTS `exchangis_job_func_params`( + `func_id` INT(11) NOT NULL, + `param_name` VARCHAR(100) NOT NULL, + `order` INT(11) DEFAULT 0, + `name_display` VARCHAR(100), + `create_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY(`func_id`, `param_name`) +)Engine=InnoDB DEFAULT CHARSET=utf8; + +-- exchangis_engine_resources definition +DROP TABLE IF EXISTS `exchangis_engine_resources`; +CREATE TABLE `exchangis_engine_resources` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `engine_type` varchar(50) NOT NULL, + `resource_name` varchar(100) NOT NULL, + `resource_type` varchar(50) NOT NULL COMMENT 'resource type' DEFAULT 'file', + `resource_path` varchar(255) NOT NULL, + `store_uri` varchar(500) NOT NULL, + `create_user` varchar(50) NOT NULL, + `modify_time` datetime DEFAULT NULL, + `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `engine_res_idx` (`engine_type`,`resource_path`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- exchangis_engine_settings definition +DROP TABLE IF EXISTS `exchangis_engine_settings`; +CREATE TABLE `exchangis_engine_settings` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `engine_name` varchar(50) NOT NULL, + `engine_desc` varchar(500) NOT NULL, + `engine_settings_value` text, + `engine_direction` varchar(255) NOT NULL, + `res_loader_class` varchar(255), + `res_uploader_class` varchar(255), + `modify_time` datetime DEFAULT NULL, + `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `engine_setting_idx` (`engine_name`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- exchangis_job_transform_rule +DROP TABLE IF EXISTS `exchangis_job_transform_rule`; +CREATE TABLE `exchangis_job_transform_rule` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `rule_name` varchar(100) NOT NULL DEFAULT 'transform_rule', + `rule_type` varchar(64) NOT NULL DEFAULT 'DEF', + `rule_source` varchar(600) DEFAULT '{}', + `data_source_type` varchar(64) NOT NULL, + `engine_type` varchar(32), + `direction` varchar(32) NOT NULL DEFAULT 'NONE', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- exchangis_job_transform_processor +DROP TABLE IF EXISTS `exchangis_job_transform_processor`; +CREATE TABLE `exchangis_job_transform_processor` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `job_id` bigint(20) NOT NULL, + `code_content` text DEFAULT NULL, + `code_language` varchar(32) NOT NULL DEFAULT 'java', + `code_bml_resourceId` varchar(255) COMMENT 'BML resource id', + `code_bml_version` varchar(255) COMMENT 'BML version', + `creator` varchar(50) NOT NULL COMMENT 'Owner of processor', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; \ No newline at end of file diff --git a/db/exchangis_dml.sql b/db/exchangis_dml.sql new file mode 100644 index 000000000..967381fa0 --- /dev/null +++ b/db/exchangis_dml.sql @@ -0,0 +1,93 @@ +-- job_func records +INSERT INTO `exchangis_job_func`(func_type,func_name,tab_name,name_dispaly,param_num,ref_name,description,modify_time) VALUES +('TRANSFORM','dx_substr','DATAX',NULL,2,NULL,NULL,NULL) +,('TRANSFORM','dx_pad','DATAX',NULL,3,NULL,NULL,NULL) +,('TRANSFORM','dx_replace','DATAX',NULL,3,NULL,NULL,NULL) +,('VERIFY','like','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','not like','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','>','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','<','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','=','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','!=','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('VERIFY','>=','DATAX',NULL,1,'dx_filter',NULL,NULL) +,('TRANSFORM','dx_precision','DATAX',NULL,1,NULL,NULL,NULL) +; + +-- job_func_params records +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(1, 'startIndex', 'startIndex', 0) ON DUPLICATE KEY UPDATE `name_display` = 'startIndex'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(1, 'length', 'length', 1) ON DUPLICATE KEY UPDATE `name_display` = 'length'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(2, 'padType', 'padType(r or l)', 0) ON DUPLICATE KEY UPDATE `name_display` = 'padType(r or l)'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(2, 'length', 'length', 1) ON DUPLICATE KEY UPDATE `name_display` = 'length'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(2, 'padString', 'padString', 2) ON DUPLICATE KEY UPDATE `name_display` = 'padString'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(3, 'startIndex', 'startIndex', 0) ON DUPLICATE KEY UPDATE `name_display` = 'startIndex'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(3, 'length', 'length', 1) ON DUPLICATE KEY UPDATE `name_display` = 'length'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`, `order`) VALUES(3, 'replaceString', 'replaceString', 2) ON DUPLICATE KEY UPDATE `name_display` = 'replaceString'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(4, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(5, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(6, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(7, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(8, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(9, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; +INSERT INTO `exchangis_job_func_params`(`func_id`, `param_name`, `name_display`) VALUES(10, 'value', 'value') ON DUPLICATE KEY UPDATE `name_display` = 'value'; + +-- job_param_config records +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('setting.speed.byte','作业速率限制','','DATAX','INPUT','setting.speed.bytes','作业速率限制','Mb/s',1,'NUMBER','','5','REGEX','^[1-9]\\d*$','作业速率限制输入错误',0,0,'',1,'',1,'',1,NULL) +,('setting.speed.record','作业记录数限制','','DATAX','INPUT','setting.speed.records','作业记录数限制','条/s',1,'NUMBER','','100','REGEX','^[1-9]\\d*$','作业记录数限制输入错误',0,0,'',1,'',2,'',1,NULL) +,('setting.speed.channel','作业最大并行度','','DATAX','INPUT','setting.max.parallelism','作业最大并行度','个',1,'NUMBER','','1','REGEX','^[1-9]\\d*$','作业最大并行度输入错误',0,0,'',1,'',3,'',1,NULL) +,('setting.max.memory','作业最大使用内存','','DATAX','INPUT','setting.max.memory','作业最大使用内存','Mb',1,'NUMBER','','1024','REGEX','^[1-9]\\d*$','作业最大使用内存输入错误',0,0,'',1,'',4,'',1,NULL) +,('setting.errorLimit.record','最多错误记录数','','DATAX','INPUT','setting.errorlimit.record','最多错误记录数','条',0,'NUMBER','','','REGEX','^[0-9]\\d*$','最多错误记录数输入错误',0,0,'',1,'',5,'',1,NULL) +,('setting.max.parallelism','作业最大并行数','','SQOOP','INPUT','setting.max.parallelism','作业最大并行数','个',1,'NUMBER','','1','REGEX','^[1-9]\\d*$','作业最大并行数输入错误',0,0,'',1,'',1,'',1,NULL) +,('setting.max.memory','作业最大内存','','SQOOP','INPUT','setting.max.memory','作业最大内存','Mb',1,'NUMBER','','1024','REGEX','^[1-9]\\d*$','作业最大内存输入错误',0,0,'',1,'',2,'',1,NULL); + +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('where','WHERE条件','SOURCE','MYSQL','INPUT','where','WHERE条件','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,500}$','WHERE条件输入过长',0,0,'',1,'',2,'',1,NULL); +,('writeMode','写入方式','SQOOP-SINK','MYSQL','OPTION','writeMode','写入方式','',1,'OPTION','["INSERT","UPDATE"]','INSERT','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('writeMode','写入方式','DATAX-SINK','MYSQL','OPTION','writeMode','写入方式','',1,'OPTION','["INSERT","UPDATE"]','INSERT','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL); + +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('where','WHERE条件','SOURCE','TDSQL','INPUT','where','WHERE条件','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,500}$','WHERE条件输入过长',0,0,'',1,'',2,'',1,NULL); +,('writeMode','写入方式','SQOOP-SINK','TDSQL','OPTION','writeMode','写入方式','',1,'OPTION','["INSERT","UPDATE"]','INSERT','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('writeMode','写入方式','DATAX-SINK','TDSQL','OPTION','writeMode','写入方式','',1,'OPTION','["INSERT","UPDATE"]','INSERT','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL); + +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('writeMode','写入方式','SQOOP-SINK','HIVE','OPTION','writeMode','写入方式(OVERWRITE只对TEXT类型表生效)','',1,'OPTION','["OVERWRITE","APPEND"]','OVERWRITE','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('partition','分区信息','SINK','HIVE','MAP','partition','分区信息(文本)','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','分区信息过长',0,0,'/api/rest_j/v1/dss/exchangis/main/datasources/render/partition/element/map',1,'',2,'',1,NULL) +,('partition','分区信息','SOURCE','HIVE','MAP','partition','分区信息(文本)','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','分区信息过长',0,0,'/api/rest_j/v1/dss/exchangis/main/datasources/render/partition/element/map',1,'',2,'',1,NULL); +,('transferMode','传输方式','DATAX-SOURCE','HIVE','OPTION','transferMode','传输方式','',1,'OPTION','["记录"]','二进制','','','该传输方式不可用',0,0,'',1,'',1,'',1,NULL) +,('nullFormat','空值字符','DATAX-SOURCE','HIVE','INPUT','nullFormat','空值字符','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','空值字符输入错误',0,0,'',1,'',2,'',1,48) +,('writeMode','写入方式','DATAX-SINK','HIVE','OPTION','writeMode','写入方式(OVERWRITE只对TEXT类型表生效)','',1,'OPTION','["append","truncate"]','append','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('nullFormat','空值字符','DATAX-SINK','HIVE','INPUT','nullFormat','空值字符','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','空值字符输入错误',0,0,'',1,'',2,'',1,49); + +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('batchSize','批量大小','DATAX-SINK','ELASTICSEARCH','INPUT','batchSize','批量大小','',0,'NUMBER','','','REGEX','^[1-9]\\d*$','批量大小输入错误',0,0,'',1,'',1,'',1,NULL) +,('nullFormat','空值字符','DATAX-SINK','ELASTICSEARCH','INPUT','nullFormat','空值字符','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','空值字符输入错误',0,0,'',1,'',2,'',1,49); + +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('where','WHERE条件','SOURCE','ORACLE','INPUT','where','WHERE条件',NULL,0,'VARCHAR',NULL,NULL,'REGEX','^[\\s\\S]{0,500}$','WHERE条件输入过长',0,0,NULL,1,'',2,NULL,1,NULL) +,('writeMode','写入方式','DATAX-SINK','ORACLE','OPTION','writeMode','写入方式',NULL,1,'OPTION','["INSERT","UPDATE"]','INSERT',NULL,NULL,'写入方式输入错误',0,0,NULL,1,NULL,1,NULL,1,NULL); + +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('query','query条件','DATAX-SOURCE','MONGODB','INPUT','query','query条件','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,500}$','query条件输入过长',0,0,'',1,'',2,'',1,NULL) +,('writeMode','写入方式','DATAX-SINK','MONGODB','OPTION','writeMode','写入方式','',1,'OPTION','["insert","replace"]','insert','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('batchSize','批量大小','DATAX-SINK','MONGODB','INPUT','batchSize','批量大小','',0,'NUMBER','','','REGEX','^[1-9]\\d*$','批量大小输入错误',0,0,'',1,'',2,'',1,NULL); + +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('writeMode','写入方式','DATAX-SINK','STARROCKS','OPTION','writeMode','写入方式','',1,'OPTION','["upsert"]','upsert','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('batchSize','批量字节数大小','DATAX-SINK','STARROCKS','INPUT','maxBatchSize','批量字节数大小','',0,'NUMBER','','','REGEX','^[1-9]\\d*$','批量大小输入错误',0,0,'',1,'',2,'',1,NULL); + +-- engine_settings records +INSERT INTO `exchangis_engine_settings` (id, engine_name, engine_desc, engine_settings_value, engine_direction, res_loader_class, res_uploader_class, modify_time) VALUES +(1, 'datax', 'datax sync engine', '{}', 'mysql->hive,hive->mysql,mysql->oracle,oracle->mysql,oracle->hive,hive->oracle,mongodb->hive,hive->mongodb,mysql->elasticsearch,oracle->elasticsearch,mongodb->elasticsearch,mysql->mongodb,mongodb->mysql,oracle->mongodb,mongodb->oracle,hive->starrocks', 'com.webank.wedatasphere.exchangis.engine.resource.loader.datax.DataxEngineResourceLoader', NULL, NULL), +(2, 'sqoop', 'hadoop tool', '{}', 'mysql->hive,hive->mysql', '', NULL, NULL); + +-- exchangis_job_transform_rule records +INSERT INTO `exchangis_job_transform_rule` (rule_name,rule_type,rule_source,data_source_type,engine_type,direction) VALUES +('es_with_post_processor','DEF','{"types": ["MAPPING", "PROCESSOR"]}','ELASTICSEARCH',NULL,'SINK') +,('es_fields_not_editable','MAPPING','{"fieldEditEnable": true, "fieldDeleteEnable": true}','ELASTICSEARCH',NULL,'SINK') +,('hive_sink_not_access','MAPPING','{"fieldEditEnable": true, "fieldDeleteEnable": true, "fieldAddEnable": true}','HIVE',NULL,'SINK') +,('mongo_field_match','MAPPING','{"fieldMatchStrategyName": "CAMEL_CASE_MATCH"}','MONGODB',NULL,'SINK') +,('mysql_field_source_match','MAPPING','{"fieldMatchStrategyName": "CAMEL_CASE_MATCH","fieldEditEnable": true, "fieldDeleteEnable": true, "fieldAddEnable": true}','MYSQL',NULL,'SOURCE') +,('starrocks_field_source_match','MAPPING','{"fieldMatchStrategyName": "CAMEL_CASE_MATCH","fieldEditEnable": true, "fieldDeleteEnable": true, "fieldAddEnable": true}','STARROCKS',NULL,'SINK') +; + diff --git a/db/job_content_example.json b/db/job_content_example.json new file mode 100644 index 000000000..5046a8ed4 --- /dev/null +++ b/db/job_content_example.json @@ -0,0 +1,76 @@ +{ + "dataSources": { + "source_id": "HIVE.10001.test_db.test_table", + "sink_id": "MYSQL.10002.mask_db.mask_table" + }, + "params": { + "sources": [ + { + "config_key": "exchangis.job.ds.params.hive.transform_type", + "config_name": "传输方式", + "config_value": "二进制", + "sort": 1 + }, + { + "config_key": "exchangis.job.ds.params.hive.partitioned_by", + "config_name": "分区信息", + "config_value": "2021-07-30", + "sort": 2 + }, + { + "config_key": "exchangis.job.ds.params.hive.empty_string", + "config_name": "空值字符", + "config_value": "", + "sort": 3 + } + ], + "sinks": [ + { + "config_key": "exchangis.job.ds.params.mysql.write_type", + "config_name": "写入方式", + "config_value": "insert", + "sort": 1 + }, + { + "config_key": "exchangis.job.ds.params.mysql.batch_size", + "config_name": "批量大小", + "config_value": 1000, + "sort": 2 + } + ] + }, + "transforms": [ + { + "source_field_name": "name", + "source_field_type": "VARCHAR", + "sink_field_name": "c_name", + "sink_field_type": "VARCHAR" + }, + { + "source_field_name": "year", + "source_field_type": "VARCHAR", + "sink_field_name": "d_year", + "sink_field_type": "VARCHAR" + } + ], + "settings": [ + { + "config_key": "rate_limit", + "config_name": "作业速率限制", + "config_value": 102400, + "sort": 1 + }, + { + "config_key": "record_limit", + "config_name": "作业记录数限制", + "config_value": 10000, + "sort": 2 + }, + { + "config_key": "max_errors", + "config_name": "最多错误记录数", + "config_value": 100, + "sort": 3 + } + ] +} \ No newline at end of file diff --git a/db/job_content_example_batch.json b/db/job_content_example_batch.json new file mode 100644 index 000000000..864bf89f0 --- /dev/null +++ b/db/job_content_example_batch.json @@ -0,0 +1,153 @@ +[{ + "subJobName": "job0001", + "dataSources": { + "source_id": "HIVE.10001.test_db.test_table", + "sink_id": "MYSQL.10002.mask_db.mask_table" + }, + "params": { + "sources": [ + { + "config_key": "exchangis.job.ds.params.hive.transform_type", + "config_name": "传输方式", + "config_value": "二进制", + "sort": 1 + }, + { + "config_key": "exchangis.job.ds.params.hive.partitioned_by", + "config_name": "分区信息", + "config_value": "2021-07-30", + "sort": 2 + }, + { + "config_key": "exchangis.job.ds.params.hive.empty_string", + "config_name": "空值字符", + "config_value": "", + "sort": 3 + } + ], + "sinks": [ + { + "config_key": "exchangis.job.ds.params.mysql.write_type", + "config_name": "写入方式", + "config_value": "insert", + "sort": 1 + }, + { + "config_key": "exchangis.job.ds.params.mysql.batch_size", + "config_name": "批量大小", + "config_value": 1000, + "sort": 2 + } + ] + }, + "transforms": { + "type": "MAPPING", + "mapping": [ + { + "source_field_name": "name", + "source_field_type": "VARCHAR", + "sink_field_name": "c_name", + "sink_field_type": "VARCHAR" + }, + { + "source_field_name": "year", + "source_field_type": "VARCHAR", + "sink_field_name": "d_year", + "sink_field_type": "VARCHAR" + } + ] + }, + "settings": [ + { + "config_key": "exchangis.datax.setting.speed.byte", + "config_name": "传输速率", + "config_value": 102400, + "sort": 1 + }, + { + "config_key": "exchangis.datax.setting.errorlimit.record", + "config_name": "脏数据最大记录数", + "config_value": 10000, + "sort": 2 + }, + { + "config_key": "exchangis.datax.setting.errorlimit.percentage", + "config_name": "脏数据占比阈值", + "config_value": 100, + "sort": 3 + } + ] +}, { + "subJobName": "job0002", + "dataSources": { + "source_id": "HIVE.10001.superman2_db.funny2_table", + "sink_id": "MYSQL.10002.ducky2_db.chicken2_table" + }, + "params": { + "sources": [ + { + "config_key": "exchangis.job.ds.params.hive.transform_type", + "config_name": "传输方式", + "config_value": "二进制", + "sort": 1 + }, + { + "config_key": "exchangis.job.ds.params.hive.partitioned_by", + "config_name": "分区信息", + "config_value": "2021-07-30", + "sort": 2 + }, + { + "config_key": "exchangis.job.ds.params.hive.empty_string", + "config_name": "空值字符", + "config_value": "", + "sort": 3 + } + ], + "sinks": [ + { + "config_key": "exchangis.job.ds.params.mysql.write_type", + "config_name": "写入方式", + "config_value": "insert", + "sort": 1 + }, + { + "config_key": "exchangis.job.ds.params.mysql.batch_size", + "config_name": "批量大小", + "config_value": 1000, + "sort": 2 + } + ] + }, + "transforms": { + "type": "MAPPING", + "mapping": [ + { + "source_field_name": "mid", + "source_field_type": "VARCHAR", + "sink_field_name": "c_mid", + "sink_field_type": "VARCHAR" + }, + { + "source_field_name": "maxcount", + "source_field_type": "INT", + "sink_field_name": "c_maxcount", + "sink_field_type": "INT" + } + ] + }, + "settings": [ + { + "config_key": "exchangis.datax.setting.speed.byte", + "config_name": "传输速率", + "config_value": 102400, + "sort": 1 + }, + { + "config_key": "exchangis.datax.setting.errorlimit.record", + "config_name": "脏数据最大记录数", + "config_value": 100, + "sort": 2 + } + ] +}] \ No newline at end of file diff --git a/db/job_content_example_stream.json b/db/job_content_example_stream.json new file mode 100644 index 000000000..264147849 --- /dev/null +++ b/db/job_content_example_stream.json @@ -0,0 +1,57 @@ +[{ + "subJobName": "streamjob0001", + "dataSources": { + "source_id": "HIVE.10001.test_db.test_table", + "sink_id": "MYSQL.10002.mask_db.mask_table" + }, + "params": {}, + "transforms": { + "type": "SQL", + "sql": "select * from aaa" + }, + "settings": [ + { + "config_key": "exchangis.datax.setting.speed.byte", + "config_name": "传输速率", + "config_value": 102400, + "sort": 1 + }, + { + "config_key": "exchangis.datax.setting.errorlimit.record", + "config_name": "脏数据最大记录数", + "config_value": 100, + "sort": 2 + } + ] +}, { + "subJobName": "streamjob0002", + "dataSources": { + "source_id": "HIVE.10001.test_db.test_table", + "sink_id": "MYSQL.10002.mask_db.mask_table" + }, + "params": {}, + "transforms": { + "type": "SQL", + "sql": "insert into xxx" + }, + "settings": [ + { + "config_key": "exchangis.datax.setting.speed.byte", + "config_name": "传输速率", + "config_value": 102400, + "sort": 1 + }, + { + "config_key": "exchangis.datax.setting.errorlimit.record", + "config_name": "脏数据最大记录数", + "config_value": 10000, + "sort": 2 + }, + { + "config_key": "exchangis.datax.setting.errorlimit.percentage", + "config_name": "脏数据占比阈值", + "config_value": 100, + "sort": 3 + } + ] +}] \ No newline at end of file diff --git a/docs/en_US/ch1/component_upgrade_en.md b/docs/en_US/ch1/component_upgrade_en.md new file mode 100644 index 000000000..3bc6e3b47 --- /dev/null +++ b/docs/en_US/ch1/component_upgrade_en.md @@ -0,0 +1,75 @@ +# Exchangis Component Upgrade Documentation +This article mainly introduces the upgrade steps for adapting DSS1.1.2 and Linkis1.4.0 on the basis of the original installation of the Exchangis service. The biggest difference between the Exchangis1.1.2 and the Exchangis1.0.0 version is the installation of the ExchangisAppconn, which needs to be replaced by the entire Exchangisappconn. and load### 1.升级Exchangis前的工作 +Before you upgrade Exchangis, please follow the[DSS1.1.2Install and deploy documentation](https://github.com/WeBankFinTech/DataSphereStudio-Doc/tree/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2) +and [Linkis1.4.0Install and deploy documentation](https://linkis.staged.apache.org/zh-CN/docs/1.4.0/deployment/deploy-quick)Complete the installation and upgrade of DSS and Linkis + +### 2.Exchangis upgrade steps + +#### 1)Delete old version ExchangisAppconn package + +Go to the following directory and find exchangis appconn folder and delete: +``` +{DSS_Install_HOME}/dss/dss-appconns +``` + +#### 2)Download binary package +We provide the upgrade material package of ExchangisAppconn, which you can download and use directly.[Click to jump Release interface](https://osp-1257653870.cos.ap-guangzhou.myqcloud.com/WeDatasphere/Exchangis/exchangis1.1.2/Exchangis1.1.2_install_package.zip) + +#### 3) Compile and package + +If you want to compile ExchangisAppConn by yourself, the specific compilation steps are as follows: + +1.clone Exchangis code +2.Under the exchangis-plugins module, find exchangis-appconn and compile exchangis-appconn separately +``` +cd {EXCHANGIS_CODE_HOME}/exchangis-plugins/exchangis-appconn +mvn clean install +``` +The exchangis-appconn.zip installation package will be found in this path +``` +{EXCHANGIS_CODE_HOME}\exchangis-plugins\exchangis-appconn\target\exchangis-appconn.zip +``` + +### 3.ExchangisAppConn general steps to deploy and configure plugins +1.Get the packaged exchangis-appconn.zip material package + +2.Put it in the following directory and unzip it + +``` +cd {DSS_Install_HOME}/dss/dss-appconns +unzip exchangis-appconn.zip +``` +The decompressed directory structure is: +``` +conf +db +lib +appconn.properties +``` + +3.Execute a script to automate the installation + +```shell +cd {DSS_INSTALL_HOME}/dss/bin +./install-appconn.sh +# The script is an interactive installation scheme, you need to enter the string exchangis and the ip and port of the exchangis service to complete the installation +# The exchangis port here refers to the front-end port, which is configured in nginx. instead of the backend service port +``` + +### 4.After completing the installation of exchangis-appconn, call the script to refresh the appconn service + +#### 4.1)Make the deployed APPCONN take effect +Use DSS refresh to make APPCONN take effect, enter the directory where the script is located {DSS_INSTALL_HOME}/bin, and execute the script with the following command. Note that there is no need to restart the dss service: +``` +sh ./appconn-refresh.sh +``` + +#### 4.2)Verify that exchangis-appconn is in effect +After the installation and deployment of exchangis-appconn is completed, you can preliminarily verify whether the exchangis-appconn is successfully installed by performing the following steps. +1. Create a new project in the DSS workspace +![image](https://user-images.githubusercontent.com/27387830/169782142-b2fc2633-e605-4553-9433-67756135a6f1.png) + +2. Check whether the project is created synchronously on the exchange side. If the creation is successful, the appconn installation is successful. +![image](https://user-images.githubusercontent.com/27387830/169782337-678f2df0-080a-495a-b59f-a98c5a427cf8.png) + +For more usage, please refer to[Exchangis User Manual](docs/zh_CN/ch1/exchangis_user_manual_cn.md) diff --git a/docs/en_US/ch1/exchangis_appconn_deploy_en.md b/docs/en_US/ch1/exchangis_appconn_deploy_en.md new file mode 100644 index 000000000..1b195c0c0 --- /dev/null +++ b/docs/en_US/ch1/exchangis_appconn_deploy_en.md @@ -0,0 +1,90 @@ +# ExchangisAppConn installation documentation + +This paper mainly introduces the deployment, configuration and installation of ExchangisAppConn in DSS(DataSphere Studio)1.0.1. + +### 1. Preparations for the deployment of ExchangisAppConn +Before you deploy ExchangisAppConn, please follow the [Exchangis to install the deployment document](docs/en_US/ch1/exchangis_deploy_en.md) to complete the installation of Exchangis and other related components, and ensure that the basic functions of the project are available. + +### 2. Download and compilation of the ExchangisAppConn plugin +#### 1) Download binary package +We provide ExchangisAppconn's material package, which you can download and use directly. [Click to jump to Release interface](https://github.com/WeBankFinTech/Exchangis/releases) +#### 2) Compile and package + +If you want to develop and compile ExchangisAppConn yourself, the specific compilation steps are as follows: +1.clone Exchangis's source code +2.In exchangis-plugins module, find exchangis-appconn, separate compilation exchangis-appconn + +``` +cd {EXCHANGIS_CODE_HOME}/exchangis-plugins/exchangis-appconn +mvn clean install +``` +The exchangis-appconn.zip installation package will be found in this path. +``` +{EXCHANGIS_CODE_HOME}/exchangis-plugins/exchangis-appconn/target/exchangis-appconn.zip +``` + +### 3. Overall steps for deployment and configuration of ExchangisAppConn +1、Get the packed exchangis-appconn.zip material package. + +2、Place it in the following directory and unzip it + +``` +cd {DSS_Install_HOME}/dss/dss-appconns +unzip exchangis-appconn.zip +``` + The extracted directory structure is: +``` +conf +db +lib +appconn.properties +``` + +3、 Execute scripts for automated installation + +```shell +cd {DSS_INSTALL_HOME}/dss/bin +./install-appconn.sh +# Script is an interactive installation scheme. You need to enter the string exchangis and the ip and port of exchangis service to complete the installation. +# Exchangis port here refers to the front-end port, which is configured in nginx. Rather than the back-end service port. +``` + +### 4. After the installation of Exchangis-AppConn is completed, the dss service needs to be re-installed to finally complete the plug-in update. + +#### 4.1) Make the deployed APPCONN effective +Make APPCONN effective by using DSS start-stop script, which is located in {DSS_INSTALL_HOME}/sbin, and execute the script by using the following commands in turn +``` +sh /sbin/dss-stop-all.sh +sh /sbin/dss-start-all.sh +``` +There may be startup failure or jam in the middle, so you can quit repeated execution. + +#### 4.2) Verify that exchangis-appconn is effective. +After the exchangis-appconn is installed and deployed, the following steps can be taken to preliminarily verify whether exchangis-appconn is successfully installed. +1. Create a new project in DSS workspace +![image](https://user-images.githubusercontent.com/27387830/169782142-b2fc2633-e605-4553-9433-67756135a6f1.png) + +2. Check whether the project is created synchronously on Exchangis. Successful creation means successful installation of appconn +![image](https://user-images.githubusercontent.com/27387830/169782337-678f2df0-080a-495a-b59f-a98c5a427cf8.png) + +For more operation, please refer to [Exchangis User Manual](docs/zh_CN/ch1/exchangis_user_manual_cn.md) + +### 5.Exchangis AppConn installation principle + +The related configuration information of Exchangis inserted into the following table. By configuring the following table, you can complete the use configuration of Exchangis. When installing Exchangis AppConn, the script will replace init.sql under each AppConn and insert it into the table. (Note: If you only need to install APPCONN quickly, you don't need to pay too much attention to the following fields. Most of the provided init.sql are configured by default. Focus on the above operations) + +| Table name | Table function | Remark | +| :----: | :----: |-------| +| dss_application | The application table is mainly used to insert the basic information of Exchangis application | Required | +| dss_menu | Menu, which stores the displayed contents, such as icons, names, etc | Required | +| dss_onestop_menu_application| And menu and application, which are used for joint search | Required | +| dss_appconn |Basic information of appconn, used to load appconn | Required | +| dss_appconn_instance| Information of an instance of AppConn, including its own url information | Required | +| dss_workflow_node | Schedulis is the information that needs to be inserted as a workflow node | Required | + +Exchangis as a scheduling framework, which implements the first-level specification and the second-level specification. The micro-services of exchangis AppConn need to be used in the following table. + +| Table name | Table function | Remark | +| :----: | :----: |-------| +| dss-framework-project-server | Use exchangis-appconn to complete the project and unify the organization. | Required | +| dss-workflow-server | Scheduling AppConn is used to achieve workflow publishing and status acquisition. | Required | diff --git a/docs/en_US/ch1/exchangis_datasource_en.md b/docs/en_US/ch1/exchangis_datasource_en.md new file mode 100644 index 000000000..ffbffdf98 --- /dev/null +++ b/docs/en_US/ch1/exchangis_datasource_en.md @@ -0,0 +1,304 @@ +# DataSource1.0 + +## 1、Background + +The earlier versions of **Exchangis0.x**and **Linkis0.x** have integrated data source modules, in which **linkis-datasource** is used as the blueprint (please refer to related documents) to reconstruct the data source module. + +## 2、Overall architecture desig + +In order to build a common data source module, the data source module is mainly divided into two parts: **datasource-client** and **datasource-server**, in which the server part is placed in the **linkis-datasource** module of **Linkis-1.0**, including the main logic of the service core; Client is placed under the **exchange is-data source** module of **exchange is-1.0**, which contains the calling logic of the client. Look at the overall architecture + +![linkis_datasource_structure](../../../images/zh_CN/ch1/datasource_structure.png) + +
+Figure 2-1 Overall Architecture Design +
+ +## 3、Detailed explanation of modules + +### 3.1 datasource-server + +**datasource-server**: As the name implies, it is a module that stores core services, and it follows the original architecture of **linkis-datasource** (split into **datasourcemanager** and **metadatamanager**) + +### 3.2 linkis-datasource + +Schematic diagram of current architecture : + +![linkis_datasource_structure](../../../images/zh_CN/ch1/linkis_datasource_structure.png) + +
+Figure 3-1 Schematic diagram of current architecture +
+ +It can be seen in the above figure that **linkis-datasource** decouples the related functions of data sources, the basic information part is managed by **datasourcemanager**, and the metadata part is managed by **metadatamanager**. The two sub-modules visit each other through RPC requests, and at the same time, they provide Restful entrances to the outside respectively. The external service requests are uniformly forwarded by **liniks-gateway** before they fall on the corresponding services. Furthermore, **metadatamanage** is connected to the sub-modules of **service** of different data sources in order to plug-in the metadata management platform of the third party. Each sub-module has its own implementation of metadata acquisition interface, such as **service/hive, service/elastic search and service/MySQL** + +#### 3.2.1 New demand + +##### Front end interface requirements + +The original **linkis-datasource** did not include the front-end interface, so now the original data source interface design of **exchangis 1.0** is merged. See **UI document** and **front-end interactive document** for details. Make a detailed description of the requirements involved: + +- Type of datasource-list acquisition [data source management] + +Description: + +Get all data source types accessed and show them + +- Datasource environment-list acquisition [data source management] + +Description: + +Get the preset data source environment parameters in the background and display them as a list + +- Add/Modify Datasource-Label Settings [Data Source Management] + +Description: + +Set the label information of the datasource + +- Connectivity detection [datasource management] + +Description: + +Check the connectivity of connected data sources, and click the Connectivity Monitor button in the data source list + +- Add/Modify Datasource-Configure and Load [Datasource Management] + +Description: + +In order to facilitate the introduction of new data sources or the attribute expansion of existing data sources, the form configuration of new/modified data sources is planned to adopt the method of background storage+front-end loading. The background will save the type, default value, loading address and simple cascading relationship of each attribute field, and the front-end will generate abstract data structures according to these, and then convert them into DOM operations. + +Process design: + +1. The user selects the datasource type, and the front end requests the background for the attribute configuration list of the data source with the datasource type as the parameter; + +2. When the front end gets the configuration list, it first judges the type, selects the corresponding control, then sets the default value and refreshes the interface DOM; + +3. After the basic configuration information is loaded and rendered, the values are preloaded and the cascading relationship is established; + +4. The configuration is completed, waiting for the user to fill it. + + Associated UI: + +![datasource_ui](../../../images/zh_CN/ch1/datasource_ui.png) + +
+Figure 3-2 Datasource UI +
+ +![datasource_ui_mysql](../../../images/zh_CN/ch1/datasource_ui_mysql.png) + +
+Figure 3-3 Creating MySQL Datasource +
+ +- Batch Processing-Batch Import/Export [Datasource Management] + +Description: + +Batch import and export of datasource configuration. + +##### Backstage demand + +**linkis-datasurce** at present, the background has integrated the relevant operation logic about the data source CRUD, and now the contents related to the label and version are added: + +- datasource permission setting [datasource management] + +Description: + +The background needs to integrate it with the labeling function of Linkis1.4.0, and give the datasource a labeling relationship. + +Process design: + +1. Users are allowed to set labels on datasources when they create and modify them; + +2. When saving, the tag information is sent to the back end as a character list, and the back end converts the tag characters into tag entities, and inserts and updates the tag; + +3. Save the datasource and establish the connection between the datasource and the label. + +- datasource version function [datasource management] + +Description: + +The concept of adding a version to a datasource, the function of which is to publish and update. When updating, a new version is added by default. When publishing, the datasource information of the version to be published covers the latest version and is marked as published. + +#### 3.2.2 Detailing + +Make some modifications and extensions to the entity objects contained in **linkis-datasource**, as follows: + +| Class Name | Role | +| -------------------------------- | ------------------------------------------------------------ | +| DataSourceType | Indicates the type of data source | +| DataSourceParamKeyDefinition | Declare data source attribute configuration definition | +| DataSourceScope[Add] | There are usually three fields for marking the scope of datasource attributes: datasource, data source environment and default (all) | +| DataSource | Datasource entity class, including label and attribute configuration definitions | +| DataSourceEnv | The datasource object entity class also contains attribute configuration definitions. | +| DataSourcePermissonLabel[Delete] | | +| DataSourceLabelRelation[Add] | Represents the relationship between datasources and permission labels | +| VersionInfo[Add] | Version information, including datasource version number information | + +2.1 Among them, **DataSourceParamKeyDefinition** keeps the original consistent structure, and adds some attributes to support interface rendering. The detailed structure is as follows: + +| **Field name** | **Field type** | **Remark** | +| -------------- | -------------- | ------------------------------------------------------------ | +| id | string | persistent ID | +| key | string | attribute name keyword | +| description | string | describe | +| name | string | attribute display name | +| defaultValue | string | attribute default value | +| valueType | string | attribute value type | +| require | boolean | is it a required attribute | +| refId | string | another attribute ID of the cascade | +| dataSrcTypId | string | the associated data source type ID | +| refMap[Add] | string | cascading relation table, format should be as follows: value1=refValue1, value2=refValue2 | +| loadUrl[Add] | string | upload URL, which is empty by default | + +2.2 The **DataSource** structure is similar, but it contains label information + +| **Field name** | **Field type** | **Remark** | +| ---------------- | -------------- | ------------------------------------------------------------ | +| serId | string | persistent ID | +| id | string | system ID | +| versions[Add] | list-obj | The associated version VersionInfo list | +| srcVersion[Add] | string | Version, indicating that the data source was created by version information. | +| datSourceName | string | Data source name | +| dataSourceDesc | string | Description of data source | +| dataSourceTypeId | integer | Data source type ID | +| connectParams | map | Connection parameter dictionary | +| parameter | string | Connection attribute parameters | +| createSystem | string | The created system is usually empty or (exchange is) | +| dataSourceEnvId | integer | The associated data source environment ID of | +| keyDefinitions | list-object | List of associated attribute configuration definitions. | +| labels[Add] | map | Tag string | +| readOnly[Add]] | boolean | Is it a read-only data source | +| expire[Add]] | boolean | Is it expired | +| isPub[Add] | boolean | Publish | + +2.3 **VersionInfo** version information. Different versions of data sources mainly have different connection parameters. The structure is as follows: + +| **Field name** | **Field type** | **Remark** | +| -------------- | -------------- | ----------------------------- | +| version | string | version number | +| source | long | The associated data source ID | +| connectParams | map | Version parameter dictionary | +| parameter | string | Version parameter string | + +2.4 **DataSourceType** and **DataSourceEnv** are also roughly the same as the original classes, in which **DataSourceType** needs to add **classifier** fields to classify different datasource types, and the others will not be described. + +The main service processing classes of **datasource-server** are as follows: + +| **Interface name** | **Interface role** | **Single realization** | +| ------------------------------- | ------------------------------------------------------------ | ---------------------- | +| DataSourceRelateService | The operation of declaring datasource association information includes enumerating all datasource types and attribute definition information under different types | Yes | +| DataSourceInfoService | Declare the basic operation of datasource/datasource environment | Yes | +| MetadataOperateService | Declare the operation of datasource metadatasource, which is generally used for connection test | Yes | +| BmlAppService | Declare the remote call to BML module to upload/download the key file of datasource | Yes | +| DataSourceVersionSupportService | Declare the operations supported by multiple versions of the datasource | Yes | +| MetadataAppService[Old] | Declare operations on metadata information | Yes | +| DataSourceBatchOpService[Add] | Declare batch operations on datasources | Yes | +| MetadataDatabaseService[Add] | Declare operations on metadata information of database classes | Yes | +| MetadataPropertiesService[Add] | Operation of declaring metadata information of attribute class | Yes | + +### 3.3 datasource-client + +**datasource-client**: contains the client-side calling logic, which can operate the data source and obtain relevant metadata in the client-side way. + +#### 3.3.1 Related demand + +##### Backstage demand + +As the requesting client, **datasource-client** has no front-end interface requirements, and its back-end requirements are relatively simple. It not only builds a stable, retryable and traceable client, but also directly interfaces with all interfaces supported by sever, and supports various access modes as much as possible. + +#### 3.3.2 Detailing + +Its organizational structure is generally designed as follows : + +![datasource_client_scructure](../../../images/zh_CN/ch1/datasource_client_scructure.png) + +
+Figure 3-4 Detailed Design of datasource-client +
+ +The class/interface information involved is as follows: + +| Class/interface name | Class/interface role | Single realization | +| ----------------------------- | ------------------------------------------------------------ | ------------------ | +| RemoteClient | The top-level interface of the Client declares the common interface methods of initialization, release and basic permission verification | No | +| RemoteClientBuilder | Client's construction class is constructed according to different implementation classes of RemoteClient | Yes | +| AbstractRemoteClient | The abstract implementation of remote involves logic such as retry, statistics and caching | Yes | +| DataSourceRemoteClient | Declare all operation portals of the data source client | No | +| MetaDataRemoteClient | Declare all operation portals of metadata client | No | +| LinkisDataSourceRemoteClient | Datasource client implementation of linkis-datasource | Yes | +| LinkisMetaDataRemoteClient | Metadata client implementation of linkis-datasource | Yes | +| MetadataRemoteAccessService | Declare the interface of the bottom layer to access the remote third-party metadata service. | Yes | +| DataSourceRemoteAccessService | Declare the interface of the bottom layer to access the remote third-party datasource service | Yes | + +The class relationship group diagram is as follows: + +![datasource_client_class_relation](../../../images/zh_CN/ch1/datasource_client_class_relation.png) + +
+Figure 3-5 datasource-client Class Relationship Group Diagram +
+ +##### Process sequence diagram: + +Next, combining all modules, the calling relationship between interfaces/classes in the business process is described in detail : + +- Create datasource + +Focus: + +1. Before creating a datasource, you need to pull the datasource type list and the attribute configuration definition list of the datasource corresponding to the type. In some cases, you also need to pull the datasource environment list ; + +2. There are two scenarios for creating datasources, one is created through the interface of **linkis-datasource**, and the other is created through the datasource-client of **exchangis**; + +3. Datasource type, attribute configuration definition and datasource environment can be added in the background library by themselves. Currently, there is no interface dynamic configuration method (to be provided). + +Now look at the timing diagram of creating a data source: + +![datasource_client_create](../../../images/zh_CN/ch1/datasource_client_create.png) + +
+Figure 3-6 Sequence diagram of datasource created datasource-client +
+ +Continue to look at creating data source interface through **datasource-client**: + +![datasource_client_create2](../../../images/zh_CN/ch1/datasource_client_create2.png) + +
+Figure 3-7 Sequence diagram of datasource created datasource-client call +
+ +Some additional methods, such as client connection authentication, request recording and life cycle monitoring, are omitted in the above figure, but the whole calling process is simplified + +- Update datasource + +Focus: + +1. There are two ways to update: version update and ordinary update. Version update will generate a new version of datasource (which can be deleted or published), while ordinary update will overwrite the current datasource and will not generate a new version; + +2. Only the creator and administrator users of the datasource can update the publication datasource. + +![datasource_client_update](../../../images/zh_CN/ch1/datasource_client_update.png) + +
+Figure 3-8 datasource-client Update Datasource Timing Diagram +
+ +- Query datasource + +Focus : + +1. When you get the datasource list through datasource-client, you need to attach the operating user information for permission filtering of the datasource + +Database design : + +![datasource_client_query](../../../images/zh_CN/ch1/datasource_client_query.png) + +
+Figure 3-9 datasource-client Query Datasource Sequence Diagram +
+ +Interface design: (refer to the existing interface of linkis-datasource for supplement) \ No newline at end of file diff --git a/docs/en_US/ch1/exchangis_datax_deploy_en.md b/docs/en_US/ch1/exchangis_datax_deploy_en.md new file mode 100644 index 000000000..7458e8212 --- /dev/null +++ b/docs/en_US/ch1/exchangis_datax_deploy_en.md @@ -0,0 +1,84 @@ +# DataX engine uses documentation + +### Prepare the environment + +The DataX engine is an indispensable component for executing Exchangis data synchronization tasks. Data synchronization tasks can be performed only after the DataX engine is installed and deployed. Also, ensure that DataX is installed on the deployed machine. + +Before you install and deploy DataX engine, Please complete the installation of Exchangis and related components according to the [Exchangis installation and deployment document](docs/en_US/ch1/exchangis_deploy_en.md), and ensure that the basic functions of the project are available. + +It is strongly recommended that you use the native DataX to perform the test task on this node before performing the DataX task, so as to check whether the environment of this node is normal. + +| Environment variable name | Environment variable content | Remark | +| :-----------------------: | :--------------------------: | ------------ | +| JAVA_HOME | JDK installation path | Required | +| DATAX_HOME | DataX installation path | Not Required | +| DATAX_CONF_DIR | DataX config path | Not Required | + +### Prepare installation package + +#### 1)Download binary package + +Exchangis1.1.2 and Linkis 1.4.0 support the mainstream DataX versions 1.4.6 and 1.4.7, and later versions may need to modify some codes for recompilation. + +[Click to jump to Release interface](https://github.com/WeBankFinTech/Exchangis/releases/tag/release-1.1.2) + +#### 2)Compile and package + +If you want to develop and compile datax engine yourself, the specific compilation steps are as follows: + +1.clone Exchangis's source code + +2.Under exchangis-plugins module, find sqoop engine and compile sqoop separately, as follows : + +``` +cd {EXCHANGIS_CODE_HOME}/exchangis-plugins/engine/datax +mvn clean install +``` + +Then the datax engine installation package will be found in this path. + +``` +{EXCHANGIS_CODE_HOME}\exchangis-plugins\datax\target\out\datax +``` + + +### Start deployment + +#### 1)DataX engine installation + +1、Get the packed datax.zip material package, the directory structure is + +```shell +datax +-- dist +-- plugin +``` + +2、Place in the following directory in the linkis installation path + +```shell +cd {LINKIS_HOME}/linkis/lib/linkis-engineconn-plugins +``` + +(Note that depending on which users the current datax engine has permissions for, they are generally hadoop user groups and hadoop users) + + +#### 2)Restart linkis-engineplugin service to make datax engine take effect + +New engines joining linkis will not take effect until the engineplugin service of linkis is restarted, and the restart script is. /linkis-daemon.sh in the Linkis installation directory. The specific steps are as follows : + +``` +cd {LINKIS_INSTALL_HOME}/links/sbin/ +./linkis-daemon.sh restart cg-engineplugin +``` + +After the service is successfully started, check whether the datax engine is installed in the linkis database + +```shell +select * from linkis_cg_engine_conn_plugin_bml_resources where engine_conn_type='datax'; +``` + +At this point, the datax installation and deployment is complete. + +For a more detailed introduction of engineplugin, please refer to the following article. +https://linkis.apache.org/zh-CN/docs/latest/deployment/install-engineconn \ No newline at end of file diff --git a/docs/en_US/ch1/exchangis_deploy_en.md b/docs/en_US/ch1/exchangis_deploy_en.md new file mode 100644 index 000000000..bcef1232d --- /dev/null +++ b/docs/en_US/ch1/exchangis_deploy_en.md @@ -0,0 +1,307 @@ +## Foreword + +Exchangis installation is mainly divided into the following four steps : + +1. Exchangis dependent on environmental preparation +2. Exchangis installation and deployment +3. DSS ExchangisAppConn installation and deployment +4. Linkis Sqoop engine installation and deployment + +## 1. Exchangis dependent on environmental preparation + +#### 1.1 Basic software installation + +| Dependent components | Must be installed | Install through train | +|------------------------------------------------------------------------------| ------ | --------------- | +| JDK (1.8.0_141) | yes | [How to install JDK](https://www.oracle.com/java/technologies/downloads/) | +| MySQL (5.5+) | yes | [How to install mysql](https://mysql.net.cn/) | +| Hadoop(3.3.4,Other versions of Hadoop need to compile Linkis by themselves.) | yes | [Hadoop deployment](https://www.apache.org/dyn/closer.cgi/hadoop/common/hadoop-3.3.4/hadoop-3.3.4.tar.gz) | +| Hive(2.3.3,Other versions of Hive need to compile Linkis by themselves.) | yes | [Hive quick installation](https://www.apache.org/dyn/closer.cgi/hive/) | +| SQOOP (1.4.6) | yes | [How to install Sqoop](https://sqoop.apache.org/docs/1.4.6/SqoopUserGuide.html) | +| DSS1.1.2 | yes | [How to install DSS](https://github.com/WeBankFinTech/DataSphereStudio-Doc/tree/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2) | +| Linkis1.4.0 | yes | [How to install Linkis](https://linkis.apache.org/zh-CN/docs/1.4.0/deployment/deploy-quick) | +| Nginx | yes | [How to install Nginx](http://nginx.org/) | + +Underlying component checking + +$\color{#FF0000}{Note: be sure to reinstall dss1.1.2, and linkis1.4.0. Please recompile linkis and use the package released on June 15th }$ + +[linkis1.4.0 code address ](https://github.com/apache/incubator-linkis/tree/release-1.4.0) + +[DSS1.1.2 code address ](https://github.com/WeBankFinTech/DataSphereStudio) + +datasource enabled + +By default, two services related to datasources (ps-data-source-manager, ps-metadatamanager) will not be started in the startup script of linkis. If you want to use datasource services, you can start them by modifying the export enable _ metadata _ manager = true value in $ linkis_conf_dir/linkis-env.sh. When the service is started and stopped through linkis-start-all.sh/linkis-stop-all.sh, the datasource service will be started and stopped. For more details about data sources, please refer to [Data Source Function Usage](https://linkis.apache.org/zh-CN/docs/1.1.0/deployment/start-metadatasource) + +#### 1.2 Create Linux users + +Please keep the deployment user of Exchangis consistent with that of Linkis, for example, the deployment user is hadoop account. + +#### 1.3 在linkis中为exchangis加专用token + +###### 1)Add special token to exchangis in linkis: + +```sql +INSERT INTO `linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`) VALUES ('EXCHANGIS-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS'); +``` + +###### 2)Authentication of hive data source for exchangis + +Insert hive data source environment configuration by executing the following sql statement in linkis database. Note that ${HIVE_METADATA_IP} and ${HIVE_METADATA_PORT} in the statement need to be modified before execution, for example:${HIVE_METADATA_IP}=127.0.0.1,${HIVE_METADATA_PORT}=3306: + +```sql +INSERT INTO `linkis_ps_dm_datasource_env` (`env_name`, `env_desc`, `datasource_type_id`, `parameter`, `create_time`, `create_user`, `modify_time`, `modify_user`) VALUES ('开发环境SIT', '开发环境SIT', 4, '{"uris":"thrift://${HIVE_METADATA_IP}:${HIVE_METADATA_PORT}", "hadoopConf":{"hive.metastore.execute.setugi":"true"}}', now(), NULL, now(), NULL); +INSERT INTO `linkis_ps_dm_datasource_env` (`env_name`, `env_desc`, `datasource_type_id`, `parameter`, `create_time`, `create_user`, `modify_time`, `modify_user`) VALUES ('开发环境UAT', '开发环境UAT', 4, '{"uris":"thrift://${HIVE_METADATA_IP}:${HIVE_METADATA_PORT}", "hadoopConf":{"hive.metastore.execute.setugi":"true"}}', now(), NULL, now(), NULL); +``` + +If the hive data source needs kerberos authentication when deployed, you need to specify a parameter keyTab in the parameter field of the Linkis_ps_dm_datasource_env table, and the way to obtain its value can be seen: [Setting and authenticating hive data source in linkis](https://linkis.apache.org/zh-CN/docs/latest/auth/token). + +#### 1.4 Underlying component checking + +Please ensure that DSS1.1.2 and Linkis1.4.0 are basically available. HiveQL scripts can be executed in the front-end interface of DSS, and DSS workflows can be created and executed normally. + +## 2. Exchangis installation and deployment + +### 2.1 Prepare installation package + +#### 2.1.1 Download binary package + +Download the latest installation package from the Released release of Exchangis [click to jump to the release interface](https://github.com/WeBankFinTech/Exchangis/releases). + +#### 2.1.2 Compile and package + + Execute the following command in the root directory of the project: + +```shell script + mvn clean install +``` + + After successful compilation, the installation package will be generated in the `assembly-package/target` directory of the project. + +### 2.2 Unzip the installation package + + Execute the following command to decompress: + +```shell script + tar -zxvf wedatasphere-exchangis-{VERSION}.tar.gz +``` + + The directory structure after decompression is as follows: + +```html +|-- config:One-click installation deployment parameter configuration directory +|-- db:Database initialization SQL directory +|-- exchangis-extds +|-- packages:Exchangis installation package directory + |-- exchangis-extds:exchangis datasource library + |-- lib:library +|-- sbin:Script storage directory +``` + +### 2.3 Modify configuration parameters + +```shell script + vim config/config.sh +``` + +```shell script +#IP of LINKIS_GATEWAY service address, which is used to find linkis-mg-gateway service. +LINKIS_GATEWAY_HOST= + +#The LINKIS_GATEWAY service address port is used to find linkis-mg-gateway service. +LINKIS_GATEWAY_PORT= + +#The URL of LINKIS_GATEWAY service address is composed of the above two parts. +LINKIS_SERVER_URL= + +#Token used to request verification of linkis service, which can be obtained in ${LINKIST_INSTALLED_HOME}/conf/token.propertis of linkis installation directory. +LINKIS_TOKEN= + +#Eureka service port +EUREKA_PORT= + +#Eureka service URL +DEFAULT_ZONE= +``` + +### 2.4 Modify database configuration + +```shell script + vim config/db.sh +``` + +```shell script +# Set the connection information of the database. +# Include IP address, port, user name, password and database name. +MYSQL_HOST= +MYSQL_PORT= +MYSQL_USERNAME= +MYSQL_PASSWORD= +DATABASE= +``` + +### 2.5 Installation and startup + +#### 2.5.1 Execute one-click installation script. + + Execute the install.sh script to complete the one-click installation and deployment: + +```shell script + sh sbin/install.sh +``` + +#### 2.5.2 Installation step + + This script is an interactive installation. After executing the install.sh script, the installation steps are divided into the following steps: + +1. Initialize database tables. + + When the reminder appears: Do you want to confiugre and install project? + + Enter `y` to start installing Exchange IS service, or `n` to not install it. + +#### 2.5.3 Change the path of the configuration file and log file + +In the 'env.properties' file in the sbin directory, set the configuration file path and log file path + +```yaml +EXCHANGIS_CONF_PATH="/appcom/config/exchangis-config/background" +EXCHANGIS_LOG_PATH="/appcom/logs/exchangis/background" +MODULE_DEFAULT_PREFIX="dss-exchangis-main-" +MODULE_DEFAULT_SUFFIX="-dev" +``` + +EXCHANGIS_CONF_PATH indicates the configuration file path, and EXCHANGIS_LOG_PATH indicates the log file path. If the preceding configurations are used, perform the following operations: + +```yaml +cd {EXCHANGIS_DEPLOY_PATH} +cp -r config /appcom/config/exchangis-config/background +mkdir -p /appcom/logs/exchangis/background +``` + +When the service is started, the configuration file in the corresponding path is used and logs are written to the corresponding path + +#### 2.5.4 Start service + +Execute the following command to start Exchangis Server: + +```shell script + sh sbin/daemon.sh start server +``` + + You can also use the following command to restart Exchangis Server: + +```shell script +./sbin/daemon.sh restart server +``` + +After executing the startup script, the following prompt will appear, eureka address will also be typed in the console when starting the service: + +![企业微信截图_16532930262583](../../../images/zh_CN/ch1/register_eureka.png) + +### 2.6 Check whether the service started successfully. + +You can check the success of service startup in Eureka interface. Check the method: + +Use http://${EUREKA_INSTALL_IP}:${EUREKA_PORT}. It is recommended to open it in Chrome browser to see if the service is registered successfully. + +As shown in the figure below: + +![补充Eureka截图](../../../images/zh_CN/ch1/eureka_exchangis.png) + +### 2.7 Front-end installation and deployment + +#### 2.7.1 Get the front-end installation package + +Exchangis has provided compiled front-end installation package by default, which can be downloaded and used directly :[Click to jump to the Release interface](https://github.com/WeBankFinTech/Exchangis/releases) + +You can also compile the exchange front-end by yourself and execute the following command in the exchanise root directory: + +```shell script + cd web + npm i + npm run build +``` + +Get the compiled dist.zip front-end package from the `web/` path. + +The acquired front-end package can be placed anywhere on the server. Here, it is recommended that you keep the same directory as the back-end installation address, place it in the same directory and unzip it. + +#### 3.3.4 Front-end installation deployment + +1. Decompress front-end installation package + + If you plan to deploy Exchange is front-end package to the directory `/appcom/install/Exchange is/web`, please copy ` dist.zip to the directory and extract it: + +```shell script + # Please copy the exchange front-end package to `/appcom/install/exchange/web` directory first. + cd /appcom/Install/exchangis/web + unzip dist.zip +``` + + Execute the following command: + +```shell script + vim /etc/nginx/conf.d/exchangis.conf +``` + +``` + server { + listen 8098; # Access port If this port is occupied, it needs to be modified. + server_name localhost; + #charset koi8-r; + #access_log /var/log/nginx/host.access.log main; + location /dist { + root /appcom/Install/exchangis/web; # Exchangisfront-end deployment directory + autoindex on; + } + + location /api { + proxy_pass http://127.0.0.1:9020; # The address of the back-end Linkis needs to be modified. + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header x_real_ipP $remote_addr; + proxy_set_header remote_addr $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_http_version 1.1; + proxy_connect_timeout 4s; + proxy_read_timeout 600s; + proxy_send_timeout 12s; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection upgrade; + } + + #error_page 404 /404.html; + # redirect server error pages to the static page /50x.html + # + error_page 500 502 503 504 /50x.html; + location = /50x.html { + root /usr/share/nginx/html; + } + } +``` + +#### 2.7.3 Start nginx and visit the front page + + After the configuration is complete, use the following command to refresh the nginx configuration again: + +```shell script + nginx -s reload +``` + +Please visit the Exchange front-end page at http://${EXCHANGIS_INSTALL_IP}:8098/#/projectManage. The following interface appears, indicating that Exchangis successfully installed on the front end. If you really want to try Exchangis, you need to install dss and linkis, and log in secret-free through dss. As shown in the following figure : + +![image](https://user-images.githubusercontent.com/27387830/170417473-af0b4cbe-758e-4800-a58f-0972f83d87e6.png) + +## 3. DSS ExchangisAppConn installation and deployment + +If you want to use Exchangis front-end, you also need to install the DSS ExchangisAppConn plugin. Please refer to: [ExchangisAppConn installation documentation for plugins ](docs/en_US/ch1/exchangis_appconn_deploy_en.md) + +## 4. Linkis Sqoop engine installation and deployment + +If you want to execute the Sqoop operation of Exchangis normally, you also need to install the Linkis Sqoop engine. Please refer to: : [Linkis Sqoop engine installation documentation ](https://linkis.apache.org/zh-CN/docs/1.1.2/engine-usage/sqoop/) + +## 5. How to log in and use Exchangis + +Exchangis for more instructions, please refer to the user manual.[Exchangis user manual](docs/en_US/ch1/exchangis_user_manual_en.md) diff --git a/docs/en_US/ch1/exchangis_interface_en.md b/docs/en_US/ch1/exchangis_interface_en.md new file mode 100644 index 000000000..019f4f2aa --- /dev/null +++ b/docs/en_US/ch1/exchangis_interface_en.md @@ -0,0 +1,839 @@ +# Exchangis interface document + +## Exchangis datasource module + +### 1、Get datasource type + +Interface description:Get the datasource type according to the information of request + +Request URL:/dss/exchangis/main/datasources/type + +Request mode:GET + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| ------- | ------------------ | ----------- | ------------- | ------- | +| request | HttpServletRequest | yes | / | request | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | ------ | ----------- | ------------- | ---------------------------- | +| method | String | yes | | Called method (request path) | +| status | int | yes | | Response status code | +| message | String | no | | Information of the response | +| data | List | yes | | The returned data | + +### 2、Query datasource + +Interface description:Query the required datasource according to vo + +Request URL:/dss/exchangis/main/datasources/query + +Request mode:GET、POST + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| ------- | ------------------ | ----------- | ------------- | ------- | +| request | HttpServletRequest | yes | / | request | +| vo | DataSourceQueryVO | yes | / | | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | ------ | ----------- | ------------- | ---------------------------- | +| method | String | yes | / | Called method (request path) | +| status | int | yes | / | Response status code | +| message | String | no | / | Information of the response | +| data | List | yes | / | The returned data | + +### 3、Query datasource + +Interface description:Query datasource according to request information + +Request URL:/dss/exchangis/main/datasources + +Request mode:GET + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| -------- | ------------------ | ----------- | ------------- | ------------------- | +| request | HttpServletRequest | yes | / | request | +| typeId | Long | yes | / | datasource typeId | +| typeName | String | yes | / | datasource Typename | +| page | Integer | yes | / | page num | +| size | Integer | yes | / | size per page | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | --------------------- | ----------- | ------------- | ---------------------------- | +| method | String | yes | / | Called method (request path) | +| status | int | yes | / | Response status code | +| message | String | no | / | Information of the response | +| data | List\ | yes | / | The returned data | + +### 4、Query datasource keydefines + +Interface description:Query key definitions of datasource according to datasource type ID + +Request URL:/dss/exchangis/main/datasources/types/{dataSourceTypeId}/keydefines + +Request mode:GET + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| ---------------- | ------------------ | ----------- | ------------- | ----------------- | +| request | HttpServletRequest | yes | / | | +| dataSourceTypeId | Long | yes | / | dataSource typeId | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | ---------------------- | ----------- | ------------- | ---------------------------- | +| method | String | yes | / | Called method (request path) | +| status | int | yes | / | Response status code | +| message | String | no | / | Information of the response | +| data | List[Map[String, Any]] | yes | / | The returned data | + +### 5、Get datasource version + +Interface description:Get the datasource version according to the datasource ID + +Request URL:/dss/exchangis/main/datasources/{id}/versions + +Request mode:GET + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| ------- | ------------------ | ----------- | ------------- | ------------- | +| request | HttpServletRequest | yes | / | request | +| id | Long | yes | / | datasource id | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | ------------------------- | ----------- | ------------- | ---------------------------- | +| method | String | yes | / | Called method (request path) | +| status | int | yes | / | Response status code | +| message | String | no | / | Information of the response | +| data | List> | yes | / | The returned data | + +### 6、Create datasource + +Interface description:Create a datasource according to the DataSourceCreateVO + +Request URL:/dss/exchangis/main/datasources + +Request mode:POST + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| ------------------ | ------------------ | ----------- | ------------- | ------------------ | +| request | HttpServletRequest | yes | / | request | +| dataSourceCreateVO | DataSourceCreateVO | yes | / | dataSourceCreateVO | +| bindingResult | BindingResult | yes | / | bindingResult | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | ------ | ----------- | ------------- | ---------------------------- | +| method | String | yes | / | Called method (request path) | +| status | int | yes | / | Response status code | +| message | String | no | / | Information of the response | +| data | Long | yes | / | The returned data | + +### 7、Get datasource information + +Interface description:Get datasource information according to request information, datasource ID and version ID + +Request URL:/dss/exchangis/main/datasources/{id} + +Request mode:GET + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| --------- | ------------------ | ----------- | ------------- | ------------- | +| request | HttpServletRequest | yes | / | request | +| id | Long | yes | / | datasource id | +| versionId | String | yes | / | version id | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | ----------------- | ----------- | ------------- | ---------------------------- | +| method | String | yes | / | Called method (request path) | +| status | int | yes | / | Response status code | +| message | String | no | / | Information of the response | +| data | DataSourceItemDTO | yes | / | The returned data | + +### 8、Get datasource connect params + +Interface description:Acquiring datasource connection parameters according to the request information and the datasource ID + +Request URL:/dss/exchangis/main/datasources/{id}/connect_params + +Request mode:GET + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| ------- | ------------------ | ----------- | ------------- | ------------- | +| request | HttpServletRequest | yes | / | request | +| Long | id | yes | / | datasource id | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | -------------------------- | ----------- | ------------- | ---------------------------- | +| method | String | yes | / | Called method (request path) | +| status | int | yes | / | Response status code | +| message | String | no | / | Information of the response | +| data | java.util.Map[String, Any] | yes | / | The returned data | + +### 9、Update datasource + +Interface description:Update the datasource according to the request information, datasource type ID and DataSourceCreateVO + +Request URL:/dss/exchangis/main/datasources/{id} + +Request mode:PUT + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| ------------------ | ------------------ | ----------- | ------------- | ------------------ | +| request | HttpServletRequest | yes | / | request | +| id | Long | yes | / | datasource id | +| dataSourceCreateVO | DataSourceCreateVO | yes | / | dataSourceCreateVO | +| bindingResult | BindingResult | yes | / | bindingResult | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | ------ | ----------- | ------------- | ---------------------------- | +| method | String | yes | / | Called method (request path) | +| status | int | yes | / | Response status code | +| message | String | no | / | Information of the response | +| data | Map | yes | / | The returned data | + +### 10、Publish datasource + +Interface description:Publish the datasource according to the request request, datasource ID and version ID + +Request URL:/dss/exchangis/main/datasources/{id}/{version}/publish + +Request mode:PUT + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| ------- | ------------------ | ----------- | ------------- | --------------------- | +| request | HttpServletRequest | yes | / | request | +| id | Long | yes | / | datasource id | +| version | Long | yes | / | datasource version id | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | ------ | ----------- | ------------- | ---------------------------- | +| method | String | yes | / | Called method (request path) | +| status | int | yes | / | Response status code | +| message | String | no | / | Information of the response | +| data | Map | yes | / | The returned data | + +### 11、Expire datasource + +Interface description:Expire datasource according to request information and datasource ID + +Request URL:/dss/exchangis/main/datasources/{id}/expire + +Request mode:PUT + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| ------- | ------------------ | ----------- | ------------- | ------------- | +| request | HttpServletRequest | yes | / | request | +| id | Long | yes | / | datasource id | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | ------ | ----------- | ------------- | ---------------------------- | +| method | String | yes | / | Called method (request path) | +| status | int | yes | / | Response status code | +| message | String | no | / | Information of the response | +| data | Map | yes | / | The returned data | + +### 12、Connect datasource + +Interface description:Connect the datasource according to the request information, datasource ID and version ID + +Request URL:/dss/exchangis/main/datasources/{id}/{version}/connect + +Request mode:PUT + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| ------- | ------------------ | ----------- | ------------- | --------------------- | +| request | HttpServletRequest | yes | / | request | +| id | Long | yes | / | datasource id | +| version | Long | yes | / | datasource version id | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | ------ | ----------- | ------------- | ---------------------------- | +| method | String | yes | / | Called method (request path) | +| status | int | yes | / | Response status code | +| message | String | no | / | Information of the response | +| data | Map | yes | / | The returned data | + +### 13、Connect datasource + +Interface description:Connect the datasource according to the request information and the datasource ID + +Request URL:/dss/exchangis/main/datasources/op/connect + +Request mode:POST + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| ------- | ------------------ | ----------- | ------------- | ------------- | +| request | HttpServletRequest | yes | / | request | +| id | Long | yes | / | datasource id | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | ------ | ----------- | ------------- | ---------------------------- | +| method | String | yes | / | Called method (request path) | +| status | int | yes | / | Response status code | +| message | String | no | / | Information of the response | +| data | Map | yes | / | The returned data | + +### 14、Delete datasource + +Interface description:Connect the datasource according to the request information and the datasource type ID + +Request URL:/dss/exchangis/main/datasources/{id} + +Request mode:DELETE + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| ---------------- | ------------------ | ----------- | ------------- | ----------------- | +| request | HttpServletRequest | yes | / | request | +| dataSourceTypeId | Long | yes | / | dataSource typeId | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | ------ | ----------- | ------------- | ---------------------------- | +| method | String | yes | / | Called method (request path) | +| status | int | yes | / | Response status code | +| message | String | no | / | Information of the response | +| data | Long | yes | / | The returned data | + +### 15、Query datasource by datasourceId + +Interface description:Query the datasource according to the request information, datasource type and datasource ID + +Request URL:/dss/exchangis/main/datasources/{type}/{id}/dbs + +Request mode:GET + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| ------- | ------------------ | ----------- | ------------- | --------------- | +| request | HttpServletRequest | yes | / | request | +| type | String | yes | / | datasource type | +| id | Long | yes | / | datasource id | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | -------------- | ----------- | ------------- | ---------------------------- | +| method | String | yes | / | Called method (request path) | +| status | int | yes | / | Response status code | +| message | String | no | / | Information of the response | +| data | List\ | yes | / | The returned data | + +### 16、Get table by datasourceId and datasourceName + +Interface description:According to the request information, datasource type, datasource ID and database, a data table is obtained. + +Request URL:/dss/exchangis/main/datasources/{type}/{id}/dbs/{dbName}/tables + +Request mode:GET + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| ------- | ------------------ | ----------- | ------------- | -------------- | +| request | HttpServletRequest | yes | / | request | +| type | String | yes | / | datsource type | +| id | Long | yes | / | datasource id | +| dbName | String | yes | / | database name | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | -------------- | ----------- | ------------- | ---------------------------- | +| method | String | yes | / | Called method (request path) | +| status | int | yes | / | Response status code | +| message | String | no | / | Information of the response | +| data | List\ | yes | / | The returned data | + +### 17、Get table field by datasourceId、datasourceName and table + +Interface description:Get table fields according to request information, datasource type, datasource ID, database name and data table. + +Request URL:/dss/exchangis/main/datasources/{type}/{id}/dbs/{dbName}/tables/{tableName}/fields + +Request mode:GET + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| --------- | ------------------ | ----------- | ------------- | --------------- | +| request | HttpServletRequest | yes | / | request | +| type | String | yes | / | datasource type | +| id | Long | yes | / | datasource id | +| dbName | String | yes | / | database name | +| tableName | String | yes | / | table name | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | ---------------------------------- | ----------- | ------------- | ---------------------------- | +| method | String | yes | / | Called method (request path) | +| status | int | yes | / | Response status code | +| message | String | no | / | Information of the response | +| data | List\ | yes | / | The returned data | + +### 18、Get fieldsmapping + +Interface description:Acquiring table field list information according to the request information and field mapping VO + +Request URL:/dss/exchangis/main/datasources/fieldsmapping + +Request mode:POST + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| ------- | ------------------ | ----------- | ------------- | -------------- | +| request | HttpServletRequest | yes | / | request | +| vo | FieldMappingVO | yes | / | FieldMappingVO | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | ------------------------- | ----------- | ------------- | ---------------------------- | +| method | String | yes | / | Called method (request path) | +| status | int | yes | / | Response status code | +| message | String | no | / | Information of the response | +| data | List> | yes | / | The returned data | + +### 19、Get params by datasourceType + +Interface description:Get parameters according to request information, engine, datasource type and file system path + +Request URL:/dss/exchangis/main/datasources/{engine}/{type}/params/ui + +Request mode:POST + +Request parameters: + +| Name | Type | If required | Default value | Remark | +| ------- | ------------------ | ----------- | ------------- | --------------- | +| request | HttpServletRequest | yes | / | request | +| engine | String | yes | / | enigne | +| type | String | yes | / | datasource type | +| dir | String | yes | / | dir | + +Return parameter: + +| Name | Type | If required | Default value | Remark | +| ------- | ------------------ | ----------- | ------------- | ---------------------------- | +| method | String | yes | / | Called method (request path) | +| status | int | yes | / | Response status code | +| message | String | no | / | Information of the response | +| data | List> | yes | / | The returned data | + + + +## Exchangis job execution module + +### 1、Submit the configured job for execution. + +Interface description : Submit ExchangisJob and return jobExecutionId in the background. + +Request URL:/api/rest_j/v1/exchangis/job/{id}/execute + +Request mode:POST + +Request parameters: + +| Name | Type | Remark | If required | Default value | +| --------------------- | ------- | ------------------------------------------------------------ | ----------- | ------------- | +| id | Long | Exchangis's ID | yes | / | +| permitPartialFailures | boolean | Whether partial failure is allowed. If true, even if some subtasks fail, the whole Job will continue to execute. After the execution is completed, the Job status is Partial_Success. This parameter is the requestBody parameter. | no | false | + +Return parameter : + +| Name | Type | Remark | If required | Default value | +| -------------- | ------ | ------------------------------------ | ----------- | ------------- | +| method | String | Called method (request path) | yes | / | +| status | int | Response status code | yes | / | +| message | String | Information of the response | no | / | +| data | Map | The returned data | yes | / | +| jobExecutionId | String | Returns the execution log of the Job | yes | / | + +Back to example : + +```json +{ + "method": "/api/rest_j/v1/exchangis/job/{id}/execute", + "status": 0, + "message": "Submitted succeed(Submit successfully)!", + "data": { + "jobExecutionId": "555node1node2node3execId1" +} + +``` + +### 2、Get the execution status of Job + +Interface description: Get the status of Job according to jobExecutionId + +Request URL:/api/rest_j/v1/exchangis/job/execution/{jobExecutionId}/status + +Request mode:GET + +Request parameters: + +| Name | Type | Remark | If required | Default value | +| -------------- | ------ | ---------------------------- | ----------- | ------------- | +| jobExecutionId | String | Execution ID of ExchangisJob | yes | / | + +Back to example : + +| Name | Type | Remark | If required | Default value | +| -------------- | ------ | ------------------------------------------------------------ | ----------- | ------------- | +| method | String | Called method (request path) | yes | / | +| status | int | Response status code | yes | / | +| message | String | Information of the response | no | / | +| data | Map | The returned data | yes | / | +| jobExecutionId | String | The status of the executed job, including: initiated, Scheduled, Running, WaitForRetry, Cancelled, Failed, Partial_Success, Success, Undefined, Timeout. Among them, the Running state indicates that it is running, and all of them are completed since Cancelled | yes | / | + +Back to example : + +```json +{ + "method": "/api/rest_j/v1/exchangis/job/execution/{id}/status", + "status": 0, + "message": "Submitted succeed(Submit successfully)!", + "data": { + "status": "Running", + "progress": 0.1 +} +``` + +### 3、Get the task list executed by this Job + +Interface description:Get the task list through jobExecutionId + +Prerequisite: the task list can only be obtained after the Job's execution status is Running, otherwise the returned task list is empty. + +Request URL:/api/rest_j/v1/exchangis/job/execution/{jobExecutionId}/tasklist + +Request mode:GET + +Request parameters: + +| Name | Type | Remark | If required | Default value | +| -------------- | ------ | ------ | ----------- | ------------- | +| jobExecutionId | String | string | yes | / | + +Back to example : + +| Name | Type | Remark | If required | Default value | +| -------------- | ------ | ------------------------------------------------------------ | ----------- | ------------- | +| method | String | Called method (request path) | yes | / | +| status | int | Response status code | yes | / | +| message | String | Information of the response | no | / | +| data | Map | The returned data | yes | / | +| jobExecutionId | String | Task list. The execution status of the Job must be Running before you can get the task list, otherwise the returned task list is empty. Please note: task has no Partial_Success status | yes | / | + +Back to example : + +```json +{ + "method": "/api/rest_j/v1/exchangis/job/execution/{jobExecutionId}/tasklist", + "status": 0, + "message": "Submitted succeed(Submit successfully)!", + "data": { + "tasks": [ + { + "taskId": 5, + "name": "test-1", + "status": "Inited", // There is no task Partial_Success status. + "createTime": "2022-01-03 09:00:00", + "launchTime": null, + "lastUpdateTime": "2022-01-03 09:00:00", + "engineType": "sqoop", + "linkisJobId": null, + "linkisJobInfo": null, + "executeUser": "enjoyyin" + } + ] + } +} + +``` + +### 4、Get the execution progress of Job & task + +Interface description: Get the execution progress through jobExecutionId + +Prerequisites: the execution status of the Job must be Running before you can get the progress of the task list. + +Request URL:/api/rest_j/v1/exchangis/job/execution/{jobExecutionId}/progress + +Request mode:GET + +Request parameters: + +| Name | Type | Remark | If required | Default value | +| -------------- | ------ | ---------------------------- | ----------- | ------------- | +| jobExecutionId | String | Execution ID of ExchangisJob | yes | / | + +Back to example : + +| Name | Type | Remark | If required | Default value | +| -------------- | ------ | ------------------------------------------------------------ | ----------- | ------------- | +| method | String | Called method (request path) | yes | / | +| status | int | Response status code | yes | / | +| message | String | Information of the response | no | / | +| data | Map | The returned data | yes | / | +| jobExecutionId | String | Task list. The execution status of the Job must be Running before you can get the task list, otherwise the returned task list is empty | yes | / | + +Back to example : + +```json +{ + "method": "/api/rest_j/v1/exchangis/job/execution/{jobExecutionId}/progress", + "status": 0, + "message": "Submitted succeed(Submit successfully)!", + "data": { + "job": { + "status": "Running", + "progress": 0.1, + "tasks": { + "running": [ + { + "taskId": 5, + "name": "test-1", + "status": "Running", + "progress": 0.1 + } + ], + "Inited": [ + { + "taskId": 5, + "name": "test-1", + "status": "Inited", + "progress": 0.1 + } + ], + "Scheduled": [], + "Success": [], + "Failed": [], // If there is a Failed task, the Job will fail directly. + "WaitForRetry": [], + "Cancelled": [], // If there is a Cancelled task, the Job will fail directly + "Undefined": [], // If there is a Undefined task, the Job will fail directly + "Timeout": [] + } + } + } +} + +``` + +### 5、Get the indicator information of task runtime + +Interface description:Through jobExecutionId and taskId, we can get the information of various indicators when task is running. + +Prerequisites: before you can get the indicator information of the task, the execution status of the task must be Running; otherwise, the returned information is empty. + +Request URL:/api/rest_j/v1/exchangis/task/execution/{taskId}/metrics + +Request mode:POST + +Request parameters: + +| Name | Type | Remark | If required | Default value | +| -------------- | ------ | --------------------------------------------------- | ----------- | ------------- | +| jobExecutionId | String | Execution ID of ExchangisJob,put it in requestBody | yes | / | +| taskId | String | Execution ID of task,put it in URI | yes | / | + +Back to example : + +| Name | Type | Remark | If required | Default value | +| -------------- | ------ | ------------------------------------------------------------ | ----------- | ------------- | +| method | String | Called method (request path) | yes | / | +| status | int | Response status code | yes | / | +| message | String | Information of the response | no | / | +| data | Map | The returned data | yes | / | +| jobExecutionId | String | Information of each task index. The execution status of task must be Running before you can get the indicator information of task | yes | / | + +Back to example : + +```json +{ + "method": "/api/rest_j/v1/exchangis/task/execution/{taskId}/metrics", + "status": 0, + "message": "Submitted succeed(Submit successfully)!", + "data": { + "task": { + "taskId": 5, + "name": "test-1", + "status": "running", + "metrics": { + "resourceUsed": { + "cpu": 10, // Unit:vcores + "memory": 20 // Unit:GB + }, + "traffic": { + "source": "mysql", + "sink": "hive", + "flow": 100 // Unit:Records/S + }, + "indicator": { + "exchangedRecords": 109345, // Unit:Records + "errorRecords": 5, + "ignoredRecords": 5 + } + } + } + } +} +``` + +### 6、Get the real-time log of Job + +Interface description:Get the real-time log of Job through jobExecutionId. + +Request URL:/api/rest_j/v1/exchangis/job/execution/{jobExecutionId}/log? fromLine=&pageSize=&ignoreKeywords=&onlyKeywords=&lastRows= + +Request mode:GET + +Request parameters: + +| Name | Type | Remark | If required | Default value | +| -------------- | ------ | ------------------------------------------------------------ | ----------- | ------------- | +| jobExecutionId | String | Execution ID of ExchangisJob | yes | / | +| fromLine | int | Read the starting line of | no | 0 | +| pageSize | int | Read the number of log lines this time | no | 100 | +| ignoreKeywords | String | Ignore which keywords are in the line, and multiple keywords are separated by English | no | / | +| onlyKeywords | String | Only the lines where keywords are located are selected, and multiple keywords are separated in English | no | / | +| lastRows | int | Read only the last few lines of the log, which is equivalent to tail -f log. When this parameter is greater than 0, all the above parameters will be invalid | no | / | + +Back to example : + +| Name | Type | Remark | If required | Default value | +| ------- | ------- | ------------------------------------------------------------ | ----------- | ------------- | +| method | String | Called method (request path) | yes | / | +| status | int | Response status code | yes | / | +| message | String | Information of the response | no | / | +| data | Map | The returned data | yes | / | +| endLine | int | The end line number of this reading, you can continue reading the log from endLine+1 next time | yes | / | +| isEnd | boolean | Have all the logs been read | no | / | +| logs | List | Returns the execution log of the Job | yes | / | + +Back to example : + +```json +{ + "method": "/api/rest_j/v1/exchangis/job/execution/{jobExecutionId}/log", + "status": 0, + "message": "Submitted succeed(Submit successfully)!", + "data": { + "endLine": 99, // The end line number of this reading, you can continue reading the log from endLine+1 next time + "isEnd": false, // Have all the logs been read + "logs": [ + "all": "", + "error": "", + "warn": "", + "info": "" + ] + } +} +``` + +### 7、Get the real-time log of task + +Interface description: Get the real-time log of task through jobExecutionId and taskId. + +Request URL:/api/rest_j/v1/exchangis/task/execution/{taskId}/log? jobExecutionId=&fromLine=&pageSize=&ignoreKeywords=&onlyKeywords=&lastRows= + +Request mode:GET + +Request parameters: + +| Name | Type | Remark | If required | Default value | +| -------------- | ------ | ------------------------------------------------------------ | ----------- | ------------- | +| taskId | String | Execution ID of task | yes | / | +| jobExecutionId | String | Execution ID of ExchangisJob | yes | / | +| fromLine | int | Read the starting line of | no | 0 | +| pageSize | int | Read the number of log lines this time | no | 100 | +| ignoreKeywords | String | Ignore which keywords are in the line, and multiple keywords are separated by English | no | / | +| onlyKeywords | String | Only the lines where keywords are located are selected, and multiple keywords are separated in English | no | / | +| lastRows | int | Read only the last few lines of the log, which is equivalent to tail -f log. When this parameter is greater than 0, all the above parameters will be invalid | no | / | + +Back to example : + +| Name | Type | Remark | If required | Default value | +| ------- | ------- | ------------------------------------------------------------ | ----------- | ------------- | +| method | String | Called method (request path) | yes | / | +| status | int | Response status code | yes | / | +| message | String | Information of the response | no | / | +| data | Map | The returned data | yes | / | +| endLine | int | The end line number of this reading, you can continue reading the log from endLine+1 next time. | yes | / | +| isEnd | boolean | Have all the logs been read | yes | / | +| logs | List | Returns the execution log of the Job | yes | / | + +Back to example : + +```json +{ + "method": "/api/rest_j/v1/exchangis/job/execution/{taskId}/log", + "status": 0, + "message": "Submitted succeed(Submit successfully)!", + "data": { + "endLine": 99, // The end line number of this reading, you can continue reading the log from endLine+1 next time. + "isEnd": false, // Have all the logs been read + "logs": [ + "all": "", + "error": "", + "warn": "", + "info": "" + ] + } +} +``` + diff --git a/docs/en_US/ch1/exchangis_job_execute_en.md b/docs/en_US/ch1/exchangis_job_execute_en.md new file mode 100644 index 000000000..b6e62bec8 --- /dev/null +++ b/docs/en_US/ch1/exchangis_job_execute_en.md @@ -0,0 +1,201 @@ +# Exchangis synchronous job execution module detailed design document + +## 一、Overall flow chart + + ![img](../../../images/zh_CN/ch1/job_overall.png) + +
+Figure 1-1 General flow chart +
+ + Please note that : + +1. If the user directly submits a JSON of the synchronization task to be executed through the REST client, the JSON can be directly submitted to the TaskGenerator without step 2. + +2. Every time the front-end or REST client submits, it will generate a JobExecutionId to the front-end, and the subsequent front-end or REST client will obtain the execution status of synchronous job job through jobExecutionId. + +3. JobExecutionId is best generated and returned when the user submits it, that is, the TaskGenerator should be performed asynchronously. Generally, the TaskGenerator may take several seconds to several minutes (depending on the number of subJob), so if you wait for the TaskGenerator to finish before returning jobExecutionId, the front-end request will probably time out. + +4. Therefore, only the front-end or REST client generates a jobExecutionId for each submission, which is to support the repeated submission of the same ExchangisJob. Therefore, in principle, JobServer won't check that only one instance of ExchangisJob can be executed at the same time, but the Web front-end should ensure that only one instance of the same ExchangisJob is executed at the same time in the same browser. + +## 二、Front-background interaction + +![img](../../../images/zh_CN/ch1/job_frontend_backend.png) + +
+Figure 2-1 Interaction between foreground and background +
+ +### 1. The necessity of jobExecutionId + +Consider the scenario that REST client directly submits a JSON synchronization task that it wants to be executed, and in order to support the repeated submission of the same ExchangisJob, it is necessary to generate a jobExecutionId every time it is submitted. + +JobExecutionId is an execution voucher of ExchangisJob, which will be stored in the database. All subsequent requests about the execution of this ExchangisJob need to bring jobExecutionId. + +1. The necessity of TaskGenerator asynchrony + +Consider a scenario, that is, after the client submits the job, the client hangs up before Exchangis can return to jobExecutionId. In this case, because jobExecutionId is not printed in the log of the client, the submitting user thinks that the job was not submitted successfully, so there may be data confusion. Furthermore, it may take a long time for TaskGenerator to process an ExchangisJob (depending on the number of subJob), so if you wait for TaskGenerator to finish before returning jobExecutionId, the front-end request will probably time out. + +Therefore, once the JobServer receives the job execution request, it should immediately generate a jobExecutionId, and generate an execution record for this ExchangisJob in the database, and set the execution status as Inited. As long as the database is persisted successfully, it will asynchronously generate the task of TaskGenerator and immediately return to jobExecutionId. + +### 2. Statelessness of JobServer + +This paper discusses whether JobServer is stateless, that is, after the front end gets jobExecutionId, no matter which JobServer instance is requested, it can normally get the desired data in execution. + +Because there is no special information stored in the memory of JobServer, and the ExchangisJob execution status, progress and Metrics information will be stored in the database, when the front-end request is sent, you only need to go to the database to fetch the relevant data. Therefore, the JobServer is stateless. + +### 3. Multi-tenant function + +Considering the multi-tenant capability, we can split JobGenerator and JobExecution, that is, JobGenerator is used to receive job execution requests submitted by front-end /REST clients in a distributed manner, and JobGenerator generates task sets and stores them in the database. This microservice can be shared by all tenants; While JobExecution can be divided according to different tenants, so as to avoid mutual influence during execution. + +### 4. High availability + +TaskChooseRuler of JobExecution will scan all Exchange Tasks in the database. If an ExchangisTask has not been updated in status after more than a period of time, it will be taken over by the new JobServer. + +How to take over? + +A simple takeover means that the JobServer of all other inventories loads this ExchangisTask to the TaskScheduler at the same time. Because it updates the progress, status and Metrics information, although many of them are updated at the same time, it has no impact on the task. + +Complex takeover requires adding a field in the database table of ExchangisTask to identify the JobServer that is executing the ExchangisTask. At this time, multiple job servers will be triggered to grab the ownership of the Exchangistask. Because the scheme is complex, it will not be considered for the time being. + +## 三、Detailed explanation of front-end interaction + +### 1. Submit + +Before execution, the page is shown in the following figure: + +As the execution interface (with the link to the submission interface attached) needs to be provided with jobId, before actually submitting for execution, it needs to be saved and then submitted, and a basic check should be made before submission, that is, if no subtask or job fails to be saved, it cannot be submitted for execution. + +![img](../../../images/zh_CN/ch1/job_frontend_1.png) + +
+Figure 3-1 Task submission +
+ +Click execute, as shown in the figure below: + +Note that the job information desk will pop up at this moment, showing the running status by default, that is, the overall progress and the progress of all subtasks. + +There are two interfaces used in the front end. One is to use the [Execution Interface] first, submit the ExchangisJob for execution, and return the jobExecutionId; in the background; Second, call the [Get Job Progress] interface through jobExecutionId, which is used to get the progress information of Job&all task, and to show the progress of the following pages. + +![img](../../../images/zh_CN/ch1/job_frontend_2.png) + +
+Figure 3-2 Task Execution +
+ +### 2. Operation status of subtasks + +When the user clicks on a running/completed sub-job, the front end triggers the [Get Task Metrics Information] interface in the background of the request, and obtains the task Metrics information through jobExecutionId & taskId, showing the contents of the following page: + +![1655260735321](../../../images/zh_CN/ch1/job_frontend_3.png) + +
+Figure 3-3 Operation of subtasks +
+ +Show the main resource usage, flow rate and core indicators. + +![1655260937221](../../../images/zh_CN/ch1/job_frontend_4.png) + +
+Figure 3-4 Resource usage of subtasks +
+ +### 3. Real-time log + +When the user clicks the "Log" button in the lower right corner as shown in the figure below, the "Real-time Log" Tab will appear at the information desk, and the real-time log of Job will be displayed by default. When you click the "Log" button of the running status, the running log of the whole Job will be displayed by default at first. At this time, the front end will call the interface of "Get Job Real-time Log" by default, and get the job log through jobExecutionId and display it, as shown in the following figure: + +![img](../../../images/zh_CN/ch1/job_frontend_5.png) + +
+Figure 3-5 Task Real-time Log +
+ +As long as the user doesn't switch to other tabs of the information desk, the front end will constantly poll the background for real-time logs; + +The user can also select to view the log of a certain task through the select selection box, then trigger the request [Get task Real-time Log] interface, get the task log through jobExecutionId & taskId, and continuously poll the latest log. + +If the user switches the select box, the previous log will not be refreshed. + +It should be Inited here that the background also provides an interface of [Get task List of this Job Execution], which is used to help the front end to get all the task lists and display the contents of the select selection box. If the Job itself is still in the initiated or Scheduled state, but it has not been successfully turned into the Running state, the task list cannot be pulled at this time, so when the user drops down the select selection box, the user should be prompted that "the Job is still being scheduled. Please check the real-time log of subtasks after the Job is turned into the Running state." + +After the operation is completed, if the status is successful, the Tab will be switched back to the operation Tab page; If the status is failed, based on the information returned by the [Get Job Progress] interface, the log of the failed sub-job's task will be displayed by default, and the log of the first failed task will be displayed automatically when multiple tasks fail. + +## 四、Detailed explanation of background design + +### 1. Table structure design + +![img](../../../images/zh_CN/ch1/job_backend_datasource_design.png) + +
+Figure 4-1 Database Table Structure Design +
+ +### 2. Interface document + +Please refer to the interface document of Exchangis job execution module for details. + +### 3. Core module & Core class design + +#### 3.1 The UML class diagram of the Bean is as follows: + +![img](../../../images/zh_CN/ch1/job_backend_uml_1.png) + + + +
+Figure 4-2 UML class diagram of entity Bean +
+ +Please note that all non-interfaces ending in Entity need to be stored in the database and exist as a table. + +#### 3.2 The UML class diagram structure of TaskGenerator is as follows: + +![img](../../../images/zh_CN/ch1/job_backend_uml_2.png) + +
+Figure 4-3 UML class diagram of TaskGenerator +
+ +TaskGenerator is only responsible for converting the JSON of a Job into a task set that can be submitted to Linkis for execution (that is, all sub Jobs under the job are translated into an ExchangisTask set), and the translation is written into DB. + +It should be noted here that TaskGenerator is executed asynchronously, and we will encapsulate JobGenerationSchedulerTask in the Service layer for asynchronous submission to TaskExecution for execution. + +#### 3.3 The UML class diagram structure of TaskExecution system is as follows: + +![img](../../../images/zh_CN/ch1/job_backend_uml_3.png) + +
+Figure 4-4 UML class diagram of Task Execution system +
+ +1. TaskExecution is mainly composed of TaskConsumer, TaskManager, TaskScheduler and TaskSchedulerLoadBalancer. + +2. TaskManager,mainly used to manage all ExchangisTask; in the Running state under this JobServer; + +3. TaskConsumer consists of several thread groups with different functions, such as NewTaskCustomer and ReceiveTaskConsumer. Among them, NewTaskConsumer fetch all executable ExchangisTask lists in the initiated state from the database (ExchangisTask lists corresponding to multiple subJob that may include multiple jobs) and submits them to TaskScheduler in batches according to the actual load of Task Scheduler; Before submitting, the status of this task in the database will be updated to Scheduled;; The receiver is used to take over an ExchangisTask that is already in operation, but still has no updated status and Metrics information after a certain period of time, and put the ExchangisTask into the TaskManager to wait for the status to be updated by StatusUpdateSchedulerTask and MetricsUpdateSchedulerTask. TaskChooseRuler is a rule device used to help TaskConsumer filter and select the required ExchangisTask, such as judging whether ExchangisTask can take over, priority strategy and other rules. + +4. TaskScheduler is a thread pool for scheduling various types of SchedulerTask;; SubmitSchedulerTask is used to asynchronously submit tasks to Linkis for execution, and write key information returned by Linkis, such as Id and ECM information, into DB; StatusUpdateSchedulerTask and MetricsUpdateSchedulerTask are permanent polling tasks that will never stop. They will constantly get the SchedulerTask that is already in the Running state from TaskManager, request status and Metrics information from Linkis regularly, and update the database. + +5. TaskSchedulerLoadBalancer is a loader, which is used to detect the polling situation of Running tasks in TaskManager, the load situation of TaskScheduler and server in real time, and determine how many StatusUpdateSchedulerTask and MetricsUpdateSchedulerTask are finally instantiated by TaskScheduler to poll the status and Metrics information of all running tasks. + +#### 3.4 The UML class diagram of TaskScheduler system is as follows: + +![img](../../../images/zh_CN/ch1/job_backend_uml_4.png) + +
+Figure 4-5 UML class diagram of Task Scheduler system +
+ + TaskScheduler is implemented based on linkis-scheduler module. + +#### 3.5 The UML class diagram of the Listener system is as follows: + +![img](../../../images/zh_CN/ch1/job_backend_uml_5.png) + +
+Figure 4-6 UML class diagram of listener system +
+ +The Listener system is the core to ensure that all information can be updated to the database, and the implementation classes of these listeners should be all service classes. \ No newline at end of file diff --git a/docs/en_US/ch1/exchangis_sqoop_deploy_en.md b/docs/en_US/ch1/exchangis_sqoop_deploy_en.md new file mode 100644 index 000000000..062f797d0 --- /dev/null +++ b/docs/en_US/ch1/exchangis_sqoop_deploy_en.md @@ -0,0 +1,77 @@ +# Sqoop engine uses documentation +### Prepare the environment +Sqoop engine is an indispensable component to perform Exchange IS data synchronization task, and only after the installation and deployment of Sqoop engine can it successfully perform data synchronization task. At the same time, make sure sqoop is installed on the deployed machine. + +Before you install and deploy Sqoop engine, Please complete the installation of Exchangis and related components according to the [Exchangis installation and deployment document](docs/en_US/ch1/exchangis_deploy_en.md), and ensure that the basic functions of the project are available. + +Sqoop engine mainly depends on Hadoop basic environment. If this node needs to deploy Sqoop engine, it needs to deploy Hadoop client environment. + +It is strongly recommended that you use the native Sqoop to perform the test task on this node before performing the Sqoop task, so as to check whether the environment of this node is normal. + +| Environment variable name | Environment variable content | Remark | +| :----: | :----: |-------| +| JAVA_HOME | JDK installation path | Required | +| HADOOP_HOME | Hadoop installation path | Required | +| HADOOP_CONF_DIR | Hadoop config path | Required | +| SQOOP_HOME | Sqoop installation path | Not Required | +| SQOOP_CONF_DIR | Sqoop config path | Not Required | +| HCAT_HOME | HCAT config path | Not Required | +| HBASE_HOME | HBASE config path | Not Required | + + +| Linkis system params | Params | Remark | +| --------------------------- | -------------------------------------------------------- | ------------------------------------------------------------ | +| wds.linkis.hadoop.site.xml | Set the location where sqoop loads hadoop parameter file | Required, please refer to the example:"/etc/hadoop/conf/core-site.xml;/etc/hadoop/conf/hdfs-site.xml;/etc/hadoop/conf/yarn-site.xml;/etc/hadoop/conf/mapred-site.xml" | +| sqoop.fetch.status.interval | Set the interval for getting sqoop execution status | Not Required, the default value is 5s. | +### Prepare installation package +#### 1)Download binary package + +Exchangis1.1.2 and Linkis 1.4.0 support the mainstream Sqoop versions 1.4.6 and 1.4.7, and later versions may need to modify some codes for recompilation. + +[Click to jump to Release interface](https://github.com/WeBankFinTech/Exchangis/releases) + +#### 2)Compile and package +If you want to develop and compile sqoop engine yourself, the specific compilation steps are as follows: + +1.clone Exchangis's source code + +2.Under exchangis-plugins module, find sqoop engine and compile sqoop separately, as follows : + +``` +cd {EXCHANGIS_CODE_HOME}/exchangis-plugins/engine/sqoop +mvn clean install +``` +Then the sqoop engine installation package will be found in this path. +``` +{EXCHANGIS_CODE_HOME}\exchangis-plugins\sqoop\target\out\sqoop +``` + + +### Start deployment +#### 1)Sqoop engine installation +1、Get the packed sqoop.zip material package + +2、Place it in the following directory and unzip it + +``` +cd {LINKIS_HOME}/linkis/lib/linkis-engineconn-plugins +unzip.zip +``` +The extracted directory structure is: +``` +dist +plugin +``` +(Note, see which users the current sqoop engine has permissions on, not necessarily root) + + +#### 2)Restart linkis-engineplugin service to make sqoop engine take effect +New engines joining linkis will not take effect until the engineplugin service of linkis is restarted, and the restart script is. /linkis-daemon.sh in the Linkis installation directory. The specific steps are as follows : +``` +cd {LINKIS_INSTALL_HOME}/links/sbin/ +./linkis-daemon.sh restart cg-engineplugin +``` +After the service is successfully started, the installation and deployment of sqoop will be completed. + +For a more detailed introduction of engineplugin, please refer to the following article. +https://linkis.apache.org/zh-CN/docs/latest/deployment/install-engineconn \ No newline at end of file diff --git a/docs/en_US/ch1/exchangis_user_manual_en.md b/docs/en_US/ch1/exchangis_user_manual_en.md new file mode 100644 index 000000000..bdd0856f9 --- /dev/null +++ b/docs/en_US/ch1/exchangis_user_manual_en.md @@ -0,0 +1,304 @@ +# Exchangis1.0 User Manual + +## 一、Product introduction + +   This article is a quick entry document of Exchangis 1.0, covering the basic usage process of Exchangis 1.0. Exchangis a lightweight data exchange service platform, which supports data synchronization between different types of data sources. The platform data exchange process is split, and the concepts of data source, data exchange task and task scheduling are abstracted, so as to achieve the purpose of visual management of data synchronization process. In the actual data transmission process, the characteristics of multiple transmission components can be integrated to achieve horizontal expansion of functions. + +## 二、Login Exchangis1.0 + +   Exchangis1.0 is currently a part of DSS**data exchange component**, and it can be accessed in the component list by logging in to DSS. Therefore, before using Exchangis 1.0, please make basic deployment of DSS, Exchange IS 1.0, Linkis and other related components to ensure that the components' functions are available. This article will not go into details. See for details:[exchangis_deploy_en](docs/en_US/ch1/exchangis_deploy_en.md)和[exchangis-appconn_deploy_en](docs/en_US/ch1/exchangis_appconn_deploy_en.md) + +### 1、Login DSS + +   By default, the system uses Linux deployment users of Linkis to log in to DSS. If users of hadoop deploy Linkis and DSS, they can log in directly through the account password: hadoop/hadoop. First log in to the webpage according to the front-end deployment address of DSS, and then enter the account password: hadoop/hadoop to enter DSS. + +### 2、Enter Exchangis + +   Exchangis is accessed through DSS. Click on the word: **Home ->DSS Application Components-> Data Exchange-> Enter Exchange**. + +![exchangis1.0_entrance](../../../images/zh_CN/ch1/exchangis1.0_entrance.png) +
+Pic2-1 Exchangis1.0 entrance +
+ +## 三、Datasource management + +  This module can configure and manage data sources. As the initial step of data synchronization, Exchangis1.0 currently supports direct data import between mysql and hive. +The main functions of data source are as follows : + +1. Create, edit and delete data sources; +2. Search data sources by type and name, and support quick positioning of data sources; +3. Data source connection test operation; +4. Release and record of historical data source version. + +![datasource_list](../../../images/zh_CN/ch1/datasource_list.png) + +
+Pic3-1 Datasource management list +
+ + +### 1、Create datasource + +   Click **Create Data Source** and select the data source you want to create. Currently, MySQL and Hive data sources can be created. + +![datasource_type](../../../images/zh_CN/ch1/datasource_type.png) + +
+Pic3-2 Datasource type +
+ + +  Select the MySQL data source and fill in the configuration parameters, among which the asterisk is required. Make sure that the Host, port number, user name and password connected to MySQL database are connected correctly. **Connection parameter** is in Json format, which is used to set the configuration information of MySQL. After filling it out, you can **test the connection**. + +![MySQL_datasource_config](../../../images/zh_CN/ch1/MySQL_datasource_config.png) + +
+Pic3-3 MySQL datasource config +
+ + +  For the configuration of Hive data source, it is different from MySQL. For the time being, it does not provide users with the function of configuring cluster parameters in the interface. For the cluster environment, it is completed by the back-end unified configuration. Users only need to select the required cluster environment and click OK to save it. + +![Hive_datasource_config](../../../images/zh_CN/ch1/Hive_datasource_config.png) + +
+Pic3-4 Hive datasource config +
+ + +### 2、Datasource function + +   The data source management module provides the function of **publishing** the version of the configured data source. Only the published data source can be used when configuring derivative tasks, otherwise, it will prompt that it is unavailable. As long as the data source is edited again, it will be regarded as a new version, and the latest version is in the first row. You can **view** the configuration of all historical data source versions in the version list, and you can refer to it whenever you need to roll back. + +![datasource_func](../../../images/zh_CN/ch1/datasource_func.png) + +
+Pic3-5 Datasource release +
+ +  The **expired ** function of data source management is used to indicate that this data source has been gradually replaced. Please change the task configuration using this data source in time to avoid the failure of the configured execution task caused by directly deleting the data source. +![datasource_timelimit](../../../images/zh_CN/ch1/datasource_timelimit.png) + +
+Pic3-6 Datasource expiration +
+ +## 四、Project management + +### 1、Project list + +   This module can create projects. In actual derivative tasks, there can be multiple derivative tasks under a project, and different projects do not affect each other. For ordinary users, they can only operate their own created projects. On the homepage of project management, you can manage projects, including **create**, **modify** and **delete** and **query and search**. Modification and deletion can only be performed on projects created in Exchangis. + +![item_list](../../../images/zh_CN/ch1/item_list.png) +
+Pic4-1 Project list +
+ + +### 2、Task list + +  Enter the project, and you can see the task list under the project. + +#### 1)Task manage + +  You can manage the created Job data synchronization tasks in the task list, similar to projects, including **Create**, **Modify**, **Delete** and **Search**. + +![job_task_list](../../../images/zh_CN/ch1/job_task_list.png) +
+Pic4-2 Task list +
+ +   Click **Create Task** to select the task type and execution engine. **Currently, only offline tasks and SQOOP execution engine** are supported, and streaming tasks and DataX engines will be supported in the future. + +![task_type_and_engine](../../../images/zh_CN/ch1/task_type_and_engine.png) +
+Pic4-3 Task type and Engine config +
+ +#### 2)Subtasks manage + +  Click **Add subtask** in the task to add multiple subtasks. You can **modify, copy and delete subtasks**. + +   At the same time, **tasks support copying**, and copied subtasks contain all information of their atomic task configuration. + +![1656571126825](../../../images/zh_CN/ch1/sub_task_manage.png) + +
+Pic4-4 Subtask manage +
+ +#### 3)Synchronize data task configuration and execution + +  Before this step, you need to add data sources in the **data source management module** for selection and **publish**. The current Exchangis version only supports **MySQL data sources and Hive data sources **. + +  Data synchronization task configuration and execution is the core function of Exchangis1.0.0. The basic configuration data synchronization process is: **Add subtask-> Select Source data source and Sink data source-> Field mapping configuration-> Process control-> Task configuration-> Save-> Execute**. + +The main functions of task execution include: +1. Add, copy and delete subtask cards ; +2. Realize the import and export of data between two different types of data sources ; +3. Selection of library tables for source and destination data sources ; +4. Datasource field mapping ; +5. Maximum job concurrency and maximum job memory configuration ; +6. Data synchronization task execution status view ; +7. Log view of each main task and each subtask ; +8. Task execution history status view ; +9. Execute the task kill operation. + +##### Selecting and configuring data sources + +  For the newly created data synchronization subtask, you should select the data source library table, and the selected data source is the published data source in the **data source module**. Select the data source to support search, and the search method is to search the library first, and then search the table. + +  When MySQL is the destination data source, it supports **insert** and **update** two writing methods; When it is the source data source, it supports **WHERE conditional statement query**. + +  When Hive is the destination data source, partition information configuration is supported, and the writing methods are **append data** and **overwrite**; When the source data source is **partition information configuration is supported**. + +![add_subtask](../../../images/zh_CN/ch1/data_source_select.png) +
+Pic4-5 Select datasource +
+ +![1656574588669](../../../images/zh_CN/ch1/data_source_insert_way.png) + +
+Pic4-6 Write mode configuration +
+ +##### Datasource field mapping + +  When the data Source library table information is configured, Exchangis1.0.0 will automatically map the fields of source data source and Sink data source, and you can choose the fields you want to map by yourself, or check whether our fields match. When Hive is a Sink data source, its mapping field cannot be modified. + +![1656574253553](../../../images/zh_CN/ch1/data_source_field_mapping.png) + +
+Pic4-7 Field mapping +
+ +##### Process control + +  Task execution provides **configuration of maximum number of parallel jobs** (default is 1) and **configuration of maximum job memory** (default is 1024Mb), which can be changed according to actual needs. + +![1656574125954](../../../images/zh_CN/ch1/task_proccess_control.png) + +
+Pic4-8 Process control +
+ +#### 4)Job execution + +  Exchangis1.0 support the simultaneous execution of multiple subtasks. After the task configuration is completed, click Execute to start the data synchronization task, and the workbench will pop up at the bottom of the interface. The workbench mainly contains three functions: **running status, real-time log and execution history** . +   **Running Status** :You can view the overall progress of the current data synchronization task, including the number of successes and failures of the task, and click on the name of the task to display the information of various running indicators of each task. + +   **Real-time log** :The main display contents include two categories. One is the log of the whole derivative job, which can output the status log of each task, such as whether the task is scheduled or not, and whether it is running moderately; The second is the log of each task, and the output is the corresponding derivative log. In the real-time log, the log can be filtered according to keywords and ignored words, and the function of obtaining the last N lines of logs is provided separately; You can also screen and display different types of logs of Error, Warning and Info, just click the corresponding button. + +   **Execution History** :It can display the historical execution information of this derivative task and provide a preliminary overview of the historical execution process. If you want to further view the detailed historical information, click on the task name to jump to the synchronous history interface for viewing. + +   Data synchronization task execution needs to specify the execution user, which is the login user by default. The specific situation needs to be adjusted according to the configuration of the actual data source. + + +## 五、Synchronous history + +   This module can view all data synchronization tasks performed historically, and each user can only view the tasks created by himself. Different users are isolated from each other. + + The main functions are as follows: +1. Find the required historical task information according to the query conditions ; +2. For non-terminal tasks, the function of terminating tasks is provided, which can kill non-terminal tasks ; +3. Check the running status and real-time day of each task ; +4. View more detailed configuration information and update time of each synchronization task . + +![sync_history](../../../images/zh_CN/ch1/sync_history.png) + +
+Pic5-1 Synchronous history +
+ +## 六、Exchangis Appconn uses + +   At present, Exchangis1.0 supports docking with DSS in the form of Appconn, **on the DSS side**, data exchange sqoop workflow node can be created in the mode of workflow arrangement through **application development-> project list ** of DSS, where data synchronization tasks can be configured and executed. Exchangis projects and data exchange tasks created in DSS will be created in Exchangis at the same time. + +Exchangis Appconn mainly supports the following functions : + +1. **Project operation ** is the creation, deletion and modification of DSS projects, which will synchronously affect the projects on Exchangis; + +2. **Basic operation of workflow node **is the task of creating, deleting and modifying sqoop workflow nodes in DSS choreographer, which will be synchronized to Exchangis. + + It is the task of creating, deleting and modifying sqoop workflow nodes in DSS choreographer, which will be synchronized to Exchangis; + +3. **Workflow derivative operation ** support sqoop workflow node configuration to perform data synchronization tasks; + +4. **Workflow publishing operation ** support sqoop workflow nodes to publish to WTSS for task scheduling. + +### 1、Project operation + +   This module can create, modify and delete DSS projects, and the operations on the DSS side will be synchronized to the Exchange is side. Here, take the project created in DSS as an example, and the process of operation in Exchange is as follows: **Click Create Project-> Fill in project information-> Click Confirm-> Enter Exchange is side-> Click Project Management**, and you can view the synchronously created projects, as shown in the following figure: + +![appconn_pro_create](../../../images/zh_CN/ch1/appconn_pro_create.png) + +
+Pic6-1 Project operation +
+ +After the creation, you will see the synchronized project on the Exchange is side. + +![appconn_pro_sync](../../../images/zh_CN/ch1/appconn_pro_sync.jpg) + +
+Pic6-2 Synchronize the project into Exchangis +
+ +### 2、Basic operation of workflow node + +  Workflow nodes can be created, modified, deleted, and selected depending on each other, and can be associated with each other. Operations on DSS side will be synchronized to Exchangis side. Taking the creation of sqoop workflow node as an example, the operation process of the project in exchangis appconn is as follows: **Create a workflow-> Drag sqoop node from the left plug-in bar to the right canvas-> Click OK to create sqoop node task-> Enter Exchangis to view the synchronously created task**, as shown in the following figure, and the same is true for deleting and modifying sqoop node tasks. + +![appconn_pro_sqoop](../../../images/zh_CN/ch1/appconn_pro_sqoop.png) + +
+Pic6-3 Sqoop node function +
+ + You can see that the derivative task is also synchronized to Exchangis. ![](../../../images/zh_CN/ch1/appconn_pro_sqoop_sync.jpg) + +
+Pic6-4 Synchronize the sqoop node into Exchangis +
+ +### 3、Workflow derivative operation + +   Double-click the Sqoop node to operate the workflow node. sqoop workflow node configuration and data synchronization tasks are supported. Derivative task in the form of workflow nodes is the core function of Exchangis Appconn **Each sqoop node represents a data synchronization task**, and the specific operation process is as follows: **Double-click the sqoop node-> Task configuration interface pops up-> Configure task information-> Execute task**, as shown in the following figure: + +![sqoop_config](../../../images/zh_CN/ch1/sqoop_config.png) + +
+Pic6-5 Double-click the sqoop workflow node to enter the configuration interface. +
+ +![sqoop_user_config](../../../images/zh_CN/ch1/sqoop_user_config.png) + +
+Pic6-6 Configure workflow node information +
+ +  There are two execution methods here, one is to click the execute button in the pop-up task configuration interface to execute; The other is to click the **Execute button** of DSS choreographer or **select the Execute button** to execute. **Click Execute** to execute all the nodes in this workflow, and **click Select Execute** to execute only the selected workflow nodes, but not all the nodes. + +![sqoop_execute](../../../images/zh_CN/ch1/sqoop_execute.png) + +
+Pic6-7 Execute job +
+ +Note: For data synchronization tasks performed in sqoop node of DSS, relevant information can be viewed on Exchangis. + +### 4、Workflow publishing operation + +  The **publish** function of workflow tasks supports sqoop workflow nodes to publish to WTSS for task scheduling. The data exchange task information created and configured in the * * Development Center **can be published to WTSS, and the task can be scheduled in WTSS**. + +### 5、Production center + +  Click the drop-down box in the namespace and switch to **Production Center **, where you can see the workflow logs of all projects and check the scheduling status of each workflow. + +![production_center](../../../images/zh_CN/ch1/production_center.png) + +
+Pic6-8 Production center +
+ diff --git a/docs/zh_CN/ch1/component_upgrade_cn.md b/docs/zh_CN/ch1/component_upgrade_cn.md new file mode 100644 index 000000000..94ca8e7f2 --- /dev/null +++ b/docs/zh_CN/ch1/component_upgrade_cn.md @@ -0,0 +1,97 @@ +Exchangis 升级文档 +本文主要介绍在原有安装Exchangis服务的基础上适配DSS1.1.2和Linkis1.4.0的升级步骤,Exchangis1.1.2相对与Exchangis1.0.0版本最大的区别在于ExchangisAppconn的安装,需要对整个Exchangisappconn进行重新替换和加载 +### 1.升级Exchangis前的工作 +您在升级Exchangis之前,请按照[DSS1.1.2安装部署文档](https://github.com/WeBankFinTech/DataSphereStudio-Doc/tree/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2) +和[Linkis1.4.0安装部署文档](https://linkis.staged.apache.org/zh-CN/docs/1.4.0/deployment/deploy-quick)完成DSS和Linkis的安装升级 + +### 2.Exchangis升级步骤 + +#### 1)删除旧版本ExchangisAppconn包 + +进入下列目录,找到exchangis的appconn文件夹并删除,如果存在的话: +``` +{DSS_Install_HOME}/dss/dss-appconns +``` + +#### 2)下载二进制包 +我们提供ExchangisAppconn的升级物料包,您可直接下载使用。[点击跳转 Release 界面](https://osp-1257653870.cos.ap-guangzhou.myqcloud.com/WeDatasphere/Exchangis/exchangis1.1.2/Exchangis1.1.2_install_package.zip) + +#### 3) 编译打包 + +如果您想自行编译ExchangisAppConn,具体编译步骤如下: +1.clone Exchangis的代码 +2.在exchangis-plugins模块下,找到exchangis-appconn,单独编译exchangis-appconn +``` +cd {EXCHANGIS_CODE_HOME}/exchangis-plugins/exchangis-appconn +mvn clean install +``` +会在该路径下找到exchangis-appconn.zip安装包 +``` +{EXCHANGIS_CODE_HOME}\exchangis-plugins\exchangis-appconn\target\exchangis-appconn.zip +``` + +### 3.ExchangisAppConn插件的部署和配置总体步骤 +1.拿到打包出来的exchangis-appconn.zip物料包 + +2.放置到如下目录并进行解压 + +``` +cd {DSS_Install_HOME}/dss/dss-appconns +unzip exchangis-appconn.zip +``` +解压出来的目录结构为: +``` +conf +db +lib +appconn.properties +``` + +3.执行脚本进行自动化安装 + +```shell +cd {DSS_INSTALL_HOME}/dss/bin +./install-appconn.sh +# 脚本是交互式的安装方案,您需要输入字符串exchangis以及exchangis服务的ip和端口,即可以完成安装 +# 这里的exchangis端口是指前端端口,在nginx进行配置。而不是后端的服务端口 +``` + +### 4.完成exchangis-appconn的安装后,调用脚本刷新appconn服务 + +#### 4.1)使部署好的APPCONN生效 +使用DSS刷新使APPCONN生效,进入到脚本所在目录{DSS_INSTALL_HOME}/bin中,使用如下命令执行脚本,注意,无需重启dss服务: +``` +sh ./appconn-refresh.sh +``` + +#### 4.2)更新Exchangis安装目录下的lib包 + +将从下载链接中得到的exchangis-project-server-1.1.2.jar和exchangis-server-1.1.2.jar两个包放入以下Exchangis安装目录的文件路径下(先删除原有旧的这两个包): + +```$xslt +lib/exchangis-server +``` + +再通过以下命令完成 Exchangis Server 的更新重启: + + ```shell script + ./sbin/daemon.sh restart server + ``` +# 4.3)更新dss-exchangis-server.propertis文件 + +将dss-exchangis-server.propertis文件中的最后一行进行替换,替换内容如下 + +```$xslt +wds.linkis-session.ticket.key=bdp-user-ticket-id +``` + +#### 4.3)验证exchangis-appconn是否生效 +在安装部署完成exchangis-appconn之后,可通过以下步骤初步验证exchangis-appconn是否安装成功。 +1. 在DSS工作空间创建一个新的项目 +![image](https://user-images.githubusercontent.com/27387830/169782142-b2fc2633-e605-4553-9433-67756135a6f1.png) + +2. 在exchangis端查看是否同步创建项目,创建成功说明appconn安装成功 +![image](https://user-images.githubusercontent.com/27387830/169782337-678f2df0-080a-495a-b59f-a98c5a427cf8.png) + + + 更多使用操作可参照[Exchangis用户手册](docs/zh_CN/ch1/exchangis_user_manual_cn.md) diff --git a/docs/zh_CN/ch1/exchangis_appconn_deploy_cn.md b/docs/zh_CN/ch1/exchangis_appconn_deploy_cn.md new file mode 100644 index 000000000..5225fc180 --- /dev/null +++ b/docs/zh_CN/ch1/exchangis_appconn_deploy_cn.md @@ -0,0 +1,87 @@ +ExchangisAppConn安装文档 +本文主要介绍在DSS(DataSphere Studio)1.0.1中ExchangisAppConn的部署、配置以及安装 +### 1.部署ExchangisAppConn的准备工作 +您在部署ExchangisAppConn之前,请按照[Exchangis安装部署文档](docs/zh_CN/ch1/exchangis_deploy_cn.md)安装完成Exchangis及其他相关组件的安装,并确保工程基本功能可用。 + +### 2.ExchangisAppConn插件的下载和编译 +#### 1)下载二进制包 +我们提供ExchangisAppconn的物料包,您可直接下载使用。[点击跳转 Release 界面](https://github.com/WeBankFinTech/Exchangis/releases) +#### 2) 编译打包 + +如果您想自己开发和编译ExchangisAppConn,具体编译步骤如下: +1.clone Exchangis的代码 +2.在exchangis-plugins模块下,找到exchangis-appconn,单独编译exchangis-appconn +```shell +cd {EXCHANGIS_CODE_HOME}/exchangis-plugins/exchangis-appconn +mvn clean install +``` +会在该路径下找到exchangis-appconn.zip安装包 +```shell +{EXCHANGIS_CODE_HOME}/exchangis-plugins/exchangis-appconn/target/exchangis-appconn.zip +``` + +### 3.ExchangisAppConn插件的部署和配置总体步骤 +1.拿到打包出来的exchangis-appconn.zip物料包 + +2.放置到如下目录并进行解压 + +注意:第一次解压exchangis appconn后,确保当前文件夹下没有index_v0000XX.index文件,该文件在后面才会生成 + +```shell +cd {DSS_Install_HOME}/dss/dss-appconns +unzip exchangis-appconn.zip +``` +解压出来的目录结构为: +```shell +conf +db +lib +appconn.properties +``` + +3.执行脚本进行自动化安装 + +```shell +cd {DSS_INSTALL_HOME}/dss/bin +./install-appconn.sh +# 脚本是交互式的安装方案,您需要输入字符串exchangis以及exchangis服务的ip和端口,即可以完成安装 +# 这里的exchangis端口是指前端端口,在nginx进行配置。而不是后端的服务端口 +``` + +### 4.完成exchangis-appconn的安装后,需要重启dss服务,才能最终完成插件的更新 + +#### 4.1)使部署好的APPCONN生效 +使用DSS启停脚本使APPCONN生效,进入到脚本所在目录{DSS_INSTALL_HOME}/sbin中,依次使用如下命令执行脚本: +```shell +sh ./dss-stop-all.sh +sh ./dss-start-all.sh +``` +#### 4.2)验证exchangis-appconn是否生效 +在安装部署完成exchangis-appconn之后,可通过以下步骤初步验证exchangis-appconn是否安装成功。 +1. 在DSS工作空间创建一个新的项目 +![image](https://user-images.githubusercontent.com/27387830/169782142-b2fc2633-e605-4553-9433-67756135a6f1.png) + +2. 在exchangis端查看是否同步创建项目,创建成功说明appconn安装成功 +![image](https://user-images.githubusercontent.com/27387830/169782337-678f2df0-080a-495a-b59f-a98c5a427cf8.png) + +更多使用操作可参照[Exchangis用户手册](docs/zh_CN/ch1/exchangis_user_manual_cn.md) + +### 5.Exchangis AppConn安装原理 + +Exchangis 的相关配置信息会插入到以下表中,通过配置下表,可以完成 Exchangis 的使用配置,安装 Exchangis AppConn 时,脚本会替换每个 AppConn 下的 init.sql,并插入到表中。(注:如果仅仅需要快速安装APPCONN,无需过分关注以下字段,提供的init.sql中大多以进行默认配置。重点关注以上操作即可) + +| 表名 | 表作用 | 备注 | +| :----: | :----: |-------| +| dss_application | 应用表,主要是插入 exchangis 应用的基本信息 | 必须 | +| dss_menu | 菜单表,存储对外展示的内容,如图标、名称等 | 必须 | +| dss_onestop_menu_application| menu 和 application 的关联表,用于联合查找 | 必须 | +| dss_appconn |appconn 的基本信息,用于加载 appconn | 必须 | +| dss_appconn_instance| AppConn 的实例的信息,包括自身的url信息 | 必须 | +| dss_workflow_node | schedulis 作为工作流节点需要插入的信息 | 必须 | + +Exchangis 作为调度框架,实现了一级规范和二级规范,需要使用 exchangis AppConn 的微服务如下表。 + +| 表名 | 表作用 | 备注 | +| :----: | :----: |-------| +| dss-framework-project-server | 使用 exchangis-appconn 完成工程以及组织的统一| 必须 | +| dss-workflow-server | 借用调度 AppConn 实现工作流发布,状态等获取 | 必须 | diff --git a/docs/zh_CN/ch1/exchangis_datasource_cn.md b/docs/zh_CN/ch1/exchangis_datasource_cn.md new file mode 100644 index 000000000..b7466d4bf --- /dev/null +++ b/docs/zh_CN/ch1/exchangis_datasource_cn.md @@ -0,0 +1,306 @@ +# DataSource1.0 + +## 1、背景 + +早期版本中的**Exchangis0.x**和**Linkis0.x**都有整合数据源模块,其中以**linkis-datasource**为蓝本(可以参阅相关的文档),重构数据源模块。 + +## 2、整体架构设计 + +为了构建公共的数据源模块,数据源模块拆主要拆分成两大部分,**datasource-client**部分和**datasource-server**部分,其中server部分统一放在**Linkis-1.0**的**linkis-datasource**模块中,包含服务核心主逻辑; client部分放在**Exchangis-1.0**的**exchangis-datasource**模块下, 包含客户端的调用逻辑,看下整体架构。 + +![linkis_datasource_structure](../../../images/zh_CN/ch1/datasource_structure.png) + +
+图2-1 整体架构设计 +
+ +## 3、模块详解 + +### 3.1 datasource-server + +**datasource-server**: 顾名思义是保存核心服务的模块,沿用**linkis-datasource**原先的架构(拆分成**datasourcemanager** 和 **metadatamanager**)。 + +### 3.2 linkis-datasource + +目前的架构示意图: + +![linkis_datasource_structure](../../../images/zh_CN/ch1/linkis_datasource_structure.png) + +
+图3-1 目前架构示意图 +
+ +上图中可见**linkis-datasource**将数据源相关功能进行了解耦,基础信息部分由**datasourcemanager**进行管理,元数据部分由**metadatamanager**进行管理。两个子模块之间通过RPC请求互相访问,同时分别对外提供Restful入口,外部服务请求统一经过**liniks-gateway**进行转发才落到对应服务上。再者,**metadatamanage**在没有对接第三方元数据管理平台的情况,为了能插件化地支持不同数据源的元数据接入,下接不同数据源的**service**子模块,子模块里都有各自对元数据获取接口的实现,这些模块例如:**service/hive, service/elasticsearch, service/mysql** + +#### 3.2.1 新增需求 + +##### 前端界面需求 + +原先**linkis-datasource**并不包含前端界面部分,现将原先对**exchangis 1.0**的数据源界面设计合并进去,详见**UI文档**和**前端交互文档**。对其中涉及的需求做下详细描述: + +- 数据源类型-列表获取【数据源管理】 + +需求描述: + +获取接入的所有数据源类型并展示 + +- 数据源环境-列表获取【数据源管理】 + +需求描述: + +获取在后台预设的数据源环境参数并展示做列表展示 + +- 新增/修改数据源-标签设置【数据源管理】 + +需求描述: + +设置数据源的标签信息。 + +- 连接性检测【数据源管理】 + +需求描述: + +检查已经接入的数据源的连通性,通过点击数据源列表的连通性监测按钮。 + +- 新增/修改数据源-配置加载【数据源管理】 + +需求描述: + +为了方便引入新的数据源或对原有的数据源进行属性扩充,所以对于新建/修改数据源的表单配置,计划采用后台存储+前端加载的方式,后台会保存每条属性字段的类型、默认值、加载地址和简单的级联关系,前端根据这些生成抽象的数据结构,再将其转化成DOM操作 + +流程设计: + +1. 用户选择数据源类型,前端以数据源类型为参数向后台请求该数据源的属性配置列表; + +2. 前端拿到配置列表,首先判断类型,选用对应的控件,再设置默认的值,刷新界面DOM; + +3. 配置基本信息都加载渲染完成后,开始进行值的预加载和级联关系的建立; + +4. 配置加载完成,等待用户填充。 + +关联UI: + +![datasource_ui](../../../images/zh_CN/ch1/datasource_ui.png) + +
+图3-2 数据源UI +
+ +![datasource_ui_mysql](../../../images/zh_CN/ch1/datasource_ui_mysql.png) + +
+图3-3 创建MySQL数据源 +
+ +- 批处理-批量导入/导出 【数据源管理】 + +需求描述: + +对数据源配置的批量导入和导出。 + +##### 后台需求 + +**linkis-datasurce**后台目前已集成了关于数据源CRUD的相关操作逻辑,现增加和标签、版本相关的内容: + +- 数据源权限设置 【数据源管理】 + +需求描述: + +后台需要将其和Linkis1.4.0的标签功能相整合,为数据源赋予标签关系。 + +流程设计: + +1. 用户在创建和修改数据源的时候,允许对数据源进行设置标签; + +2. 保存修改时,把标签信息作为字符列表传给后端,后端将标签字符转换为标签实体,插入更新该标签; + +3. 保存数据源,建立数据源和标签的联系。 + +- 数据源版本功能 【数据源管理】 + +需求描述: + +为数据源添加版本的概念,版本的作用在于发布和更新,更新的时候默认添加新版本,发布的时候将要发布的版本的数据源信息覆盖最新版本,并标识为已发布。 + +#### 3.2.2 详细设计 + +对**linkis-datasource**包含的实体对象做一些修改和扩充,介绍如下: + +| **类名** | **作用** | +| -------------------------------- | ------------------------------------------------------------ | +| DataSourceType | 表示数据源的类型 | +| DataSourceParamKeyDefinition | 声明数据源属性配置定义 | +| DataSourceScope【新增】 | 标注数据源属性的作用域,通常有三种域,数据源、数据源环境和默认(全部) | +| DataSource | 数据源对象实体类,包含标签和属性配置定义 | +| DataSourceEnv | 数据源环境对象实体类,也包含属性配置定义 | +| DataSourcePermissonLabel【删除】 | | +| DataSourceLabelRelation【新增】 | 表示数据源和权限标签之间的关系 | +| VersionInfo【新增】 | 版本信息,包含数据源版本号信息 | + +2.1 其中**DataSourceParamKeyDefinition**保持原先一致的结构,新增一些支撑界面渲染的属性,结构详解如下: + +| **字段名称** | **字段类型** | **备注** | +| --------------- | ------------ | --------------------------------------------------------- | +| id | string | 持久化ID | +| key | string | 属性名称关键字 | +| description | string | 描述 | +| name | string | 属性显示名称 | +| defaultValue | string | 属性默认值 | +| valueType | string | 属性值类型 | +| require | boolean | 是否是必须属性 | +| refId | string | 级联的另一个属性ID | +| dataSrcTypId | string | 关联的数据源类型ID | +| 【新增】refMap | string | 级联关系表,格式应如: value1=refValue1, value2=refValue2 | +| 【新增】loadUrl | string | 加载URL, 默认为空 | + +2.2 **DataSource**结构也类似,但包含标签信息: + +| **字段名称** | **字段类型** | **备注** | +| ------------------ | ------------ | ------------------------------------------ | +| serId | string | 持久化ID | +| id | string | 系统内ID | +| 【新增】versions | list-obj | 关联的版本VersionInfo列表 | +| 【新增】srcVersion | string | 来源版本,标注该数据源是通过版本信息创建的 | +| datSourceName | string | 数据源名称 | +| dataSourceDesc | string | 数据源描述 | +| dataSourceTypeId | integer | 数据源类型ID | +| connectParams | map | 连接属性参数字典 | +| parameter | string | 连接属性参数串 | +| createSystem | string | 创建的系统,一般情况下都为空或(exchangis) | +| dataSourceEnvId | integer | 关联的数据源环境ID | +| keyDefinitions | list-object | 关联的属性配置定义列表 | +| 【新增】labels | map | 标签字符串 | +| 【新增】readOnly | boolean | 是否是只读数据源 | +| 【新增】expire | boolean | 是否过期 | +| 【去除】 isPub | boolean | 是否发布 | + +2.3 **VersionInfo**版本信息,不同的数据源版本主要是连接参数不同,结构如下: + +| **字段名称** | **字段类型** | **备注** | +| ------------- | ------------ | ---------------- | +| version | string | 版本号 | +| source | long | 关联的数据源ID | +| connectParams | map | 版本属性参数字典 | +| parameter | string | 版本属性参数串 | + +2.4 **DataSourceType**和**DataSourceEnv**也与原先的类大致相同,其中**DataSourceType**需要新增**classifier**字段来对不同数据源类型进行分类,其他就不展开描述。 + +**datasource-server**具有的主要服务处理类如下: + +| **接口名** | **接口作用** | **单一实现** | +| ---------------------------------- | ------------------------------------------------------------ | ------------ | +| DataSourceRelateService | 声明对数据源关联信息的操作,包括列举所有的数据源类型,列举不同类型下的属性定义信息 | 是 | +| DataSourceInfoService | 声明对数据源/数据源环境的基本操作 | 是 | +| MetadataOperateService | 声明对数据源元数据源的操作,一般用于连接测试 | 是 | +| BmlAppService | 声明对BML模块的远端调用,用于上传/下载数据源的密钥文件 | 是 | +| DataSourceVersionSupportService | 声明对数据源多版本支持的操作 | 是 | +| MetadataAppService【旧,需要拆分】 | 声明对元数据信息的操作 | 是 | +| DataSourceBatchOpService【新增】 | 声明对数据源的批处理操作 | 是 | +| MetadataDatabaseService【新增】 | 声明对数据库类元数据信息的操作 | 是 | +| MetadataPropertiesService【新增】 | 声明对属性类元数据信息的操作 | 是 | + +### 3.3 datasource-client + +**datasource-client**: 包含客户端调用逻辑,用客户端的方式去操作数据源以及获取相关的元数据。 + +#### 3.3.1 相关需求 + +##### 后台需求 + +作为请求客户端,**datasource-client**没有前端界面需求,后台需求比较单一,既构建稳定、可重试、容易溯源的客户端,直接对接sever端支持的所有接口,尽可能支持各种的接入方式 + +#### 3.3.2 详细设计 + +其包的组织架构大体设计如下: + +![datasource_client_scructure](../../../images/zh_CN/ch1/datasource_client_scructure.png) + +
+图3-4 datasource-client详细设计 +
+ +涉及到的类/接口信息如下: + +| 类/接口名 | 类/接口作用 | 单一实现 | +| ----------------------------- | ------------------------------------------------------------ | -------- | +| RemoteClient | Client对象的顶级接口,声明了初始化、释放以及基本权限验证的公共接口方法 | 否 | +| RemoteClientBuilder | Client的构造类,根据RemoteClient的不同实现类来构造 | 是 | +| AbstractRemoteClient | RemoteClient的抽象实现,涉及到重试、统计、缓存等逻辑 | 是 | +| DataSourceRemoteClient | 声明数据源客户端的所有操作入口 | 否 | +| MetaDataRemoteClient | 声明元数据客户端的所有操作入口 | 否 | +| LinkisDataSourceRemoteClient | linkis-datasource的数据源客户端实现 | 是 | +| LinkisMetaDataRemoteClient | linkis-datasource的元数据客户端实现 | 是 | +| MetadataRemoteAccessService | 声明底层访问远端第三方元数据服务的接口 | 是 | +| DataSourceRemoteAccessService | 声明底层访问远端第三方数据源服务的接口 | 是 | + +其类关系组图如下: + +![datasource_client_class_relation](../../../images/zh_CN/ch1/datasource_client_class_relation.png) + +
+图3-5 datasource-client类关系组图 +
+ +##### 流程时序图: + +下面再结合所有模块,详细描述下业务流程中接口/类之间的调用关系: + +- 创建数据源 + +关注点: + +1. 创建数据源前需要拉取数据源类型列表和类型对应的数据源的属性配置定义列表,有些情况下还需要拉取数据源环境列表; + +2. 创建数据源分两种情景,一种通过**linkis-datasource**的界面创建,一种通过**exchangis**的datasource-client创建; + +3. 数据源类型、属性配置定义、数据源环境,可以自行在后台库中添加,目前未提供界面动态配置的方式(待提供)。 + +现在看下创建数据源的时序图: + +![datasource_client_create](../../../images/zh_CN/ch1/datasource_client_create.png) + +
+图3-6 datasource-client创建数据源时序图 +
+ +继续看下通过**datasource-client**来调用创建数据源接口: + +![datasource_client_create2](../../../images/zh_CN/ch1/datasource_client_create2.png) + +
+图3-7 datasource-client调用创建数据源时序图 +
+ +上图中省略了一些客户端连接认证、请求记录以及生命周期监听等附加的方法,而是精简了整体调用流程。 + +- 更新数据源 + +关注点: + +1. 更新有两种方式:版本更新和普通更新,版本更新会产生一个新的数据源的版本(可删除,可发布),而普通更新则会覆盖当前的数据源,不会产生新版本; + +2. 只有数据源的创建用户以及管理员用户可以更新发布数据源。 + +![datasource_client_update](../../../images/zh_CN/ch1/datasource_client_update.png) + +
+图3-8 datasource-client更新数据源时序图 +
+ +- 查询数据源 + +关注点: + +1. 通过datasource-client获取数据源列表的时候,需要附带上操作用户信息,用于对数据源做权限过滤。 + +数据库设计: + +![datasource_client_query](../../../images/zh_CN/ch1/datasource_client_query.png) + +
+图3-9 datasource-client查询数据源时序图 +
+ +##### 接口设计: + +(参照linkis-datasource现有的接口做补充) \ No newline at end of file diff --git a/docs/zh_CN/ch1/exchangis_datax_deploy_cn.md b/docs/zh_CN/ch1/exchangis_datax_deploy_cn.md new file mode 100644 index 000000000..00a09f989 --- /dev/null +++ b/docs/zh_CN/ch1/exchangis_datax_deploy_cn.md @@ -0,0 +1,72 @@ +# DataX 引擎使用文档 +### 环境准备 +DataX引擎是执行Exchangis数据同步任务不可或缺的组件,只有安装部署完成DataX引擎才能够成功执行数据同步任务。同时,确保所部署的机器上有安装DataX。 + +您在安装部署DataX引擎之前,请按照[Exchangis安装部署文档](docs/zh_CN/ch1/exchangis_deploy_cn.md)安装完成Exchangis及相关组件的安装,并确保工程基本功能可用。 + +强烈建议您在执行DataX任务之前,先在该节点使用原生的DataX执行测试任务,以检测该节点环境是否正常。 + +| 环境变量名 | 环境变量内容 | 备注 | +| :----: | :----: |-------| +| JAVA_HOME | JDK安装路径 | 必须 | +| DataX_HOME | DataX安装路径 | 非必须 | +| DataX_CONF_DIR | DataX配置路径 | 非必须 | + +### 安装包准备 +#### 1)下载二进制包 + +Exchangis1.1.2和Linkis 1.4.0支持的主流DataX版本1.4.6与1.4.7,更高版本可能需要修改部分代码重新编译。 + +[点击跳转 Release 界面](https://github.com/WeBankFinTech/Exchangis/releases/tag/release-1.1.2) + +#### 2) 编译打包 +如果您想自己开发和编译datax引擎,具体编译步骤如下: + +1、克隆Exchangis的代码 + +2、在exchangis-plugins模块下,找到datax引擎,单独编译datax,操作如下 + +``` +cd {EXCHANGIS_CODE_HOME}/exchangis-plugins/engine/datax +mvn clean install +``` +然后会在该路径下找到datax引擎安装包 +``` +{EXCHANGIS_CODE_HOME}/exchangis-plugins/datax/target/out +``` + + +### 开始部署 +#### 1)DataX引擎安装 +1、拿到打包出来的datax物料包,目录结构为: + +```shell +datax +-- dist +-- plugin +``` + +2、放置到linkis安装路径的如下目录 + +```shell +cd {LINKIS_HOME}/linkis/lib/linkis-engineconn-plugins +``` +(注意,看当前datax引擎对哪些用户有权限,一般都为hadoop用户组和hadoop用户) + + +#### 2)重启linkis-engineplugin服务使datax引擎生效 +新加入linkis的引擎都要重启linkis的engineplugin服务才会生效,重启脚本为linkis安装目录下的./sbin/linkis-daemon.sh,具体步骤如下 +```shell +cd {LINKIS_INSTALL_HOME}/links/sbin/ +./linkis-daemon.sh restart cg-engineplugin +``` +待服务启动成功,在linkis数据库中校验datax引擎是否安装完毕 + +```shell +select * from linkis_cg_engine_conn_plugin_bml_resources where engine_conn_type='datax'; +``` + +至此,datax安装部署就完成了。 + +engineplugin更详细的介绍可以参看下面的文章。 +https://linkis.apache.org/zh-CN/docs/latest/deployment/install-engineconn \ No newline at end of file diff --git a/docs/zh_CN/ch1/exchangis_deploy_cn.md b/docs/zh_CN/ch1/exchangis_deploy_cn.md new file mode 100644 index 000000000..79c3fc139 --- /dev/null +++ b/docs/zh_CN/ch1/exchangis_deploy_cn.md @@ -0,0 +1,308 @@ +## 前言 + +Exchangis 的安装,主要分为以下四步: + +1. Exchangis 依赖环境准备 +2. Exchangis 安装部署 +3. DSS ExchangisAppConn 安装部署 +4. Linkis Sqoop 引擎安装部署 +5. Linkis DataX 引擎安装部署 + +## 1. Exchangis 依赖环境准备 + +#### 1.1 基础软件安装 + +| 依赖的组件 | 是否必装 | 安装直通车 | +|---------------------------------------| ------ | --------------- | +| JDK (1.8.0_141) | 必装 | [如何安装JDK](https://www.oracle.com/java/technologies/downloads/) | +| MySQL (5.5+) | 必装 | [如何安装mysql](https://www.runoob.com/mysql/mysql-install.html) | +| Hadoop(3.3.4,Hadoop 其他版本需自行编译 Linkis) | 必装 | [Hadoop部署](https://www.apache.org/dyn/closer.cgi/hadoop/common/hadoop-3.3.4/hadoop-3.3.4.tar.gz) | +| Hive(2.3.3,Hive 其他版本需自行编译 Linkis) | 必装 | [Hive快速安装](https://www.apache.org/dyn/closer.cgi/hive/) | +| SQOOP (1.4.6) | 必装 | [如何安装Sqoop](https://sqoop.apache.org/docs/1.4.6/SqoopUserGuide.html) | +| DSS1.1.2 | 必装 | [如何安装DSS](https://github.com/WeBankFinTech/DataSphereStudio-Doc/tree/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2) | +| Linkis1.4.0 | 必装 | [如何安装Linkis](https://linkis.apache.org/zh-CN/docs/1.4.0/deployment/deploy-quick) | +| Nginx | 必装 | [如何安装 Nginx](http://nginx.org/) | + +底层依赖组件检查 + +注意:一定要使用最新版的dss1.1.2,及linkis1.4.0。 + +[linkis1.4.0代码地址](https://github.com/apache/incubator-linkis/tree/release-1.4.0) + +[DSS1.1.2代码地址 ](https://github.com/WeBankFinTech/DataSphereStudio) + +datasource启用 + +linkis的启动脚本中默认不会启动数据源相关的服务两个服务(ps-data-source-manager,ps-metadatamanager), 如果想使用数据源服务,可以通过如下方式进行开启: 修改$LINKIS_CONF_DIR/linkis-env.sh中的 export ENABLE_METADATA_MANAGER=true值为true。 通过linkis-start-all.sh/linkis-stop-all.sh 进行服务启停时,会进行数据源服务的启动与停止。关于数据源更多详情可参考[数据源功能使用](https://linkis.apache.org/zh-CN/docs/1.4.0/user-guide/datasource-manual) + +#### 1.2 创建 Linux 用户 + +请保持 Exchangis 的部署用户与 Linkis 的部署用户一致,例如:部署用户是hadoop账号。 + +#### 1.3 在linkis中配置授权认证 + +###### 1)为exchangis加数据源认证的token + +通过在linkis数据库中执行以下语句,为Exchangis分配专属token: + +``` +INSERT INTO `linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`) VALUES ('EXCHANGIS-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS'); +``` + +###### 2)为exchangis加hive数据源的认证 + +通过在linkis数据库中执行以下sql语句,插入hive数据源环境配置,注意,执行前需要修改语句中的\$\{HIVE_METADATA_IP\}和\$\{HIVE_METADATA_PORT\},例如\$\{HIVE_METADATA_IP\}=127.0.0.1,\$\{HIVE_METADATA_PORT\}=3306: + +``` +INSERT INTO `linkis_ps_dm_datasource_env` (`env_name`, `env_desc`, `datasource_type_id`, `parameter`, `create_time`, `create_user`, `modify_time`, `modify_user`) VALUES ('开发环境SIT', '开发环境SIT', 4, '{"uris":"thrift://${HIVE_METADATA_IP}:${HIVE_METADATA_PORT}", "hadoopConf":{"hive.metastore.execute.setugi":"true"}}', now(), NULL, now(), NULL); +INSERT INTO `linkis_ps_dm_datasource_env` (`env_name`, `env_desc`, `datasource_type_id`, `parameter`, `create_time`, `create_user`, `modify_time`, `modify_user`) VALUES ('开发环境UAT', '开发环境UAT', 4, '{"uris":"thrift://${HIVE_METADATA_IP}:${HIVE_METADATA_PORT}", "hadoopConf":{"hive.metastore.execute.setugi":"true"}}', now(), NULL, now(), NULL); +``` + +如果hive数据源在部署时设置了需要进行kerberos方式认证,则需要在linkis_ps_dm_datasource_env表的parameter字段指定一个参数keyTab,其值的获取方式可见:[在Linkis中设置并认证hive数据源](https://linkis.apache.org/zh-CN/docs/latest/auth/token) + +#### 1.4 底层依赖组件检查 + +**请确保 DSS1.1.2 与 Linkis1.4.0 基本可用,可在 DSS 前端界面执行 HiveQL 脚本,可正常创建并执行 DSS 工作流。** + + +## 2. Exchangis 安装部署 + +### 2.1 安装包准备 + +#### 2.1.1下载二进制包 + +从 Exchangis 已发布的 release 中 [点击下载exchangis安装包](https://github.com/WeBankFinTech/Exchangis/releases),下载最新的安装包。 + +#### 2.1.2 编译打包 + +在项目的根目录下执行如下命令: + +```shell script + mvn clean install +``` + +编译成功后将会在项目的 `assembly-package/target` 目录下生成安装包。 + +### 2.2 解压安装包 + +执行以下命令进行解压: + +```shell script + tar -zxvf wedatasphere-exchangis-{VERSION}.tar.gz +``` + +解压出来后的目录结构如下: + +```html +|-- config:一键安装部署参数配置目录 +|-- db:数据库表初始化 SQL 目录 +|-- packages:Exchangis 安装包目录 + |-- exchangis-extds:数据源扩展库 + |-- lib:库 +|-- sbin:脚本存放目录 +``` + +### 2.3 修改配置参数 + +```shell script + vim config/config.sh +``` + +```shell script +#LINKIS_GATEWAY服务地址IP,用于查找linkis-mg-gateway服务 +LINKIS_GATEWAY_HOST={IP} + +#LINKIS_GATEWAY服务地址端口,用于查找linkis-mg-gateway服务 +LINKIS_GATEWAY_PORT={PORT} + +#Exchangis服务端口 +EXCHANGIS_PORT={PORT} + +#Eureka服务URL +EUREKA_URL=http://{IP:PORT}/eureka/ +``` + +### 2.4 修改数据库配置 + +```shell script + vim config/db.sh +``` + +```shell script +# 设置数据库的连接信息 +# 包括IP地址、端口、用户名、密码和数据库名称 +MYSQL_HOST={IP} +MYSQL_PORT={PORT} +MYSQL_USERNAME={username} +MYSQL_PASSWORD={password} +DATABASE={dbName} +``` + +### 2.5 安装和启动 + +#### 2.5.1 执行一键安装脚本 + +在sbin目录下执行 `install.sh` 脚本,完成一键安装部署: + +```shell script +./install.sh +``` + +#### 2.5.2 安装步骤 + +该脚本为交互式安装,开始执行install.sh脚本后,安装步骤依次分为以下几步: + +1. 初始化数据库表 + +当出现该提醒时:Do you want to initalize database with sql: [${SQL_SOURCE_PATH}]? + +输入 `y` 初始化数据库表,输入 `n` 跳过数据库表初始化步骤。 + +#### 2.5.3 修改配置文件路径和日志文件路径 + +在sbin目录下的`env.properties`文件,设置配置文件路径和日志文件路径 + +```yaml +EXCHANGIS_CONF_PATH="/appcom/config/exchangis-config/background" +EXCHANGIS_LOG_PATH="/appcom/logs/exchangis/background" +MODULE_DEFAULT_PREFIX="dss-exchangis-main-" +MODULE_DEFAULT_SUFFIX="-dev" +``` + +EXCHANGIS_CONF_PATH为配置文件路径,EXCHANGIS_LOG_PATH为日志文件路径,若为以上配置,则作如下操作: + +```shell +cd {EXCHANGIS_DEPLOY_PATH} +cp -r config /appcom/config/exchangis-config/background +mkdir -p /appcom/logs/exchangis/background +``` + +则在服务启动时,将会使用对应路径下的配置文件,以及将日志写到对应的路径下 + +#### 2.5.4 启动服务 + +第一次启动,可以sbin目录下执行以下命令,启动 Exchangis Server: + +```shell script +./daemon.sh start server +``` + +您也可以使用以下命令在sbin目录下完成 Exchangis Server 的重启: + +```shell script +./daemon.sh restart server +``` + +执行完成启动脚本后,会出现以下提示,eureka地址也会在启动服务时在控制台打出: + +![企业微信截图_16532930262583](../../../images/zh_CN/ch1/register_eureka.png) + +### 2.6 查看服务是否启动成功 + +可以在Eureka界面查看服务启动成功情况,查看方法: + +使用 http://${EUREKA_INSTALL_IP}:${EUREKA_INSTALL_PORT}, 建议在 Chrome 浏览器中打开,查看服务是否注册成功。 + +如下图所示: + +![补充Eureka截图](../../../images/zh_CN/ch1/eureka_exchangis.png) + +### 2.7 前端安装部署 + +#### 2.7.1 获取前端安装包 + +Exchangis 已默认提供了编译好的前端安装包,可直接下载使用:[点击下载前端安装包](https://github.com/WeBankFinTech/Exchangis/releases) + +您也可以自行编译 Exchangis 前端,在 Exchangis 根目录下执行如下命令: + +```shell script + cd web + npm i + npm run build +``` + +从 `web/` 路径获取编译好的 exchangis-ui.zip 前端包。 + +获取到的前端包,您可以放在服务器上的任意位置,这里建议您与后端安装地址目录保持一致,在同一目录下放置并解压。 + +#### 3.3.4 前端安装部署 + +1. 解压前端安装包 + +如您打算将 Exchangis 前端包部署到 `/appcom/Install/ExchangisInstall/exchangis-ui` 目录,请先将 `exchangis-ui.zip` 拷贝到该目录并执行解压,注意,**请在安装dss的机器上安装exchangis前端**: + +```shell script + # 请先将 Exchangis 前端包拷贝到 `/appcom/Install/ExchangisInstall` 目录 + cd /appcom/Install/ExchangisInstall + unzip exchangis-ui.zip +``` + +执行如下命令: + +```shell script + vim /etc/nginx/conf.d/exchangis.conf +``` + +``` + server { + listen {PORT}; # 访问端口 如果该端口被占用,则需要修改 + server_name localhost; + #charset koi8-r; + #access_log /var/log/nginx/host.access.log main; + location / { + root /appcom/Install/ExchangisInstall/exchangis-ui; # Exchangis 前端部署目录 + autoindex on; + } + + location /api { + proxy_pass http://{IP}:{PORT}; # 后端Linkis的地址,需要修改 + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header x_real_ipP $remote_addr; + proxy_set_header remote_addr $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_http_version 1.1; + proxy_connect_timeout 4s; + proxy_read_timeout 600s; + proxy_send_timeout 12s; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection upgrade; + } + + #error_page 404 /404.html; + # redirect server error pages to the static page /50x.html + # + error_page 500 502 503 504 /50x.html; + location = /50x.html { + root /usr/share/nginx/html; + } + } +``` + +#### 2.7.3 启动 nginx 及访问前端页面 + +配置完成之后,使用以下命令重新刷新 nginx 配置: + +```shell script + nginx -s reload +``` + +请通过 http://${EXCHANGIS_INSTALL_IP}:{EXCHANGIS_INSTALL_PORT}/#/projectManage 访问 Exchangis 前端页面,出现以下界面,说明exchangis安装前端成功,如果要真正试用exchangis,需要安装dss和linkis,通过dss进行免密登录,如下图所示: + +![image](https://user-images.githubusercontent.com/27387830/170417473-af0b4cbe-758e-4800-a58f-0972f83d87e6.png) + +## 3. DSS ExchangisAppConn 安装部署 + +如您想正常使用 Exchangis 前端,还需安装 DSS ExchangisAppConn 插件,请参考: [ExchangisAppConn 插件安装文档](docs/zh_CN/ch1/exchangis_appconn_deploy_cn.md) + +## 4. Linkis Sqoop 引擎安装部署 + +如您想正常执行 Exchangis 的Sqoop作业,还需安装 Linkis Sqoop 引擎,请参考: [Linkis Sqoop 引擎插件安装文档](docs/zh_CN/ch1/exchangis_sqoop_deploy_cn.md) + +## 5. Linkis DataX 引擎安装部署 + +如您想正常执行 Exchangis 的DataX作业,还需安装 Linkis DataX 引擎,请参考: [Linkis DataX 引擎插件安装文档](docs/zh_CN/ch1/exchangis_datax_deploy_cn.md) + +## 6. 如何登录使用 Exchangis + +Exchangis 更多使用说明,请参考用户使用手册[Exchangis 用户手册](docs/zh_CN/ch1/exchangis_user_manual_cn.md) diff --git a/docs/zh_CN/ch1/exchangis_interface_cn.md b/docs/zh_CN/ch1/exchangis_interface_cn.md new file mode 100644 index 000000000..bf50d1f5f --- /dev/null +++ b/docs/zh_CN/ch1/exchangis_interface_cn.md @@ -0,0 +1,839 @@ +# Exchangis接口文档 + +## Exchangis数据源模块 + +### 1、获取数据源类型 + +接口描述:根据request的信息来获取数据源类型 + +请求URL:/dss/exchangis/main/datasources/type + +请求方式:GET + +请求参数: + +| 名称 | 类型 | 备注 | 是否必须 | 默认值 | 备注 | +| ------- | ------------------ | ---- | -------- | ------ | ----------- | +| request | HttpServletRequest | | 是 | / | request请求 | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------ | -------- | ------ | -------------------- | +| method | String | 是 | | 调用的方法(请求路径) | +| status | int | 是 | | 响应状态码 | +| message | String | 否 | | 响应的信息 | +| data | List | 是 | | 返回的数据 | + +### 2、查询数据源 + +接口描述:根据vo查询所需的数据源 + +请求URL:/dss/exchangis/main/datasources/query + +请求方式:GET、POST + +请求参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------------------ | -------- | ------ | ----------- | +| request | HttpServletRequest | 是 | / | request请求 | +| vo | DataSourceQueryVO | 是 | / | | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------ | -------- | ------ | -------------------- | +| method | String | 是 | / | 调用的方法(请求路径) | +| status | int | 是 | / | 响应状态码 | +| message | String | 否 | / | 响应的信息 | +| data | List | 是 | / | 返回的数据 | + +### 3、查询数据源 + +接口描述:根据request信息查询数据源 + +请求URL:/dss/exchangis/main/datasources + +请求方式:GET + +请求参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| -------- | ------------------ | -------- | ------ | ------------ | +| request | HttpServletRequest | 是 | / | request请求 | +| typeId | Long | 是 | / | 数据源类型ID | +| typeName | String | 是 | / | 数据源类型 | +| page | Integer | 是 | / | 页数 | +| size | Integer | 是 | / | 每页大小 | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | --------------------- | -------- | ------ | -------------------- | +| method | String | 是 | / | 调用的方法(请求路径) | +| status | int | 是 | / | 响应状态码 | +| message | String | 否 | / | 响应的信息 | +| data | List\ | 是 | / | 返回的数据 | + +### 4、查询数据源关键定义 + +接口描述:根据数据源类型ID查询数据源关键定义 + +请求URL:/dss/exchangis/main/datasources/types/{dataSourceTypeId}/keydefines + +请求方式:GET + +请求参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ---------------- | ------------------ | -------- | ------ | ------------ | +| request | HttpServletRequest | 是 | / | request请求 | +| dataSourceTypeId | Long | 是 | / | 数据源类型ID | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ---------------------- | -------- | ------ | -------------------- | +| method | String | 是 | / | 调用的方法(请求路径) | +| status | int | 是 | / | 响应状态码 | +| message | String | 否 | / | 响应的信息 | +| data | List[Map[String, Any]] | 是 | / | 返回的数据 | + +### 5、获取数据源版本 + +接口描述:根据数据源ID获取数据源版本 + +请求URL:/dss/exchangis/main/datasources/{id}/versions + +请求方式:GET + +请求参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------------------ | -------- | ------ | ----------- | +| request | HttpServletRequest | 是 | / | request请求 | +| id | Long | 是 | / | 数据源ID | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------------------------- | -------- | ------ | -------------------- | +| method | String | 是 | / | 调用的方法(请求路径) | +| status | int | 是 | / | 响应状态码 | +| message | String | 否 | / | 响应的信息 | +| data | List> | 是 | / | 返回的数据 | + +### 6、创建数据源 + +接口描述:根据数据源创建VO创建数据源 + +请求URL:/dss/exchangis/main/datasources + +请求方式:POST + +请求参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------------------ | ------------------ | -------- | ------ | ------------ | +| request | HttpServletRequest | 是 | / | request请求 | +| dataSourceCreateVO | DataSourceCreateVO | 是 | / | 数据源创建VO | +| bindingResult | BindingResult | 是 | / | 绑定结果 | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------ | -------- | ------ | -------------------- | +| method | String | 是 | / | 调用的方法(请求路径) | +| status | int | 是 | / | 响应状态码 | +| message | String | 否 | / | 响应的信息 | +| data | Long | 是 | / | 返回的数据 | + +### 7、获取数据源信息 + +接口描述:根据request信息、数据源ID和版本ID获取数据源信息 + +请求URL:/dss/exchangis/main/datasources/{id} + +请求方式:GET + +请求参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| --------- | ------------------ | -------- | ------ | ----------- | +| request | HttpServletRequest | 是 | / | request请求 | +| id | Long | 是 | / | 数据源ID | +| versionId | String | 是 | / | 版本ID | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ----------------- | -------- | ------ | -------------------- | +| method | String | 是 | / | 调用的方法(请求路径) | +| status | int | 是 | / | 响应状态码 | +| message | String | 否 | / | 响应的信息 | +| data | DataSourceItemDTO | 是 | / | 返回的数据 | + +### 8、获取数据源连接参数 + +接口描述:根据request信息和数据源ID获取数据源连接参数 + +请求URL:/dss/exchangis/main/datasources/{id}/connect_params + +请求方式:GET + +请求参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------------------ | -------- | ------ | ----------- | +| request | HttpServletRequest | 是 | / | request请求 | +| Long | id | 是 | / | 数据源ID | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | -------------------------- | -------- | ------ | -------------------- | +| method | String | 是 | / | 调用的方法(请求路径) | +| status | int | 是 | / | 响应状态码 | +| message | String | 否 | / | 响应的信息 | +| data | java.util.Map[String, Any] | 是 | / | 返回的数据 | + +### 9、更新数据源 + +接口描述:根据request信息、数据源类型ID和数据源创建VO更新数据源 + +请求URL:/dss/exchangis/main/datasources/{id} + +请求方式:PUT + +请求参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------------------ | ------------------ | -------- | ------ | ------------ | +| request | HttpServletRequest | 是 | / | request请求 | +| id | Long | 是 | / | 数据源类型ID | +| dataSourceCreateVO | DataSourceCreateVO | 是 | / | 数据源创建VO | +| bindingResult | BindingResult | 是 | / | 绑定结果 | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------ | -------- | ------ | -------------------- | +| method | String | 是 | / | 调用的方法(请求路径) | +| status | int | 是 | / | 响应状态码 | +| message | String | 否 | / | 响应的信息 | +| data | Map | 是 | / | 返回的数据 | + +### 10、发布数据源 + +接口描述:根据request请求、数据源ID和版本ID发布数据源 + +请求URL:/dss/exchangis/main/datasources/{id}/{version}/publish + +请求方式:PUT + +请求参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------------------ | -------- | ------ | ----------- | +| request | HttpServletRequest | 是 | / | request请求 | +| id | Long | 是 | / | 数据源ID | +| version | Long | 是 | / | 版本ID | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------ | -------- | ------ | -------------------- | +| method | String | 是 | / | 调用的方法(请求路径) | +| status | int | 是 | / | 响应状态码 | +| message | String | 否 | / | 响应的信息 | +| data | Map | 是 | / | 返回的数据 | + +### 11、过期数据源 + +接口描述:根据request信息和数据源ID过期数据源 + +请求URL:/dss/exchangis/main/datasources/{id}/expire + +请求方式:PUT + +请求参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------------------ | -------- | ------ | ----------- | +| request | HttpServletRequest | 是 | / | request请求 | +| id | Long | 是 | / | 数据源ID | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------ | -------- | ------ | -------------------- | +| method | String | 是 | / | 调用的方法(请求路径) | +| status | int | 是 | / | 响应状态码 | +| message | String | 否 | / | 响应的信息 | +| data | Map | 是 | / | 返回的数据 | + +### 12、连接数据源 + +接口描述:根据request信息、数据源ID和版本ID连接数据源 + +请求URL:/dss/exchangis/main/datasources/{id}/{version}/connect + +请求方式:PUT + +请求参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------------------ | -------- | ------ | ----------- | +| request | HttpServletRequest | 是 | / | request请求 | +| id | Long | 是 | / | 数据源ID | +| version | Long | 是 | / | 版本ID | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------ | -------- | ------ | -------------------- | +| method | String | 是 | / | 调用的方法(请求路径) | +| status | int | 是 | / | 响应状态码 | +| message | String | 否 | / | 响应的信息 | +| data | Map | 是 | / | 返回的数据 | + +### 13、连接数据源 + +接口描述:根据request信息和数据源ID连接数据源 + +请求URL:/dss/exchangis/main/datasources/op/connect + +请求方式:POST + +请求参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------------------ | -------- | ------ | ----------- | +| request | HttpServletRequest | 是 | / | request请求 | +| id | Long | 是 | / | 数据源ID | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------ | -------- | ------ | -------------------- | +| method | String | 是 | / | 调用的方法(请求路径) | +| status | int | 是 | / | 响应状态码 | +| message | String | 否 | / | 响应的信息 | +| data | Map | 是 | / | 返回的数据 | + +### 14、删除数据源 + +接口描述:根据request信息和数据源类型ID连接数据源 + +请求URL:/dss/exchangis/main/datasources/{id} + +请求方式:DELETE + +请求参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ---------------- | ------------------ | -------- | ------ | ------------ | +| request | HttpServletRequest | 是 | / | request请求 | +| dataSourceTypeId | Long | 是 | / | 数据源类型ID | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------ | -------- | ------ | -------------------- | +| method | String | 是 | / | 调用的方法(请求路径) | +| status | int | 是 | / | 响应状态码 | +| message | String | 否 | / | 响应的信息 | +| data | Long | 是 | / | 返回的数据 | + +### 15、根据数据源ID查询数据源 + +接口描述:根据request信息、数据源类型和数据源ID查询数据源 + +请求URL:/dss/exchangis/main/datasources/{type}/{id}/dbs + +请求方式:GET + +请求参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------------------ | -------- | ------ | ----------- | +| request | HttpServletRequest | 是 | / | request请求 | +| type | String | 是 | / | 数据源类型 | +| id | Long | 是 | / | 数据源ID | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | -------------- | -------- | ------ | -------------------- | +| method | String | 是 | / | 调用的方法(请求路径) | +| status | int | 是 | / | 响应状态码 | +| message | String | 否 | / | 响应的信息 | +| data | List\ | 是 | / | 返回的数据 | + +### 16、根据数据源ID和数据库获取数据表 + +接口描述:根据request信息、数据源类型、数据源ID和数据库获取数据表 + +请求URL:/dss/exchangis/main/datasources/{type}/{id}/dbs/{dbName}/tables + +请求方式:GET + +请求参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------------------ | -------- | ------ | ----------- | +| request | HttpServletRequest | 是 | / | request请求 | +| type | String | 是 | / | 数据源类型 | +| id | Long | 是 | / | 数据源ID | +| dbName | String | 是 | / | 数据库名 | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | -------------- | -------- | ------ | -------------------- | +| method | String | 是 | / | 调用的方法(请求路径) | +| status | int | 是 | / | 响应状态码 | +| message | String | 否 | / | 响应的信息 | +| data | List\ | 是 | / | 返回的数据 | + +### 17、根据数据源ID、数据库和数据表获取表字段 + +接口描述:根据request信息、数据源类型、数据源ID和数据库名和数据表获取表字段 + +请求URL:/dss/exchangis/main/datasources/{type}/{id}/dbs/{dbName}/tables/{tableName}/fields + +请求方式:GET + +请求参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| --------- | ------------------ | -------- | ------ | ----------- | +| request | HttpServletRequest | 是 | / | request请求 | +| type | String | 是 | / | 数据源类型 | +| id | Long | 是 | / | 数据源ID | +| dbName | String | 是 | / | 数据库名 | +| tableName | String | 是 | / | 数据表 | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ---------------------------------- | -------- | ------ | -------------------- | +| method | String | 是 | / | 调用的方法(请求路径) | +| status | int | 是 | / | 响应状态码 | +| message | String | 否 | / | 响应的信息 | +| data | List\ | 是 | / | 返回的数据 | + +### 18、获取表字段列表信息 + +接口描述:根据request信息和字段映射VO获取表字段列表信息 + +请求URL:/dss/exchangis/main/datasources/fieldsmapping + +请求方式:POST + +请求参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------------------ | -------- | ------ | ----------- | +| request | HttpServletRequest | 是 | / | request请求 | +| vo | FieldMappingVO | 是 | / | 字段映射VO | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------------------------- | -------- | ------ | -------------------- | +| method | String | 是 | / | 调用的方法(请求路径) | +| status | int | 是 | / | 响应状态码 | +| message | String | 否 | / | 响应的信息 | +| data | List> | 是 | / | 返回的数据 | + +### 19、根据数据源类型获取参数 + +接口描述:根据request信息、引擎、数据源类型和文件系统路径获取参数 + +请求URL:/dss/exchangis/main/datasources/{engine}/{type}/params/ui + +请求方式:POST + +请求参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------------------ | -------- | ------ | ------------ | +| request | HttpServletRequest | 是 | / | request请求 | +| engine | String | 是 | / | 引擎 | +| type | String | 是 | / | 数据源类型 | +| dir | String | 是 | / | 文件系统路径 | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------------------ | -------- | ------ | -------------------- | +| method | String | 是 | / | 调用的方法(请求路径) | +| status | int | 是 | / | 响应状态码 | +| message | String | 否 | / | 响应的信息 | +| data | List> | 是 | / | 返回的数据 | + + + +## Exchangis作业执行模块 + +### 1、提交配置好的job进行执行 + +接口描述:提交执行ExchangisJob,后台返回jobExecutionId + +请求URL:/api/rest_j/v1/exchangis/job/{id}/execute + +请求方式:POST + +请求参数: + +| 名称 | 类型 | 备注 | 是否必须 | 默认值 | +| --------------------- | ------- | ------------------------------------------------------------ | -------- | ------ | +| id | Long | Exchangis的ID | 是 | 无 | +| permitPartialFailures | boolean | 是否允许部分失败。如果为true,就算 部分子任务失败,整个Job还是会继续 执行,执行完成后,Job状态为 Partial_Success。该参数为 requestBody参数。 | 否 | false | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| -------------- | ------ | -------- | ------ | -------------------- | +| method | String | 是 | | 调用的方法(请求路径) | +| status | int | 是 | | 响应状态码 | +| message | String | 否 | | 响应的信息 | +| data | Map | 是 | | 返回的数据 | +| jobExecutionId | String | 是 | | 执行job的执行id | + +返回示例: + +```json +{ + "method": "/api/rest_j/v1/exchangis/job/{id}/execute", + "status": 0, + "message": "Submitted succeed(提交成功)!", + "data": { + "jobExecutionId": "555node1node2node3execId1" +} + +``` + +### 2、获取Job的执行状态 + +接口描述:根据jobExecutionId获取Job的状态 + +请求URL:/api/rest_j/v1/exchangis/job/execution/{jobExecutionId}/status + +请求方式:GET + +请求参数: + +| 名称 | 类型 | 备注 | 是否必须 | 默认值 | +| -------------- | ------ | -------------------- | -------- | ------ | +| jobExecutionId | String | ExchangisJob的执行ID | 是 | 无 | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| -------------- | ------ | -------- | ------ | ------------------------------------------------------------ | +| method | String | 是 | | 调用的方法(请求路径) | +| status | int | 是 | | 响应状态码 | +| message | String | 否 | | 响应的信息 | +| data | Map | 是 | | 返回的数据 | +| jobExecutionId | String | 是 | | 执行job的状态,包含:Inited,Scheduled, Running,WaitForRetry,Cancelled,Failed, Partial_Success,Success,Undefined,Timeout。其 中,Running状态表示正在运行,从Cancelled开始,都 是已完成状态。 | + +返回示例: + +```json +{ + "method": "/api/rest_j/v1/exchangis/job/execution/{id}/status", + "status": 0, + "message": "Submitted succeed(提交成功)!", + "data": { + "status": "Running", + "progress": 0.1 +} +``` + +### 3、获取本次Job执行的task列表 + +接口描述:通过jobExecutionId,获取任务列表 + +前提条件:Job的执行状态必须为Running后,才可以拿到task列表,否则返回的task列表为空 + +请求URL:/api/rest_j/v1/exchangis/job/execution/{jobExecutionId}/tasklist + +请求方式:GET + +请求参数: + +| 名称 | 类型 | 备注 | 是否必须 | 默认值 | +| -------------- | ------ | ------ | -------- | ------ | +| jobExecutionId | String | string | 是 | 无 | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| -------------- | ------ | -------- | ------ | ------------------------------------------------------------ | +| method | String | 是 | | 调用的方法(请求路径) | +| status | int | 是 | | 响应状态码 | +| message | String | 否 | | 响应的信息 | +| data | Map | 是 | | 返回的数据 | +| jobExecutionId | String | 是 | | 任务列表。Job的执行状态必须为Running后,才可以 拿到task列表,否则返回的task列表为空。 请注意:task没有Partial_Success状态。 | + +返回示例: + +```json +{ + "method": "/api/rest_j/v1/exchangis/job/execution/{jobExecutionId}/tasklist", + "status": 0, + "message": "Submitted succeed(提交成功)!", + "data": { + "tasks": [ + { + "taskId": 5, + "name": "test-1", + "status": "Inited", // task没有Partial_Success状态 + "createTime": "2022-01-03 09:00:00", + "launchTime": null, + "lastUpdateTime": "2022-01-03 09:00:00", + "engineType": "sqoop", + "linkisJobId": null, + "linkisJobInfo": null, + "executeUser": "enjoyyin" + } + ] + } +} + +``` + +### 4、获取Job & task的执行进度 + +接口描述:通过jobExecutionId,获取执行进度 + +前提条件:Job的执行状态必须为Running后,才可以拿到task列表的进度 + +请求URL:/api/rest_j/v1/exchangis/job/execution/{jobExecutionId}/progress + +请求方式:GET + +请求参数: + +| 名称 | 类型 | 备注 | 是否必须 | 默认值 | +| -------------- | ------ | -------------------- | -------- | ------ | +| jobExecutionId | String | ExchangisJob的执行ID | 是 | 无 | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| -------------- | ------ | -------- | ------ | ------------------------------------------------------------ | +| method | String | 是 | | 调用的方法(请求路径) | +| status | int | 是 | | 响应状态码 | +| message | String | 否 | | 响应的信息 | +| data | Map | 是 | | 返回的数据 | +| jobExecutionId | String | 是 | | 任务列表。Job的执行状态必须为Running后,才可以 拿到task列表,否则返回的task列表为空。 | + +返回示例: + +```json +{ + "method": "/api/rest_j/v1/exchangis/job/execution/{jobExecutionId}/progress", + "status": 0, + "message": "Submitted succeed(提交成功)!", + "data": { + "job": { + "status": "Running", + "progress": 0.1, + "tasks": { + "running": [ + { + "taskId": 5, + "name": "test-1", + "status": "Running", + "progress": 0.1 + } + ], + "Inited": [ + { + "taskId": 5, + "name": "test-1", + "status": "Inited", + "progress": 0.1 + } + ], + "Scheduled": [], + "Success": [], + "Failed": [], // 如果存在Failed的task,则Job会直接失败 + "WaitForRetry": [], + "Cancelled": [], // 如果存在Cancelled的task,则Job会直接失败 + "Undefined": [], // 如果存在Undefined的task,则Job会直接失败 + "Timeout": [] + } + } + } +} + +``` + +### 5、获取task运行时的各项指标信息 + +接口描述:通过jobExecutionId和taskId,获取task运行时的各项指标信息 + +前提条件:task的执行状态必须为Running后,才可以拿到task的各项指标信息,否则返回的 为空 + +请求URL:/api/rest_j/v1/exchangis/task/execution/{taskId}/metrics + +请求方式:POST + +请求参数: + +| 名称 | 类型 | 备注 | 是否必须 | 默认值 | +| -------------- | ------ | ---------------------------------------- | -------- | ------ | +| jobExecutionId | String | ExchangisJob的执行ID,放入 requestBody中 | 是 | 无 | +| taskId | String | task的执行ID,放入URI中 | 是 | 无 | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| -------------- | ------ | -------- | ------ | ------------------------------------------------------------ | +| method | String | 是 | | 调用的方法(请求路径) | +| status | int | 是 | | 响应状态码 | +| message | String | 否 | | 响应的信息 | +| data | Map | 是 | | 返回的数据 | +| jobExecutionId | String | 是 | | 任务各项指标信息。task的执行状态必须为 Running后,才可以拿到task的各项指标信息。 | + +返回示例: + +```json +{ + "method": "/api/rest_j/v1/exchangis/task/execution/{taskId}/metrics", + "status": 0, + "message": "Submitted succeed(提交成功)!", + "data": { + "task": { + "taskId": 5, + "name": "test-1", + "status": "running", + "metrics": { + "resourceUsed": { + "cpu": 10, // 单位:vcores + "memory": 20 // 单位:GB + }, + "traffic": { + "source": "mysql", + "sink": "hive", + "flow": 100 // 单位:Records/S + }, + "indicator": { + "exchangedRecords": 109345, // 单位:Records + "errorRecords": 5, + "ignoredRecords": 5 + } + } + } + } +} +``` + +### 6、获取Job的实时日志 + +接口描述:通过jobExecutionId,获取Job的实时日志 + +请求URL:/api/rest_j/v1/exchangis/job/execution/{jobExecutionId}/log? fromLine=&pageSize=&ignoreKeywords=&onlyKeywords=&lastRows= + +请求方式:GET + +请求参数: + +| 名称 | 类型 | 备注 | 是否必须 | 默认值 | +| -------------- | ------ | ------------------------------------------------------------ | -------- | ------ | +| jobExecutionId | String | ExchangisJob的执行ID | 是 | 无 | +| fromLine | int | 读取的起始行 | 否 | 0 | +| pageSize | int | 本次读取日志行数 | 否 | 100 | +| ignoreKeywords | String | 忽略哪些关键字所在的行,多个关键字以英 文,分隔 | 否 | 无 | +| onlyKeywords | String | 只取哪些关键字所在的行,多个关键字以英 文,分隔 | 否 | 无 | +| lastRows | int | 只读取最后多少行的日志,相当于tail -f log。该参数大于0时,上面所有的参数都会 失效。 | 否 | 无 | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------- | -------- | ------ | ------------------------------------------------------ | +| method | String | 是 | | 调用的方法(请求路径) | +| status | int | 是 | | 响应状态码 | +| message | String | 否 | | 响应的信息 | +| data | Map | 是 | | 返回的数据 | +| endLine | int | 是 | | 本次读取的结束行号,下次可以从 endLine + 1 继续读取日志 | +| isEnd | boolean | | | 日志是否已经全部读完 | +| logs | List | 是 | | 返回Job的执行日志 | + +返回示例: + +```json +{ + "method": "/api/rest_j/v1/exchangis/job/execution/{jobExecutionId}/log", + "status": 0, + "message": "Submitted succeed(提交成功)!", + "data": { + "endLine": 99, // 本次读取的结束行号,下次可以从 endLine + 1 继续读取日志 + "isEnd": false, // 日志是否已经全部读完 + "logs": [ + "all": "", + "error": "", + "warn": "", + "info": "" + ] + } +} +``` + +### 7、获取task的实时日志 + +接口描述:通过jobExecutionId和taskId,获取task的实时日志 + +请求URL:/api/rest_j/v1/exchangis/task/execution/{taskId}/log? jobExecutionId=&fromLine=&pageSize=&ignoreKeywords=&onlyKeywords=&lastRows= + +请求方式:GET + +请求参数: + +| 名称 | 类型 | 备注 | 是否必须 | 默认值 | +| -------------- | ------ | ------------------------------------------------------------ | -------- | ------ | +| taskId | String | task的执行Id | 是 | 无 | +| jobExecutionId | String | ExchangisJob的执行ID | 是 | 无 | +| fromLine | int | 读取的起始行 | 否 | 0 | +| pageSize | int | 本次读取日志行数 | 否 | 100 | +| ignoreKeywords | String | 忽略哪些关键字所在的行,多个关键字以英 文,分隔 | 否 | 无 | +| onlyKeywords | String | 只取哪些关键字所在的行,多个关键字以英 文,分隔 | 否 | 无 | +| lastRows | int | 只读取最后多少行的日志,相当于tail -f log。该参数大于0时,上面所有的参数都会 失效。 | 否 | 无 | + +返回参数: + +| 名称 | 类型 | 是否必须 | 默认值 | 备注 | +| ------- | ------- | -------- | ------ | ------------------------------------------------------ | +| method | String | 是 | | 调用的方法(请求路径) | +| status | int | 是 | | 响应状态码 | +| message | String | 否 | | 响应的信息 | +| data | Map | 是 | | 返回的数据 | +| endLine | int | 是 | | 本次读取的结束行号,下次可以从 endLine + 1 继续读取日志 | +| isEnd | boolean | | | 日志是否已经全部读完 | +| logs | List | 是 | | 返回Job的执行日志 | + +返回示例: + +```json +{ + "method": "/api/rest_j/v1/exchangis/job/execution/{taskId}/log", + "status": 0, + "message": "Submitted succeed(提交成功)!", + "data": { + "endLine": 99, // 本次读取的结束行号,下次可以从 endLine + 1 继续读取日志 + "isEnd": false, // 日志是否已经全部读完 + "logs": [ + "all": "", + "error": "", + "warn": "", + "info": "" + ] + } +} +``` + diff --git a/docs/zh_CN/ch1/exchangis_job_execute_cn.md b/docs/zh_CN/ch1/exchangis_job_execute_cn.md new file mode 100644 index 000000000..7fd147778 --- /dev/null +++ b/docs/zh_CN/ch1/exchangis_job_execute_cn.md @@ -0,0 +1,204 @@ +# Exchangis同步作业执行模块详细设计文档 + +## 一、总体流程图 + + ![img](../../../images/zh_CN/ch1/job_overall.png) + +
+图1-1 总体流程图 +
+ +请注意: + +1. 如果用户通过REST客户端直接提交一段希望被执行的同步任务JSON,则无需进行步骤二,直接将JSON提交给TaskGenerator即可。 + +2. 前端或REST客户端每次提交时,会产生一个jobExecutionId给前端,后续前端或REST客户端通过jobExecutionId获取同步作业Job的执行情况。 + +3. jobExecutionId最好是在用户提交过来时就产生且返回,即TaskGenerator应该是异步进行的。一般情况下,TaskGenerator的时间可能是几秒到几分钟(取决于subJob的个数),所以如果等TaskGenerator完成后才返回 jobExecutionId,则前端请求很可能超时。 + +4. 只所以前端或REST客户端每次提交都产生一个jobExecutionId,是为了支持同一个ExchangisJob的重复提交。所以JobServer原则上不会检查一个ExchangisJob只能同时有一个实例在执行,但是Web前端应该保证同一个浏览器同一时刻的同一个ExchangisJob只有一个在执行。 + +## 二、前后台交互 + +![img](../../../images/zh_CN/ch1/job_frontend_backend.png) + +
+图2-1 前后台交互 +
+ +### 1. jobExecutionId的必要性 + +考虑REST客户端直接提交一段希望被执行的同步任务JSON的场景,以及为了支持同一个ExchangisJob的重复提交,所以每次提交时,产生一个jobExecutionId就非常有必要。 + +jobExecutionId是一个ExchangisJob的执行凭证,会存储到数据库之中,后续关于本次ExchangisJob执行的所有请求,都需带上jobExecutionId。 + +1. TaskGenerator异步的必要性 + +考虑到一种场景,即客户端提交了作业后,在Exchangis还没来得及返回jobExecutionId时客户端挂掉了,这种情况下,由于客户端的日志中没有打印jobExecutionId,提交用户以为作业没有提交成功,则有可能会存在数据错乱的问题。而且, TaskGenerator 处理一个ExchangisJob的时间可能很长(取决于subJob的个数),所以如果等TaskGenerator完成后才返回 jobExecutionId,则前端请求很可能超时。 + +所以应该在JobServer接收到作业执行请求后,就立马生成一个jobExecutionId,并在数据库中为该ExchangisJob生成一条执行记录,且将该执行状态置位Inited,只要数据库持久化成功,就异步生成TaskGenerator的任务,并立马返回jobExecutionId。 + +### 2. JobServer的无状态性 + +这里讨论JobServer是否是无状态的,即前端拿到jobExecutionId之后,无论请求哪一个JobServer实例,都是能正常拿到想要的执行中数据。 + +由于JobServer的内存中没有存储特殊信息,且ExchangisJob执行状态、进度和Metrics信息都会存到数据库中,当前端请求发送过来时,只需去数据库拉取相关数据即可。 + +所以JobServer是无状态的。 + +### 3. 多租户功能 + +考虑到多租户能力,我们可以将JobGenerator和JobExecution进行拆分,即JobGenerator用于分布式接收前端/REST客户端提交过来的作业执行请求,JobGenerator生成任务集并存储到数据库中,这个微服务是可以所有租户共用的;而JobExecution则可以根据不同的租户进行划分,从而避免执行过程中彼此进行影响。 + +### 4. 高可用能力 + +JobExecution的TaskChooseRuler会去数据库中扫描所有的ExchangTask,如果一个ExchangisTask在超过一段时间后还没有状态更新,则会被新的JobServer进行接管。 + +如何接管? + +简单式的接管就是所有其他存货的JobServer同时加载这个ExchangisTask到TaskScheduler,由于是更新进度、状态和Metrics信息,虽然多个同时更新,但是对任务并无影响。 + +复杂接管则需在ExchangisTask的数据库表中增加一个字段,用于标识正在执行该ExchangisJob的JobServer,这时触发多个JobServer抢夺该ExchangisTask的所有权,由于该方案较复杂,暂不考虑。 + +## 三、前端交互详解 + +### 1. 提交 + +在没有执行前,页面如下图所示: + +由于执行接口(附上提交接口的链接)需带上jobId,实际提交执行之前,需先保存,再提交,且提交前需做基本检查,即如果没有一个子任务或作业保存失败,则不能提交执行。 + +![img](../../../images/zh_CN/ch1/job_frontend_1.png) + +
+图3-1 任务提交 +
+ +点击执行之后,如下图所示: + +需注意,这时会弹出作业信息台,默认展示运行情况,即总体进度和所有子任务的进度情况。 + +这里前端会用到两个接口,一是先使用【执行接口】,提交执行ExchangisJob,后台返回jobExecutionId;二是通过jobExecutionId调用【获取Job进度】接口,用于获取Job & 所有task的进度信息,用于展示如下页面的进度情况。 + +![img](../../../images/zh_CN/ch1/job_frontend_2.png) + +
+图3-2 任务执行 +
+ +### 2. 子任务的运行情况 + +当用户点击正在运行/已完成的某个子作业时,这时前端触发请求后台的【获取Task Metrics信息】接口,通过jobExecutionId & taskId来获取task Metrics信息,展示如下页面的内容: + +![1655260735321](../../../images/zh_CN/ch1/job_frontend_3.png) + +
+图3-3 子任务运行情况 +
+ +主要展示资源使用情况、流量情况和核心指标。 + +
+图3-4 子任务资源使用情况 +
+ +![1655260937221](../../../images/zh_CN/ch1/job_frontend_4.png) + +### 3. 实时日志 + +当用户点击如下图所示的右下角“日志”按钮时,信息台出现“实时日志”Tab,并默认展示Job的实时日志。当点击运行情况的“日志”按钮时,首先会默认展示整个作业的运行日志,这时前端默认调用【获取Job实时日志】的接口,通过jobExecutionId获取Job日志并进行展示,如下图: + +![img](../../../images/zh_CN/ch1/job_frontend_5.png) + +
+图3-5 任务实时日志 +
+ +只要用户不切到信息台的其他Tab,则前端会不断向后台轮询实时日志; + +用户也可以通过select选择框选择查看某一个task的日志,这时触发请求【获取task实时日志】接口,通过jobExecutionId & taskId获取task日志并不断轮询最新日志。 + +如果用户切换了select选择框,之前的那个日志不再刷新。 + +这里需要注意,后台还提供了一个【获取本次Job执行的task列表】接口,是为了协助前端拿到所有的task列表,用于展示select选择框的内容,如果Job本身还在Inited或Scheduled状态,还未成功转为Running状态,这时是拉不到task列表的,所以当用户下拉select选择框时,应该提示用户“Job还在调度中,请在Job转为Running状态后再查看子任务的实时日志”。 + +运行完成后,如果状态为成功,则将Tab切回运行情况Tab页面;如果状态为失败,则基于【获取Job进度】接口所返回的信息,默认跳转并展示失败的subJob所在的task的日志,多个task失败时自动展示第一个失败的task的日志。 + +## 四、后台设计详解 + +### 1. 数据库表结构设计 + +![img](../../../images/zh_CN/ch1/job_backend_datasource_design.png) + +
+图4-1 数据库表结构设计 +
+ +### 2. 接口文档 + +详见:Exchangis作业执行模块接口文档 + +### 3. 核心模块 & 核心类设计 + +#### 3.1 实体Bean的UML类图如下: + +![img](../../../images/zh_CN/ch1/job_backend_uml_1.png) + + + +
+图4-2 实体Bean的UML类图 +
+ +请注意:其中所有以Entity结尾的非接口,都是需要存储到数据库之中,作为一张表而存在。 + +#### 3.2 TaskGenerator的UML类图结构如下: + +![img](../../../images/zh_CN/ch1/job_backend_uml_2.png) + +
+图4-3 TaskGenerator的UML类图 +
+ +TaskGenerator只负责将一个Job的JSON转换成可以提交给Linkis执行的任务集(即将Job下面的所有subJob翻译成一个ExchangisTask集),翻译完成后写入DB。 + +这里需要注意,TaskGenerator是异步执行的,我们会在Service层封装出JobGenerationSchedulerTask用于异步提交给TaskExecution去执行。 + +#### 3.3 TaskExecution体系的UML类图结构如下: + +![img](../../../images/zh_CN/ch1/job_backend_uml_3.png) + +
+图4-4 TaskExecution体系的UML类图 +
+ +1. TaskExecution主要由TaskConsumer、TaskManager、TaskScheduler和TaskSchedulerLoadBalancer组成。 + +2. TaskManager,主要用于管理该JobServer下所有处于Running状态的ExchangisTask; + +3. TaskConsumer则由多个功能不同的线程组组成,如NewTaskConsumer和ReceiveTaskConsumer等;其中NewTaskConsumer每次从数据库中fetch所有可被执行的处于Inited状态的ExchangisTask列表(可能包含多个Job的多个subJob所对应的ExchangisTask列表),按照TaskScheduler的实际负载情况分批次提交给TaskScheduler;提交之前,会先更新数据库里这个task的状态为Scheduled;而ReceiveTaskConsumer用于接管一个已在运行中,但是超过一定时间后还是没有更新状态和Metrics信息的ExchangisTask,将该ExchangisTask放入TaskManager之中,等待被StatusUpdateSchedulerTask和MetricsUpdateSchedulerTask更新状态。而TaskChooseRuler则是用于协助TaskConsumer过滤和选择所需要的ExchangisTask的规则器,如判断是否ExchangisTask可以接手、优先级策略等规则。 + +4. TaskScheduler是一个线程池,用于调度各种类型的SchedulerTask;其中,SubmitSchedulerTask,用于异步将任务提交给Linkis去执行,并将Linkis返回的关键信息如Id、ECM信息等写入DB;StatusUpdateSchedulerTask和MetricsUpdateSchedulerTask是永远不会停止的常驻型轮询任务,会不断从TaskManager中拿到已经处于Running状态的SchedulerTask,不断定时向Linkis去请求状态和Metrics信息,并更新数据库。 + +5. TaskSchedulerLoadBalancer是一个负载器,用于实时检测TaskManager内的runningTask的轮询情况、TaskScheduler和服务器的负载情况,确定TaskScheduler最终实例化多少个StatusUpdateSchedulerTask和MetricsUpdateSchedulerTask去轮询所有处于Running状态的任务的状态和Metrics信息。 + +#### 3.4 TaskScheduler体系的UML类图如下: + +![img](../../../images/zh_CN/ch1/job_backend_uml_4.png) + +
+图4-5 TaskScheduler体系的UML类图 +
+ +TaskScheduler基于linkis-scheduler模块来实现。 + +#### 3.5 Listener体系的UML类图如下: + +![img](../../../images/zh_CN/ch1/job_backend_uml_5.png) + +
+图4-6 Listener体系的UML类图 +
+ +Listener体系是保证各信息能更新到数据库的核心,实现这些listener的实现类应该都是各个service类。 + diff --git a/docs/zh_CN/ch1/exchangis_sqoop_deploy_cn.md b/docs/zh_CN/ch1/exchangis_sqoop_deploy_cn.md new file mode 100644 index 000000000..f4de28ac2 --- /dev/null +++ b/docs/zh_CN/ch1/exchangis_sqoop_deploy_cn.md @@ -0,0 +1,82 @@ +# Sqoop 引擎使用文档 +### 环境准备 +Sqoop引擎是执行Exchangis数据同步任务不可或缺的组件,只有安装部署完成Sqoop引擎才能够成功执行数据同步任务。同时,确保所部署的机器上有安装sqoop。 + +您在安装部署Sqoop引擎之前,请按照[Exchangis安装部署文档](docs/zh_CN/ch1/exchangis_deploy_cn.md)安装完成Exchangis及相关组件的安装,并确保工程基本功能可用。 + +Sqoop引擎主要依赖Hadoop基础环境,如果该节点需要部署Sqoop引擎,需要部署Hadoop客户端环境。 + +强烈建议您在执行Sqoop任务之前,先在该节点使用原生的Sqoop执行测试任务,以检测该节点环境是否正常。 + +| 环境变量名 | 环境变量内容 | 备注 | +| :----: | :----: |-------| +| JAVA_HOME | JDK安装路径 | 必须 | +| HADOOP_HOME | Hadoop安装路径 | 必须 | +| HADOOP_CONF_DIR | Hadoop配置路径 | 必须 | +| SQOOP_HOME | Sqoop安装路径 | 非必须 | +| SQOOP_CONF_DIR | Sqoop配置路径 | 非必须 | +| HCAT_HOME | HCAT配置路径 | 非必须 | +| HBASE_HOME | HBASE配置路径 | 非必须 | + + +| Linkis系统参数 | 参数 | 备注 | +| --------------------------- | ------------------------------- | ------------------------------------------------------------ | +| wds.linkis.hadoop.site.xml | 设置sqoop加载hadoop参数文件位置 | 必须,参考示例:"/etc/hadoop/conf/core-site.xml;/etc/hadoop/conf/hdfs-site.xml;/etc/hadoop/conf/yarn-site.xml;/etc/hadoop/conf/mapred-site.xml" | +| sqoop.fetch.status.interval | 设置获取sqoop执行状态的间隔时间 | 非必须,默认值为5s | +### 安装包准备 +#### 1)下载二进制包 + +Exchangis1.1.2和Linkis 1.4.0支持的主流Sqoop版本1.4.6与1.4.7,更高版本可能需要修改部分代码重新编译。 + +[点击跳转 Release 界面](https://github.com/WeBankFinTech/Exchangis/releases) + +#### 2) 编译打包 +如果您想自己开发和编译sqoop引擎,具体编译步骤如下: + +1.clone Exchangis的代码 + +2.在exchangis-plugins模块下,找到sqoop引擎,单独编译sqoop,操作如下 +``` +cd {EXCHANGIS_CODE_HOME}/exchangis-engines/engine-plugins/sqoop +mvn clean install +``` +然后会在该路径下找到sqoop引擎安装包 +``` +{EXCHANGIS_CODE_HOME}/exchangis-engines/engine-plugins/sqoop/target/out +``` + + +### 开始部署 +#### 1)sqoop引擎安装 +1.拿到打包出来的sqoop物料包,目录结构为: + +```shell +sqoop +-- dist +-- plugin +``` + +2.放置到linkis安装路径的如下目录 + +```shell +cd {LINKIS_HOME}/linkis-engineconn-plugins +``` +(注意,看当前sqoop引擎对哪些用户有权限,不一定是root) + + +#### 2)重启linkis-engineplugin服务使sqoop引擎生效 +新加入linkis的引擎都要重启linkis的engineplugin服务才会生效,重启脚本为linkis安装目录下的./sbin/linkis-daemon.sh,具体步骤如下 +```shell +cd {LINKIS_INSTALL_HOME}/links/sbin/ +./linkis-daemon.sh restart cg-engineplugin +``` +待服务启动成功,在linkis数据库中校验sqoop引擎是否安装完毕 + +```yaml +select * from linkis_cg_engine_conn_plugin_bml_resources where engine_conn_type='sqoop'; +``` + +至此,sqoop安装部署就完成了。 + +engineplugin更详细的介绍可以参看下面的文章。 +https://linkis.apache.org/zh-CN/docs/latest/deployment/install-engineconn \ No newline at end of file diff --git a/docs/zh_CN/ch1/exchangis_user_manual_cn.md b/docs/zh_CN/ch1/exchangis_user_manual_cn.md new file mode 100644 index 000000000..005c9e189 --- /dev/null +++ b/docs/zh_CN/ch1/exchangis_user_manual_cn.md @@ -0,0 +1,302 @@ +# Exchangis1.0用户手册 + +## 一、产品简介 + +  本文是Exchangis1.0的快速入门文档,涵盖了Exchangis1.0的基本使用流程。Exchangis是一款轻量级的数据交换服务平台,支持不同类型数据源之间的数据同步。平台将数据交换流程进行拆分,抽象出数据源,数据交换任务,任务调度等概念,达到可视化管理数据同步流程的目的。而在实际数据传输过程中可集成多个传输组件特性,做到功能横向扩展。 + +## 二、登录Exchangis1.0 + +  Exchangis1.0目前作为DSS**数据交换组件**的一部分,通过登录DSS的方式在组件列表中免密进入。所以,在使用Exchangis1.0之前,请对DSS,Exchangis1.0,Linkis等相关组件进行基本部署,保证组件功能可用,本文不进行赘述,详情见[exchangis部署文档](docs/zh_CN/ch1/exchangis_deploy_cn.md)和[exchangis-appconn部署文档](docs/zh_CN/ch1/exchangis_appconn_deploy_cn.md) + +### 1、登录DSS + +  系统默认通过Linkis的Linux部署用户登录DSS,如使用hadoop用户部署Linkis和DSS,可以直接通过账号密码:hadoop/hadoop登录。 首先根据DSS的前端部署地址登录网页,接着输入账号密码:hadoop/hadoop进入DSS + +### 2、进入Exchangis + +  Exchangis通过DSS来进入,在DSS页面中一词点击:**首页->DSS应用组件->数据交换->进入Exchangis** + +![exchangis1.0_entrance](../../../images/zh_CN/ch1/exchangis1.0_entrance.png) +
+图2-1 Exchangis1.0入口 +
+ +## 三、数据源管理 + +  该模块可以对数据源进行配置和管理,为进行数据同步作业的起始步骤,目前Exchangis1.0支持对mysql和hive直接数据的互相导入。 +数据源主要功能如下: + +1. 创建,编辑,删除数据源; +2. 根据类型和名称搜索数据源,支持对数据源快速定位; +3. 数据源连接测试操作; +4. 历史数据源版本发布及记录。 + +![datasource_list](../../../images/zh_CN/ch1/datasource_list.png) + +
+图3-1 数据源管理列表 +
+ + +### 1、创建数据源 + +  点击**创建数据源**,选择自己想要创建的数据源,当前支持MySQL和Hive两种数据源的创建。 + +![datasource_type](../../../images/zh_CN/ch1/datasource_type.png) + +
+图3-2 数据源类型 +
+ + +  选择创建MySQL数据源,填写配置参数,其中,带星号的为必填项,务必保证连接MySQL数据库的Host,端口号,用户名和密码连接正确。**连接参数**为Json格式,用于设置MySQL的配置信息,填写完能够进行**测试连接**。 + +![MySQL_datasource_config](../../../images/zh_CN/ch1/MySQL_datasource_config.png) + +
+图3-3 MySQL数据源配置 +
+ + +  对于Hive数据源的配置,与MySQL不太相同,暂时不提供用户自行在界面进行集群参数配置的功能,对于集群环境,由后端统一配置完成,用户只需要选择需要的集群环境即可,点击确定即可保存。 + +![Hive_datasource_config](../../../images/zh_CN/ch1/Hive_datasource_config.png) + +
+图3-4 Hive数据源配置 +
+ + +### 2、数据源功能 + +  数据源管理模块提供对配置数据源版本的**发布**功能,只有经过发布的数据源才能在配置导数任务的时候被使用,否则会提示不可用,只要再次编辑的数据源就会被视为一个新的版本,最新的版本在第一行。在版本列表中可以**查看**所有历史数据源版本的配置,您可在随时需要回滚时进行参考。 + +![datasource_func](../../../images/zh_CN/ch1/datasource_func.png) + +
+图3-5 数据源发布功能 +
+ +  数据源管理的**过期**功能,用于提示此数据源已经逐渐要被替换,请及时更换使用该数据源的任务配置,避免直接删除数据源造成所配置的执行任务失效。 +![datasource_timelimit](../../../images/zh_CN/ch1/datasource_timelimit.png) + +
+图3-6 数据源过期功能 +
+ +## 四、项目管理 + +### 1、项目列表 + +  该模块可以创建项目,在实际的导数任务中,一个项目下可以有多个导数任务,不同的项目之间互不影响,对于普通用户而言,可以操作的只有自己创建的项目。 +在项目管理首页,可以对项目进行管理,包括**创建**,**修改**和**删除**以及**查询搜索**,修改和删除只能对在Exchangis中创建的项目操作。 + +![item_list](../../../images/zh_CN/ch1/item_list.png) +
+图4-1 项目列表 +
+ +### 2、任务列表 + +进入项目,可以看到该项目下的任务列表。 + +#### 1)任务管理 + +  任务列表中可以对创建的Job数据同步任务进行管理,与项目类似,包括**创建**,**修改**、**删除**和**搜索**。 + +![job_task_list](../../../images/zh_CN/ch1/job_task_list.png) +
+图4-2 任务列表 +
+ +  点击**创建任务**,能够选择任务类型和执行引擎,**目前仅支持离线任务和SQOOP执行引擎**,未来将会支持流式任务和DataX引擎等。 + +![task_type_and_engine](../../../images/zh_CN/ch1/task_type_and_engine.png) +
+图4-3 任务类型和引擎配置 +
+ +#### 2)子任务管理 + +  在任务中点击**添加子任务**,可以添加多个子任务,支持对子任务的**修改、复制和删除** + +  同时也支持**任务支持复制**,复制的子任务包含其原子任务配置的所有信息。 + +![1656571126825](../../../images/zh_CN/ch1/sub_task_manage.png) + +
+图4-4 子任务管理 +
+ +#### 3)数据同步任务配置和执行 + +  进行该步骤前,需要预先在**数据源管理模块**添加好数据源以供选择,并进行**发布**,当前Exchangis版本仅支持**MySQL数据源和Hive数据源**。 + +  数据同步任务配置和执行是Exchangis1.0.0的核心功能,基本配置数据同步流程为:**添加子任务 -> 选择Source数据源和Sink数据源 -> 字段映射配置 -> 过程控制 -> 任务配置 -> 保存 -> 执行**。 + +任务执行主要功能包括: +1. 子任务卡片的添加,复制和删除; +2. 实现对两种不同类型数据源之间数据的导入导出; +3. 来源数据源和目的地数据源的库表选择; +4. 数据源字段映射; +5. 作业最大并发数和作业最大内存配置; +6. 数据同步任务执行情况查看; +7. 每个主任务和各个子任务的日志查看; +8. 任务执行历史状态查看; +9. 执行任务kill操作。 + +##### 数据源选择和配置 + +  对于新创建的数据同步子任务,要进行数据源库表的选择,选择的数据源为在**数据源模块**中已发布的数据源。数据源选择支持搜索,搜索方式为先搜索库,再搜索表。 + +  MySQL为目的地数据源时,支持**插入**和**更新**两种写入方式;为源数据源时,支持**WHERE条件语句查询**。 + +  Hive为目的地数据源时,支持分区信息配置,写入方式为**追加数据**和**覆盖**两种;为源数据源时,支持**分区信息配置**。 + +![add_subtask](../../../images/zh_CN/ch1/data_source_select.png) +
+图4-5 选择数据源 +
+ + ![1656574588669](../../../images/zh_CN/ch1/data_source_insert_way.png) + +
+图4-6 写入方式配置 +
+ +##### 数据源字段映射 + +  当配置完成数据源库表信息时,Exchangis1.0.0会自动进行Source数据源和Sink数据源的字段映射,并且可以自行选择想要映射的字段,也可以用来检查我们的字段是否匹配;当Hive为Sink数据源时,其映射字段不可修改。 + +![1656574253553](../../../images/zh_CN/ch1/data_source_field_mapping.png) + +
+图4-7 字段映射 +
+ +##### 过程控制 + +  任务执行提供**作业最大并行数**配置(默认为1个),以及**作业最大内存**配置(默认为1024Mb),可根据实际需要进行更改。 + +![1656574125954](../../../images/zh_CN/ch1/task_proccess_control.png) + +
+图4-8 过程控制 +
+ +#### 4)作业执行 + +  Exchangis1.0支持多个子任务同时执行,任务配置完成后,点击执行,即开始数据同步任务,界面下方会弹出工作台,工作台主要包含三个部分功能:**运行情况、实时日志和执行历史**。 + +  **运行情况**能够查看当前数据同步任务整体进度,包含task成功和失败的数量等,以及点击task的名字,能够展示每个task的各项运行指标信息。 + +  **实时日志**主要展示的内容包含两大类,一是整个导数job的日志,能够输出每个task的状态日志,例如task是否被调度,是否运行中等;二是每个task的日志,输出的是各自相应的导数日志。在实时日志中能够根据关键字和忽略字进行日志筛选,并别提供获取最后n行日志功能;还可以对Error、Warning和Info不同类型的日志进行筛选展示,只需点击相应的按钮即可。 + +  **执行历史**能够展示该导数任务的历史执行信息,对历史执行过程提供初步的概览,如果想进一步查看详细历史信息,点击任务名称,即可跳转到同步历史界面进行查看。 + +  数据同步任务执行需指定执行用户,默认为登录用户,具体情况需根据实际数据源的配置去调整。 + + +## 五、同步历史 + +  该模块可以查看到历史执行的所有数据同步任务,每个用户只能查看自己创建的任务,不同用户之间互相隔离。 + +主要功能如下: +1. 根据查询条件查找所需的历史任务信息; +2. 对于非终态的任务,提供终止任务的功能,能够kill掉非终态的任务; +3. 查看每个任务的运行情况和实时日志; +4. 查看每个同步任务的更细节配置信息和更新时间等。 + +![sync_history](../../../images/zh_CN/ch1/sync_history.png) + +
+图5-1 同步历史界面 +
+ +## 六、Exchangis Appconn使用 + +  目前,Exchangis1.0支持以Appconn形式与DSS对接,**在DSS端**,能够通过DSS的**应用开发->项目列表**,以工作流编排的模式创建数据交换**sqoop工作流节点**,在这里,能够进行数据同步任务的配置和执行。在DSS创建的Exchangis项目和数据交换任务,会同步在Exchangis中创建。 + +Exchangis Appconn主要支持以下功能: + +1. **项目操作**是对DSS项目的创建,删除,修改操作会同步影响Exchangis端的项目; +2. **工作流节点基本操作**是DSS编排器中对sqoop工作流节点创建,删除,修改操作会同步到Exchangis端的任务; +3. **工作流导数操作**支持sqoop工作流节点配置执行数据同步任务; +4. **工作流发布操作**支持sqoop工作流节点发布至WTSS进行任务调度。 + +### 1、项目操作 + +  该模块能够对DSS项目进行创建、修改和删除,在DSS端的操作会同步到Exchangis端,这里以在DSS创建的项目为例,在exchangis appconn操作的流程为:**点击创建项目->填写项目信息->点击确认->进入Exchangis端->点击项目管理**,即可查看到同步创建的项目,如下图所示: + +![appconn_pro_create](../../../images/zh_CN/ch1/appconn_pro_create.png) + +
+图6-1 项目操作 +
+ +创建完成后,会在Exchangis端看到同步过来的项目 + +![appconn_pro_sync](../../../images/zh_CN/ch1/appconn_pro_sync.jpg) + +
+图6-2 项目同步到Exchangis +
+ +### 2、工作流节点基本操作 + + 工作流节点能够进行创建、修改、删除和进行依赖选择,可以进行节点之间的关联,在DSS端的操作会同步到Exchangis端。这里以对sqoop工作流节点的创建操作为例,在exchangis appconn对项目的操作流程为:**创建一条工作流->从左侧插件栏拖动sqoop节点至右侧画布->点击确认创建sqoop节点任务->进入exchangis端查看同步创建的任务**,如下图所示,对sqoop节点任务的删除和修改同理。 + +![appconn_pro_sqoop](../../../images/zh_CN/ch1/appconn_pro_sqoop.png) + +
+图6-3 Sqoop节点功能 +
+ +可以看到导数任务也同步到Exchangis中了 + +![](../../../images/zh_CN/ch1/appconn_pro_sqoop_sync.jpg) + +
+图6-4 Sqoop节点同步到Exchangis +
+ +### 3、工作流导数操作 + +  双击Sqoop节点进行工作流节点进行操作,支持sqoop工作流节点配置与执行数据同步任务。以工作流节点形式进行导数任务是Exchangis Appconn的核心功能,**每一个sqoop节点代表一个数据同步任务**,具体操作流程如下:**双击sqoop节点->弹出任务配置界面->配置任务信息->执行任务**,如下图所示: + +![sqoop_config](../../../images/zh_CN/ch1/sqoop_config.png) + +
+图6-5 双击sqoop工作流节点进入配置界面 +
+ +![sqoop_user_config](../../../images/zh_CN/ch1/sqoop_user_config.png) + +
+图6-6 配置工作流节点信息 +
+ +  这里有两种执行方式,一种是在弹出的任务配置界面点击执行按钮进行执行;另一种是点击DSS编排器的**执行按钮**或者是**选中执行按钮**进行执行,**点击执行**会对该工作流中的所有节点执行,**点击选中执行**仅会执行选中的工作流节点,不会执行全部节点。 + +![sqoop_execute](../../../images/zh_CN/ch1/sqoop_execute.png) + +
+图6-7 执行任务 +
+ +注:在DSS的sqoop节点中执行的数据同步任务,均可在Exchangis端查看相关信息。 + +### 4、工作流发布操作 + +  工作流任务的**发布**功能,支持sqoop工作流节点发布至WTSS进行任务调度。在**开发中心**创建和配置的数据交换任务信息,通过发布,发布到WTSS,能够在WTSS中进行任务调度。 + +### 5、生产中心 + +  点击命名空间的下拉框,切换到**生产中心**,可以看到所有项目的工作流的日志,可以查看每个工作流调度的状态 + +![production_center](../../../images/zh_CN/ch1/production_center.png) + +
+图6-8 生产中心 +
+ diff --git a/exchangis-dao/pom.xml b/exchangis-dao/pom.xml new file mode 100644 index 000000000..ca406e7fa --- /dev/null +++ b/exchangis-dao/pom.xml @@ -0,0 +1,58 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + + exchangis-dao + + + 8 + 8 + 5.1.1.Final + + + + + org.apache.linkis + linkis-mybatis + + + org.apache.linkis + linkis-module + + + org.apache.linkis + linkis-gateway-httpclient-support + + + org.hibernate + hibernate-validator + ${hibernate.validator} + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + \ No newline at end of file diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/ValidatorConfiguration.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/ValidatorConfiguration.java new file mode 100644 index 000000000..882e71f50 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/ValidatorConfiguration.java @@ -0,0 +1,21 @@ +package com.webank.wedatasphere.exchangis; + +import org.hibernate.validator.HibernateValidator; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +import javax.validation.Validation; +import javax.validation.Validator; + +/** + * Bean validator + */ +@Configuration +public class ValidatorConfiguration { + @Bean + public Validator validator(){ + return Validation.byProvider(HibernateValidator.class) + .configure().failFast(true) + .buildValidatorFactory().getValidator(); + } +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/AuditLogUtils.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/AuditLogUtils.java new file mode 100644 index 000000000..e79ee6bb5 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/AuditLogUtils.java @@ -0,0 +1,38 @@ +package com.webank.wedatasphere.exchangis.common; + + +import com.webank.wedatasphere.exchangis.common.enums.OperateTypeEnum; +import com.webank.wedatasphere.exchangis.common.enums.TargetTypeEnum; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Date; + +/** + * @author tikazhang + * @Date 2022/9/19 20:07 + */ +public class AuditLogUtils { + + + private static final Logger LOGGER = LoggerFactory.getLogger(AuditLogUtils.class); + + /** + * 打印审计日志,id类的属性都是String + * @param user 执行操作的用户名 + * @param targetType 操作针对的对象类型 + * @param targetId 操作针对的对象id + * @param targetName 操作针对的对象名称 + * @param operateType 操作类型 + * @param params 操作相关的参数 + */ + public static void printLog(String user, String proxyUser, TargetTypeEnum targetType, + String targetId, String targetName, OperateTypeEnum operateType, Object params) { + //String detailInfo=new Gson().toJson(params); + String detailInfo=params.toString(); + LOGGER.info("[{}],[{}],[{}],[{}],[{}],[{}],[{}],[{}],[{}]", + new Date(),user, "proxyUser is: " + proxyUser, "Exchangis-1.1.3", targetType.getName(), + targetId,targetName,operateType.getName(), detailInfo); + } + +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/EnvironmentUtils.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/EnvironmentUtils.java new file mode 100644 index 000000000..ac6e77198 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/EnvironmentUtils.java @@ -0,0 +1,84 @@ +package com.webank.wedatasphere.exchangis.common; + +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.DataWorkCloudApplication; +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.server.utils.LinkisMainHelper; +import org.springframework.context.ApplicationContext; + + +/** + * Environment utils + */ +public class EnvironmentUtils { + + private static final CommonVars JVM_USER = CommonVars.apply("wds.exchangis.env.jvm.user", System.getProperty("user.name", "hadoop")); + + private static final CommonVars SERVER_NAME = CommonVars.apply(LinkisMainHelper.SERVER_NAME_KEY(), "exchangis"); + + /** + * Jvm user + * @return user name + */ + public static String getJvmUser(){ + return JVM_USER.getValue(); + } + + /** + * Server name + * @return name + */ + public static String getServerName(){ + return SERVER_NAME.getValue(); + } + + /** + * Get server address + * @return address + */ + public static String getServerAddress(){ + ApplicationContext context = DataWorkCloudApplication.getApplicationContext(); + String hostname; + if (Configuration.PREFER_IP_ADDRESS()) { + hostname = context + .getEnvironment().getProperty("spring.cloud.client.ip-address"); + } else { + hostname = context.getEnvironment().getProperty("eureka.instance.hostname", ""); + if (StringUtils.isBlank(hostname)) { + hostname = Utils.getComputerName(); + } + } + String serverPort = context.getEnvironment().getProperty("server.port"); + return hostname + (StringUtils.isNotBlank(serverPort) ? ":" + serverPort : ""); + } + /** + * Get server host name + * @return hostname + */ + public static String getServerHost(){ + ApplicationContext context = DataWorkCloudApplication.getApplicationContext(); + if (Configuration.PREFER_IP_ADDRESS()) { + return context + .getEnvironment().getProperty("spring.cloud.client.ip-address"); + } else { + String hostname = context.getEnvironment().getProperty("eureka.instance.hostname", ""); + if (StringUtils.isBlank(hostname)) { + return Utils.getComputerName(); + } + return hostname; + } + } + + /** + * Get server port + * @return port number + */ + public static Integer getServerPort(){ + String serverPort = DataWorkCloudApplication.getApplicationContext() + .getEnvironment().getProperty("server.port"); + return StringUtils.isNotBlank(serverPort) ? Integer.parseInt(serverPort) : null; + } + +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/UserUtils.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/UserUtils.java new file mode 100644 index 000000000..216086ce1 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/UserUtils.java @@ -0,0 +1,26 @@ +package com.webank.wedatasphere.exchangis.common; + +import org.apache.linkis.server.security.ProxyUserSSOUtils; +import org.apache.linkis.server.security.SecurityFilter; +import scala.Option; + +import javax.servlet.http.HttpServletRequest; + +/** + * @author tikazhang + * @Date 2022/9/22 16:54 + */ +public class UserUtils { + public static String getLoginUser(HttpServletRequest request) { + Option proxyUserUsername = + ProxyUserSSOUtils.getProxyUserUsername(request); + String loginUser = null; + if (proxyUserUsername.isDefined()) { + loginUser = proxyUserUsername.get(); + } else { + loginUser = SecurityFilter.getLoginUsername(request); + } + return loginUser; + } + +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/enums/OperateTypeEnum.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/enums/OperateTypeEnum.java new file mode 100644 index 000000000..99de7e712 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/enums/OperateTypeEnum.java @@ -0,0 +1,30 @@ +package com.webank.wedatasphere.exchangis.common.enums; + +/** + * @author tikazhang + * @Date 2022/9/19 20:16 + */ +public enum OperateTypeEnum { + CREATE("create"), + UPDATE("update"), + DELETE("delete"), + COPY("copy"), + EXPORT("export"), + IMPORT("import"), + PUBLISH("publish"), + EXPIRE("expire"), + EXECUTE("execute"), + KILL("kill"), + ; + private String name; + OperateTypeEnum(String name) { + this.name = name; + } + + public String getName() { + return name; + } + public void setName(String name) { + this.name = name; + } +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/enums/TargetTypeEnum.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/enums/TargetTypeEnum.java new file mode 100644 index 000000000..539767c1d --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/enums/TargetTypeEnum.java @@ -0,0 +1,38 @@ +package com.webank.wedatasphere.exchangis.common.enums; + +/** + * @author tikazhang + * @Date 2022/9/19 20:22 + */ +public enum TargetTypeEnum { + /** + * 项目 + */ + PROJECT("project"), + /** + * 作业 + */ + JOB("job"), + /** + * 数据源 + */ + DATASOURCE("datasource"), + /** + * 任务 + */ + TASK("task"), + ; + private String name; + + TargetTypeEnum(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/http/HttpClientConfiguration.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/http/HttpClientConfiguration.java new file mode 100644 index 000000000..548b99650 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/http/HttpClientConfiguration.java @@ -0,0 +1,25 @@ +package com.webank.wedatasphere.exchangis.common.http; + +import org.apache.linkis.common.conf.CommonVars; + +/** + * Define the http configuration + */ +public class HttpClientConfiguration { + + /** + * Connect timeout + */ + public static final CommonVars CONNECTION_TIMEOUT = CommonVars.apply("wds.exchangis.http.client.connection.timeout", 30000L); + + /** + * Max connection size + */ + public static final CommonVars MAX_CONNECTION_SIZE = CommonVars.apply("wds.exchangis.http.client.connection.max-size", 100); + + /** + * Read timeout + */ + public static final CommonVars READ_TIMEOUT = CommonVars.apply("wds.exchangis.http.client.read-timeout", 90000L); + +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/linkis/bml/BmlResource.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/linkis/bml/BmlResource.java new file mode 100644 index 000000000..0ea6f12af --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/linkis/bml/BmlResource.java @@ -0,0 +1,40 @@ +package com.webank.wedatasphere.exchangis.common.linkis.bml; + +/** + * Bml resource definition + */ +public class BmlResource { + /** + * Resource id + */ + private String resourceId; + + /** + * Version + */ + private String version; + + public BmlResource(){ + + } + + public BmlResource(String resourceId, String version){ + this.resourceId = resourceId; + this.version = version; + } + public String getResourceId() { + return resourceId; + } + + public void setResourceId(String resourceId) { + this.resourceId = resourceId; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/linkis/client/ClientConfiguration.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/linkis/client/ClientConfiguration.java new file mode 100644 index 000000000..bb65867a4 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/linkis/client/ClientConfiguration.java @@ -0,0 +1,54 @@ +package com.webank.wedatasphere.exchangis.common.linkis.client; + +import com.webank.wedatasphere.exchangis.common.http.HttpClientConfiguration; +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.common.conf.Configuration; + +/** + * Configuration for linkis client + */ +public class ClientConfiguration { + + /** + * Linkis server url + */ + public static final CommonVars LINKIS_SERVER_URL = CommonVars.apply("wds.exchangis.client.linkis.server-url", Configuration.getGateWayURL()); + + /** + * Linkis token value + */ + public static final CommonVars LINKIS_TOKEN_VALUE = CommonVars.apply("wds.exchangis.client.linkis.token.value", "EXCHANGIS-TOKEN"); + + /** + * Linkis client max connections + */ + public static final CommonVars LINKIS_DEFAULT_MAX_CONNECTIONS = CommonVars.apply("wds.exchangis.client.linkis.max-connections.default", + HttpClientConfiguration.MAX_CONNECTION_SIZE.getValue()); + + + /** + * Linkis discovery enable + */ + public static final CommonVars LINKIS_DISCOVERY_ENABLED = CommonVars.apply("wds.exchangis.client.linkis.discovery.enabled", true); + + /** + * Linkis discovery frequency + */ + public static final CommonVars LINKIS_DISCOVERY_FREQUENCY_PERIOD = CommonVars.apply("wds.exchangis.client.linkis.discovery.frequency-period", 1L); + + /** + * Linkis client load balance + */ + public static final CommonVars LINKIS_LOAD_BALANCER_ENABLED = CommonVars.apply("wds.exchangis.client.linkis.load-balancer.enabled", true); + + + /** + * Linkis client retry + */ + public static final CommonVars LINKIS_RETRY_ENABLED = CommonVars.apply("wds.exchangis.client.linkis.retry.enabled", false); + + /** + * DWS version + */ + public static final CommonVars LINKIS_DWS_VERSION = CommonVars.apply("wds.exchangis.client.linkis.dws.version", Configuration.LINKIS_WEB_VERSION().getValue()); +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/pager/PageQuery.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/pager/PageQuery.java new file mode 100644 index 000000000..ba95c3d80 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/pager/PageQuery.java @@ -0,0 +1,49 @@ +package com.webank.wedatasphere.exchangis.common.pager; + +import java.util.Objects; + +/** + * Query Vo + */ +public class PageQuery { + + protected Integer current = 1; + + protected Integer size = 10; + + protected Integer page; + + protected Integer pageSize; + + public Integer getCurrent() { + return current; + } + + public void setCurrent(Integer current) { + this.current = current; + } + + public Integer getSize() { + return size; + } + + public void setSize(Integer size) { + this.size = size; + } + + public Integer getPage() { + return Objects.nonNull(page) ? page : current; + } + + public void setPage(Integer page) { + this.page = page; + } + + public Integer getPageSize() { + return Objects.nonNull(pageSize) ? pageSize : size; + } + + public void setPageSize(Integer pageSize) { + this.pageSize = pageSize; + } +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/pager/PageResult.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/pager/PageResult.java new file mode 100644 index 000000000..8774b54b4 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/pager/PageResult.java @@ -0,0 +1,61 @@ +package com.webank.wedatasphere.exchangis.common.pager; + +import com.github.pagehelper.PageInfo; +import org.apache.linkis.server.Message; + +import java.util.List; + +/** + * Page result + * @param + */ +public class PageResult{ + /** + * Total + */ + private Long total; + + /** + * List + */ + private List list; + + public PageResult(){ + + } + + public PageResult(PageInfo pageInfo){ + this.total = pageInfo.getTotal(); + this.list = pageInfo.getList(); + } + /** + * To Message(in linkis-common) + * @return message + */ + public Message toMessage(String info){ + Message message = Message.ok(info); + message.data("total", total); + message.data("list", list); + return message; + } + + public Message toMessage(){ + return toMessage(""); + } + + public Long getTotal() { + return total; + } + + public void setTotal(Long total) { + this.total = total; + } + + public List getList() { + return list; + } + + public void setList(List list) { + this.list = list; + } +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/validator/groups/InsertGroup.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/validator/groups/InsertGroup.java new file mode 100644 index 000000000..1c05bc04d --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/validator/groups/InsertGroup.java @@ -0,0 +1,10 @@ +package com.webank.wedatasphere.exchangis.common.validator.groups; + +import javax.validation.groups.Default; + +/** + * Insert group for validator + */ +public interface InsertGroup extends Default { + +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/validator/groups/UpdateGroup.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/validator/groups/UpdateGroup.java new file mode 100644 index 000000000..ce05e3f7e --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/validator/groups/UpdateGroup.java @@ -0,0 +1,9 @@ +package com.webank.wedatasphere.exchangis.common.validator.groups; + +import javax.validation.groups.Default; + +/** + * Update group for validator + */ +public interface UpdateGroup extends Default { +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobDsBind.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobDsBind.java new file mode 100644 index 000000000..8b99351ca --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobDsBind.java @@ -0,0 +1,88 @@ +package com.webank.wedatasphere.exchangis.dao.domain; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; + +@TableName("exchangis_job_ds_bind") +public class ExchangisJobDsBind { + + @TableId(type = IdType.AUTO) + private Long id; + + @TableField("job_id") + private Long jobId; + + private Integer taskIndex; + + private Long sourceDsId; + + /** + * Source data source name + */ + private String sourceDsName; + + private Long sinkDsId; + + /** + * Sink data source name + */ + private String sinkDsName; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public Integer getTaskIndex() { + return taskIndex; + } + + public void setTaskIndex(Integer taskIndex) { + this.taskIndex = taskIndex; + } + + public Long getSourceDsId() { + return sourceDsId; + } + + public void setSourceDsId(Long sourceDsId) { + this.sourceDsId = sourceDsId; + } + + public Long getSinkDsId() { + return sinkDsId; + } + + public void setSinkDsId(Long sinkDsId) { + this.sinkDsId = sinkDsId; + } + + public String getSourceDsName() { + return sourceDsName; + } + + public void setSourceDsName(String sourceDsName) { + this.sourceDsName = sourceDsName; + } + + public String getSinkDsName() { + return sinkDsName; + } + + public void setSinkDsName(String sinkDsName) { + this.sinkDsName = sinkDsName; + } +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobParamConfig.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobParamConfig.java new file mode 100644 index 000000000..b50609c03 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobParamConfig.java @@ -0,0 +1,279 @@ +package com.webank.wedatasphere.exchangis.dao.domain; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; + +@TableName("exchangis_job_param_config") +public class ExchangisJobParamConfig { + public static final String DIRECTION_SOURCE = "SOURCE"; + public static final String DIRECTION_SINK = "SINK"; + + @TableId(value = "id", type = IdType.AUTO) + private Long id; + + @TableField(value = "config_key") + private String configKey; + + @TableField(value = "config_name") + private String configName; + + @TableField(value = "config_direction") + private String configDirection; + + private String type; + + @TableField(value = "ui_type") + private String uiType; + + @TableField(value = "ui_field") + private String uiField; + + @TableField(value = "ui_label") + private String uiLabel; + + @TableField(value = "unit") + private String unit; + + @TableField(value = "required") + private Boolean required; + + @TableField(value = "value_type") + private String valueType; + + @TableField(value = "value_range") + private String valueRange; + + @TableField(value = "default_value") + private String defaultValue; + + @TableField(value = "validate_type") + private String validateType; + + @TableField(value = "validate_range") + private String validateRange; + + @TableField(value = "validate_msg") + private String validateMsg; + + @TableField(value = "is_hidden") + private Boolean hidden; + + @TableField(value = "is_advanced") + private Boolean advanced; + + @TableField(value = "ref_id") + private Long refId; + + /** + * store url exa. http://127.0.0.1/api/v1/dss/exchangis/main/xxx + */ + private String source; + + private Integer level; + + private String treename; + + private Integer sort; + + private String description; + + private Integer status; + + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getConfigKey() { + return configKey; + } + + public void setConfigKey(String configKey) { + this.configKey = configKey; + } + + public String getConfigName() { + return configName; + } + + public void setConfigName(String configName) { + this.configName = configName; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getUiType() { + return uiType; + } + + public void setUiType(String uiType) { + this.uiType = uiType; + } + + public String getUiField() { + return uiField; + } + + public void setUiField(String uiField) { + this.uiField = uiField; + } + + public String getUiLabel() { + return uiLabel; + } + + public void setUiLabel(String uiLabel) { + this.uiLabel = uiLabel; + } + + public String getValueType() { + return valueType; + } + + public void setValueType(String valueType) { + this.valueType = valueType; + } + + public String getValueRange() { + return valueRange; + } + + public void setValueRange(String valueRange) { + this.valueRange = valueRange; + } + + public String getDefaultValue() { + return defaultValue; + } + + public void setDefaultValue(String defaultValue) { + this.defaultValue = defaultValue; + } + + public String getValidateType() { + return validateType; + } + + public void setValidateType(String validateType) { + this.validateType = validateType; + } + + public String getValidateRange() { + return validateRange; + } + + public void setValidateRange(String validateRange) { + this.validateRange = validateRange; + } + + public String getValidateMsg() { return validateMsg; } + + public void setValidateMsg(String validateMsg) { this.validateMsg = validateMsg; } + + public Boolean getHidden() { + return hidden; + } + + public void setHidden(Boolean hidden) { + this.hidden = hidden; + } + + public Boolean getAdvanced() { + return advanced; + } + + public void setAdvanced(Boolean advanced) { + this.advanced = advanced; + } + + public Integer getLevel() { + return level; + } + + public void setLevel(Integer level) { + this.level = level; + } + + public String getTreename() { + return treename; + } + + public void setTreename(String treename) { + this.treename = treename; + } + + public Integer getSort() { + return sort; + } + + public void setSort(Integer sort) { + this.sort = sort; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public Integer getStatus() { + return status; + } + + public void setStatus(Integer status) { + this.status = status; + } + + public String getConfigDirection() { + return configDirection; + } + + public void setConfigDirection(String configDirection) { + this.configDirection = configDirection; + } + + public String getUnit() { + return unit; + } + + public void setUnit(String unit) { + this.unit = unit; + } + + public Boolean getRequired() { + return required; + } + + public void setRequired(Boolean required) { + this.required = required; + } + + public Long getRefId() { + return refId; + } + + public void setRefId(Long refId) { + this.refId = refId; + } +} \ No newline at end of file diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/hook/MapperHook.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/hook/MapperHook.java new file mode 100644 index 000000000..ba0de2947 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/hook/MapperHook.java @@ -0,0 +1,24 @@ +package com.webank.wedatasphere.exchangis.dao.hook; + +import com.webank.wedatasphere.exchangis.dao.mapper.ExchangisJobParamConfigMapper; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class MapperHook { + + private ExchangisJobParamConfigMapper exchangisJobParamConfigMapper; + + @Autowired + public MapperHook(ExchangisJobParamConfigMapper exchangisJobParamConfigMapper) { + this.exchangisJobParamConfigMapper = exchangisJobParamConfigMapper; + } + + public ExchangisJobParamConfigMapper getExchangisJobParamConfigMapper() { + return exchangisJobParamConfigMapper; + } + + public void setExchangisJobParamConfigMapper(ExchangisJobParamConfigMapper exchangisJobParamConfigMapper) { + this.exchangisJobParamConfigMapper = exchangisJobParamConfigMapper; + } +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/mapper/ExchangisJobDsBindMapper.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/mapper/ExchangisJobDsBindMapper.java new file mode 100644 index 000000000..43522db61 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/mapper/ExchangisJobDsBindMapper.java @@ -0,0 +1,10 @@ +package com.webank.wedatasphere.exchangis.dao.mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobDsBind; +import org.apache.ibatis.annotations.Mapper; + +@Mapper +public interface ExchangisJobDsBindMapper extends BaseMapper { + +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/mapper/ExchangisJobParamConfigMapper.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/mapper/ExchangisJobParamConfigMapper.java new file mode 100644 index 000000000..03e6de5a8 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/mapper/ExchangisJobParamConfigMapper.java @@ -0,0 +1,10 @@ +package com.webank.wedatasphere.exchangis.dao.mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; +import org.apache.ibatis.annotations.Mapper; + +@Mapper +public interface ExchangisJobParamConfigMapper extends BaseMapper { + +} \ No newline at end of file diff --git a/exchangis-dao/src/main/scala/com/webank/wedatasphere/exchangis/common/linkis/client/ExchangisHttpClient.scala b/exchangis-dao/src/main/scala/com/webank/wedatasphere/exchangis/common/linkis/client/ExchangisHttpClient.scala new file mode 100644 index 000000000..45efe146d --- /dev/null +++ b/exchangis-dao/src/main/scala/com/webank/wedatasphere/exchangis/common/linkis/client/ExchangisHttpClient.scala @@ -0,0 +1,39 @@ +package com.webank.wedatasphere.exchangis.common.linkis.client + +import com.webank.wedatasphere.exchangis.common.linkis.client.config.{ExchangisClientConfig} +import org.apache.http.client.config.RequestConfig +import org.apache.http.impl.client.{CloseableHttpClient, HttpClients} +import org.apache.linkis.httpclient.dws.DWSHttpClient + +import java.util.concurrent.TimeUnit + +/** + * Enhanced http client config + */ +class ExchangisHttpClient(clientConfig: ExchangisClientConfig, clientName: String) + extends DWSHttpClient(clientConfig, clientName){ + /** + * Build http client + */ + override protected val httpClient: CloseableHttpClient = { + val defaultRequestConfig = RequestConfig.custom() + .setConnectTimeout(clientConfig.getConnectTimeout.toInt) + .setConnectionRequestTimeout(clientConfig.getConnReqTimeout.toInt) + .setSocketTimeout(clientConfig.getReadTimeout.toInt) + .build() + val clientBuilder = HttpClients.custom() + clientBuilder.setDefaultRequestConfig(defaultRequestConfig).useSystemProperties() + .setMaxConnPerRoute(clientConfig.getMaxConnection / 2).setMaxConnTotal(clientConfig.getMaxConnection) + val maxIdleTime = clientConfig.getMaxIdleTime + if (maxIdleTime > 0){ + // Evict idle connections + clientBuilder.evictExpiredConnections(); + clientBuilder.evictIdleConnections(maxIdleTime, TimeUnit.MILLISECONDS) + } + clientBuilder.build() + } + + def getHttpClient: CloseableHttpClient = { + httpClient + } +} diff --git a/exchangis-dao/src/main/scala/com/webank/wedatasphere/exchangis/common/linkis/client/config/ExchangisClientConfig.scala b/exchangis-dao/src/main/scala/com/webank/wedatasphere/exchangis/common/linkis/client/config/ExchangisClientConfig.scala new file mode 100644 index 000000000..ab9767ab9 --- /dev/null +++ b/exchangis-dao/src/main/scala/com/webank/wedatasphere/exchangis/common/linkis/client/config/ExchangisClientConfig.scala @@ -0,0 +1,36 @@ +package com.webank.wedatasphere.exchangis.common.linkis.client.config + +import org.apache.linkis.httpclient.config.ClientConfig +import org.apache.linkis.httpclient.dws.config.DWSClientConfig + +/** + * Enhanced dws client config + */ +class ExchangisClientConfig private[config]( + clientConfig: ClientConfig, + maxIdleTime: Long, + connReqTimeout: Long + ) extends DWSClientConfig(clientConfig) { + + /** + * Max idle time + * @return + */ + def getMaxIdleTime: Long = { + maxIdleTime + } + + /** + * Connection request timeout + * @return + */ + def getConnReqTimeout: Long = { + connReqTimeout + } +} + +object ExchangisClientConfig{ + def newBuilder: ExchangisClientConfigBuilder = { + new ExchangisClientConfigBuilder() + } +} \ No newline at end of file diff --git a/exchangis-dao/src/main/scala/com/webank/wedatasphere/exchangis/common/linkis/client/config/ExchangisClientConfigBuilder.scala b/exchangis-dao/src/main/scala/com/webank/wedatasphere/exchangis/common/linkis/client/config/ExchangisClientConfigBuilder.scala new file mode 100644 index 000000000..47df001f4 --- /dev/null +++ b/exchangis-dao/src/main/scala/com/webank/wedatasphere/exchangis/common/linkis/client/config/ExchangisClientConfigBuilder.scala @@ -0,0 +1,59 @@ +package com.webank.wedatasphere.exchangis.common.linkis.client.config + +import com.webank.wedatasphere.exchangis.common.http.HttpClientConfiguration +import com.webank.wedatasphere.exchangis.common.linkis.client.ClientConfiguration +import org.apache.linkis.httpclient.config.{ClientConfig, ClientConfigBuilder} +import org.apache.linkis.httpclient.dws.authentication.TokenAuthenticationStrategy + +import java.util.concurrent.TimeUnit + +/** + * Enhanced dws client config builder + */ +class ExchangisClientConfigBuilder extends ClientConfigBuilder{ + + private var maxIdleTime: Long = _ + + private var connReqTimeout: Long = _ + + private var dwsVersion: String = _ + + // Load from vars default + // Http common + maxConnection = HttpClientConfiguration.MAX_CONNECTION_SIZE.getValue + connectTimeout = HttpClientConfiguration.CONNECTION_TIMEOUT.getValue + readTimeout = HttpClientConfiguration.READ_TIMEOUT.getValue + // Linkis client, use token auth default + dwsVersion = ClientConfiguration.LINKIS_DWS_VERSION.getValue + serverUrl = ClientConfiguration.LINKIS_SERVER_URL.getValue + discoveryEnabled = ClientConfiguration.LINKIS_DISCOVERY_ENABLED.getValue + discoveryFrequency(ClientConfiguration.LINKIS_DISCOVERY_FREQUENCY_PERIOD.getValue, TimeUnit.MINUTES) + loadbalancerEnabled = ClientConfiguration.LINKIS_LOAD_BALANCER_ENABLED.getValue + retryEnabled = ClientConfiguration.LINKIS_RETRY_ENABLED.getValue + authenticationStrategy = new TokenAuthenticationStrategy() + authTokenKey = TokenAuthenticationStrategy.TOKEN_KEY + authTokenValue = ClientConfiguration.LINKIS_TOKEN_VALUE.getValue + + def maxIdleTime(maxIdleTime: Long): this.type = { + this.maxIdleTime = maxIdleTime + this + } + + def connReqTimeout(connReqTimeout: Long): this.type = { + this.connReqTimeout = connReqTimeout + this + } + + def setDWSVersion(dwsVersion: String): this.type = { + this.dwsVersion = dwsVersion + this + } + + override def build(): ExchangisClientConfig = { + val clientConfig = new ExchangisClientConfig(super.build(), maxIdleTime, connReqTimeout) + clientConfig.setDWSVersion(dwsVersion) + clientConfig + } + + +} diff --git a/exchangis-datasource/exchangis-datasource-core/pom.xml b/exchangis-datasource/exchangis-datasource-core/pom.xml new file mode 100644 index 000000000..a3f253324 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/pom.xml @@ -0,0 +1,67 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-datasource-core + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${project.version} + + + + + org.apache.linkis + linkis-datasource-client + + + org.apache.linkis + linkis-metadata + + + org.apache.linkis + linkis-publicservice + + + org.apache.linkis + linkis-bml + + + + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + \ No newline at end of file diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/AbstractExchangisDataSourceDefinition.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/AbstractExchangisDataSourceDefinition.java new file mode 100644 index 000000000..c86fa6ef6 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/AbstractExchangisDataSourceDefinition.java @@ -0,0 +1,85 @@ +package com.webank.wedatasphere.exchangis.datasource.core; + +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; +import com.webank.wedatasphere.exchangis.dao.hook.MapperHook; +import com.webank.wedatasphere.exchangis.dao.mapper.ExchangisJobParamConfigMapper; +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.datasourcemanager.common.domain.DataSourceType; + +import java.util.List; + +public abstract class AbstractExchangisDataSourceDefinition implements ExchangisDataSourceDefinition { + + /** + * Mapper hook from common module? + */ + protected MapperHook mapperHook; + /** + * Type id + */ + protected String id; + @Override + public String name() { + return type().name; + } + + + @Override + public String classifier() { + return type().classifier; + } + + + @Override + public void setMapperHook(MapperHook mapperHook) { + this.mapperHook = mapperHook; + } + + @Override + public List getDataSourceTypes(String user) { + return ExchangisDataSourceDefinition.super.getDataSourceTypes(user); + } + + @Override + public String id() { + if (null == id || id.equalsIgnoreCase("")) { + List types = getDataSourceTypes("hdfs"); + for (DataSourceType type : types) { + if (type.getName().equalsIgnoreCase(name())) { + this.id = type.getId(); + } + } + } + return this.id; + } + + @Override + public List getDataSourceParamConfigs() { + return getDataSourceParamConfigs(type().name); + } + + protected List getDataSourceParamConfigs(String type) { + return getDataSourceParamConfigs(type, null); + } + + + protected List getDataSourceParamConfigs(String type, String dir) { + ExchangisJobParamConfigMapper exchangisJobParamConfigMapper = this.mapperHook.getExchangisJobParamConfigMapper(); + QueryWrapper queryWrapper = new QueryWrapper<>(); + if (StringUtils.isNotBlank(dir)) { + queryWrapper.eq("config_direction", dir); + } + queryWrapper.eq("type", type); + queryWrapper.eq("is_hidden", 0); + queryWrapper.eq("status", 1); + return exchangisJobParamConfigMapper.selectList(queryWrapper); + } + + /** + * Data source type + * @return type + */ + protected abstract ExchangisDataSourceType type(); +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSource.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSource.java new file mode 100644 index 000000000..2eea07dd7 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSource.java @@ -0,0 +1,19 @@ +package com.webank.wedatasphere.exchangis.datasource.core; + +/** + * Data source basic inf + */ +public interface ExchangisDataSource { + + /** + * Id + * @return id + */ + Long getId(); + + void setId(Long id); + + + + +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSourceConfiguration.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSourceConfiguration.java new file mode 100644 index 000000000..782c26977 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSourceConfiguration.java @@ -0,0 +1,28 @@ +package com.webank.wedatasphere.exchangis.datasource.core; + + +import com.webank.wedatasphere.exchangis.common.linkis.client.ClientConfiguration; +import org.apache.linkis.common.conf.CommonVars; + +/** + * Exchangis data source config + */ +public class ExchangisDataSourceConfiguration { + /** + * Server url + */ + public static final CommonVars SERVER_URL = CommonVars.apply("wds.exchangis.datasource.client.server-url", + ClientConfiguration.LINKIS_SERVER_URL.getValue()); + + /** + * Token value + */ + public static final CommonVars AUTH_TOKEN_VALUE = CommonVars.apply("wds.exchangis.datasource.client.token.value", + ClientConfiguration.LINKIS_TOKEN_VALUE.getValue()); + + /** + * Dws version + */ + public static final CommonVars DWS_VERSION = CommonVars.apply("wds.exchangis.datasource.client.dws.version", + ClientConfiguration.LINKIS_DWS_VERSION.getValue()); +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSourceDefinition.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSourceDefinition.java new file mode 100644 index 000000000..8c6e440c2 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSourceDefinition.java @@ -0,0 +1,75 @@ +package com.webank.wedatasphere.exchangis.datasource.core; + +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; +import com.webank.wedatasphere.exchangis.dao.hook.MapperHook; +import org.apache.linkis.datasource.client.impl.LinkisDataSourceRemoteClient; +import org.apache.linkis.datasource.client.impl.LinkisMetaDataRemoteClient; +import org.apache.linkis.datasource.client.request.GetAllDataSourceTypesAction; +import org.apache.linkis.datasource.client.response.GetAllDataSourceTypesResult; +import org.apache.linkis.datasourcemanager.common.domain.DataSourceType; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * Ds type definition + */ +public interface ExchangisDataSourceDefinition { + + /** + * Type id + * @return + */ + String id(); + + /** + * Type name + * @return name + */ + String name(); + + /** + * Description + * @return desc + */ + String description(); + + String option(); + + String classifier(); + + String structClassifier(); + + String icon(); + + /** + * Parameter config in + * @return + */ + default List getDataSourceParamConfigs(){ + return new ArrayList<>(); + }; + + default LinkisDataSourceRemoteClient getDataSourceRemoteClient(){ + throw new IllegalArgumentException("unsupported to get data source remote client"); + } + + default LinkisMetaDataRemoteClient getMetaDataRemoteClient(){ + throw new IllegalArgumentException("unsupported to get metadata remote client"); + } + + void setMapperHook(MapperHook mapperHook); + + default List getDataSourceTypes(String user) { + GetAllDataSourceTypesResult result = getDataSourceRemoteClient().getAllDataSourceTypes(GetAllDataSourceTypesAction.builder() + .setUser(user) + .build() + ); + + List allDataSourceType = result.getAllDataSourceType(); + if (Objects.isNull(allDataSourceType)) allDataSourceType = Collections.emptyList(); + return allDataSourceType; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/context/DefaultExchangisDsContext.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/context/DefaultExchangisDsContext.java new file mode 100644 index 000000000..c59ffe36f --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/context/DefaultExchangisDsContext.java @@ -0,0 +1,72 @@ +package com.webank.wedatasphere.exchangis.datasource.core.context; + + +import com.google.common.base.Strings; +import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSourceDefinition; +import com.webank.wedatasphere.exchangis.datasource.core.loader.ExchangisDataSourceDefLoader; + +import java.util.Collection; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; + +public class DefaultExchangisDsContext implements ExchangisDataSourceContext { + + private final Map dataSources = new ConcurrentHashMap<>(24); + + @Override + public boolean registerLoader(ExchangisDataSourceDefLoader loader) { + return false; + } + + @Override + public void addExchangisDsDefinition(ExchangisDataSourceDefinition dataSource) { + Objects.requireNonNull(dataSource, "dataSource required"); + String name = dataSource.name(); + dataSources.put(name, dataSource); + } + + @Override + public ExchangisDataSourceDefinition removeExchangisDsDefinition(String type) { + return null; + } + + @Override + public ExchangisDataSourceDefinition updateExchangisDsDefinition(ExchangisDataSourceDefinition dataSource) { + return null; + } + + @Override + public ExchangisDataSourceDefinition getExchangisDsDefinition(String type) { + if (Strings.isNullOrEmpty(type)) { + return null; + } + return this.dataSources.get(type.trim().toUpperCase()); + } + + @Override + public ExchangisDataSourceDefinition getExchangisDsDefinition(Long dataSourceTypeId) { + if (Objects.isNull(dataSourceTypeId)) { + return null; + } + Collection values = this.dataSources.values(); + for (ExchangisDataSourceDefinition ds : values) { + if (ds.id().equalsIgnoreCase(dataSourceTypeId+"")) { + return ds; + } + } + + return null; + } + + @Override + public Collection all() { + return this.dataSources.values(); + } + + @Override + public Set keys() { + return this.dataSources.keySet(); + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/context/ExchangisDataSourceContext.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/context/ExchangisDataSourceContext.java new file mode 100644 index 000000000..c86cf9870 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/context/ExchangisDataSourceContext.java @@ -0,0 +1,62 @@ +package com.webank.wedatasphere.exchangis.datasource.core.context; + +import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSourceDefinition; +import com.webank.wedatasphere.exchangis.datasource.core.loader.ExchangisDataSourceDefLoader; + +import java.util.Collection; +import java.util.Set; + +/** + * Data source context + */ +public interface ExchangisDataSourceContext { + + boolean registerLoader(ExchangisDataSourceDefLoader loader); + + /** + * Add ds definition + * @param dataSource ds + */ + void addExchangisDsDefinition(ExchangisDataSourceDefinition dataSource); + + /** + * Remove definition + * @param type type + * @return definition + */ + ExchangisDataSourceDefinition removeExchangisDsDefinition(String type); + + /** + * Update definition + * @param dataSource ds + * @return definition + */ + ExchangisDataSourceDefinition updateExchangisDsDefinition(ExchangisDataSourceDefinition dataSource); + + /** + * Get ds definition + * @param type type + * @return definition + */ + ExchangisDataSourceDefinition getExchangisDsDefinition(String type); + + /** + * Get ds definition + * @param dataSourceTypeId type id + * @return definition + */ + ExchangisDataSourceDefinition getExchangisDsDefinition(Long dataSourceTypeId); + + /** + * All definition + * @return definitions + */ + Collection all(); + + /** + * Type names + * @return set + */ + Set keys(); + +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/Classifier.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/Classifier.java new file mode 100644 index 000000000..78d144ce9 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/Classifier.java @@ -0,0 +1,26 @@ +package com.webank.wedatasphere.exchangis.datasource.core.domain; + +public enum Classifier { + + ELASTICSEARCH("分布式全文索引"), + + HIVE("大数据存储"), + + MONGODB("非关系型数据库"), + + MYSQL("关系型数据库"), + + SFTP("sftp连接"), + + ORACLE("关系型数据库"), + + STARROCKS("关系型数据库"), + + TDSQL("关系型数据库"); + + public String name; + + Classifier(String name) { + this.name = name; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/ExchangisDataSourceType.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/ExchangisDataSourceType.java new file mode 100644 index 000000000..22c7e3085 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/ExchangisDataSourceType.java @@ -0,0 +1,34 @@ +package com.webank.wedatasphere.exchangis.datasource.core.domain; + +public enum ExchangisDataSourceType { + + ELASTICSEARCH("ELASTICSEARCH", "分布式全文索引"), + + HIVE("HIVE", "大数据存储"), + + MONGODB("MONGODB", "非关系型数据库"), + + MYSQL("MYSQL", "关系型数据库"), + + SFTP("SFTP", "sftp连接"), + + ORACLE("ORACLE", "关系型数据库"), + + STARROCKS("STARROCKS", "大数据存储"), + + TDSQL("TDSQL", "大数据存储"); + + /** + * Type name + */ + public String name; + + /** + * Classifier + */ + public String classifier; + ExchangisDataSourceType(String name, String classifier) { + this.name = name; + this.classifier = classifier; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/MetaColumn.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/MetaColumn.java new file mode 100644 index 000000000..297a0333c --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/MetaColumn.java @@ -0,0 +1,69 @@ +package com.webank.wedatasphere.exchangis.datasource.core.domain; + +/** + * Meta column + */ +public class MetaColumn { + + /** + * Column index + */ + private int index = -1; + + /** + * Is primary key + */ + private boolean primaryKey; + + /** + * Name + */ + private String name; + + /** + * Type symbol + */ + private String type; + + public MetaColumn(){ + + } + + public MetaColumn(int index, String name, String type, boolean primaryKey){ + this.index = index; + this.name = name; + this.type = type; + this.primaryKey = primaryKey; + } + public int getIndex() { + return index; + } + + public void setIndex(int index) { + this.index = index; + } + + public boolean isPrimaryKey() { + return primaryKey; + } + + public void setPrimaryKey(boolean primaryKey) { + this.primaryKey = primaryKey; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/StructClassifier.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/StructClassifier.java new file mode 100644 index 000000000..e7aa660ac --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/StructClassifier.java @@ -0,0 +1,16 @@ +package com.webank.wedatasphere.exchangis.datasource.core.domain; + +public enum StructClassifier { + + STRUCTURED("结构化"), + + SEMI_STRUCTURED("半结构化"), + + NON_STRUCTURED("无结构化"); + + public String name; + + StructClassifier(String name) { + this.name = name; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/exception/ExchangisDataSourceException.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/exception/ExchangisDataSourceException.java new file mode 100644 index 000000000..2486a3bb6 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/exception/ExchangisDataSourceException.java @@ -0,0 +1,34 @@ +package com.webank.wedatasphere.exchangis.datasource.core.exception; + + +import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.common.exception.ExceptionLevel; +import org.apache.linkis.common.exception.LinkisRuntimeException; + +public class ExchangisDataSourceException extends ErrorException { + + public ExchangisDataSourceException(int errCode, String desc) { + super(errCode, desc); + } + + public ExchangisDataSourceException(int errCode, String desc, Throwable t) { + super(errCode, desc); + super.initCause(t); + } + public ExchangisDataSourceException(int errCode, String desc, String ip, int port, String serviceKind) { + super(errCode, desc, ip, port, serviceKind); + } + + public static class Runtime extends LinkisRuntimeException { + + public Runtime(int errCode, String desc, Throwable t) { + super(errCode, desc); + super.initCause(t); + } + + @Override + public ExceptionLevel getLevel() { + return ExceptionLevel.ERROR; + } + } +} \ No newline at end of file diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/exception/ExchangisDataSourceExceptionCode.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/exception/ExchangisDataSourceExceptionCode.java new file mode 100644 index 000000000..cf5e6083d --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/exception/ExchangisDataSourceExceptionCode.java @@ -0,0 +1,50 @@ +package com.webank.wedatasphere.exchangis.datasource.core.exception; + +// 31000 ~ 31999 +public enum ExchangisDataSourceExceptionCode { + + CLIENT_RPC_ERROR(31000), + CLIENT_QUERY_DATASOURCE_ERROR(31001), + CLIENT_GET_DATASOURCE_VERSION_ERROR(31002), + CLIENT_GET_DATASOURCE_ERROR(31003), + CLIENT_DATASOURCE_TEST_CONNECTION_ERROR(31004), + CLIENT_DATASOURCE_PUBLISH_VERSION_ERROR(31005), + CLIENT_DATASOURCE_PARAMS_GET_ERROR(31006), + CLIENT_DATASOURCE_EXPIRE_ERROR(31007), + CLIENT_METADATA_GET_COLUMNS_ERROR(31008), + CLIENT_METADATA_GET_TABLES_ERROR(31009), + CLIENT_METADATA_GET_DATABASES_ERROR(31010), + CLIENT_DATASOURCE_DELETE_ERROR(31011), + CLIENT_DATASOURCE_UPDATE_ERROR(31012), + CLIENT_DATASOURCE_UPDATE_PARAMS_VERSION_ERROR(31013), + CLIENT_DATASOURCE_CREATE_ERROR(31014), + CLIENT_DATASOURCE_GET_TYPES_ERROR(31015), + CLIENT_DATASOURCE_GET_KEY_DEFINES_ERROR(31016), + CLIENT_METADATA_GET_PARTITION_PROPS(31017), + CLIENT_METADATA_GET_PARTITION(31018), + // 其他错误 + PARSE_JSON_ERROR(39000), // Parse Json Error + CONTEXT_GET_DATASOURCE_NULL(39001), // DataSource Context Error + PARAMETER_INVALID(39002), // DataSource Context Error + DELETE_HISTORY_ERROR(39003), + UNSUPPORTED_DS_MAPPING(39004), + DS_MAPPING_MUST_CONTAIN_HIVE(39005), + DS_TYPE_MUST_DIFFERENT(39006), + UNSUPPORTEd_ENGINE(39007) + + ; + + private int code; + + ExchangisDataSourceExceptionCode(int code) { + this.code = code; + } + + public int getCode() { + return code; + } + + public void setCode(int code) { + this.code = code; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/exception/ExchangisServiceRpcException.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/exception/ExchangisServiceRpcException.java new file mode 100644 index 000000000..bb86ad58f --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/exception/ExchangisServiceRpcException.java @@ -0,0 +1,14 @@ +package com.webank.wedatasphere.exchangis.datasource.core.exception; + +import org.apache.linkis.common.exception.ErrorException; + +/** + * Rpc exception in client + */ +public class ExchangisServiceRpcException extends ErrorException { + + public ExchangisServiceRpcException(String desc, Throwable t) { + super(ExchangisDataSourceExceptionCode.CLIENT_RPC_ERROR.getCode(), desc); + super.initCause(t); + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/loader/ExchangisDataSourceDefLoader.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/loader/ExchangisDataSourceDefLoader.java new file mode 100644 index 000000000..a1fdc8a34 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/loader/ExchangisDataSourceDefLoader.java @@ -0,0 +1,33 @@ +package com.webank.wedatasphere.exchangis.datasource.core.loader; + + +import com.webank.wedatasphere.exchangis.dao.hook.MapperHook; +import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSourceDefinition; +import com.webank.wedatasphere.exchangis.datasource.core.context.ExchangisDataSourceContext; +import org.apache.linkis.common.conf.CommonVars; + +import java.util.Objects; + +public interface ExchangisDataSourceDefLoader { + + String EXCHANGIS_DIR_NAME = Objects.isNull(CommonVars.apply("wds.exchangis.datasource.extension.dir").getValue()) ? "exchangis-extds" : CommonVars.apply("wds.exchangis.datasource.extension.dir").getValue().toString(); + + String PROPERTIES_NAME = "extds.properties"; + + String LIB_NAME = "lib"; + + String JAR_SUF_NAME = ".jar"; + + String FILE_SCHEMA = "file://"; + + void setClassLoader(ClassLoader classLoader); + + void setContext(ExchangisDataSourceContext context); + + void init(MapperHook mapperHook) throws Exception; + + ExchangisDataSourceDefinition load(String dataSourceType); + + ExchangisDataSourceDefinition get(String dataSourceType, boolean reload); + +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/DataSourceInfoService.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/DataSourceInfoService.java new file mode 100644 index 000000000..c9ef6ada9 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/DataSourceInfoService.java @@ -0,0 +1,7 @@ +package com.webank.wedatasphere.exchangis.datasource.core.service; + +public interface DataSourceInfoService { + + + +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/MetadataInfoService.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/MetadataInfoService.java new file mode 100644 index 000000000..1166eff4d --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/MetadataInfoService.java @@ -0,0 +1,67 @@ +package com.webank.wedatasphere.exchangis.datasource.core.service; + +import com.webank.wedatasphere.exchangis.datasource.core.domain.MetaColumn; +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; +import com.webank.wedatasphere.exchangis.datasource.core.service.rpc.ServiceRpcClient; + +import java.util.List; +import java.util.Map; + +public interface MetadataInfoService extends ServiceRpcInf { + + /** + * Get properties of partition + * @param database database + * @param table table + * @param partition partition + * @return map + */ + Map getPartitionProps(String userName, Long dataSourceId, + String database, String table, String partition) throws ExchangisDataSourceException; + + Map getPartitionProps(ServiceRpcClient rpcClient, + String userName, Long dataSourceId, + String database, String table, String partition) throws ExchangisDataSourceException; + + /** + * Get properties of table + * @param database database + * @param table table + * @return map + * @throws ExchangisDataSourceException + */ + Map getTableProps(String userName, Long dataSourceId, + String database, String table) throws ExchangisDataSourceException; + + Map getTableProps(ServiceRpcClient rpcClient, String userName, Long dataSourceId, + String database, String table) throws ExchangisDataSourceException; + + /** + * Get partition keys + * @param userName userName + * @param dataSourceId data source id + * @param database database + * @param table table + * @return + * @throws ExchangisDataSourceException + */ + List getPartitionKeys(String userName, Long dataSourceId, String database, String table) throws ExchangisDataSourceException; + + /** + * Get columns + * @param userName userName + * @param dataSourceId data source id + * @param database database + * @param table table + * @return + * @throws ExchangisDataSourceException + */ + List getColumns(String userName, Long dataSourceId, String database, String table) throws ExchangisDataSourceException; + + /** + * Get the default(local) hdfs information + * @param uri uri + * @return + */ + Map getLocalHdfsInfo(String uri) throws ExchangisDataSourceException; +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/ServiceRpcInf.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/ServiceRpcInf.java new file mode 100644 index 000000000..b7c5ca7d8 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/ServiceRpcInf.java @@ -0,0 +1,9 @@ +package com.webank.wedatasphere.exchangis.datasource.core.service; + +/** + * RPC service + */ +public interface ServiceRpcInf { + + Class getClientClass(); +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/rpc/AbstractServiceRpcDispatcher.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/rpc/AbstractServiceRpcDispatcher.java new file mode 100644 index 000000000..ab8fae6eb --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/rpc/AbstractServiceRpcDispatcher.java @@ -0,0 +1,43 @@ +package com.webank.wedatasphere.exchangis.datasource.core.service.rpc; + + +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisServiceRpcException; + +import java.util.Objects; + +/** + * Abstract implements + */ +public abstract class AbstractServiceRpcDispatcher, T extends ServiceOperation> + implements ServiceRpcDispatcher { + + @Override + public U dispatch(T operation) throws ExchangisServiceRpcException { + return dispatch(getDefaultRemoteClient(), operation); + } + + @Override + public U dispatch(C remoteClient, T operation) throws ExchangisServiceRpcException { + if (Objects.isNull(remoteClient)){ + throw new ExchangisServiceRpcException("Remote client for service: [" + this.getClass().getSimpleName() + "] cannot be empty", null); + } + try { + return execute(remoteClient, operation); + }catch(Exception e){ + + if (e instanceof ExchangisServiceRpcException){ + throw e; + } + throw new ExchangisServiceRpcException("Unexpected exception in dispatching operation: [uri: " + operation.uri + ", timestamp: " + + operation.timestamp + "]", e); + } + } + + @Override + public C getDefaultRemoteClient() { + // Default client is Empty + return null; + } + + protected abstract U execute(C remoteClient, T operation) throws ExchangisServiceRpcException; +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/rpc/ServiceOperation.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/rpc/ServiceOperation.java new file mode 100644 index 000000000..e2dae2205 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/rpc/ServiceOperation.java @@ -0,0 +1,43 @@ +package com.webank.wedatasphere.exchangis.datasource.core.service.rpc; + + +/** + * Operation + */ +public class ServiceOperation { + /** + * Uri + */ + protected String uri; + /** + * Timestamp + */ + protected long timestamp; + + + public ServiceOperation(){ + + } + + public ServiceOperation(String uri){ + this.uri = uri; + this.timestamp = System.currentTimeMillis(); + } + + public String getUri() { + return uri; + } + + public void setUri(String uri) { + this.uri = uri; + } + + public long getTimestamp() { + return timestamp; + } + + public void setTimestamp(long timestamp) { + this.timestamp = timestamp; + } + +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/rpc/ServiceRpcClient.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/rpc/ServiceRpcClient.java new file mode 100644 index 000000000..b812fc447 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/rpc/ServiceRpcClient.java @@ -0,0 +1,13 @@ +package com.webank.wedatasphere.exchangis.datasource.core.service.rpc; + +/** + * Each remote client should implement this interface + */ +public interface ServiceRpcClient { + + default String getRemoteIp(){ + return "127.0.0.1"; + } + + C getClient(); +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/rpc/ServiceRpcDispatcher.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/rpc/ServiceRpcDispatcher.java new file mode 100644 index 000000000..e59c98def --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/rpc/ServiceRpcDispatcher.java @@ -0,0 +1,22 @@ +package com.webank.wedatasphere.exchangis.datasource.core.service.rpc; + +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisServiceRpcException; + +/** + * Dispatch the operation with remote client + */ +public interface ServiceRpcDispatcher, T extends ServiceOperation> { + + /** + * Dispatch entrance + * @param operation operation + */ + U dispatch(T operation) throws ExchangisServiceRpcException; + + U dispatch(C remoteClient, T operation) throws ExchangisServiceRpcException; + /** + * Get http client + * @return client + */ + C getDefaultRemoteClient(); +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/ElementUI.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/ElementUI.java new file mode 100644 index 000000000..5589218fd --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/ElementUI.java @@ -0,0 +1,51 @@ +package com.webank.wedatasphere.exchangis.datasource.core.ui; + +import java.util.Map; + +public interface ElementUI { + /** + * Type enum + */ + enum Type { + NONE, TEXTAREA, INPUT, OPTION, MAP + } + + /** + * Field name + * @return string + */ + String getField(); + + /** + * Label + * @return label string + */ + String getLabel(); + + /** + * Type name + * @return string + */ + String getType(); + + Integer getSort(); + + /** + * Value store + * @return + */ + T getValue(); + + /** + * Default value + * @return + */ + T getDefaultValue(); + + /** + * Get value from params + * @param params + * @return + */ + void setValue(Map params); +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/ExchangisDataSourceIdUI.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/ExchangisDataSourceIdUI.java new file mode 100644 index 000000000..817d51d45 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/ExchangisDataSourceIdUI.java @@ -0,0 +1,55 @@ +package com.webank.wedatasphere.exchangis.datasource.core.ui; + +// 任务数据源UI对象 +public class ExchangisDataSourceIdUI { + + private String type; + + private String id; + + private String ds; + + private String db; + + private String table; + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getDs() { + return ds; + } + + public void setDs(String ds) { + this.ds = ds; + } + + public String getDb() { + return db; + } + + public void setDb(String db) { + this.db = db; + } + + public String getTable() { + return table; + } + + public void setTable(String table) { + this.table = table; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/ExchangisDataSourceIdsUI.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/ExchangisDataSourceIdsUI.java new file mode 100644 index 000000000..115005b1b --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/ExchangisDataSourceIdsUI.java @@ -0,0 +1,24 @@ +package com.webank.wedatasphere.exchangis.datasource.core.ui; + +public class ExchangisDataSourceIdsUI { + + private ExchangisDataSourceIdUI source; + + private ExchangisDataSourceIdUI sink; + + public ExchangisDataSourceIdUI getSource() { + return source; + } + + public void setSource(ExchangisDataSourceIdUI source) { + this.source = source; + } + + public ExchangisDataSourceIdUI getSink() { + return sink; + } + + public void setSink(ExchangisDataSourceIdUI sink) { + this.sink = sink; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/ExchangisDataSourceParamsUI.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/ExchangisDataSourceParamsUI.java new file mode 100644 index 000000000..71a2ca67d --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/ExchangisDataSourceParamsUI.java @@ -0,0 +1,36 @@ +package com.webank.wedatasphere.exchangis.datasource.core.ui; + +import java.util.Collections; +import java.util.List; + +public class ExchangisDataSourceParamsUI { + + private List> sources = Collections.emptyList(); + + private List> sinks = Collections.emptyList(); + + public List> getSources() { + return sources; + } + + public void setSources(List> sources) { + this.sources = sources; + } + + public List> getSinks() { + return sinks; + } + + public void setSinks(List> sinks) { + this.sinks = sinks; + } + + public void addSourceUI(ElementUI ui) { + this.sources.add(ui); + } + + public void addSinkUI(ElementUI ui) { + this.sinks.add(ui); + } + +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/InputElementUI.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/InputElementUI.java new file mode 100644 index 000000000..88a23ab4d --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/InputElementUI.java @@ -0,0 +1,142 @@ +package com.webank.wedatasphere.exchangis.datasource.core.ui; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; + +import java.util.Map; + +public class InputElementUI implements ElementUI { + private Long id; + private String key; + private String field; + private String label; + private Integer sort; + private String value; + private String defaultValue; + private String unit; + private Boolean required; + private String validateType; + private String validateRange; + private String validateMsg; + private String source; + private Long refId; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public void setField(String field) { + this.field = field; + } + + public Long getRefId() { + return refId; + } + + public void setRefId(Long refId) { + this.refId = refId; + } + + @Override + public String getField() { + return this.field; + } + + @Override + public String getLabel() { + return label; + } + + public void setLabel(String label) { + this.label = label; + } + + @Override + public String getType() { + return Type.INPUT.name(); + } + + @Override + public Integer getSort() { + return sort; + } + + public void setSort(Integer sort) { + this.sort = sort; + } + + @Override + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public String getDefaultValue() { return defaultValue; } + + @Override + public void setValue(Map params) { + // Convert to json string directly + this.value = Json.toJson(params, null); + } + + public void setDefaultValue(String defaultValue) { this.defaultValue = defaultValue; } + + public String getUnit() { + return unit; + } + + public void setUnit(String unit) { + this.unit = unit; + } + + public Boolean getRequired() { + return required; + } + + public void setRequired(Boolean required) { + this.required = required; + } + + public String getValidateType() { + return validateType; + } + + public void setValidateType(String validateType) { + this.validateType = validateType; + } + + public String getValidateRange() { + return validateRange; + } + + public void setValidateRange(String validateRange) { + this.validateRange = validateRange; + } + + public String getValidateMsg() { return validateMsg; } + + public void setValidateMsg(String validateMsg) { this.validateMsg = validateMsg; } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/MapElementUI.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/MapElementUI.java new file mode 100644 index 000000000..cf86a5dfc --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/MapElementUI.java @@ -0,0 +1,143 @@ +package com.webank.wedatasphere.exchangis.datasource.core.ui; + +import org.apache.commons.lang.StringUtils; + +import java.util.Map; +import java.util.Objects; + +public class MapElementUI implements ElementUI> { + private Long id; + private String key; + private String field; + private String label; + private Integer sort; + private Map value; + private Map defaultValue; + private String unit; + private Boolean required; + private String validateType; + private String validateRange; + private String validateMsg; + private String source; + private Long refId; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Long getRefId() { + return refId; + } + + public void setRefId(Long refId) { + this.refId = refId; + } + + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public void setField(String field) { + this.field = field; + } + + @Override + public String getField() { + return this.field; + } + + @Override + public String getLabel() { + return label; + } + + public void setLabel(String label) { + this.label = label; + } + + @Override + public String getType() { + return Type.MAP.name(); + } + + @Override + public Integer getSort() { + return sort; + } + + public void setSort(Integer sort) { + this.sort = sort; + } + + @Override + public Map getValue() { + return value; + } + + + public void setValue(Map value) { + this.value = value; + } + + @Override + public Map getDefaultValue() { return defaultValue; } + + + public void setDefaultValue(Map defaultValue) { this.defaultValue = defaultValue; } + + public String getUnit() { + return unit; + } + + public void setUnit(String unit) { + this.unit = unit; + } + + public Boolean getRequired() { + return required; + } + + public void setRequired(Boolean required) { + this.required = required; + } + + public String getValidateType() { + return validateType; + } + + public void setValidateType(String validateType) { + this.validateType = validateType; + } + + public String getValidateRange() { + return validateRange; + } + + public void setValidateRange(String validateRange) { + this.validateRange = validateRange; + } + + public String getValidateMsg() { return validateMsg; } + + public void setValidateMsg(String validateMsg) { this.validateMsg = validateMsg; } + + private boolean isBasicType(Class clz){ + return false; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/OptionElementUI.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/OptionElementUI.java new file mode 100644 index 000000000..214de217a --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/OptionElementUI.java @@ -0,0 +1,117 @@ +package com.webank.wedatasphere.exchangis.datasource.core.ui; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; + +import java.util.Collection; +import java.util.Map; + +public class OptionElementUI implements ElementUI { + private Long id; + private String key; + private String field; + private String label; + private Collection values; + private String value; + private String defaultValue; + private Integer sort; + private String unit; + private Boolean required; + private Long refId; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Long getRefId() { + return refId; + } + + public void setRefId(Long refId) { + this.refId = refId; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public String getField() { + return field; + } + + @Override + public String getType() { + return Type.OPTION.name(); + } + + public void setField(String field) { + this.field = field; + } + + @Override + public String getLabel() { + return label; + } + + public void setLabel(String label) { + this.label = label; + } + + public Collection getValues() { + return values; + } + + public void setValues(Collection values) { + this.values = values; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public String getDefaultValue() { return defaultValue; } + + @Override + public void setValue(Map params) { + this.value = Json.toJson(params.values(), null); + } + + public void setDefaultValue(String defaultValue) { this.defaultValue = defaultValue; } + + @Override + public Integer getSort() { + return sort; + } + + public void setSort(Integer sort) { + this.sort = sort; + } + + public String getUnit() { + return unit; + } + + public void setUnit(String unit) { + this.unit = unit; + } + + public Boolean getRequired() { + return required; + } + + public void setRequired(Boolean required) { + this.required = required; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/builder/DefaultElementUIFactory.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/builder/DefaultElementUIFactory.java new file mode 100644 index 000000000..bc87979e2 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/builder/DefaultElementUIFactory.java @@ -0,0 +1,83 @@ +package com.webank.wedatasphere.exchangis.datasource.core.ui.builder; + +import com.webank.wedatasphere.exchangis.datasource.core.ui.ElementUI; +import com.webank.wedatasphere.exchangis.datasource.core.ui.InputElementUI; + +import java.lang.annotation.ElementType; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; + +/** + * Default element factory + */ +public class DefaultElementUIFactory implements ElementUIFactory{ + /** + * Element builders holder + */ + private Map>> builders = new HashMap<>(); + + + @Override + @SuppressWarnings("unchecked") + public void register(String type, Function> builder, Class inputType) { + builders.putIfAbsent(new Identify(type, inputType), (Function>) builder); + } + + @Override + @SuppressWarnings("unchecked") + public ElementUI createElement(String type, Object input, Class inputType) { + Identify identify = new Identify(type, inputType); + AtomicReference> elementUI = new AtomicReference<>(); + Optional.ofNullable(builders.get(identify)).ifPresent(builder -> { + elementUI.set((ElementUI) builder.apply(input)); + }); + return elementUI.get(); + } + + /** + * Identify for element builder + */ + private static class Identify{ + + /** + * Type + */ + private final String type; + + /** + * Input class + */ + private final Class inputClass; + + public Identify(String type, Class inputClass){ + this.type = type; + this.inputClass = inputClass; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Identify identify = (Identify) o; + return Objects.equals(type, identify.type) && Objects.equals(inputClass, identify.inputClass); + } + + @Override + public int hashCode() { + return Objects.hash(type, inputClass); + } + + @Override + public String toString() { + return "Identify{" + + "type='" + type + '\'' + + ", inputClass=" + inputClass.getCanonicalName() + + '}'; + } + } + +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/builder/ElementUIFactory.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/builder/ElementUIFactory.java new file mode 100644 index 000000000..62befc4dd --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/builder/ElementUIFactory.java @@ -0,0 +1,31 @@ +package com.webank.wedatasphere.exchangis.datasource.core.ui.builder; + +import com.webank.wedatasphere.exchangis.datasource.core.ui.ElementUI; + +import java.util.function.Function; + +/** + * Element factory + */ +public interface ElementUIFactory { + + /** + * Register the element builder + * @param type type + * @param builder builder + * @param inputType input type + * @param T + * @param R + */ + void register(String type, Function> builder, Class inputType); + + + /** + * Create element + * @param type type + * @param input input object + * @param element value type + * @return element + */ + ElementUI createElement(String type, Object input, Class inputType); +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/builder/SpringElementUIFactory.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/builder/SpringElementUIFactory.java new file mode 100644 index 000000000..15872ea3f --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/builder/SpringElementUIFactory.java @@ -0,0 +1,49 @@ +package com.webank.wedatasphere.exchangis.datasource.core.ui.builder; + +import com.webank.wedatasphere.exchangis.datasource.core.ui.ElementUI; +import com.webank.wedatasphere.exchangis.datasource.core.ui.InputElementUI; +import com.webank.wedatasphere.exchangis.datasource.core.ui.MapElementUI; +import com.webank.wedatasphere.exchangis.datasource.core.ui.OptionElementUI; +import org.springframework.stereotype.Component; + +import javax.annotation.PostConstruct; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Function; + +/** + * Default element factory in spring + */ +@Component +public class SpringElementUIFactory extends DefaultElementUIFactory{ + + @PostConstruct + public void init(){ + super.register(ElementUI.Type.MAP.name(), (Function, MapElementUI>) params -> + setElementValue(new MapElementUI(), params), Map.class); + super.register(ElementUI.Type.INPUT.name(), (Function, InputElementUI>) params -> + setElementValue(new InputElementUI(), params), Map.class); + super.register(ElementUI.Type.OPTION.name(), (Function, OptionElementUI>) params -> + setElementValue(new OptionElementUI(), params), Map.class); + } + + /** + * Set the params into element and return + * @param element element + * @param params params Map + * @param R + * @return + */ + private >R setElementValue(R element, Map params){ + element.setValue(params); + return element; + } + + public static void main(String[] args){ + SpringElementUIFactory elementUIFactory = new SpringElementUIFactory(); + elementUIFactory.init(); + Map map = new HashMap<>(); + map.putIfAbsent("hello", "world"); + System.out.println(elementUIFactory.createElement(ElementUI.Type.MAP.name(), map, Map.class).getValue()); + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/viewer/DefaultDataSourceUIViewer.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/viewer/DefaultDataSourceUIViewer.java new file mode 100644 index 000000000..f3394ad3a --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/viewer/DefaultDataSourceUIViewer.java @@ -0,0 +1,52 @@ +package com.webank.wedatasphere.exchangis.datasource.core.ui.viewer; + +import com.webank.wedatasphere.exchangis.datasource.core.ui.*; +import com.webank.wedatasphere.exchangis.datasource.core.vo.ExchangisJobTransformsContent; + +import java.util.List; + +public class DefaultDataSourceUIViewer implements ExchangisDataSourceUIViewer { + private String subJobName; + private final ExchangisDataSourceIdsUI dataSourceIds; + private final ExchangisDataSourceParamsUI params; +// private final ExchangisDataSourceTransformsUI transforms; + private final ExchangisJobTransformsContent transforms; + private final List> settings; + + public DefaultDataSourceUIViewer(String subJobName, ExchangisDataSourceIdsUI dataSourceIds, ExchangisDataSourceParamsUI params, ExchangisJobTransformsContent transforms, List> settings) { + this.subJobName = subJobName; + this.dataSourceIds = dataSourceIds; + this.params = params; + this.transforms = transforms; + this.settings = settings; + } + + @Override + public String getSubJobName() { + return subJobName; + } + + public void setSubJobName(String subJobName) { + this.subJobName = subJobName; + } + + @Override + public ExchangisDataSourceIdsUI getDataSourceIds() { + return this.dataSourceIds; + } + + @Override + public ExchangisDataSourceParamsUI getParams() { + return this.params; + } + + @Override + public ExchangisJobTransformsContent getTransforms() { + return this.transforms; + } + + @Override + public List> getSettings() { + return this.settings; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/viewer/ExchangisDataSourceUIViewer.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/viewer/ExchangisDataSourceUIViewer.java new file mode 100644 index 000000000..95dd61ad0 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ui/viewer/ExchangisDataSourceUIViewer.java @@ -0,0 +1,18 @@ +package com.webank.wedatasphere.exchangis.datasource.core.ui.viewer; + +import com.webank.wedatasphere.exchangis.datasource.core.ui.*; +import com.webank.wedatasphere.exchangis.datasource.core.vo.ExchangisJobTransformsContent; + +import java.util.List; + +public interface ExchangisDataSourceUIViewer { + String getSubJobName(); + + ExchangisDataSourceIdsUI getDataSourceIds(); + + ExchangisDataSourceParamsUI getParams(); + + ExchangisJobTransformsContent getTransforms(); + + List> getSettings(); +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/utils/Json.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/utils/Json.java new file mode 100644 index 000000000..2cb70ab41 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/utils/Json.java @@ -0,0 +1,126 @@ +package com.webank.wedatasphere.exchangis.datasource.core.utils; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.*; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Json { + public static final String PREFIX = "["; + public static final String SUFFIX = "]"; + + private static final ObjectMapper mapper; + + public static Logger logger = LoggerFactory.getLogger(Json.class); + static { + mapper = new ObjectMapper(); + //Custom the feature of serialization and deserialization + mapper.configure(JsonParser.Feature.ALLOW_COMMENTS, true); + mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true); + mapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true); + //Enum + mapper.configure(DeserializationFeature.READ_ENUMS_USING_TO_STRING, true); + mapper.configure(SerializationFeature.WRITE_ENUMS_USING_TO_STRING, true); +// mapper.configure(JsonReadFeature.ALLOW_UNESCAPED_CONTROL_CHARS.mappedFeature(), true); + //Empty beans allowed + mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); + //Ignore unknown properties + mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + //Accept NaN + mapper.configure(JsonParser.Feature.ALLOW_NON_NUMERIC_NUMBERS, true); + //Cancel to scape no ascii +// mapper.configure(JsonWriteFeature.ESCAPE_NON_ASCII.mappedFeature(), false); + } + + public static ObjectMapper getMapper(){ + return mapper; + } + /** + * Generate json string + * + * @param simpleObj object + * @param viewModel model + * @return string + */ + public static String toJson(Object simpleObj, Class viewModel) { + ObjectWriter writer = mapper.writer(); + if (null != simpleObj) { + try { + if (null != viewModel) { + writer = writer.withView(viewModel); + } + return writer.writeValueAsString(simpleObj); + } catch (JsonProcessingException e) { + logger.warn("Fail to process method 'toJson(" + simpleObj + ": " + simpleObj.getClass() + + ", " + (viewModel != null ? viewModel.getSimpleName() : null) + ")'", e); + return null; + } + } + return null; + } + + @SuppressWarnings("unchecked") + public static T fromJson(String json, Class tClass, Class... parameters) { + if (StringUtils.isNotBlank(json)) { + try { + if (parameters.length > 0) { + return (T) mapper.readValue(json, mapper.getTypeFactory().constructParametricType(tClass, parameters)); + } + return (T) mapper.readValue(json, tClass); + } catch (Exception e) { + logger.warn("Fail to process method 'fromJson(" + + (json.length() > 5 ? json.substring(0, 5) + "..." : json) + ": " + json.getClass() + + ", " + tClass.getSimpleName() + ": "+ Class.class + ", ...: " + Class.class + ")", e); + return null; + } + } + return null; + } + + public static T fromJson(String json, JavaType javaType) { + if(StringUtils.isNotBlank(json)){ + try{ + return mapper.readValue(json, javaType); + }catch (Exception e){ + logger.warn("Fail to process method 'fromJson(" + + (json.length() > 5 ? json.substring(0, 5) + "..." : json) + ": " + json.getClass() + + ", " + javaType.getTypeName() + ": "+ JavaType.class + ")", e); + return null; + } + } + return null; + } + /** + * Convert object using serialization and deserialization + * + * @param simpleObj simpleObj + * @param tClass type class + * @param T + * @return result + */ + @SuppressWarnings("unchecked") + public static T convert(Object simpleObj, Class tClass, Class... parameters) { + try { + if (parameters.length > 0) { + return mapper.convertValue(simpleObj, mapper.getTypeFactory().constructParametricType(tClass, parameters)); + } + return (T) mapper.convertValue(simpleObj, tClass); + } catch (Exception e) { + logger.warn("Fail to process method 'convert(" + simpleObj + ": " + simpleObj.getClass().getSimpleName() + + ", " + tClass.getSimpleName() + ": "+ Class.class + ", ...: " + Class.class + ")", e); + return null; + } + } + + public static T convert(Object simpleObj, JavaType javaType){ + try { + return mapper.convertValue(simpleObj, javaType); + } catch (Exception e) { + logger.warn("Fail to process method 'convert(" + simpleObj + ": " + simpleObj.getClass().getSimpleName() + + ", " + javaType.getTypeName() + ": "+ JavaType.class + ")", e); + return null; + } + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobDataSourcesContent.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobDataSourcesContent.java new file mode 100644 index 000000000..368cce9ea --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobDataSourcesContent.java @@ -0,0 +1,149 @@ +package com.webank.wedatasphere.exchangis.datasource.core.vo; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; + +/** + * string: HIVE.ID.DB.TABLE + * json: { + * "type": "HIVE", + * "id": 467, + * "name": "HIVE-DEMO", + * "database": "default", + * "table": "demo-test" + * } + */ +public class ExchangisJobDataSourcesContent { + + @JsonProperty("source_id") + private String sourceId; + + /** + * Source ds + */ +// private ExchangisJobDataSource source = new ExchangisJobDataSource(); + + + @JsonProperty("sink_id") + private String sinkId; + + /** + * Sink ds + */ +// private ExchangisJobDataSource sink = new ExchangisJobDataSource(); + + public String getSourceId() { + return sourceId; + } + + public void setSourceId(String sourceId) { + this.sourceId = sourceId; + } + + public String getSinkId() { + return sinkId; + } + + public void setSinkId(String sinkId) { + this.sinkId = sinkId; + } + +// public void setSource(ExchangisJobDataSource source) { +// this.source = source; +// } + +// public ExchangisJobDataSource getSource() { +// return source; +// } + +// public void setSink(ExchangisJobDataSource sink) { +// this.sink = sink; +// } + +// public ExchangisJobDataSource getSink() { +// return sink; +// } + + @JsonInclude(JsonInclude.Include.NON_EMPTY) + public static class ExchangisJobDataSource { + + /** + * Data source type + */ + private ExchangisDataSourceType type; + + /** + * Data source id + */ + private String id; + + /** + * Data source name + */ + private String name; + + /** + * Database field + */ + private String database; + + /** + * Table field + */ + private String table; + + /** + * Uri field + */ + private String uri; + + public void setType(ExchangisDataSourceType type) { + this.type = type; + } + + public ExchangisDataSourceType getType() { + return type; + } + + public void setId(String id) { + this.id = id; + } + + public String getId() { + return id; + } + + public void setName(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public void setDatabase(String database) { + this.database = database; + } + + public String getDatabase() { + return database; + } + + public void setTable(String table) { + this.table = table; + } + + public String getTable() { + return table; + } + + public void setUri(String uri) { + this.uri = uri; + } + + public String getUri() { + return uri; + } + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobInfoContent.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobInfoContent.java new file mode 100644 index 000000000..eae71c5a6 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobInfoContent.java @@ -0,0 +1,88 @@ +package com.webank.wedatasphere.exchangis.datasource.core.vo; + +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + +@JsonIgnoreProperties(ignoreUnknown = true) +public class ExchangisJobInfoContent { + + /** + * Engine name + */ + private String engine; + + /** + * Sub job name + */ + private String subJobName; + + /** + * Data source content + */ + private ExchangisJobDataSourcesContent dataSources; + + /** + * Extra params + */ + private ExchangisJobParamsContent params; + + /** + * Transform define + */ +// private List transforms; + private ExchangisJobTransformsContent transforms; + + /** + * Settings + */ + private List settings; + + public String getEngine() { + return engine; + } + + public void setEngine(String engine) { + this.engine = engine; + } + + public ExchangisJobDataSourcesContent getDataSources() { + return dataSources; + } + + public void setDataSources(ExchangisJobDataSourcesContent dataSources) { + this.dataSources = dataSources; + } + + public ExchangisJobParamsContent getParams() { + return params; + } + + public void setParams(ExchangisJobParamsContent params) { + this.params = params; + } + + public ExchangisJobTransformsContent getTransforms() { + return transforms; + } + + public void setTransforms(ExchangisJobTransformsContent transforms) { + this.transforms = transforms; + } + + public List getSettings() { + return settings; + } + + public void setSettings(List settings) { + this.settings = settings; + } + + public String getSubJobName() { + return subJobName; + } + + public void setSubJobName(String subJobName) { + this.subJobName = subJobName; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobParamsContent.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobParamsContent.java new file mode 100644 index 000000000..64d3a6106 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobParamsContent.java @@ -0,0 +1,85 @@ +package com.webank.wedatasphere.exchangis.datasource.core.vo; + +import com.fasterxml.jackson.annotation.JsonAlias; +import com.fasterxml.jackson.annotation.JsonProperty; + +import java.util.List; + +public class ExchangisJobParamsContent { + + /** + * Source params + */ + private List sources; + + /** + * Sink params + */ + private List sinks; + + public List getSources() { + return sources; + } + + public void setSources(List sources) { + this.sources = sources; + } + + public List getSinks() { + return sinks; + } + + public void setSinks(List sinks) { + this.sinks = sinks; + } + + public static class ExchangisJobParamsItem { + + @JsonProperty("config_key") + @JsonAlias({"key", "k"}) + private String configKey; + + @JsonProperty("config_name") + @JsonAlias({"name", "n"}) + private String configName; + + @JsonProperty("config_value") + @JsonAlias({"value", "v"}) + private Object configValue; + + private Integer sort; + + public String getConfigKey() { + return configKey; + } + + public void setConfigKey(String configKey) { + this.configKey = configKey; + } + + public String getConfigName() { + return configName; + } + + public void setConfigName(String configName) { + this.configName = configName; + } + + public Object getConfigValue() { + return configValue; + } + + public void setConfigValue(Object configValue) { + this.configValue = configValue; + } + + public Integer getSort() { + return sort; + } + + public void setSort(Integer sort) { + this.sort = sort; + } + } + +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformer.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformer.java new file mode 100644 index 000000000..ee8961e51 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformer.java @@ -0,0 +1,26 @@ +package com.webank.wedatasphere.exchangis.datasource.core.vo; + +import java.util.List; + +public class ExchangisJobTransformer { + + private String name; + + private List params; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public List getParams() { + return params; + } + + public void setParams(List params) { + this.params = params; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformsContent.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformsContent.java new file mode 100644 index 000000000..7b7b61e85 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformsContent.java @@ -0,0 +1,82 @@ +package com.webank.wedatasphere.exchangis.datasource.core.vo; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import java.util.List; + +public class ExchangisJobTransformsContent { + private boolean addEnable; + private String type; + private String sql; + + @JsonProperty("code_id") + private String codeId; + + /** + * Table (source) not exist + */ + private boolean srcTblNotExist = false; + + /** + * Table (sink) not exist + */ + private boolean sinkTblNotExist = false; + + private List mapping; + + public boolean isAddEnable() { + return addEnable; + } + + public void setAddEnable(boolean addEnable) { + this.addEnable = addEnable; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getSql() { + return sql; + } + + public void setSql(String sql) { + this.sql = sql; + } + + public List getMapping() { + return mapping; + } + + public void setMapping(List mapping) { + this.mapping = mapping; + } + + public String getCodeId() { + return codeId; + } + + public void setCodeId(String codeId) { + this.codeId = codeId; + } + + public boolean isSrcTblNotExist() { + return srcTblNotExist; + } + + public void setSrcTblNotExist(boolean srcTblNotExist) { + this.srcTblNotExist = srcTblNotExist; + } + + public boolean isSinkTblNotExist() { + return sinkTblNotExist; + } + + public void setSinkTblNotExist(boolean sinkTblNotExist) { + this.sinkTblNotExist = sinkTblNotExist; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformsItem.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformsItem.java new file mode 100644 index 000000000..10ddad5a6 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformsItem.java @@ -0,0 +1,154 @@ +package com.webank.wedatasphere.exchangis.datasource.core.vo; + +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonAlias; +import com.fasterxml.jackson.annotation.JsonProperty; + +public class ExchangisJobTransformsItem { + /** + * Source field name + */ + @JsonProperty("source_field_name") + @JsonAlias({"srcFieldName"}) + private String sourceFieldName; + + /** + * Source field type + */ + @JsonProperty("source_field_type") + @JsonAlias({"srcFieldType"}) + private String sourceFieldType; + + /** + * Sink field name + */ + @JsonProperty("sink_field_name") + @JsonAlias({"sinkFieldName"}) + private String sinkFieldName; + + /** + * Sink field type + */ + @JsonProperty("sink_field_type") + @JsonAlias({"sinkFieldType"}) + private String sinkFieldType; + + /** + * Delete enable + */ + @JsonProperty("deleteEnable") + private boolean deleteEnable; + + /** + * Source field index + */ + @JsonProperty("source_field_index") + @JsonAlias({"srcFieldIdx"}) + private Integer sourceFieldIndex; + + /** + * Sink field index + */ + @JsonProperty("sink_field_index") + @JsonAlias({"sinkFi"}) + private Integer sinkFieldIndex; + + @JsonProperty("source_field_editable") + private boolean sourceFieldEditable; + + @JsonProperty("sink_field_editable") + private boolean sinkFieldEditable; + + private List validator; + + private ExchangisJobTransformer transformer; + + public String getSourceFieldName() { + return sourceFieldName; + } + + public void setSourceFieldName(String sourceFieldName) { + this.sourceFieldName = sourceFieldName; + } + + public String getSourceFieldType() { + return sourceFieldType; + } + + public void setSourceFieldType(String sourceFieldType) { + this.sourceFieldType = sourceFieldType; + } + + public String getSinkFieldName() { + return sinkFieldName; + } + + public void setSinkFieldName(String sinkFieldName) { + this.sinkFieldName = sinkFieldName; + } + + public String getSinkFieldType() { + return sinkFieldType; + } + + public void setSinkFieldType(String sinkFieldType) { + this.sinkFieldType = sinkFieldType; + } + + public List getValidator() { + return validator; + } + + public void setValidator(List validator) { + this.validator = validator; + } + + public ExchangisJobTransformer getTransformer() { + return transformer; + } + + public void setTransformer(ExchangisJobTransformer transformer) { + this.transformer = transformer; + } + + public Integer getSourceFieldIndex() { + return sourceFieldIndex; + } + + public void setSourceFieldIndex(Integer sourceFieldIndex) { + this.sourceFieldIndex = sourceFieldIndex; + } + + public Integer getSinkFieldIndex() { + return sinkFieldIndex; + } + + public void setSinkFieldIndex(Integer sinkFieldIndex) { + this.sinkFieldIndex = sinkFieldIndex; + } + + public boolean isDeleteEnable() { + return deleteEnable; + } + + public void setDeleteEnable(boolean deleteEnable) { + this.deleteEnable = deleteEnable; + } + + public boolean isSourceFieldEditable() { + return sourceFieldEditable; + } + + public void setSourceFieldEditable(boolean sourceFieldEditable) { + this.sourceFieldEditable = sourceFieldEditable; + } + + public boolean isSinkFieldEditable() { + return sinkFieldEditable; + } + + public void setSinkFieldEditable(boolean sinkFieldEditable) { + this.sinkFieldEditable = sinkFieldEditable; + } +} diff --git a/exchangis-datasource/exchangis-datasource-linkis/pom.xml b/exchangis-datasource/exchangis-datasource-linkis/pom.xml new file mode 100644 index 000000000..d2a6553dd --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-linkis/pom.xml @@ -0,0 +1,45 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-datasource-linkis + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-datasource-core + ${project.version} + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + \ No newline at end of file diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisBatchDataSource.java b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisBatchDataSource.java new file mode 100644 index 000000000..277eee047 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisBatchDataSource.java @@ -0,0 +1,23 @@ +package com.webank.wedatasphere.exchangis.datasource.linkis; + +import com.webank.wedatasphere.exchangis.datasource.core.AbstractExchangisDataSourceDefinition; +import org.apache.linkis.datasource.client.impl.LinkisDataSourceRemoteClient; +import org.apache.linkis.datasource.client.impl.LinkisMetaDataRemoteClient; + +/** + * Batch data source + */ +public abstract class ExchangisBatchDataSource extends AbstractExchangisDataSourceDefinition { + + + @Override + public LinkisDataSourceRemoteClient getDataSourceRemoteClient() { + return ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); + } + + @Override + public LinkisMetaDataRemoteClient getMetaDataRemoteClient() { + return ExchangisLinkisRemoteClient.getLinkisMetadataRemoteClient(); + } + +} diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/LinkisDataSourceInfoService.java b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/LinkisDataSourceInfoService.java new file mode 100644 index 000000000..e11bff3b6 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/LinkisDataSourceInfoService.java @@ -0,0 +1,32 @@ +/* + Copyright 2022 WeBank + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + */ + + +package com.webank.wedatasphere.exchangis.datasource.linkis.service; + +import com.webank.wedatasphere.exchangis.datasource.core.service.DataSourceInfoService; +import com.webank.wedatasphere.exchangis.datasource.core.service.rpc.ServiceRpcClient; +import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisLinkisRemoteClient; +import com.webank.wedatasphere.exchangis.datasource.linkis.service.rpc.LinkisDataSourceServiceRpcDispatcher; +import org.apache.linkis.datasource.client.impl.LinkisDataSourceRemoteClient; + +public class LinkisDataSourceInfoService extends LinkisDataSourceServiceRpcDispatcher implements DataSourceInfoService { + @Override + public ServiceRpcClient getDefaultRemoteClient() { + return ExchangisLinkisRemoteClient::getLinkisDataSourceRemoteClient; + } + +} diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/LinkisMetadataInfoService.java b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/LinkisMetadataInfoService.java new file mode 100644 index 000000000..5bbe000e2 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/LinkisMetadataInfoService.java @@ -0,0 +1,113 @@ +package com.webank.wedatasphere.exchangis.datasource.linkis.service; + +import com.webank.wedatasphere.exchangis.common.EnvironmentUtils; +import com.webank.wedatasphere.exchangis.datasource.core.domain.MetaColumn; +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; +import com.webank.wedatasphere.exchangis.datasource.core.service.MetadataInfoService; +import com.webank.wedatasphere.exchangis.datasource.core.service.rpc.ServiceRpcClient; +import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisLinkisRemoteClient; +import com.webank.wedatasphere.exchangis.datasource.linkis.request.MetadataGetConnInfoAction; +import com.webank.wedatasphere.exchangis.datasource.linkis.request.MetadataGetPartitionPropsAction; +import com.webank.wedatasphere.exchangis.datasource.linkis.response.MetadataGetConnInfoResult; +import com.webank.wedatasphere.exchangis.datasource.linkis.response.MetadataGetPartitionPropsResult; +import com.webank.wedatasphere.exchangis.datasource.linkis.service.rpc.LinkisDataSourceServiceOperation; +import com.webank.wedatasphere.exchangis.datasource.linkis.service.rpc.LinkisDataSourceServiceRpcDispatcher; +import org.apache.linkis.datasource.client.impl.LinkisMetaDataRemoteClient; +import org.apache.linkis.datasource.client.request.MetadataGetColumnsAction; +import org.apache.linkis.datasource.client.request.MetadataGetPartitionsAction; +import org.apache.linkis.datasource.client.request.MetadataGetTablePropsAction; +import org.apache.linkis.datasource.client.response.MetadataGetColumnsResult; +import org.apache.linkis.datasource.client.response.MetadataGetPartitionsResult; +import org.apache.linkis.datasource.client.response.MetadataGetTablePropsResult; +import org.apache.linkis.metadata.query.common.domain.MetaColumnInfo; + +import java.util.*; + +import static com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceExceptionCode.*; + +/** + * Linkis to fetch metadata info + */ +public class LinkisMetadataInfoService extends LinkisDataSourceServiceRpcDispatcher + implements MetadataInfoService { + // TODO define in properties file + private static final String LOCAL_HDFS_NAME = ".LOCAL_HDFS"; + @Override + public Class getClientClass() { + return LinkisMetaDataRemoteClient.class; + } + + @Override + public ServiceRpcClient getDefaultRemoteClient() { + return ExchangisLinkisRemoteClient::getLinkisMetadataRemoteClient; + } + + @Override + public Map getPartitionProps(String userName, Long dataSourceId, + String database, String table, String partition) throws ExchangisDataSourceException { + return getPartitionProps(getDefaultRemoteClient(), userName, dataSourceId, database, table, partition); + } + + @Override + @SuppressWarnings("unchecked") + public Map getPartitionProps(ServiceRpcClient rpcClient, + String userName, Long dataSourceId, + String database, String table, String partition) throws ExchangisDataSourceException { + MetadataGetPartitionPropsResult result = dispatch((ServiceRpcClient) rpcClient, new LinkisDataSourceServiceOperation(() -> { + MetadataGetPartitionPropsAction action = new MetadataGetPartitionPropsAction(dataSourceId, + database, table, partition, LINKIS_RPC_CLIENT_SYSTEM.getValue()); + action.setUser(userName); + return action; + }), CLIENT_METADATA_GET_PARTITION_PROPS.getCode(), "getPartitionProps"); + return result.props(); + } + + @Override + public Map getTableProps(String userName, Long dataSourceId, String database, String table) throws ExchangisDataSourceException { + return getTableProps(getDefaultRemoteClient(), userName, dataSourceId, database, table); + } + + @Override + @SuppressWarnings("unchecked") + public Map getTableProps(ServiceRpcClient rpcClient, String userName, Long dataSourceId, String database, String table) throws ExchangisDataSourceException { + MetadataGetTablePropsResult result = dispatch((ServiceRpcClient) rpcClient, new LinkisDataSourceServiceOperation(() -> MetadataGetTablePropsAction.builder() + .setDataSourceId(dataSourceId).setDatabase(database).setTable(table) + .setUser(userName).setSystem(LINKIS_RPC_CLIENT_SYSTEM.getValue()).build()), CLIENT_METADATA_GET_TABLES_ERROR.getCode(), "getTableProps"); + return result.props(); +} + + @Override + public List getPartitionKeys(String userName, Long dataSourceId, String database, String table) throws ExchangisDataSourceException { + MetadataGetPartitionsResult result = dispatch(getDefaultRemoteClient(), new LinkisDataSourceServiceOperation(() -> MetadataGetPartitionsAction.builder() + .setDataSourceId(dataSourceId).setDatabase(database).setTable(table) + .setUser(userName).setSystem(LINKIS_RPC_CLIENT_SYSTEM.getValue()).build()), CLIENT_METADATA_GET_PARTITION.getCode(), "getPartitionKeys"); + return result.getPartitionInfo().getPartKeys(); + } + + @Override + public List getColumns(String userName, Long dataSourceId, String database, String table) throws ExchangisDataSourceException { + MetadataGetColumnsResult result = dispatch(getDefaultRemoteClient(), new LinkisDataSourceServiceOperation(() -> MetadataGetColumnsAction.builder() + .setSystem(LINKIS_RPC_CLIENT_SYSTEM.getValue()) + .setDataSourceId(dataSourceId).setDatabase(database).setTable(table) + .setUser(userName).build()),CLIENT_METADATA_GET_PARTITION.getCode(), "getColumns"); + List columnInfoList = result.getAllColumns(); + List columns = new ArrayList<>(); + Optional.ofNullable(columnInfoList).ifPresent(infoList -> infoList.forEach(info -> + columns.add(new MetaColumn(info.getIndex(), info.getName(), info.getType(), info.isPrimaryKey())))); + return columns; + } + + @Override + public Map getLocalHdfsInfo(String uri) throws ExchangisDataSourceException{ + Map query = new HashMap<>(); + query.put("uri", uri); + MetadataGetConnInfoResult result = dispatch(getDefaultRemoteClient(), new LinkisDataSourceServiceOperation(() -> { + MetadataGetConnInfoAction action = new MetadataGetConnInfoAction(LOCAL_HDFS_NAME, LINKIS_RPC_CLIENT_SYSTEM.getValue(), query); + action.setUser(EnvironmentUtils.getJvmUser()); + return action; + }), CLIENT_METADATA_GET_PARTITION.getCode(), "getLocalHdfsInfo"); + return result.getInfo(); + } + + +} diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/rpc/LinkisDataSourceServiceOperation.java b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/rpc/LinkisDataSourceServiceOperation.java new file mode 100644 index 000000000..d98928005 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/rpc/LinkisDataSourceServiceOperation.java @@ -0,0 +1,33 @@ +package com.webank.wedatasphere.exchangis.datasource.linkis.service.rpc; + +import com.webank.wedatasphere.exchangis.datasource.core.service.rpc.ServiceOperation; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.httpclient.dws.request.DWSHttpAction; +import org.apache.linkis.httpclient.request.Action; + +import java.util.Objects; +import java.util.function.Supplier; + +/** + * Operation contains request action + */ +public class LinkisDataSourceServiceOperation extends ServiceOperation { + + /** + * Action + */ + private Action requestAction; + + public LinkisDataSourceServiceOperation(Supplier actionBuilder){ + if (Objects.nonNull(actionBuilder)){ + requestAction = actionBuilder.get(); + if (requestAction instanceof DWSHttpAction){ + this.uri = (StringUtils.join(((DWSHttpAction) requestAction).suffixURLs(), "/")); + } + } + } + + public Action getRequestAction() { + return requestAction; + } +} diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/rpc/LinkisDataSourceServiceRpcDispatcher.java b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/rpc/LinkisDataSourceServiceRpcDispatcher.java new file mode 100644 index 000000000..9ecf2402d --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/rpc/LinkisDataSourceServiceRpcDispatcher.java @@ -0,0 +1,58 @@ +package com.webank.wedatasphere.exchangis.datasource.linkis.service.rpc; + +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisServiceRpcException; +import com.webank.wedatasphere.exchangis.datasource.core.service.rpc.AbstractServiceRpcDispatcher; +import com.webank.wedatasphere.exchangis.datasource.core.service.rpc.ServiceRpcClient; +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.datasource.client.RemoteClient; +import org.apache.linkis.datasource.client.impl.LinkisMetaDataRemoteClient; +import org.apache.linkis.httpclient.dws.response.DWSResult; +import org.apache.linkis.httpclient.request.Action; +import org.apache.linkis.httpclient.response.Result; + +import java.util.Objects; + +public class LinkisDataSourceServiceRpcDispatcher extends + AbstractServiceRpcDispatcher, LinkisDataSourceServiceOperation> { + + public static final CommonVars LINKIS_RPC_CLIENT_SYSTEM = CommonVars.apply("wds.exchangis.datasource.linkis.client.system", "exchangis"); + @Override + @SuppressWarnings("unchecked") + protected U execute(ServiceRpcClient remoteClient, LinkisDataSourceServiceOperation operation) throws ExchangisServiceRpcException { + Action action = operation.getRequestAction(); + try { + Result result = remoteClient.getClient().execute(action); + if (Objects.isNull(result)){ + throw new ExchangisServiceRpcException("The return of client is empty, operation: [" + operation.getUri() + "]", null); + } + if (result instanceof DWSResult && ((DWSResult) result).getStatus() != 0){ + throw new ExchangisServiceRpcException("The status of result from client is: [" + + ((DWSResult) result).getStatus() + "], operation: [" + operation.getUri() + "]", null); + } + return (U)result; + }catch(ClassCastException e){ + throw new ExchangisServiceRpcException("The return of dispatcher should be suitable Result type", e); + } + } + + /** + * Dispatch with error resolver + * @param rpcClient rpc client + * @param operation operation + * @param errorCode error code + * @param method method + * @param return type + * @return + * @throws ExchangisDataSourceException + */ + protected R dispatch(ServiceRpcClient rpcClient, LinkisDataSourceServiceOperation operation, + int errorCode, String method) + throws ExchangisDataSourceException { + try{ + return dispatch(rpcClient, operation); + } catch (ExchangisServiceRpcException e){ + throw new ExchangisDataSourceException(errorCode, "Fail to invoke operation: [method: " + method + ", uri:" + operation.getUri() +"], reason: " + e.getMessage(), e); + } + } +} diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisLinkisRemoteClient.scala b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisLinkisRemoteClient.scala new file mode 100644 index 000000000..c3d4d242b --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisLinkisRemoteClient.scala @@ -0,0 +1,168 @@ +package com.webank.wedatasphere.exchangis.datasource.linkis + +import com.webank.wedatasphere.exchangis.common.linkis.client.{ClientConfiguration, ExchangisHttpClient} +import com.webank.wedatasphere.exchangis.common.linkis.client.config.ExchangisClientConfig +import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSourceConfiguration +import com.webank.wedatasphere.exchangis.datasource.core.exception.{ExchangisDataSourceException, ExchangisDataSourceExceptionCode} +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.datasource.client.config.DatasourceClientConfig.DATA_SOURCE_SERVICE_CLIENT_NAME +import org.apache.linkis.datasource.client.impl.{LinkisDataSourceRemoteClient, LinkisMetaDataRemoteClient} +import org.apache.linkis.datasource.client.request._ +import org.apache.linkis.datasource.client.response._ +import org.apache.linkis.datasourcemanager.common.domain.{DataSource, DataSourceType} +import org.apache.linkis.httpclient.dws.DWSHttpClient + + +object ExchangisLinkisRemoteClient { + + //Linkis Datasource Client Config + val clientConfig: ExchangisClientConfig = ExchangisClientConfig.newBuilder + .addServerUrl(ExchangisDataSourceConfiguration.SERVER_URL.getValue) + .setAuthTokenValue(ExchangisDataSourceConfiguration.AUTH_TOKEN_VALUE.getValue) + .setDWSVersion(ExchangisDataSourceConfiguration.DWS_VERSION.getValue) + .build() + + /** + * Data source client + */ + val dataSourceClient = new ExchangisDataSourceClient(clientConfig, null) + + /** + * Meta data client + */ + val metaDataClient = new ExchangisMetadataClient(clientConfig) + + def getLinkisDataSourceRemoteClient: LinkisDataSourceRemoteClient = { + dataSourceClient + } + + def getLinkisMetadataRemoteClient: LinkisMetaDataRemoteClient = { + metaDataClient + } + + def close(): Unit = { + dataSourceClient.close() + metaDataClient.close() + } + + def queryDataSource(linkisDatasourceName: String): QueryDataSourceResult = { + dataSourceClient.queryDataSource(QueryDataSourceAction.builder() + .setSystem("") + .setName(linkisDatasourceName) + .setTypeId(1) + .setIdentifies("") + .setCurrentPage(1) + .setUser("hadoop") + .setPageSize(1).build() + ) + } + + /** + * get datasourceConnect information + * + * @param dataSourceId id + * @param system dssSystem + * @param user username + * @return + */ + def queryConnectParams(dataSourceId: Long, system: String, user: String): GetConnectParamsByDataSourceIdResult = { + dataSourceClient.getConnectParams(GetConnectParamsByDataSourceIdAction.builder() + .setDataSourceId(dataSourceId) + .setSystem(system) + .setUser(user) + .build() + ) + } + + /** + * get all DataSourceTypes + * + * @param user user + * @return + */ + def queryDataSourceTypes(user: String): java.util.List[DataSourceType] = { + dataSourceClient.getAllDataSourceTypes(GetAllDataSourceTypesAction.builder() + .setUser(user) + .build() + ).getAllDataSourceType + } + + + def queryClusterByDataSourceType(system: String, name: String, typeId: Long, user: String): java.util.List[DataSource] = { + dataSourceClient.queryDataSource(QueryDataSourceAction.builder() + .setSystem(system) + .setName(name) + .setTypeId(typeId) + .setIdentifies("") + .setCurrentPage(1) + .setPageSize(10) + .setUser(user) + .build() + ).getAllDataSource + } + + + /** + * get DataBases list + * + * @param system + * @param dataSourceId + * @param user + * @return list + */ + def queryDataBasesByCuster(system: String, dataSourceId: Long, user: String): MetadataGetDatabasesResult = { + metaDataClient.getDatabases(MetadataGetDatabasesAction.builder() + .setSystem(system) + .setDataSourceId(dataSourceId) + .setUser(user) + .build() + ) + } + + def queryTablesByDataBase(system: String, dataSourceId: Long, dataBase: String, user: String): MetadataGetTablesResult = { + metaDataClient.getTables(MetadataGetTablesAction.builder() + .setSystem(system) + .setDataSourceId(dataSourceId) + .setDatabase(dataBase) + .setUser(user) + .build() + ) + } + + def queryColumnsByTable(system: String, dataSourceId: Long, dataBase: String, table: String, user: String): MetadataGetColumnsResult = { + metaDataClient.getColumns(MetadataGetColumnsAction.builder() + .setSystem(system) + .setDataSourceId(dataSourceId) + .setDatabase(dataBase) + .setTable(table) + .setUser(user) + .build() + ) + } + + +} + +/** + * Exchangis data source client + * @param clientConfig client config + * @param clientName client name + */ +class ExchangisDataSourceClient(clientConfig: ExchangisClientConfig, clientName: String) extends LinkisDataSourceRemoteClient(clientConfig, clientName){ + + protected override val dwsHttpClient: DWSHttpClient = { + val client = if (StringUtils.isEmpty(clientName)) DATA_SOURCE_SERVICE_CLIENT_NAME.getValue else clientName + Option(clientConfig) match { + case Some(config) => new ExchangisHttpClient(config, client) + case _ => throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARAMETER_INVALID.getCode, "Linkis client config cannot be null") + } + } +} + +/** + * Exchangis meta data client + * @param clientConfig client config + */ +class ExchangisMetadataClient(clientConfig: ExchangisClientConfig) extends LinkisMetaDataRemoteClient(clientConfig){ + protected override val dwsHttpClient: DWSHttpClient = new ExchangisHttpClient(clientConfig, "MetaData-Client") +} diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/request/MetadataGetConnInfoAction.scala b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/request/MetadataGetConnInfoAction.scala new file mode 100644 index 000000000..d8050a648 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/request/MetadataGetConnInfoAction.scala @@ -0,0 +1,27 @@ +package com.webank.wedatasphere.exchangis.datasource.linkis.request + +import org.apache.linkis.datasource.client.request.DataSourceAction +import org.apache.linkis.httpclient.request.GetAction +import java.util +import scala.collection.JavaConverters._ +/** + * Get connection info action + */ +class MetadataGetConnInfoAction(dataSourceName: String, system: String, query: util.Map[String, Any]) extends GetAction with DataSourceAction{ + + setParameter("dataSourceName", dataSourceName); + setParameter("system", system); + + Option(query) match { + case Some(queryParams) => + queryParams.asScala.foreach(param => setParameter(param._1, param._2)) + case _ => + } + private var user: String = _ + + override def suffixURLs: Array[String] = Array("metadataQuery", "getConnectionInfo") + + override def setUser(user: String): Unit = this.user = user + + override def getUser: String = this.user +} diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/request/MetadataGetPartitionPropsAction.scala b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/request/MetadataGetPartitionPropsAction.scala new file mode 100644 index 000000000..c8caa7ade --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/request/MetadataGetPartitionPropsAction.scala @@ -0,0 +1,53 @@ +package com.webank.wedatasphere.exchangis.datasource.linkis.request + +import org.apache.linkis.datasource.client.config.DatasourceClientConfig.METADATA_SERVICE_MODULE +import org.apache.linkis.datasource.client.request.DataSourceAction +import org.apache.linkis.httpclient.request.GetAction + +class MetadataGetPartitionPropsAction extends GetAction with DataSourceAction{ + /** + * Data source id + */ + private var dataSourceId: Long = _ + + /** + * Database + */ + private var database: String = _ + + /** + * Table + */ + private var table: String = _ + + /** + * Partition + */ + private var partition: String = _ + + override def suffixURLs: Array[String] = Array(METADATA_SERVICE_MODULE.getValue, "props", dataSourceId.toString, "db", database, "table", table, "partition", partition) + + private var user: String = _ + + override def setUser(user: String): Unit = this.user = user + + override def getUser: String = this.user + + /** + * Just use the constructor instead of builder + * @param dataSourceId data source id + * @param database database + * @param table table + * @param partition partition + * @param system system + */ + def this(dataSourceId: Long, database: String, table: String, partition: String, + system: String){ + this() + this.dataSourceId = dataSourceId + this.database = database + this.table = table + this.partition = partition + setParameter("system", system) + } +} diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/request/ParamsTestConnectAction.scala b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/request/ParamsTestConnectAction.scala new file mode 100644 index 000000000..7461f63fb --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/request/ParamsTestConnectAction.scala @@ -0,0 +1,39 @@ +package com.webank.wedatasphere.exchangis.datasource.linkis.request + +import org.apache.linkis.datasource.client.config.DatasourceClientConfig.DATA_SOURCE_SERVICE_MODULE +import org.apache.linkis.datasource.client.request.DataSourceAction +import org.apache.linkis.httpclient.dws.DWSHttpClient +import org.apache.linkis.httpclient.request.POSTAction + +import java.util +import scala.collection.JavaConverters.mapAsScalaMapConverter + +/** + * Connect test for the data source params + */ +class ParamsTestConnectAction extends POSTAction with DataSourceAction{ + + private var user: String = _ + + override def getRequestPayload: String = DWSHttpClient.jacksonJson.writeValueAsString(getRequestPayloads) + + override def suffixURLs: Array[String] = Array(DATA_SOURCE_SERVICE_MODULE.getValue, "op", "connect", "json") + + override def setUser(user: String): Unit = this.user = user + + override def getUser: String = this.user + + /** + * + * @param dataSource data source map + * @param user user + */ + def this(dataSource: util.Map[String, Any], user: String){ + this() + dataSource.asScala.foreach{ + case (key, value) => + this.addRequestPayload(key, value) + } + this.user = user + } +} diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/response/MetadataGetConnInfoResult.scala b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/response/MetadataGetConnInfoResult.scala new file mode 100644 index 000000000..8c5402adc --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/response/MetadataGetConnInfoResult.scala @@ -0,0 +1,14 @@ +package com.webank.wedatasphere.exchangis.datasource.linkis.response + +import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult +import org.apache.linkis.httpclient.dws.response.DWSResult + +import scala.beans.BeanProperty +import java.util +/** + * Result of get connection info + */ +@DWSHttpMessageResult("/api/rest_j/v\\d+/metadataQuery/getConnectionInfo") +class MetadataGetConnInfoResult extends DWSResult{ + @BeanProperty var info: util.Map[String, String] = _ +} diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/response/MetadataGetPartitionPropsResult.scala b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/response/MetadataGetPartitionPropsResult.scala new file mode 100644 index 000000000..4f56f5aed --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/response/MetadataGetPartitionPropsResult.scala @@ -0,0 +1,12 @@ +package com.webank.wedatasphere.exchangis.datasource.linkis.response + +import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult +import org.apache.linkis.httpclient.dws.response.DWSResult + +import scala.beans.BeanProperty +import java.util + +@DWSHttpMessageResult("/api/rest_j/v\\d+/metadatamanager/props/(\\S+)/db/(\\S+)/table/(\\S+)/partition/(\\S+)") +class MetadataGetPartitionPropsResult extends DWSResult{ + @BeanProperty var props: util.Map[String, String] = _ +} diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/response/ParamsTestConnectResult.scala b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/response/ParamsTestConnectResult.scala new file mode 100644 index 000000000..1dc447082 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/response/ParamsTestConnectResult.scala @@ -0,0 +1,12 @@ +package com.webank.wedatasphere.exchangis.datasource.linkis.response + +import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult +import org.apache.linkis.httpclient.dws.response.DWSResult + +import scala.beans.BeanProperty + +@DWSHttpMessageResult("/api/rest_j/v\\d+/data-source-manager/op/connect/json") +class ParamsTestConnectResult extends DWSResult{ + @BeanProperty var ok: Boolean = _ + +} diff --git a/exchangis-datasource/exchangis-datasource-loader/pom.xml b/exchangis-datasource/exchangis-datasource-loader/pom.xml new file mode 100644 index 000000000..97af1ee06 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-loader/pom.xml @@ -0,0 +1,45 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-datasource-loader + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-datasource-core + ${project.version} + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + \ No newline at end of file diff --git a/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/clazzloader/ExchangisDataSourceClassLoader.java b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/clazzloader/ExchangisDataSourceClassLoader.java new file mode 100644 index 000000000..a55ddfd5a --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/clazzloader/ExchangisDataSourceClassLoader.java @@ -0,0 +1,43 @@ +/* + * + * * Copyright 2019 WeBank + * * + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * + */ + +package com.webank.wedatasphere.exchangis.datasource.loader.clazzloader; + +import java.net.URL; +import java.net.URLClassLoader; + +/** + * 利用UrlClassLoader加载Jar文件到内存 + * */ +public class ExchangisDataSourceClassLoader extends URLClassLoader { + + public ExchangisDataSourceClassLoader(URL[] urls, ClassLoader parent) { + super(urls, parent); + } + + @Override + public Class loadClass(String name) throws ClassNotFoundException { + return super.loadClass(name); + } + + @Override + protected Class loadClass(String name, boolean resolve) throws ClassNotFoundException { + return super.loadClass(name, resolve); + } + +} \ No newline at end of file diff --git a/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/exception/NoSuchExchangisExtDataSourceException.java b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/exception/NoSuchExchangisExtDataSourceException.java new file mode 100644 index 000000000..464fe68d5 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/exception/NoSuchExchangisExtDataSourceException.java @@ -0,0 +1,14 @@ +package com.webank.wedatasphere.exchangis.datasource.loader.exception; + + +import org.apache.linkis.common.exception.ErrorException; + +public class NoSuchExchangisExtDataSourceException extends ErrorException { + public NoSuchExchangisExtDataSourceException(String errDecs) { + super(70059, errDecs); + } + + public NoSuchExchangisExtDataSourceException(int errCode, String desc) { + super(errCode, desc); + } +} diff --git a/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/loader/ExchangisDataSourceLoaderFactory.java b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/loader/ExchangisDataSourceLoaderFactory.java new file mode 100644 index 000000000..979ca9a86 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/loader/ExchangisDataSourceLoaderFactory.java @@ -0,0 +1,43 @@ +package com.webank.wedatasphere.exchangis.datasource.loader.loader; + +import com.webank.wedatasphere.exchangis.datasource.core.loader.ExchangisDataSourceDefLoader; +import org.apache.commons.lang.ClassUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.conf.CommonVars; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ExchangisDataSourceLoaderFactory { + + private static final Logger logger = LoggerFactory.getLogger(ExchangisDataSourceLoaderFactory.class); + + private static Class clazz = LocalExchangisDataSourceLoader.class; + private static ExchangisDataSourceDefLoader exchangisDataSourceDefLoader = null; + + public static ExchangisDataSourceDefLoader getLoader(){ + if (exchangisDataSourceDefLoader == null){ + synchronized (ExchangisDataSourceLoaderFactory.class){ + if (exchangisDataSourceDefLoader == null){ + // 可以通过配置自行加载对应的类 + CommonVars apply = CommonVars.apply("exchangis.extds.loader.classname", ""); + String className = apply.getValue(); + if (StringUtils.isNotBlank(className)){ + try{ + clazz = ClassUtils.getClass(className); + }catch(ClassNotFoundException e){ + logger.warn(String.format("Can not get ExchangisDataSourceLoader class %s, LocalExchangisDataSourceLoader will be used by default.", className), e); + } + } + try { + exchangisDataSourceDefLoader = clazz.newInstance(); + } catch (Exception e) { + logger.error(String.format("Can not initialize ExchangisDataSourceLoader class %s.", clazz.getSimpleName()), e); + } + logger.info("Use {} to load all Exchangis Extension DataSources.", clazz.getSimpleName()); + } + } + } + return exchangisDataSourceDefLoader; + } + +} diff --git a/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/loader/LocalExchangisDataSourceLoader.java b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/loader/LocalExchangisDataSourceLoader.java new file mode 100644 index 000000000..18d843f09 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/loader/LocalExchangisDataSourceLoader.java @@ -0,0 +1,84 @@ +package com.webank.wedatasphere.exchangis.datasource.loader.loader; + +import com.webank.wedatasphere.exchangis.dao.hook.MapperHook; +import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSourceDefinition; +import com.webank.wedatasphere.exchangis.datasource.core.context.ExchangisDataSourceContext; +import com.webank.wedatasphere.exchangis.datasource.core.loader.ExchangisDataSourceDefLoader; +import com.webank.wedatasphere.exchangis.datasource.loader.clazzloader.ExchangisDataSourceClassLoader; +import com.webank.wedatasphere.exchangis.datasource.loader.utils.ExceptionHelper; +import com.webank.wedatasphere.exchangis.datasource.loader.utils.ExtDsUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.common.exception.ErrorException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.net.URL; +import java.util.List; +import java.util.Objects; + +public class LocalExchangisDataSourceLoader implements ExchangisDataSourceDefLoader { + + private static final Logger LOGGER = LoggerFactory.getLogger(LocalExchangisDataSourceLoader.class); + + private ClassLoader classLoader; + private ExchangisDataSourceContext context; + + @Override + public void setClassLoader(ClassLoader classLoader) { + this.classLoader = classLoader; + } + + @Override + public void setContext(ExchangisDataSourceContext context) { + this.context = context; + } + + @Override + public void init(MapperHook mapperHook) throws Exception { + // 初始化磁盘扫描加载 + ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader(); + String loadClassPath = Objects.requireNonNull(currentClassLoader.getResource(EXCHANGIS_DIR_NAME)).getPath(); + if (StringUtils.endsWith(loadClassPath, File.separator)) { + loadClassPath = loadClassPath + File.separator; + } + String libPathUrl = loadClassPath + ".." + File.separator + EXCHANGIS_DIR_NAME; + LOGGER.info("libPath url is {}", libPathUrl); + List jars = ExtDsUtils.getJarsUrlsOfPath(libPathUrl); +// List jars = ExtDsUtils.getJarsUrlsOfPath(EXCHANGIS_DIR_NAME); + ClassLoader classLoader = new ExchangisDataSourceClassLoader(jars.toArray(new URL[1]), currentClassLoader); + + List classNames = ExtDsUtils.getExchangisExtDataSourceClassNames(libPathUrl, classLoader); + for (String clazzName: classNames) { + Class clazz = null; + try { + clazz = classLoader.loadClass(clazzName); + } catch (ClassNotFoundException e) { + Thread.currentThread().setContextClassLoader(currentClassLoader); + ExceptionHelper.dealErrorException(70062, clazzName + " class not found ", e, ErrorException.class); + } + + if (clazz == null) { + Thread.currentThread().setContextClassLoader(currentClassLoader); + } else { + ExchangisDataSourceDefinition dsType = (ExchangisDataSourceDefinition) clazz.newInstance(); + dsType.setMapperHook(mapperHook); + Thread.currentThread().setContextClassLoader(currentClassLoader); + LOGGER.info("ExchangisDataSource is {}", dsType.getClass().toString()); + + context.addExchangisDsDefinition(dsType); + } + } + + } + + @Override + public ExchangisDataSourceDefinition load(String dataSourceType) { + return null; + } + + @Override + public ExchangisDataSourceDefinition get(String dataSourceType, boolean reload) { + return null; + } +} diff --git a/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/utils/ExceptionHelper.java b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/utils/ExceptionHelper.java new file mode 100644 index 000000000..5260e4cc4 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/utils/ExceptionHelper.java @@ -0,0 +1,47 @@ +/* + * + * * Copyright 2019 WeBank + * * + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * + */ + +package com.webank.wedatasphere.exchangis.datasource.loader.utils; + + +import org.apache.linkis.common.exception.ErrorException; + +import java.lang.reflect.Constructor; + +public class ExceptionHelper { + public static void dealErrorException(int errorCode, String errorMsg, Throwable t) throws ErrorException { + ErrorException errorException = new ErrorException(errorCode, errorMsg); + errorException.initCause(t); + throw errorException; + } + + public static void dealErrorException(int errorCode, String errorDesc, Throwable throwable, + Class clazz) throws T{ + T errorException = null; + try { + Constructor constructor = clazz.getConstructor(int.class, String.class); + errorException = constructor.newInstance(errorCode, errorDesc); + errorException.setErrCode(errorCode); + errorException.setDesc(errorDesc); + } catch (Exception e) { + throw new RuntimeException(String.format("failed to instance %s", clazz.getName()), e); + } + errorException.initCause(throwable); + throw errorException; + } +} diff --git a/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/utils/ExtDsUtils.java b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/utils/ExtDsUtils.java new file mode 100644 index 000000000..59b40ea66 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/utils/ExtDsUtils.java @@ -0,0 +1,177 @@ +/* + * + * * Copyright 2019 WeBank + * * + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * + */ + +package com.webank.wedatasphere.exchangis.datasource.loader.utils; + +import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSourceDefinition; +import com.webank.wedatasphere.exchangis.datasource.core.loader.ExchangisDataSourceDefLoader; +import com.webank.wedatasphere.exchangis.datasource.loader.exception.NoSuchExchangisExtDataSourceException; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.lang.reflect.Modifier; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Enumeration; +import java.util.List; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; + +public class ExtDsUtils { + + private static final Logger logger = LoggerFactory.getLogger(ExtDsUtils.class); + + private static Class PARENT_CLASS = ExchangisDataSourceDefinition.class; + + + public static String getExchangisExtDataSourceClassName(String libPath, + ClassLoader classLoader) throws NoSuchExchangisExtDataSourceException { + //1.获取目录下面所有的jar包 + List jars = getJarsOfPath(libPath); + //2.从所有的jar中获取到ExchangisDataSource的子类 + for (String jar : jars) { + for (String clazzName : getClassNameFrom(jar)) { + //3. 再在对应的jar包中寻找是AppConn的子类 + if (isChildClass(clazzName, PARENT_CLASS, classLoader)) { + return clazzName; + } + } + } + throw new NoSuchExchangisExtDataSourceException("does not exist ExchangisDataSource child class"); + } + + public static List getExchangisExtDataSourceClassNames(String libPath, + ClassLoader classLoader) { + List classNames = new ArrayList<>(); + List jars = getJarsOfPath(libPath); + for (String jar : jars) { + for (String clazzName : getClassNameFrom(jar)) { + if (isChildClass(clazzName, PARENT_CLASS, classLoader)) { + classNames.add(clazzName); +// return clazzName; + } + } + } + return classNames; + } + + /** + * 获得需要实例化的AppConn的全限定名 + * */ + public static String getExchangisExtDataSourceClassName(String dataSourceName, String libPath, + ClassLoader classLoader) throws NoSuchExchangisExtDataSourceException { + //1.获取目录下面所有的jar包 + List jars = getJarsOfPath(libPath); + //2.从所有的jar中获取到ExchangisDataSource的子类 + for (String jar : jars) { + for (String clazzName : getClassNameFrom(jar)) { + //3. 再在对应的jar包中寻找是AppConn的子类 + if (isChildClass(clazzName, PARENT_CLASS, classLoader)) { + return clazzName; + } + } + } + throw new NoSuchExchangisExtDataSourceException(dataSourceName + " does not exist"); + } + + public static List getJarsOfPath(String path) { + File file = new File(path); + List jars = new ArrayList<>(); + if (file.listFiles() != null) { + for (File f : file.listFiles()) { + // exchangis-xxxxx.jar + if (!f.isDirectory() && f.getName().endsWith(ExchangisDataSourceDefLoader.JAR_SUF_NAME) && f.getName().startsWith("exchangis")) { + jars.add(f.getPath()); + } + } + } + return jars; + } + + + public static List getJarsUrlsOfPath(String path) { + File file = new File(path); + logger.info(file.getPath()); + logger.info(file.getAbsolutePath()); + List jars = new ArrayList<>(); + if (file.listFiles() != null) { + for (File f : file.listFiles()) { + if (!f.isDirectory() && f.getName().endsWith(ExchangisDataSourceDefLoader.JAR_SUF_NAME)) { + try { + jars.add(f.toURI().toURL()); + } catch (MalformedURLException e) { + logger.warn("url {} cannot be added", ExchangisDataSourceDefLoader.FILE_SCHEMA + f.getPath()); + } + } + } + } + return jars; + } + + + /** + * 从jar包读取所有的class文件名 + */ + private static List getClassNameFrom(String jarName) { + List fileList = new ArrayList<>(); + try { + JarFile jarFile = new JarFile(new File(jarName)); + Enumeration en = jarFile.entries(); + while (en.hasMoreElements()) { + String name1 = en.nextElement().getName(); + if (!name1.endsWith(".class")) { + continue; + } + String name2 = name1.substring(0, name1.lastIndexOf(".class")); + String name3 = name2.replaceAll("/", "."); + fileList.add(name3); + } + } catch (IOException e) { + e.printStackTrace(); + } + + return fileList; + } + + + private static boolean isChildClass(String className, Class parentClazz, ClassLoader classLoader) { + if (StringUtils.isEmpty(className)) { + return false; + } + Class clazz = null; + try { + clazz = classLoader.loadClass(className); + //忽略抽象类和接口 + if (Modifier.isAbstract(clazz.getModifiers())) { + return false; + } + if (Modifier.isInterface(clazz.getModifiers())) { + return false; + } + } catch (Throwable t) { + logger.error("className {} can not be instanced", className, t); + return false; + } + return parentClazz.isAssignableFrom(clazz); + } + +} diff --git a/exchangis-datasource/exchangis-datasource-server/pom.xml b/exchangis-datasource/exchangis-datasource-server/pom.xml new file mode 100644 index 000000000..e2b04ecad --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-server/pom.xml @@ -0,0 +1,81 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-datasource-server + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-datasource-service + ${project.version} + + + + com.webank.wedatasphere.exchangis + exchangis-datasource-loader + ${project.version} + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + org.apache.maven.plugins + maven-assembly-plugin + 3.1.0 + false + + + make-assembly + package + + single + + + + src/main/assembly/distribution.xml + + + + + + false + exchangis-datasource-server + false + false + + src/main/assembly/distribution.xml + + + + + + \ No newline at end of file diff --git a/exchangis-datasource/exchangis-datasource-server/src/main/assembly/distribution.xml b/exchangis-datasource/exchangis-datasource-server/src/main/assembly/distribution.xml new file mode 100644 index 000000000..997641614 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-server/src/main/assembly/distribution.xml @@ -0,0 +1,100 @@ + + + + exchangis-datasource-server + + dir + + false + + + ${basedir}/src/main/bin + + * + + 0755 + 0755 + bin + unix + + + ${basedir}/../extension-datasources/exchangis-datasource-ext-hive/target + + *.jar + + + *-javadoc.jar + + 0755 + 0755 + exchangis-extds + + + ${basedir}/../extension-datasources/exchangis-datasource-ext-mysql/target + + *.jar + + + *-javadoc.jar + + 0755 + 0755 + exchangis-extds + + + ${basedir}/../extension-datasources/exchangis-datasource-ext-elasticsearch/target + + *.jar + + + *-javadoc.jar + + 0755 + 0755 + exchangis-extds + + + ${basedir}/../extension-datasources/exchangis-datasource-ext-mongodb/target + + *.jar + + + *-javadoc.jar + + 0755 + 0755 + exchangis-extds + + + ${basedir}/../extension-datasources/exchangis-datasource-ext-oracle/target + + *.jar + + + *-javadoc.jar + + 0755 + 0755 + exchangis-extds + + + + diff --git a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/ExchangisDataSourceAutoConfiguration.java b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/ExchangisDataSourceAutoConfiguration.java new file mode 100644 index 000000000..2146c2595 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/ExchangisDataSourceAutoConfiguration.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.exchangis.datasource.server; + +import com.webank.wedatasphere.exchangis.datasource.core.service.MetadataInfoService; +import com.webank.wedatasphere.exchangis.datasource.linkis.service.LinkisMetadataInfoService; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * Auto configure the core beans + */ +@Configuration +public class ExchangisDataSourceAutoConfiguration { + + @Bean + @ConditionalOnMissingBean(MetadataInfoService.class) + public MetadataInfoService metadataInfoService(){ + return new LinkisMetadataInfoService(); + } +} diff --git a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/configuration/ServerConfig.java b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/configuration/ServerConfig.java new file mode 100644 index 000000000..5f9809e85 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/configuration/ServerConfig.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.exchangis.datasource.server.configuration; + + +import com.webank.wedatasphere.exchangis.dao.hook.MapperHook; +import com.webank.wedatasphere.exchangis.datasource.core.context.DefaultExchangisDsContext; +import com.webank.wedatasphere.exchangis.datasource.core.context.ExchangisDataSourceContext; +import com.webank.wedatasphere.exchangis.datasource.core.loader.ExchangisDataSourceDefLoader; +import com.webank.wedatasphere.exchangis.datasource.loader.loader.ExchangisDataSourceLoaderFactory; +import org.apache.linkis.common.exception.ErrorException; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +public class ServerConfig { + + @Bean + public ExchangisDataSourceContext context(MapperHook mapperHook) throws Exception { + DefaultExchangisDsContext context = new DefaultExchangisDsContext(); + ExchangisDataSourceDefLoader loader = ExchangisDataSourceLoaderFactory.getLoader(); + loader.setContext(context); + try { + loader.init(mapperHook); + } catch (Exception e) { + throw new ErrorException(70059, e.getMessage()); + } + return context; + } + +} diff --git a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRenderRestfulApi.java b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRenderRestfulApi.java new file mode 100644 index 000000000..d83538d14 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRenderRestfulApi.java @@ -0,0 +1,61 @@ +package com.webank.wedatasphere.exchangis.datasource.server.restful.api; + +import com.webank.wedatasphere.exchangis.common.UserUtils; +import com.webank.wedatasphere.exchangis.datasource.core.ui.ElementUI; +import com.webank.wedatasphere.exchangis.datasource.service.DataSourceRenderService; +import org.apache.linkis.server.Message; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.bind.annotation.*; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import java.util.Locale; +import java.util.Objects; +import java.util.Optional; + +/** + * Expose the ui interface to front-end rendering + */ +@RestController +@RequestMapping(value = "dss/exchangis/main/datasources/render", produces = {"application/json;charset=utf-8"}) +public class ExchangisDataSourceRenderRestfulApi { + + private static final Logger LOG = LoggerFactory.getLogger(ExchangisDataSourceRenderRestfulApi.class); + + @Resource + private DataSourceRenderService renderService; + + @RequestMapping(value = "/partition/element/{elementType:\\w+}", method = RequestMethod.GET) + public Message partition(@PathVariable("elementType") String type, + @RequestParam("dataSourceId") Long dataSourceId, + @RequestParam("database") String database, + @RequestParam(value = "tableNotExist", required = false) Boolean tableNotExist, + @RequestParam("table") String table, HttpServletRequest request){ + String userName = UserUtils.getLoginUser(request); + ElementUI.Type uiType; + try { + uiType = ElementUI.Type.valueOf(type.toUpperCase(Locale.ROOT)); + } catch (Exception e){ + return Message.error("Element type: [" + type +"] is not support (不兼容的元素类型)"); + } + Message result = Message.ok(); + try{ + boolean notExist = Optional.ofNullable(tableNotExist).orElse(false); + ElementUI elementUI = renderService.getPartitionAndRender(userName, dataSourceId, + database, table, uiType, notExist); + result.data("type", uiType.name()); + result.data("customize", notExist); + if (Objects.nonNull(elementUI)){ + result.data("render", elementUI.getValue()); + } + }catch(Exception e){ + String uiMessage = "Load to render partition info Failed (加载渲染分区信息失败)"; + LOG.error(uiMessage + ", reason: " + e.getMessage(), e); + result = Message.error(uiMessage); + } + result.setMethod("/api/rest_j/v1/dss/exchangis/main/datasources/render/partition/element/" + type); + return result; + } + +} diff --git a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRestfulApi.java b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRestfulApi.java new file mode 100644 index 000000000..dbb0e2526 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRestfulApi.java @@ -0,0 +1,471 @@ +package com.webank.wedatasphere.exchangis.datasource.server.restful.api; + +import com.webank.wedatasphere.exchangis.common.AuditLogUtils; +import com.webank.wedatasphere.exchangis.common.UserUtils; +import com.webank.wedatasphere.exchangis.common.enums.OperateTypeEnum; +import com.webank.wedatasphere.exchangis.common.enums.TargetTypeEnum; +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; +import com.webank.wedatasphere.exchangis.datasource.core.ui.ElementUI; +import com.webank.wedatasphere.exchangis.datasource.service.ExchangisDataSourceService; +import com.webank.wedatasphere.exchangis.datasource.vo.DataSourceCreateVO; +import com.webank.wedatasphere.exchangis.datasource.vo.DataSourceQueryVO; +import com.webank.wedatasphere.exchangis.datasource.vo.FieldMappingVO; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.validation.BindingResult; +import org.springframework.validation.FieldError; +import org.springframework.web.bind.annotation.*; + +import javax.servlet.http.HttpServletRequest; +import javax.validation.Valid; +import javax.ws.rs.QueryParam; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +@RestController +@RequestMapping(value = "dss/exchangis/main/datasources", produces = {"application/json;charset=utf-8"}) +public class ExchangisDataSourceRestfulApi { + private static final Logger LOG = LoggerFactory.getLogger(ExchangisDataSourceRestfulApi.class); + + + private final ExchangisDataSourceService exchangisDataSourceService; + + private static Pattern p = Pattern.compile("(?<=\\[)[^]]+"); + + @Autowired + public ExchangisDataSourceRestfulApi(ExchangisDataSourceService exchangisDataSourceService) { + this.exchangisDataSourceService = exchangisDataSourceService; + } + + // list all datasource types + @RequestMapping( value = "/type", method = RequestMethod.GET) + public Message listDataSourceTypes(HttpServletRequest request, + @RequestParam(value = "engineType", required = false) String engineType, + @RequestParam(value = "direct", required = false) String direct, + @RequestParam(value = "sourceType", required = false) String sourceType) throws Exception { + Message message = null; + LOG.info("engineType:{}, direct:{}, sourceType:{}", engineType, direct, sourceType); + try{ + message = exchangisDataSourceService.listDataSources(request, engineType, direct, sourceType); + } catch (ExchangisDataSourceException e) { + String errorMessage = "Error occur while list datasource type"; + LOG.error(errorMessage, e); + + String errorNote = e.getMessage(); + Matcher matcher = p.matcher(errorNote); + if (matcher.find()) { + message = Message.error(matcher.group()); + } + else{ + message = Message.error("Getting datasource type list fail (获取数据源类型列表失败)"); + } + } + return message; + + } + + // query paged datasource + @RequestMapping( value = "/query", method = {RequestMethod.GET,RequestMethod.POST}) + public Message create(HttpServletRequest request, @RequestBody DataSourceQueryVO vo) throws Exception { + Message message = null; + try{ + message = exchangisDataSourceService.queryDataSources(request, vo); + } catch (ExchangisDataSourceException e) { + String errorMessage = "Error occur while query datasource"; + LOG.error(errorMessage, e); + message = Message.error("查询数据源失败"); + } + return message; + + } + + // list all datasources + @RequestMapping( value = "", method = RequestMethod.GET) + @Deprecated + public Message listAllDataSources( + HttpServletRequest request, + @RequestParam(value = "typeId", required = false) Long typeId, + @RequestParam(value = "typeName", required = false) String typeName, + @RequestParam(value = "page", required = false) Integer page, + @RequestParam(value = "size", required = false) Integer size + ) throws Exception { + Message message = null; + try{ + message = exchangisDataSourceService.listAllDataSources(request, typeName, typeId, page, size); + } catch (ExchangisDataSourceException e) { + String errorMessage = "Error occur while getting datasource list"; + LOG.error(errorMessage, e); + + String errorNote = e.getMessage(); + Matcher matcher = p.matcher(errorNote); + if (matcher.find()) { + message = Message.error(matcher.group()); + } + else{ + message = Message.error("Getting datasource list fail (获取数据源列表失败)"); + } + } + return message; + + } + + // get datasource key define + @RequestMapping( value = "/types/{dataSourceTypeId}/keydefines", method = RequestMethod.GET) + public Message getDataSourceKeyDefine( + HttpServletRequest request, + @PathVariable("dataSourceTypeId") Long dataSourceTypeId + ) throws Exception { + Message message = null; + try{ + message = exchangisDataSourceService.getDataSourceKeyDefine(request, dataSourceTypeId); + } catch (ExchangisDataSourceException e) { + String errorMessage = "Error occur while getting datasource key define"; + LOG.error(errorMessage, e); + message = Message.error("获取数据源主键定义失败"); + } + return message; + + } + + + // get datasource version list + @RequestMapping( value = "/{id}/versions", method = RequestMethod.GET) + public Message getDataSourceVersionsById(HttpServletRequest request, @PathVariable("id") Long id) throws Exception { + Message message = null; + try{ + message = exchangisDataSourceService.getDataSourceVersionsById(request, id); + } catch (ExchangisDataSourceException e) { + String errorMessage = "Error occur while getting datasource version"; + LOG.error(errorMessage, e); + + String errorNote = e.getMessage(); + Matcher matcher = p.matcher(errorNote); + if (matcher.find()) { + message = Message.error(matcher.group()); + } + else{ + message = Message.error("Getting datasource version fail (获取数据源版本失败)"); + } + } + return message; + + } + + // create datasource + @RequestMapping( value = "", method = RequestMethod.POST) + public Message create(/*@PathParam("type") String type, */@Valid @RequestBody DataSourceCreateVO dataSourceCreateVO, BindingResult bindingResult, HttpServletRequest request ) throws Exception { + Message message = new Message(); + String loginUser = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); + LOG.info("dataSourceName: " + dataSourceCreateVO.getDataSourceName() + ", dataSourceDesc: " + dataSourceCreateVO.getDataSourceDesc() + ", label: " + dataSourceCreateVO.getLabels()); + if(bindingResult.hasErrors()){ + List fieldErrors = bindingResult.getFieldErrors(); + for(int i=0;i fieldErrors = bindingResult.getFieldErrors(); + for(int i=0;i params, @QueryParam(value = "encryStr") String encryStr) throws Exception { + Message message = null; + try{ + LOG.info("Encrypt params is: {}", params); + message = exchangisDataSourceService.encryptConnectInfo((String) params.get("encryStr")); + //message = Message.ok().data("encryStr", "owwonowoww"); + } catch (Exception e) { + String errorMessage = "Encrypted string failed"; + LOG.error(errorMessage, e); + message = Message.error("加密字符串失败"); + } + return message; + } + + @RequestMapping( value = "/tools/decrypt", method = RequestMethod.POST) + public Message sinkStrDecrypt(HttpServletRequest request, @RequestBody Map params, @QueryParam(value = "sinkStr") String sinkStr) throws Exception { + Message message = null; + try{ + message = exchangisDataSourceService.decryptConnectInfo((String) params.get("sinkStr")); + //message = Message.ok().data("encryStr", "owwonowoww"); + } catch (Exception e) { + String errorMessage = "Encrypted string failed"; + LOG.error(errorMessage, e); + message = Message.error("加密字符串失败"); + } + return message; + } + + @RequestMapping( value = "/{engine}/{type}/params/ui", method = RequestMethod.GET) + public Message getParamsUI( + HttpServletRequest request, + @PathVariable("engine") String engine, + @PathVariable("type") String type, + @RequestParam(value = "dir", required = false) String dir + ) { + List> uis = this.exchangisDataSourceService.getDataSourceParamsUI(type, String.format("%s-%s", engine, dir)); + return Message.ok().data("uis", uis); + } + +} diff --git a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisJobDataSourceRestfulApi.java b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisJobDataSourceRestfulApi.java new file mode 100644 index 000000000..b019c1321 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisJobDataSourceRestfulApi.java @@ -0,0 +1,58 @@ +package com.webank.wedatasphere.exchangis.datasource.server.restful.api; + +import com.webank.wedatasphere.exchangis.datasource.core.ui.ElementUI; +import com.webank.wedatasphere.exchangis.datasource.core.ui.viewer.ExchangisDataSourceUIViewer; +import com.webank.wedatasphere.exchangis.datasource.service.ExchangisDataSourceService; +import org.apache.linkis.server.Message; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.*; + +import javax.servlet.http.HttpServletRequest; +import java.util.List; + +// TODO 这里仅仅为了测试,JOB的接口在另外的 Controller 中 +@RestController +@RequestMapping(value = "dss/exchangis/main", produces = {"application/json;charset=utf-8"}) +public class ExchangisJobDataSourceRestfulApi { + + private final ExchangisDataSourceService exchangisDataSourceService; + + @Autowired + public ExchangisJobDataSourceRestfulApi(ExchangisDataSourceService exchangisDataSourceService) { + this.exchangisDataSourceService = exchangisDataSourceService; + } + + // 根据 任务ID 获取该任务的数据源所有配置项 UI 数据 + @RequestMapping( value = "jobs/{jobId}/datasource/ui", method = RequestMethod.GET) + public Message getJobDataSourcesUI(HttpServletRequest request, @PathVariable("jobId")Long jobId) { +// ExchangisDataSourceUIViewer jobDataSourceUI = this.exchangisDataSourceService.getJobDataSourceUIs(jobId); + List ui = this.exchangisDataSourceService.getJobDataSourceUIs(request, jobId); + return Message.ok().data("ui", ui); + } + + // 根据 任务引擎类型 获取该引擎的配置项 UI 数据 + @RequestMapping( value = "jobs/engine/{engineType}/settings/ui", method = RequestMethod.GET) + public Message getJobEngineSettingsUI(HttpServletRequest request, @PathVariable("engineType")String engineType, @RequestParam(required = false)String labels) { + List> jobSettingsUI = this.exchangisDataSourceService.getJobEngineSettingsUI(engineType); + return Message.ok().data("ui", jobSettingsUI); + } + + // 根据 任务ID 获取该任务的数据源配置项 UI 数据 + @RequestMapping( value = "jobs/{jobId}/datasource/params/ui", method = RequestMethod.GET) + public Message getJobDataSourceParamsUI(HttpServletRequest request, @PathVariable("jobId")Long jobId) { + return this.exchangisDataSourceService.getJobDataSourceParamsUI(jobId); + } + + // 根据 任务ID 获取该任务的数据源字段映射 UI 数据 + @RequestMapping( value = "jobs/{jobId}/datasource/transforms/ui", method = RequestMethod.GET) + public Message getJobTransformsUI(HttpServletRequest request, @PathVariable("jobId")Long jobId) { + return this.exchangisDataSourceService.getJobDataSourceTransformsUI(jobId); + } + + // 根据 任务ID 获取该任务的数据源引擎配置项 UI 数据 + @RequestMapping( value = "jobs/{jobId}/{jobName}/datasource/settings/ui", method = RequestMethod.GET) + public Message getJobSettingsUI(HttpServletRequest request, @PathVariable("jobId")Long jobId, + @PathVariable("jobName")String jobName) throws Exception { + return this.exchangisDataSourceService.getJobDataSourceSettingsUI(jobId, jobName); + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/pom.xml b/exchangis-datasource/exchangis-datasource-service/pom.xml new file mode 100644 index 000000000..02f408fe5 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/pom.xml @@ -0,0 +1,67 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-datasource-service + + + 8 + 8 + + + + + + com.webank.wedatasphere.exchangis + exchangis-datasource-linkis + ${project.version} + + + + + com.webank.wedatasphere.exchangis + exchangis-job-common + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-engine-core + ${project.version} + compile + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + \ No newline at end of file diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/Utils/RSAUtil.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/Utils/RSAUtil.java new file mode 100644 index 000000000..b85130df5 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/Utils/RSAUtil.java @@ -0,0 +1,89 @@ +package com.webank.wedatasphere.exchangis.datasource.Utils; + +import org.apache.linkis.common.conf.CommonVars; + +import javax.crypto.Cipher; +import java.io.IOException; +import java.security.*; +import java.security.spec.PKCS8EncodedKeySpec; +import java.security.spec.X509EncodedKeySpec; +import java.util.Base64; + +/** + * @author tikazhang + * @Date 2022/8/4 10:35 + */ +public class RSAUtil { + + public static final CommonVars PUBLIC_KEY_STR = CommonVars.apply("wds.exchangis.publicKeyStr", "publicKeyStr"); + public static final CommonVars PRIVATE_KEY_STR = CommonVars.apply("wds.exchangis.privateKeyStr", "privateKeyStr"); + + + //生成秘钥对 + public static KeyPair getKeyPair() throws Exception { + KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA"); + keyPairGenerator.initialize(2048); + KeyPair keyPair = keyPairGenerator.generateKeyPair(); + return keyPair; + } + + //获取公钥(Base64编码) + public static String getPublicKey(KeyPair keyPair){ + PublicKey publicKey = keyPair.getPublic(); + byte[] bytes = publicKey.getEncoded(); + return byte2Base64(bytes); + } + + //获取私钥(Base64编码) + public static String getPrivateKey(KeyPair keyPair){ + PrivateKey privateKey = keyPair.getPrivate(); + byte[] bytes = privateKey.getEncoded(); + return byte2Base64(bytes); + } + + //将Base64编码后的公钥转换成PublicKey对象 + public static PublicKey string2PublicKey(String pubStr) throws Exception{ + byte[] keyBytes = base642Byte(pubStr); + X509EncodedKeySpec keySpec = new X509EncodedKeySpec(keyBytes); + KeyFactory keyFactory = KeyFactory.getInstance("RSA"); + PublicKey publicKey = keyFactory.generatePublic(keySpec); + return publicKey; + } + + //将Base64编码后的私钥转换成PrivateKey对象 + public static PrivateKey string2PrivateKey(String priStr) throws Exception{ + byte[] keyBytes = base642Byte(priStr); + PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(keyBytes); + KeyFactory keyFactory = KeyFactory.getInstance("RSA"); + PrivateKey privateKey = keyFactory.generatePrivate(keySpec); + return privateKey; + } + + //公钥加密 + public static byte[] publicEncrypt(byte[] content, PublicKey publicKey) throws Exception{ + Cipher cipher = Cipher.getInstance("RSA"); + cipher.init(Cipher.ENCRYPT_MODE, publicKey); + byte[] bytes = cipher.doFinal(content); + return bytes; + } + + //私钥解密 + public static byte[] privateDecrypt(byte[] content, PrivateKey privateKey) throws Exception{ + Cipher cipher = Cipher.getInstance("RSA"); + cipher.init(Cipher.DECRYPT_MODE, privateKey); + byte[] bytes = cipher.doFinal(content); + return bytes; + } + + //字节数组转Base64编码 + public static String byte2Base64(byte[] bytes){ + Base64.Encoder encoder = Base64.getEncoder(); + return encoder.encodeToString(bytes); + } + + //Base64编码转字节数组 + public static byte[] base642Byte(String base64Key) throws IOException { + Base64.Decoder decoder = Base64.getDecoder(); + return decoder.decode(base64Key); + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/domain/ExchangisDsProject.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/domain/ExchangisDsProject.java new file mode 100644 index 000000000..322424cb9 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/domain/ExchangisDsProject.java @@ -0,0 +1,8 @@ +package com.webank.wedatasphere.exchangis.datasource.domain; + +/** + * The relation between data source and project + */ +public class ExchangisDsProject { +// private String +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/CreateDataSourceSuccessResultDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/CreateDataSourceSuccessResultDTO.java new file mode 100644 index 000000000..1398ecd32 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/CreateDataSourceSuccessResultDTO.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.exchangis.datasource.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class CreateDataSourceSuccessResultDTO extends ResultDTO { + private InsertIdDTO data; + + + public InsertIdDTO getData() { + return data; + } + + public void setData(InsertIdDTO data) { + this.data = data; + } + + public static class InsertIdDTO { + @JsonProperty(value = "insertId") + private Long insertId; + + public Long getId() { + return insertId; + } + + public void setId(Long insertId) { + this.insertId = insertId; + } + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/DataSourceDbTableColumnDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/DataSourceDbTableColumnDTO.java new file mode 100644 index 000000000..08b232ac5 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/DataSourceDbTableColumnDTO.java @@ -0,0 +1,40 @@ +package com.webank.wedatasphere.exchangis.datasource.dto; + +public class DataSourceDbTableColumnDTO { + private String name; + private String type; + private int fieldIndex; + private boolean fieldEditable; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public int getFieldIndex() { + return fieldIndex; + } + + public void setFieldIndex(int fieldIndex) { + this.fieldIndex = fieldIndex; + } + + public boolean isFieldEditable() { + return fieldEditable; + } + + public void setFieldEditable(boolean fieldEditable) { + this.fieldEditable = fieldEditable; + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/DataSourceTestConnectResultDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/DataSourceTestConnectResultDTO.java new file mode 100644 index 000000000..581199646 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/DataSourceTestConnectResultDTO.java @@ -0,0 +1,18 @@ +package com.webank.wedatasphere.exchangis.datasource.dto; + +public class DataSourceTestConnectResultDTO extends ResultDTO { + + private TestConnectData data; + + public TestConnectData getData() { + return data; + } + + public void setData(TestConnectData data) { + this.data = data; + } + + public static class TestConnectData { + } + +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/DeleteDataSourceSuccessResultDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/DeleteDataSourceSuccessResultDTO.java new file mode 100644 index 000000000..a4a8d1b22 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/DeleteDataSourceSuccessResultDTO.java @@ -0,0 +1,28 @@ +package com.webank.wedatasphere.exchangis.datasource.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class DeleteDataSourceSuccessResultDTO extends ResultDTO { + private DeleteIdDTO data; + + public DeleteIdDTO getData() { + return data; + } + + public void setData(DeleteIdDTO data) { + this.data = data; + } + + public static class DeleteIdDTO { + @JsonProperty(value = "remove_id") + private Long id; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExchangisDataSourceDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExchangisDataSourceDTO.java new file mode 100644 index 000000000..4f2c06fe3 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExchangisDataSourceDTO.java @@ -0,0 +1,169 @@ +package com.webank.wedatasphere.exchangis.datasource.dto; + +import java.util.Date; + +public class ExchangisDataSourceDTO { + + private Long id; + private String name; + private String type; + private Long dataSourceTypeId; + private String createIdentify; + private String createSystem; + private String desc; + private String createUser; + private String labels; + private String label; + private Long versionId; + private String modifyUser; + private Date modifyTime; + private boolean expire; + private boolean writeAble; + private boolean readAble; + private String authDbs; + private String authTbls; + + public boolean isExpire() { + return expire; + } + + public void setExpire(boolean expire) { + this.expire = expire; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getCreateIdentify() { + return createIdentify; + } + + public void setCreateIdentify(String createIdentify) { + this.createIdentify = createIdentify; + } + + public Long getDataSourceTypeId() { + return dataSourceTypeId; + } + + public void setDataSourceTypeId(Long dataSourceTypeId) { + this.dataSourceTypeId = dataSourceTypeId; + } + + public String getDesc() { + return desc; + } + + public void setDesc(String desc) { + this.desc = desc; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } + + public String getLabels() { + return labels; + } + + public void setLabels(String labels) { + this.labels = labels; + } + + public String getLabel() { + return label; + } + + public void setLabel(String label) { + this.label = label; + } + + public Long getVersionId() { + return versionId; + } + + public void setVersionId(Long versionId) { + this.versionId = versionId; + } + + public String getModifyUser() { + return modifyUser; + } + + public void setModifyUser(String modifyUser) { + this.modifyUser = modifyUser; + } + + public Date getModifyTime() { + return modifyTime; + } + + public void setModifyTime(Date modifyTime) { + this.modifyTime = modifyTime; + } + + public String getCreateSystem() { + return createSystem; + } + + public void setCreateSystem(String createSystem) { + this.createSystem = createSystem; + } + + public boolean isWriteAble() { + return writeAble; + } + + public void setWriteAble(boolean writeAble) { + this.writeAble = writeAble; + } + + public boolean isReadAble() { + return readAble; + } + + public void setReadAble(boolean readAble) { + this.readAble = readAble; + } + + public String getAuthDbs() { + return authDbs; + } + + public void setAuthDbs(String authDbs) { + this.authDbs = authDbs; + } + + public String getAuthTbls() { + return authTbls; + } + + public void setAuthTbls(String authTbls) { + this.authTbls = authTbls; + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExchangisDataSourceDefDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExchangisDataSourceDefDTO.java new file mode 100644 index 000000000..c71f60fd9 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExchangisDataSourceDefDTO.java @@ -0,0 +1,62 @@ +package com.webank.wedatasphere.exchangis.datasource.dto; + +public class ExchangisDataSourceDefDTO { + private final String id; + private final String classifier; + private final String name; + private String option; + private String description; + private String icon; + private String struct_classifier; + + public ExchangisDataSourceDefDTO(String id, String classifier, String name, String struct_classifier) { + this.id = id; + this.classifier = classifier; + this.name = name; + this.struct_classifier = struct_classifier; + } + + public String getId() { + return id; + } + + public String getClassifier() { + return classifier; + } + + public String getName() { + return name; + } + + public String getOption() { + return option; + } + + public void setOption(String option) { + this.option = option; + } + + public void setDescription(String description) { + this.description = description; + } + + public void setIcon(String icon) { + this.icon = icon; + } + + public String getDescription() { + return description; + } + + public String getIcon() { + return icon; + } + + public String getStruct_classifier() { + return struct_classifier; + } + + public void setStruct_classifier(String struct_classifier) { + this.struct_classifier = struct_classifier; + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExpireDataSourceSuccessResultDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExpireDataSourceSuccessResultDTO.java new file mode 100644 index 000000000..6a9688c0f --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExpireDataSourceSuccessResultDTO.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.exchangis.datasource.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class ExpireDataSourceSuccessResultDTO extends ResultDTO { + private ExpireIdDTO data; + + + public ExpireIdDTO getData() { + return data; + } + + public void setData(ExpireIdDTO data) { + this.data = data; + } + + public static class ExpireIdDTO { + @JsonProperty(value = "expire_id") + private Long id; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/GetDataSourceConnectParamsResultDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/GetDataSourceConnectParamsResultDTO.java new file mode 100644 index 000000000..cd800a480 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/GetDataSourceConnectParamsResultDTO.java @@ -0,0 +1,16 @@ +package com.webank.wedatasphere.exchangis.datasource.dto; + +public class GetDataSourceConnectParamsResultDTO extends ResultDTO { + private ConnectParamsDTO data; + + public ConnectParamsDTO getData() { + return data; + } + + public void setData(ConnectParamsDTO data) { + this.data = data; + } + + public static class ConnectParamsDTO { + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/GetDataSourceInfoResultDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/GetDataSourceInfoResultDTO.java new file mode 100644 index 000000000..3bc542801 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/GetDataSourceInfoResultDTO.java @@ -0,0 +1,206 @@ +package com.webank.wedatasphere.exchangis.datasource.dto; + +import java.util.Map; + +public class GetDataSourceInfoResultDTO extends ResultDTO { + private DataSourceInfoDTO data; + + public DataSourceInfoDTO getData() { + return data; + } + + public void setData(DataSourceInfoDTO data) { + this.data = data; + } + + public static class DataSourceInfoDTO { + private DataSourceItemDTO info; + + public DataSourceItemDTO getInfo() { + return info; + } + + public void setInfo(DataSourceItemDTO info) { + this.info = info; + } + } + + public static class DataSourceItemDTO { + private Long id; + private String dataSourceName; + private String dataSourceDesc; + private Long dataSourceTypeId; + private String createIdentify; + private String createSystem; + private Map connectParams; + private Long createTime; + private String createUser; + private Long modifyTime; + private String modifyUser; + private String labels; + private String label; + private Long versionId; + private Integer publishedVersionId; + private Boolean expire; + private DataSourceItemDsTypeDTO dataSourceType; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getDataSourceName() { + return dataSourceName; + } + + public void setDataSourceName(String dataSourceName) { + this.dataSourceName = dataSourceName; + } + + public String getDataSourceDesc() { + return dataSourceDesc; + } + + public void setDataSourceDesc(String dataSourceDesc) { + this.dataSourceDesc = dataSourceDesc; + } + + public Long getDataSourceTypeId() { + return dataSourceTypeId; + } + + public void setDataSourceTypeId(Long dataSourceTypeId) { + this.dataSourceTypeId = dataSourceTypeId; + } + + public String getCreateIdentify() { + return createIdentify; + } + + public void setCreateIdentify(String createIdentify) { + this.createIdentify = createIdentify; + } + + public String getCreateSystem() { + return createSystem; + } + + public void setCreateSystem(String createSystem) { + this.createSystem = createSystem; + } + + public Map getConnectParams() { + return connectParams; + } + + public void setConnectParams(Map connectParams) { + this.connectParams = connectParams; + } + + public Long getCreateTime() { + return createTime; + } + + public void setCreateTime(Long createTime) { + this.createTime = createTime; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } + + public String getLabels() { + return labels; + } + + public void setLabels(String labels) { + this.labels = labels; + this.label = labels; + } + + public String getLabel() { + return label; + } + + public void setLabel(String label) { + this.label = label; + } + + public Integer getPublishedVersionId() { + return publishedVersionId; + } + + public void setPublishedVersionId(Integer publishedVersionId) { + this.publishedVersionId = publishedVersionId; + } + + public Long getVersionId() { + return versionId; + } + + public void setVersionId(Long versionId) { + this.versionId = versionId; + } + + public Boolean getExpire() { + return expire; + } + + public void setExpire(Boolean expire) { + this.expire = expire; + } + + public DataSourceItemDsTypeDTO getDataSourceType() { + return dataSourceType; + } + + public void setDataSourceType(DataSourceItemDsTypeDTO dataSourceType) { + this.dataSourceType = dataSourceType; + } + + public Long getModifyTime() { + return modifyTime; + } + + public void setModifyTime(Long modifyTime) { + this.modifyTime = modifyTime; + } + + public String getModifyUser() { + return modifyUser; + } + + public void setModifyUser(String modifyUser) { + this.modifyUser = modifyUser; + } + } + + public static class DataSourceItemDsTypeDTO { + private String name; + private Integer layers; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Integer getLayers() { + return layers; + } + + public void setLayers(Integer layers) { + this.layers = layers; + } + } + +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/GetDataSourceTypeKeyDefinesSuccessResultDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/GetDataSourceTypeKeyDefinesSuccessResultDTO.java new file mode 100644 index 000000000..06977ec1b --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/GetDataSourceTypeKeyDefinesSuccessResultDTO.java @@ -0,0 +1,127 @@ +package com.webank.wedatasphere.exchangis.datasource.dto; + + +import org.apache.linkis.datasourcemanager.common.domain.DataSourceParamKeyDefinition; + +public class GetDataSourceTypeKeyDefinesSuccessResultDTO extends ResultDTO { + private DataSourceKeyDefinition data; + + public DataSourceKeyDefinition getData() { + return data; + } + + public void setData(DataSourceKeyDefinition data) { + this.data = data; + } + + public static class DataSourceKeyDefinition { + private Long id; + private String key; + private String description; + private String name; + private String defaultValue; + private DataSourceParamKeyDefinition.ValueType valueType; + private DataSourceParamKeyDefinition.Scope scope; + private boolean require; + private String valueRegex; + private Long refId; + private String refValue; + private String dataSource; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDefaultValue() { + return defaultValue; + } + + public void setDefaultValue(String defaultValue) { + this.defaultValue = defaultValue; + } + + public DataSourceParamKeyDefinition.ValueType getValueType() { + return valueType; + } + + public void setValueType(DataSourceParamKeyDefinition.ValueType valueType) { + this.valueType = valueType; + } + + public DataSourceParamKeyDefinition.Scope getScope() { + return scope; + } + + public void setScope(DataSourceParamKeyDefinition.Scope scope) { + this.scope = scope; + } + + public boolean isRequire() { + return require; + } + + public void setRequire(boolean require) { + this.require = require; + } + + public String getValueRegex() { + return valueRegex; + } + + public void setValueRegex(String valueRegex) { + this.valueRegex = valueRegex; + } + + public Long getRefId() { + return refId; + } + + public void setRefId(Long refId) { + this.refId = refId; + } + + public String getRefValue() { + return refValue; + } + + public void setRefValue(String refValue) { + this.refValue = refValue; + } + + public String getDataSource() { + return dataSource; + } + + public void setDataSource(String dataSource) { + this.dataSource = dataSource; + } + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/GetDataSourceVersionsResultDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/GetDataSourceVersionsResultDTO.java new file mode 100644 index 000000000..7c500534b --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/GetDataSourceVersionsResultDTO.java @@ -0,0 +1,87 @@ +package com.webank.wedatasphere.exchangis.datasource.dto; + +import java.util.List; +import java.util.Map; + +public class GetDataSourceVersionsResultDTO extends ResultDTO { + + private VersionDataDTO data; + + public VersionDataDTO getData() { + return data; + } + + public void setData(VersionDataDTO data) { + this.data = data; + } + + public static class VersionDataDTO { + private List versions; + + public List getVersions() { + return versions; + } + + public void setVersions(List versions) { + this.versions = versions; + } + } + + public static class VersionDTO { + private Long versionId; + private Long datasourceId; + private Map connectParams; + private String comment; + private String createUser; + private boolean published = false; + + public Long getVersionId() { + return versionId; + } + + public void setVersionId(Long versionId) { + this.versionId = versionId; + } + + public Long getDatasourceId() { + return datasourceId; + } + + public void setDatasourceId(Long datasourceId) { + this.datasourceId = datasourceId; + } + + public Map getConnectParams() { + return connectParams; + } + + public void setConnectParams(Map connectParams) { + this.connectParams = connectParams; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } + + public boolean isPublished() { + return published; + } + + public void setPublished(boolean published) { + this.published = published; + } + } + +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/MetadataGetColumnsResultDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/MetadataGetColumnsResultDTO.java new file mode 100644 index 000000000..1837db063 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/MetadataGetColumnsResultDTO.java @@ -0,0 +1,72 @@ +package com.webank.wedatasphere.exchangis.datasource.dto; + +import java.util.List; + +public class MetadataGetColumnsResultDTO extends ResultDTO { + + private Data data; + + public Data getData() { + return data; + } + + public void setData(Data data) { + this.data = data; + } + + public static class Data { + + private List columns; + + public List getColumns() { + return columns; + } + + public void setColumns(List columns) { + this.columns = columns; + } + + } + + public static class Column { + + private int index; + private boolean primaryKey; + private String name; + private String type; + + public int getIndex() { + return index; + } + + public void setIndex(int index) { + this.index = index; + } + + public boolean isPrimaryKey() { + return primaryKey; + } + + public void setPrimaryKey(boolean primaryKey) { + this.primaryKey = primaryKey; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + } + +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/MetadataGetTablePropsResultDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/MetadataGetTablePropsResultDTO.java new file mode 100644 index 000000000..069e2109a --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/MetadataGetTablePropsResultDTO.java @@ -0,0 +1,30 @@ +package com.webank.wedatasphere.exchangis.datasource.dto; + +import java.util.Map; + +public class MetadataGetTablePropsResultDTO extends ResultDTO { + + private Data data; + + public Data getData() { + return data; + } + + public void setData(Data data) { + this.data = data; + } + + public static class Data { + + private Map props; + + public Map getProps() { + return props; + } + + public void setProps(Map props) { + this.props = props; + } + } + +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ResultDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ResultDTO.java new file mode 100644 index 000000000..bab2dbfef --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ResultDTO.java @@ -0,0 +1,35 @@ +package com.webank.wedatasphere.exchangis.datasource.dto; + +public class ResultDTO { + + private String method; + + private Integer status; + + private String message; + + public String getMethod() { + return method; + } + + public void setMethod(String method) { + this.method = method; + } + + public Integer getStatus() { + return status; + } + + public void setStatus(Integer status) { + this.status = status; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/UpdateDataSourceSuccessResultDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/UpdateDataSourceSuccessResultDTO.java new file mode 100644 index 000000000..56d56c985 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/UpdateDataSourceSuccessResultDTO.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.exchangis.datasource.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class UpdateDataSourceSuccessResultDTO extends ResultDTO { + private UpdateIdDTO data; + + + public UpdateIdDTO getData() { + return data; + } + + public void setData(UpdateIdDTO data) { + this.data = data; + } + + public static class UpdateIdDTO { + @JsonProperty(value = "update_id") + private Long id; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/UpdateParamsVersionResultDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/UpdateParamsVersionResultDTO.java new file mode 100644 index 000000000..3225448fd --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/UpdateParamsVersionResultDTO.java @@ -0,0 +1,25 @@ +package com.webank.wedatasphere.exchangis.datasource.dto; + +public class UpdateParamsVersionResultDTO extends ResultDTO { + private VersionDTO data; + + public VersionDTO getData() { + return data; + } + + public void setData(VersionDTO data) { + this.data = data; + } + + public static class VersionDTO { + private Long version; + + public Long getVersion() { + return version; + } + + public void setVersion(Long version) { + this.version = version; + } + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/AbstractDataSourceService.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/AbstractDataSourceService.java new file mode 100644 index 000000000..f4ebaab69 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/AbstractDataSourceService.java @@ -0,0 +1,360 @@ +package com.webank.wedatasphere.exchangis.datasource.service; + +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Strings; +import com.webank.wedatasphere.exchangis.common.UserUtils; +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; +import com.webank.wedatasphere.exchangis.dao.mapper.ExchangisJobParamConfigMapper; +import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSourceDefinition; +import com.webank.wedatasphere.exchangis.datasource.core.context.ExchangisDataSourceContext; +import com.webank.wedatasphere.exchangis.datasource.core.ui.*; +import com.webank.wedatasphere.exchangis.datasource.core.ui.viewer.DefaultDataSourceUIViewer; +import com.webank.wedatasphere.exchangis.datasource.core.ui.viewer.ExchangisDataSourceUIViewer; +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.datasource.core.vo.ExchangisJobDataSourcesContent; +import com.webank.wedatasphere.exchangis.datasource.core.vo.ExchangisJobInfoContent; +import com.webank.wedatasphere.exchangis.datasource.core.vo.ExchangisJobParamsContent; +import com.webank.wedatasphere.exchangis.datasource.core.vo.ExchangisJobTransformsContent; +import com.webank.wedatasphere.exchangis.datasource.dto.GetDataSourceInfoResultDTO; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobEntity; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.datasource.client.impl.LinkisDataSourceRemoteClient; +import org.apache.linkis.datasource.client.request.GetInfoByDataSourceIdAction; +import org.apache.linkis.httpclient.response.Result; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.servlet.http.HttpServletRequest; +import java.util.*; +import java.util.stream.Collectors; + +public class AbstractDataSourceService { + protected final ObjectMapper mapper = new ObjectMapper(); + protected final ExchangisDataSourceContext context; + protected final ExchangisJobParamConfigMapper exchangisJobParamConfigMapper; + + private final static Logger LOG = LoggerFactory.getLogger(AbstractDataSourceService.class); + + + public AbstractDataSourceService(ExchangisDataSourceContext context, ExchangisJobParamConfigMapper exchangisJobParamConfigMapper) { + this.context = context; + this.exchangisJobParamConfigMapper = exchangisJobParamConfigMapper; + } + + protected List parseJobContent(String content) { + List jobInfoContents; + if (Strings.isNullOrEmpty(content)) { + jobInfoContents = new ArrayList<>(); + } else { + try { + jobInfoContents = this.mapper.readValue(content, new TypeReference>() { + }); + } catch (JsonProcessingException e) { + jobInfoContents = new ArrayList<>(); + } + } + return jobInfoContents; + } + + private ExchangisDataSourceIdsUI buildDataSourceIdsUI(ExchangisJobInfoContent content) { + return this.buildDataSourceIdsUI(null, content); + } + + private ExchangisDataSourceIdsUI buildDataSourceIdsUI(HttpServletRequest request, ExchangisJobInfoContent content) { + String loginUser = Optional.ofNullable(request).isPresent() ? UserUtils.getLoginUser(request) : null; + ExchangisJobDataSourcesContent dataSources = content.getDataSources(); + if (Objects.isNull(dataSources)) { + return null; + } + + String sourceId = dataSources.getSourceId(); + String sinkId = dataSources.getSinkId(); + + if (Strings.isNullOrEmpty(sourceId) && Strings.isNullOrEmpty(sinkId)) { + return null; + } + + ExchangisDataSourceIdsUI ids = new ExchangisDataSourceIdsUI(); + if (!Strings.isNullOrEmpty(sourceId)) { + String[] split = sourceId.trim().split("\\."); + ExchangisDataSourceIdUI source = new ExchangisDataSourceIdUI(); + source.setType(split[0]); + source.setId(split[1]); + Optional.ofNullable(loginUser).ifPresent(u -> { + Optional.ofNullable(this.context.getExchangisDsDefinition(split[0])).ifPresent(o -> { + LinkisDataSourceRemoteClient dsClient = o.getDataSourceRemoteClient(); + GetInfoByDataSourceIdAction action = GetInfoByDataSourceIdAction.builder() + .setDataSourceId(Long.parseLong(split[1])) + .setUser(u) + .setSystem(split[0]) + .build(); + + Result execute = dsClient.execute(action); + String responseBody = execute.getResponseBody(); + GetDataSourceInfoResultDTO dsInfo = null; + dsInfo = Json.fromJson(responseBody, GetDataSourceInfoResultDTO.class); + assert dsInfo != null; + source.setDs(dsInfo.getData().getInfo().getDataSourceName()); + }); + }); + source.setDb(split[2]); + source.setTable(split[3]); + + ids.setSource(source); + } + + if (!Strings.isNullOrEmpty(sinkId)) { + String[] split = sinkId.trim().split("\\."); + ExchangisDataSourceIdUI sink = new ExchangisDataSourceIdUI(); + sink.setType(split[0]); + sink.setId(split[1]); + Optional.ofNullable(loginUser).ifPresent(u -> { + Optional.ofNullable(this.context.getExchangisDsDefinition(split[0])).ifPresent(o -> { + LinkisDataSourceRemoteClient dsClient = o.getDataSourceRemoteClient(); + GetInfoByDataSourceIdAction action = GetInfoByDataSourceIdAction.builder() + .setDataSourceId(Long.parseLong(split[1])) + .setUser(u) + .setSystem(split[0]) + .build(); + Result execute = dsClient.execute(action); + String responseBody = execute.getResponseBody(); + GetDataSourceInfoResultDTO dsInfo = null; + dsInfo = Json.fromJson(responseBody, GetDataSourceInfoResultDTO.class); + assert dsInfo != null; + sink.setDs(dsInfo.getData().getInfo().getDataSourceName()); + }); + }); + + sink.setDb(split[2]); + sink.setTable(split[3]); + + ids.setSink(sink); + } + return ids; + } + + protected ExchangisDataSourceParamsUI buildDataSourceParamsUI(ExchangisJobInfoContent content) { + ExchangisDataSourceIdsUI dataSourceIdsUI = buildDataSourceIdsUI(content); + + ExchangisJobParamsContent params = content.getParams(); + List sourceParamConfigs = Collections.emptyList(); + List sinkParamConfigs = Collections.emptyList(); + if (null != dataSourceIdsUI) { + ExchangisDataSourceIdUI source = dataSourceIdsUI.getSource(); + if (null != source) { + String type = source.getType(); + ExchangisDataSourceDefinition exchangisSourceDataSource = this.context.getExchangisDsDefinition(type); + if (null != exchangisSourceDataSource) { + sourceParamConfigs = exchangisSourceDataSource.getDataSourceParamConfigs().stream().filter( + i -> i.getConfigDirection().equals(content.getEngine() + "-SOURCE") || "SOURCE".equalsIgnoreCase(i.getConfigDirection())).collect(Collectors.toList()); + } + } + + ExchangisDataSourceIdUI sink = dataSourceIdsUI.getSink(); + if (null != sink) { + String type = sink.getType(); + ExchangisDataSourceDefinition exchangisSinkDataSource = this.context.getExchangisDsDefinition(type); + if (null != exchangisSinkDataSource) { + sinkParamConfigs = exchangisSinkDataSource.getDataSourceParamConfigs().stream().filter(i -> + i.getConfigDirection().equals(content.getEngine() + "-SINK") || "SINK".equalsIgnoreCase(i.getConfigDirection())).collect(Collectors.toList()); + } + } + } + + List sourceParamsItems = Collections.emptyList(); + List sinkParamsItems = Collections.emptyList(); + if (null != params && null != params.getSources()) { + sourceParamsItems = params.getSources(); + } + if (null != params && null != params.getSinks()) { + sinkParamsItems = params.getSinks(); + } + + List> jobDataSourceParamsUI1 = buildDataSourceParamsFilledValueUI(sourceParamConfigs, sourceParamsItems); + List> jobDataSourceParamsUI2 = buildDataSourceParamsFilledValueUI(sinkParamConfigs, sinkParamsItems); + ExchangisDataSourceParamsUI paramsUI = new ExchangisDataSourceParamsUI(); + paramsUI.setSources(jobDataSourceParamsUI1); + paramsUI.setSinks(jobDataSourceParamsUI2); + return paramsUI; + } + + protected ExchangisDataSourceUIViewer buildAllUI(HttpServletRequest request, ExchangisJobEntity job, ExchangisJobInfoContent content) { + // ----------- 构建 dataSourceIdsUI + ExchangisDataSourceIdsUI dataSourceIdsUI = buildDataSourceIdsUI(request, content); + + // ----------- 构建 dataSourceParamsUI + ExchangisDataSourceParamsUI paramsUI = buildDataSourceParamsUI(content); + + // ----------- 构建 dataSourceTransformsUI + ExchangisJobTransformsContent transforms = content.getTransforms(); + transforms.setAddEnable(!("HIVE".equals(dataSourceIdsUI.getSource().getType()) || "HIVE".equals(dataSourceIdsUI.getSink().getType()))); + +// ExchangisDataSourceTransformsUI dataSourceTransFormsUI = ExchangisDataSourceUIViewBuilder.getDataSourceTransFormsUI(transforms); + + List> jobDataSourceSettingsUI = this.buildJobSettingsUI(job.getEngineType(), content); + + return new DefaultDataSourceUIViewer(content.getSubJobName(), dataSourceIdsUI, paramsUI, transforms, jobDataSourceSettingsUI); + } + + + protected List> buildJobSettingsUI(String jobEngineType) { + if (Strings.isNullOrEmpty(jobEngineType)) { + return Collections.emptyList(); + } + QueryWrapper queryWrapper = new QueryWrapper<>(); + queryWrapper.eq("type", jobEngineType); + queryWrapper.eq("is_hidden", 0); + queryWrapper.eq("status", 1); + List settingParamConfigs = exchangisJobParamConfigMapper.selectList(queryWrapper); + return buildDataSourceParamsFilledValueUI(settingParamConfigs, null); + } + + protected List> buildJobSettingsUI(String jobEngineType, ExchangisJobInfoContent content) { + if (Strings.isNullOrEmpty(jobEngineType)) { + return Collections.emptyList(); + } + List settings = content.getSettings(); + QueryWrapper queryWrapper = new QueryWrapper<>(); + queryWrapper.eq("type", jobEngineType); + queryWrapper.eq("is_hidden", 0); + queryWrapper.eq("status", 1); + List settingParamConfigs = exchangisJobParamConfigMapper.selectList(queryWrapper); + return buildDataSourceParamsFilledValueUI(settingParamConfigs, settings); + } + + protected List> buildDataSourceParamsUI(List paramConfigs) { + List> uis = new ArrayList<>(); + if (!Objects.isNull(paramConfigs) && !paramConfigs.isEmpty()) { + for (ExchangisJobParamConfig cfg : paramConfigs) { + ElementUI ui = fillElementUIValue(cfg, ""); + uis.add(ui); + } + } + return uis; + } + + protected List> buildDataSourceParamsFilledValueUI(List paramConfigs, List paramsList) { + List> uis = new ArrayList<>(); + if (!Objects.isNull(paramConfigs) && !paramConfigs.isEmpty()) { + for (ExchangisJobParamConfig cfg : paramConfigs) { + if (Objects.isNull(paramsList) || paramsList.isEmpty()) { + uis.add(fillElementUIValue(cfg, "")); + continue; + } + ExchangisJobParamsContent.ExchangisJobParamsItem selectedParamItem = getJobParamsItem(cfg.getConfigKey(), paramsList); + if (Objects.isNull(selectedParamItem)) { + ElementUI ui = fillElementUIValue(cfg, ""); + uis.add(ui); + } else { + ElementUI ui = fillElementUIValue(cfg, selectedParamItem.getConfigValue()); + uis.add(ui); + } + } + } + return uis; + } + + private ExchangisJobParamsContent.ExchangisJobParamsItem getJobParamsItem(String configKey, List sources) { + for (ExchangisJobParamsContent.ExchangisJobParamsItem item : sources) { + if (item.getConfigKey().equalsIgnoreCase(configKey)) { + return item; + } + } + return null; + } + + private ElementUI fillElementUIValue(ExchangisJobParamConfig config, Object value) { + String uiType = config.getUiType(); + ElementUI.Type uiTypeEnum; + try { + uiTypeEnum = StringUtils.isNotBlank(uiType)? + ElementUI.Type.valueOf(uiType.toUpperCase(Locale.ROOT)) : ElementUI.Type.NONE; + }catch (Exception e){ + uiTypeEnum = ElementUI.Type.NONE; + } + switch (uiTypeEnum) { + case OPTION: + return fillOptionElementUIValue(config, String.valueOf(value)); + case INPUT: + return fillInputElementUIValue(config, String.valueOf(value)); + case MAP: + Map mapElement = null; + try { + mapElement = Json.fromJson(Json.toJson(value, null), + Map.class, String.class, Object.class); + } catch (Exception e) { + LOG.info("Exception happened while parse json"+ "Config value: " + value + "message: " + e.getMessage(), e); + } + return fillMapElementUIValue(config, mapElement); + default: + return null; + } + } + + + private OptionElementUI fillOptionElementUIValue(ExchangisJobParamConfig config, String value) { + String valueRange = config.getValueRange(); + List values = Collections.emptyList(); + try { + values = mapper.readValue(valueRange, List.class); + } catch (JsonProcessingException e) { + e.printStackTrace(); + } + + OptionElementUI ui = new OptionElementUI(); + ui.setId(config.getId()); + ui.setKey(config.getConfigKey()); + ui.setField(config.getUiField()); + ui.setLabel(config.getUiLabel()); + ui.setValues(values); + ui.setValue(value); + ui.setDefaultValue(config.getDefaultValue()); + ui.setSort(config.getSort()); + ui.setRequired(config.getRequired()); + ui.setUnit(config.getUnit()); + ui.setRefId(config.getRefId()); + return ui; + } + + private InputElementUI fillInputElementUIValue(ExchangisJobParamConfig config, String value) { + InputElementUI ui = new InputElementUI(); + ui.setId(config.getId()); + ui.setKey(config.getConfigKey()); + ui.setField(config.getUiField()); + ui.setLabel(config.getUiLabel()); + ui.setValue(value); + ui.setDefaultValue(config.getDefaultValue()); + ui.setSort(config.getSort()); + ui.setRequired(config.getRequired()); + ui.setUnit(config.getUnit()); + ui.setSource(config.getSource()); + ui.setValidateType(config.getValidateType()); + ui.setValidateRange(config.getValidateRange()); + ui.setValidateMsg(config.getValidateMsg()); + ui.setRefId(config.getRefId()); + return ui; + } + + private MapElementUI fillMapElementUIValue(ExchangisJobParamConfig config, Map value) { + MapElementUI ui = new MapElementUI(); + ui.setId(config.getId()); + ui.setKey(config.getConfigKey()); + ui.setField(config.getUiField()); + ui.setLabel(config.getUiLabel()); + ui.setValue(value); + //ui.setDefaultValue(config.getDefaultValue()); + ui.setSort(config.getSort()); + ui.setRequired(config.getRequired()); + ui.setUnit(config.getUnit()); + ui.setSource(config.getSource()); + ui.setValidateType(config.getValidateType()); + ui.setValidateRange(config.getValidateRange()); + ui.setValidateMsg(config.getValidateMsg()); + ui.setRefId(config.getRefId()); + return ui; + } + +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/DataSourceRenderService.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/DataSourceRenderService.java new file mode 100644 index 000000000..5df843cdc --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/DataSourceRenderService.java @@ -0,0 +1,17 @@ +package com.webank.wedatasphere.exchangis.datasource.service; + +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; +import com.webank.wedatasphere.exchangis.datasource.core.ui.ElementUI; + +public interface DataSourceRenderService { + + /** + * Get the partition info and render to element + * @param userName userName + * @return element ui + */ + ElementUI getPartitionAndRender(String userName, + Long dataSourceId, String database, + String table, ElementUI.Type uiType, boolean tableNotExist) throws ExchangisDataSourceException; + +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/DataSourceUIGetter.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/DataSourceUIGetter.java new file mode 100644 index 000000000..1b0897518 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/DataSourceUIGetter.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.exchangis.datasource.service; + +import com.webank.wedatasphere.exchangis.datasource.core.ui.ElementUI; +import com.webank.wedatasphere.exchangis.datasource.core.ui.viewer.ExchangisDataSourceUIViewer; + +import javax.servlet.http.HttpServletRequest; +import java.util.List; + +public interface DataSourceUIGetter { + + // 根据已经创建的 Job 来获取该 Job DataSource 的配置信息 + List getJobDataSourceUIs(HttpServletRequest request, Long jobId); + + // 在新建 Job 的时候,右侧需要根据选择的数据源类型来获取参数设置项 + List> getDataSourceParamsUI(String dsType, String dir); + + // 在新建 Job 的时候,右侧需要根据选择的引擎类型来获取参数设置项 + List> getJobEngineSettingsUI(String engineType); + +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/ExchangisDataSourceService.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/ExchangisDataSourceService.java new file mode 100644 index 000000000..69d4f7d65 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/ExchangisDataSourceService.java @@ -0,0 +1,1404 @@ +package com.webank.wedatasphere.exchangis.datasource.service; + +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.google.common.base.Strings; +import com.webank.wedatasphere.exchangis.common.UserUtils; +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobDsBind; +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; +import com.webank.wedatasphere.exchangis.dao.mapper.ExchangisJobDsBindMapper; +import com.webank.wedatasphere.exchangis.dao.mapper.ExchangisJobParamConfigMapper; +import com.webank.wedatasphere.exchangis.datasource.GetDataSourceInfoByIdAndVersionIdAction; +import com.webank.wedatasphere.exchangis.datasource.Utils.RSAUtil; +import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSourceDefinition; +import com.webank.wedatasphere.exchangis.datasource.core.context.ExchangisDataSourceContext; +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceExceptionCode; +import com.webank.wedatasphere.exchangis.datasource.core.ui.ElementUI; +import com.webank.wedatasphere.exchangis.datasource.core.ui.ExchangisDataSourceParamsUI; +import com.webank.wedatasphere.exchangis.datasource.core.ui.viewer.ExchangisDataSourceUIViewer; +import com.webank.wedatasphere.exchangis.datasource.core.vo.ExchangisJobInfoContent; +import com.webank.wedatasphere.exchangis.datasource.core.vo.ExchangisJobTransformsContent; +import com.webank.wedatasphere.exchangis.datasource.dto.*; +import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisLinkisRemoteClient; +import com.webank.wedatasphere.exchangis.datasource.linkis.request.ParamsTestConnectAction; +import com.webank.wedatasphere.exchangis.datasource.linkis.response.ParamsTestConnectResult; +import com.webank.wedatasphere.exchangis.datasource.vo.DataSourceCreateVO; +import com.webank.wedatasphere.exchangis.datasource.vo.DataSourceQueryVO; +import com.webank.wedatasphere.exchangis.datasource.vo.FieldMappingVO; +import com.webank.wedatasphere.exchangis.engine.dao.EngineSettingsDao; +import com.webank.wedatasphere.exchangis.engine.domain.EngineSettings; +import com.webank.wedatasphere.exchangis.job.api.ExchangisJobOpenService; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.datasource.client.impl.LinkisDataSourceRemoteClient; +import org.apache.linkis.datasource.client.impl.LinkisMetaDataRemoteClient; +import org.apache.linkis.datasource.client.request.*; +import org.apache.linkis.datasource.client.response.*; +import org.apache.linkis.datasourcemanager.common.domain.DataSource; +import org.apache.linkis.datasourcemanager.common.domain.DataSourceType; +import org.apache.linkis.datasourcemanager.common.exception.JsonErrorException; +import org.apache.linkis.datasourcemanager.common.util.json.Json; +import org.apache.linkis.httpclient.response.Result; +import org.apache.linkis.metadata.query.common.domain.MetaColumnInfo; +import org.apache.linkis.server.Message; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import java.security.PrivateKey; +import java.security.PublicKey; +import java.util.*; + +import static com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceExceptionCode.*; + +@Service +public class ExchangisDataSourceService extends AbstractDataSourceService implements DataSourceUIGetter{ + + private final EngineSettingsDao settingsDao; + + private static final Logger LOGGER = LoggerFactory.getLogger(ExchangisDataSourceService.class); + + + @Autowired + public ExchangisDataSourceService(ExchangisDataSourceContext context, + ExchangisJobParamConfigMapper exchangisJobParamConfigMapper, EngineSettingsDao settingsDao) { + super(context, exchangisJobParamConfigMapper); + this.settingsDao = settingsDao; + mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES); + } + + @Resource + private ExchangisJobOpenService jobOpenService; + + @Autowired + private ExchangisJobDsBindMapper exchangisJobDsBindMapper; + @Override + public List getJobDataSourceUIs(HttpServletRequest request, Long jobId) { + if (Objects.isNull(jobId)) { + return null; + } + + ExchangisJobEntity job; + try { + job = this.jobOpenService.getJobById(jobId, false); + } catch (ExchangisJobException e) { + throw new ExchangisDataSourceException + .Runtime(CONTEXT_GET_DATASOURCE_NULL.getCode(), "Fail to get job entity (获得任务信息失败)", e); + } + if (Objects.isNull(job)) { + return null; + } + + List jobInfoContents = this.parseJobContent(job.getJobContent()); + List uis = new ArrayList<>(); + for (ExchangisJobInfoContent cnt : jobInfoContents) { + cnt.setEngine(job.getEngineType()); + ExchangisDataSourceUIViewer viewer = buildAllUI(request, job, cnt); + uis.add(viewer); + } + + return uis; + } + + // 根据数据源类型获取参数 + @Override + public List> getDataSourceParamsUI(String dsType, String engineAndDirection) { + + ExchangisDataSourceDefinition exchangisDataSource = this.context.getExchangisDsDefinition(dsType); + List paramConfigs = exchangisDataSource.getDataSourceParamConfigs(); + List filteredConfigs = new ArrayList<>(); + String[] engineDirect = engineAndDirection.split("-"); + String direction = engineDirect[1]; + for (ExchangisJobParamConfig paramConfig : paramConfigs) { + //skip the + Optional.ofNullable(paramConfig.getConfigDirection()).ifPresent(configDirection -> { + if (configDirection.equalsIgnoreCase(engineAndDirection) || configDirection.equalsIgnoreCase(direction)){ + filteredConfigs.add(paramConfig); + } + }); + } + return this.buildDataSourceParamsUI(filteredConfigs); + } + + @Override + public List> getJobEngineSettingsUI(String engineType) { + return this.buildJobSettingsUI(engineType); + } + + /** + * 根据 LocalExchangisDataSourceLoader 加载到的本地的数据源与 Linkis 支持的数据源 + * 做比较,筛选出可以给前端展示的数据源类型 + */ + public Message listDataSources(HttpServletRequest request, String engineType, String direct, String sourceType) throws Exception { + Collection all = this.context.all(); + List dtos = new ArrayList<>(); + + List settingsList = this.settingsDao.getSettings(); + List engineSettings = new ArrayList<>(); + + + if (StringUtils.isEmpty(engineType)) { + engineSettings = settingsList; + } else { + EngineSettings engineSetting = new EngineSettings(); + for (int i = 0; i < settingsList.size(); i++) { + if (StringUtils.equals(settingsList.get(i).getName(), engineType.toLowerCase())) { + engineSetting = settingsList.get(i); + break; + } + } + engineSettings.add(engineSetting); + } + + Set directType = new HashSet<>(); + for (EngineSettings engineSetting: engineSettings) { + for (int i = 0; i < engineSetting.getDirectionRules().size(); i++) { + engineSetting.getDirectionRules().stream().forEach(item -> { + String source = item.getSource(); + String sink = item.getSink(); + if (StringUtils.isEmpty(direct)) { + directType.add(source); + directType.add(sink); + } else if (StringUtils.equals(direct, "source")) { + directType.add(source); + } else { + if ((StringUtils.isBlank(sourceType) || + (StringUtils.isNoneBlank(sourceType) && StringUtils.equals(source, sourceType.toLowerCase())))) { + directType.add(sink); + } + } + }); + } + } + + String userName = UserUtils.getLoginUser(request); + LOGGER.info("listDataSources userName: {}" + userName); + // 通过 datasourcemanager 获取的数据源类型和context中的数据源通过 type 和 name 比较 + // 以 exchangis 中注册了的数据源集合为准 + LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); + GetAllDataSourceTypesResult result; + try { + result = linkisDataSourceRemoteClient.getAllDataSourceTypes(GetAllDataSourceTypesAction.builder() + .setUser(userName) + .build() + ); + } catch (Exception e) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_GET_TYPES_ERROR.getCode(), e.getMessage()); + } + + if (Objects.isNull(result)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_GET_TYPES_ERROR.getCode(), "datasource get types null or empty"); + } + + List allDataSourceType = new ArrayList<>(); + List dataSourceTypes = result.getAllDataSourceType(); + for ( int i = 0; i < dataSourceTypes.size(); i++) { + if (directType.contains(dataSourceTypes.get(i).getName())) { + allDataSourceType.add(dataSourceTypes.get(i)); + } + } + if (Objects.isNull(allDataSourceType)) allDataSourceType = Collections.emptyList(); + + for (DataSourceType type : allDataSourceType) { + LOGGER.info("Current datasource Type is :{}", type.getName()); + for (ExchangisDataSourceDefinition item : all) { + if (item.name().equalsIgnoreCase(type.getName())) { + ExchangisDataSourceDefDTO dto = new ExchangisDataSourceDefDTO( + type.getId(), + type.getClassifier(), + item.name(), + item.structClassifier() + ); + // use linkis datasource table field to fill the dto bean + dto.setIcon(type.getIcon()); + dto.setDescription(type.getDescription()); + dto.setOption(type.getOption()); + dtos.add(dto); + } + } + } + + return Message.ok().data("list", dtos); + } + + @Transactional + public Message create(HttpServletRequest request, /*String type, */DataSourceCreateVO vo) throws Exception { + //DataSourceCreateVO vo; + Map json; + try { + json = mapper.readValue(mapper.writeValueAsString(vo), Map.class); + json.put("labels",json.get("label")); + } catch (JsonProcessingException e) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARSE_JSON_ERROR.getCode(), e.getMessage()); + } + String comment = vo.getComment(); + String createSystem = vo.getCreateSystem(); + if (Objects.isNull(comment)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARAMETER_INVALID.getCode(), "parameter comment should not be null"); + } + + if (Strings.isNullOrEmpty(createSystem)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARAMETER_INVALID.getCode(), "parameter createSystem should not be empty"); + } + + + String user = UserUtils.getLoginUser(request); + LOGGER.info("createDatasource userName:" + user); + + ExchangisDataSourceDefinition dsType = context.getExchangisDsDefinition(vo.getDataSourceTypeId()); + if (Objects.isNull(dsType)) { + throw new ExchangisDataSourceException(CONTEXT_GET_DATASOURCE_NULL.getCode(), "exchangis context get datasource null"); + } + + LinkisDataSourceRemoteClient client = dsType.getDataSourceRemoteClient(); + LOGGER.info("create datasource json as follows"); + Set> entries = json.entrySet(); + for (Map.Entry entry : entries) { + LOGGER.info("key {} : value {}", entry.getKey(), entry.getValue()); + } +// CreateDataSourceResult result; + String responseBody; + try { + + Result execute = client.execute(CreateDataSourceAction.builder() + .setUser(user) + .addRequestPayloads(json) + .build() + ); + responseBody = execute.getResponseBody(); + } catch (Exception e) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_CREATE_ERROR.getCode(), e.getMessage()); + } + +// if (Objects.isNull(result)) { + if (Strings.isNullOrEmpty(responseBody)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_CREATE_ERROR.getCode(), "datasource create response null or empty"); + } + + CreateDataSourceSuccessResultDTO result = Json.fromJson(responseBody, CreateDataSourceSuccessResultDTO.class); + if (result.getStatus() != 0) { + throw new ExchangisDataSourceException(result.getStatus(), result.getMessage()); + } +// Long dataSourceId = result.getInsert_id(); + Long dataSourceId = result.getData().getId(); + UpdateDataSourceParameterResult updateDataSourceParameterResult; + try { + // 创建完成后发布数据源参数,形成一个版本 + updateDataSourceParameterResult = client.updateDataSourceParameter( + UpdateDataSourceParameterAction.builder() + .setUser(user) + .setDataSourceId(Long.parseLong(dataSourceId + "")) + .addRequestPayloads(json) + .build() + ); + } catch (Exception e) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_UPDATE_PARAMS_VERSION_ERROR.getCode(), e.getMessage()); + } + + if (Objects.isNull(updateDataSourceParameterResult)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_UPDATE_PARAMS_VERSION_ERROR.getCode(), "datasource update params version null or empty"); + } + + if (updateDataSourceParameterResult.getStatus() != 0) { + throw new ExchangisDataSourceException(updateDataSourceParameterResult.getStatus(), updateDataSourceParameterResult.getMessage()); + } + return Message.ok().data("id", dataSourceId); + } + + @Transactional + public Message updateDataSource(HttpServletRequest request,/* String type,*/ Long id, DataSourceCreateVO vo) throws Exception { + + Map json; + try { + json = mapper.readValue(mapper.writeValueAsString(vo), Map.class); + json.put("labels",json.get("label")); + } catch (JsonProcessingException e) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARSE_JSON_ERROR.getCode(), e.getMessage()); + } + String comment = vo.getComment(); + String createSystem = vo.getCreateSystem(); + if (Objects.isNull(comment)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARAMETER_INVALID.getCode(), "parameter comment should not be null"); + } + + if (Strings.isNullOrEmpty(createSystem)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARAMETER_INVALID.getCode(), "parameter createSystem should not be empty"); + } + String user = UserUtils.getLoginUser(request); + LOGGER.info("updateDataSource userName:" + user); + + LOGGER.info("DataSourceTypeId:" + vo.getDataSourceTypeId()); + ExchangisDataSourceDefinition dsType = context.getExchangisDsDefinition(vo.getDataSourceTypeId()); + if (Objects.isNull(dsType)) { + throw new ExchangisDataSourceException(30401, "exchangis.datasource.null"); + } + + LinkisDataSourceRemoteClient client = dsType.getDataSourceRemoteClient(); +// UpdateDataSourceResult updateDataSourceResult; + String responseBody; + try { + Result execute = client.execute(UpdateDataSourceAction.builder() + .setUser(user) + .setDataSourceId(Long.parseLong(id + "")) + .addRequestPayloads(json) + .build() + ); + responseBody = execute.getResponseBody(); + } catch (Exception e) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_UPDATE_ERROR.getCode(), e.getMessage()); + } + +// if (Objects.isNull(updateDataSourceResult)) { + if (Strings.isNullOrEmpty(responseBody)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_UPDATE_ERROR.getCode(), "datasource update null or empty"); + } + + UpdateDataSourceSuccessResultDTO updateDataSourceResult = Json.fromJson(responseBody, UpdateDataSourceSuccessResultDTO.class); + + if (updateDataSourceResult.getStatus() != 0) { + throw new ExchangisDataSourceException(updateDataSourceResult.getStatus(), updateDataSourceResult.getMessage()); + } + + UpdateDataSourceParameterResult updateDataSourceParameterResult; + try { + updateDataSourceParameterResult = client.updateDataSourceParameter( + UpdateDataSourceParameterAction.builder() + .setDataSourceId(Long.parseLong(id + "")) + .setUser(user) + .addRequestPayloads(json) + .build() + ); + } catch (Exception e) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_UPDATE_PARAMS_VERSION_ERROR.getCode(), e.getMessage()); + } + + if (Objects.isNull(updateDataSourceParameterResult)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_UPDATE_PARAMS_VERSION_ERROR.getCode(), "datasource update params version null or empty"); + } + + if (updateDataSourceParameterResult.getStatus() != 0) { + throw new ExchangisDataSourceException(updateDataSourceParameterResult.getStatus(), updateDataSourceParameterResult.getMessage()); + } + + return Message.ok(); + } + + @Transactional + public Message deleteDataSource(HttpServletRequest request, /*String type,*/ Long id) throws Exception { + + QueryWrapper condition = new QueryWrapper<>(); + condition.eq("source_ds_id", id).or().eq("sink_ds_id", id); + Long inUseCount = Long.valueOf(this.exchangisJobDsBindMapper.selectCount(condition)); + if (inUseCount > 0) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_DELETE_ERROR.getCode(), "目前存在引用依赖"); + } + + LinkisDataSourceRemoteClient dataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); + // DeleteDataSourceResult result; + + String responseBody; + try { + String user = UserUtils.getLoginUser(request); + LOGGER.info("deleteDataSource userName:" + user); +// result = dataSourceRemoteClient.deleteDataSource( +// new DeleteDataSourceAction.Builder().setUser(user).setResourceId(id+"").builder() +// ); + + Result execute = dataSourceRemoteClient.execute( + new DeleteDataSourceAction.Builder().setUser(user).setDataSourceId(Long.parseLong(id + "")).builder() + ); + responseBody = execute.getResponseBody(); + + } catch (Exception e) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_DELETE_ERROR.getCode(), e.getMessage()); + } + +// if (Objects.isNull(result)) { + if (Strings.isNullOrEmpty(responseBody)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_DELETE_ERROR.getCode(), "datasource delete null or empty"); + } + + DeleteDataSourceSuccessResultDTO result = Json.fromJson(responseBody, DeleteDataSourceSuccessResultDTO.class); + + if (result.getStatus() != 0) { + throw new ExchangisDataSourceException(result.getStatus(), result.getMessage()); + } +// return Message.ok().data("id", result.getRemove_id()); + return Message.ok().data("id", result.getData().getId()); + } + + public Message queryDataSourceDBs(HttpServletRequest request, String type, Long id) throws Exception { + ExchangisDataSourceDefinition definition = context.getExchangisDsDefinition(type); + LinkisMetaDataRemoteClient metaDataRemoteClient = definition.getMetaDataRemoteClient(); + + String userName = UserUtils.getLoginUser(request); + LOGGER.info("queryDataSourceDBs userName:" + userName); + MetadataGetDatabasesResult databases; + try { + databases = metaDataRemoteClient.getDatabases(MetadataGetDatabasesAction.builder() +// .setSystem("system") + .setSystem(type) + .setDataSourceId(id) + .setUser(userName) + .build()); + } catch (Exception e) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_METADATA_GET_DATABASES_ERROR.getCode(), e.getMessage()); + } + + if (Objects.isNull(databases)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_METADATA_GET_DATABASES_ERROR.getCode(), "metadata get databases null or empty"); + } + + List dbs = Optional.ofNullable(databases.getDbs()).orElse(new ArrayList<>()); + + return Message.ok().data("dbs", dbs); + } + + public Message queryDataSourceDBTables(HttpServletRequest request, String type, Long id, String dbName) throws Exception { + String user = UserUtils.getLoginUser(request); + LOGGER.info("queryDataSourceDBTables userName:" + user); + + ExchangisDataSourceDefinition definition = context.getExchangisDsDefinition(type); + MetadataGetTablesResult tables; + try { + LinkisMetaDataRemoteClient metaDataRemoteClient = definition.getMetaDataRemoteClient(); + tables = metaDataRemoteClient.getTables(MetadataGetTablesAction.builder() + .setSystem(type) + .setDataSourceId(id) + .setDatabase(dbName) + .setUser(user) + .build() + ); + } catch (Exception e) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_METADATA_GET_TABLES_ERROR.getCode(), e.getMessage()); + } + + if (Objects.isNull(tables)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_METADATA_GET_TABLES_ERROR.getCode(), "metadata get tables null or empty"); + } + + List tbs = Optional.ofNullable(tables.getTables()).orElse(new ArrayList<>()); + + return Message.ok().data("tbs", tbs); + } + + public Message getJobDataSourceParamsUI(Long jobId) { + if (Objects.isNull(jobId)) { + return null; + } + + ExchangisJobEntity job; + try { + job = this.jobOpenService.getJobById(jobId, false); + } catch (ExchangisJobException e) { + throw new ExchangisDataSourceException + .Runtime(CONTEXT_GET_DATASOURCE_NULL.getCode(), "Fail to get job entity (获得任务信息失败)", e); + } + + if (Objects.isNull(job)) { + return null; + } + + List jobInfoContents = this.parseJobContent(job.getJobContent()); + List uis = new ArrayList<>(); + for (ExchangisJobInfoContent cnt : jobInfoContents) { + uis.add(this.buildDataSourceParamsUI(cnt)); + } + + return Message.ok().data("ui", uis); + } + + public Message getJobDataSourceTransformsUI(Long jobId) { + if (Objects.isNull(jobId)) { + return null; + } + + ExchangisJobEntity job; + try { + job = this.jobOpenService.getJobById(jobId, false); + } catch (ExchangisJobException e) { + throw new ExchangisDataSourceException + .Runtime(CONTEXT_GET_DATASOURCE_NULL.getCode(), "Fail to get job entity (获得任务信息失败)", e); + } + if (Objects.isNull(job)) { + return null; + } + + String jobContent = job.getJobContent(); + ExchangisJobInfoContent content; + // 转换 content + if (Strings.isNullOrEmpty(jobContent)) { + content = new ExchangisJobInfoContent(); + } else { + try { + content = this.mapper.readValue(jobContent, ExchangisJobInfoContent.class); + } catch (JsonProcessingException e) { + content = new ExchangisJobInfoContent(); + } + } + + // ----------- 构建 dataSourceTransformsUI + ExchangisJobTransformsContent transforms = content.getTransforms(); + + return Message.ok().data("ui", transforms); + } + + public Message getJobDataSourceSettingsUI(Long jobId, String jobName) throws Exception { + if (Objects.isNull(jobId) || Strings.isNullOrEmpty(jobName)) { + return null; + } + + ExchangisJobEntity job; + try { + job = this.jobOpenService.getJobById(jobId, false); + } catch (ExchangisJobException e) { + throw new ExchangisDataSourceException + .Runtime(CONTEXT_GET_DATASOURCE_NULL.getCode(), "Fail to get job entity (获得任务信息失败)", e); + } + if (Objects.isNull(job)) { + return null; + } + + List contents = this.parseJobContent(job.getJobContent()); + + for (ExchangisJobInfoContent content : contents) { + if (content.getSubJobName().equalsIgnoreCase(jobName)) { + List> uis = this.buildJobSettingsUI(job.getEngineType(), content); + return Message.ok().data("uis", uis); + } + } + + return Message.ok().data("ui", Collections.emptyList()); + + } + + public Message queryDataSourceDBTableFields(HttpServletRequest request, String type, Long id, String dbName, String tableName) throws Exception { + ExchangisDataSourceDefinition definition = context.getExchangisDsDefinition(type); + LinkisMetaDataRemoteClient metaDataRemoteClient = definition.getMetaDataRemoteClient(); + + String user = UserUtils.getLoginUser(request); + LOGGER.info("queryDataSourceDBTableFields userName:" + user); + List allColumns; + try { + MetadataGetColumnsResult columns = metaDataRemoteClient.getColumns(MetadataGetColumnsAction.builder() + .setSystem(type) + .setDataSourceId(id) + .setDatabase(dbName) + .setTable(tableName) + .setUser(user) + .build()); + + allColumns = columns.getAllColumns(); + } catch (Exception e) { + throw new ExchangisDataSourceException(CLIENT_METADATA_GET_COLUMNS_ERROR.getCode(), e.getMessage()); + } + + if (Objects.isNull(allColumns)) { + throw new ExchangisDataSourceException(CLIENT_METADATA_GET_COLUMNS_ERROR.getCode(), "metadata get columns null or empty"); + } + + List list = new ArrayList<>(); + allColumns.forEach(col -> { + DataSourceDbTableColumnDTO item = new DataSourceDbTableColumnDTO(); + item.setName(col.getName()); + item.setType(col.getType()); + list.add(item); + }); + + return Message.ok().data("columns", list); + } + + public Message queryDataSources(HttpServletRequest request, DataSourceQueryVO vo) throws Exception { + if (null == vo) { + vo = new DataSourceQueryVO(); + } + String username = UserUtils.getLoginUser(request); + LOGGER.info("queryDataSources userName:" + username); + Integer page = Objects.isNull(vo.getPage()) ? 1 : vo.getPage(); + Integer pageSize = Objects.isNull(vo.getPageSize()) ? 100 : vo.getPageSize(); + + String dataSourceName = Objects.isNull(vo.getName()) ? "" : vo.getName().replace("_", "\\_"); + LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); + QueryDataSourceResult result; + int totalPage = 0; + try { + QueryDataSourceAction.Builder builder = QueryDataSourceAction.builder() + .setSystem("system") + .setName(dataSourceName) + .setIdentifies("") + .setCurrentPage(page) + .setUser(username) + .setPageSize(pageSize); + + Long typeId = vo.getTypeId(); + if (!Objects.isNull(typeId)) { + builder.setTypeId(typeId); + } +// if (!Strings.isNullOrEmpty(dataSourceName)) { +// builder.setName(dataSourceName); +// } + if (!Strings.isNullOrEmpty(vo.getTypeName())) { + builder.setSystem(vo.getTypeName()); + } + + QueryDataSourceAction action = builder.build(); + result = linkisDataSourceRemoteClient.queryDataSource(action); + totalPage = result.getTotalPage(); + } catch (Exception e) { + if (e instanceof ErrorException) { + ErrorException ee = (ErrorException) e; + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_QUERY_DATASOURCE_ERROR.getCode(), e.getMessage(), ee.getIp(), ee.getPort(), ee.getServiceKind()); + } else { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_QUERY_DATASOURCE_ERROR.getCode(), e.getMessage()); + } + } + if (Objects.isNull(result)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_QUERY_DATASOURCE_ERROR.getCode(), "datasource query response body null or empty"); + } + + List allDataSource = result.getAllDataSource(); + + List originDataSources = new ArrayList<>(); + List dataSources = new ArrayList<>(); + allDataSource.forEach(ds -> { + ExchangisDataSourceDTO item = new ExchangisDataSourceDTO(); + item.setId(ds.getId()); + item.setCreateIdentify(ds.getCreateIdentify()); + item.setName(ds.getDataSourceName()); + item.setType(ds.getCreateSystem()); + item.setCreateSystem(ds.getCreateSystem()); + item.setDataSourceTypeId(ds.getDataSourceTypeId()); + item.setLabels(ds.getLabels()); + item.setLabel(ds.getLabels()); + item.setDesc(ds.getDataSourceDesc()); + item.setCreateUser(ds.getCreateUser()); + item.setModifyUser(ds.getModifyUser()); + item.setModifyTime(ds.getModifyTime()); + item.setVersionId(ds.getVersionId()); + item.setExpire(ds.isExpire()); + item.setReadAble(true); + item.setWriteAble(true); + item.setAuthDbs(""); + item.setAuthTbls(""); + originDataSources.add(item); + }); + + String direct = vo.getDirect(); + LOGGER.info("direct is: {}", direct); + LOGGER.info("originDatasource is: {}", originDataSources); + if (direct!=null) { + if ("source".equals(direct)) { + for (ExchangisDataSourceDTO originDataSource : originDataSources) { + if (originDataSource.isReadAble()) { + dataSources.add(originDataSource); + } + } + } else if ("sink".equals(direct)) { + for (ExchangisDataSourceDTO originDataSource : originDataSources) { + if (originDataSource.isReadAble()) { + dataSources.add(originDataSource); + } + } + } + } + else { + dataSources.addAll(originDataSources); + } + Message message = Message.ok(); + message.data("list", dataSources); + message.data("total", totalPage); + return message; + //return Message.ok().data("list", dataSources); + } + + public Message listAllDataSources(HttpServletRequest request, String typeName, Long typeId, Integer page, Integer pageSize) throws ExchangisDataSourceException { + String userName = UserUtils.getLoginUser(request); + LOGGER.info("listAllDataSources userName:" + userName); + + LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); + QueryDataSourceAction.Builder builder = QueryDataSourceAction.builder() + .setSystem("system") + .setIdentifies("") + .setUser(userName); + + if (!Strings.isNullOrEmpty(typeName)) { + builder.setName(typeName); + } + if (!Objects.isNull(typeId)) { + builder.setTypeId(typeId); + } + if (!Objects.isNull(page)) { + builder.setCurrentPage(page); + } else { + builder.setCurrentPage(1); + } + if (!Objects.isNull(pageSize)) { + builder.setPageSize(pageSize); + } else { + builder.setPageSize(200); + } + + List allDataSource; + try { + QueryDataSourceResult result = linkisDataSourceRemoteClient.queryDataSource(builder.build()); + allDataSource = result.getAllDataSource(); + } catch (Exception e) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_QUERY_DATASOURCE_ERROR.getCode(), e.getMessage()); + } + List dataSources = new ArrayList<>(); + if (!Objects.isNull(allDataSource)) { + allDataSource.forEach(ds -> { + ExchangisDataSourceDTO item = new ExchangisDataSourceDTO(); + item.setId(ds.getId()); + item.setCreateIdentify(ds.getCreateIdentify()); + item.setName(ds.getDataSourceName()); + item.setType(ds.getDataSourceType().getName()); + item.setDataSourceTypeId(ds.getDataSourceTypeId()); + item.setModifyTime(ds.getModifyTime()); + item.setModifyUser(ds.getModifyUser()); + item.setCreateSystem(ds.getCreateSystem()); + item.setCreateIdentify(ds.getCreateIdentify()); + item.setCreateUser(ds.getCreateUser()); + item.setExpire(ds.isExpire()); + item.setLabels(ds.getLabels()); + item.setDesc(ds.getDataSourceDesc()); + item.setVersionId(ds.getVersionId()); + dataSources.add(item); + }); + } + return Message.ok().data("list", dataSources); + } + + public Message getDataSource(HttpServletRequest request, Long id, String versionId) throws ErrorException { + String userName = UserUtils.getLoginUser(request); + LOGGER.info("getDataSource userName:" + userName); + GetDataSourceInfoResultDTO result; + if (Strings.isNullOrEmpty(versionId)) { + result = getDataSource(userName, id); + } else { + result = getDataSourceByIdAndVersionId(userName, id, versionId); + } +// GetDataSourceInfoResultDTO result = getDataSource(userName, id); + return Message.ok().data("info", result.getData().getInfo()); + } + + public GetDataSourceInfoResultDTO getDataSourceByIdAndVersionId(String userName, Long id, String versionId) throws ErrorException { + LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); + try { + Result execute = linkisDataSourceRemoteClient.execute( + GetDataSourceInfoByIdAndVersionIdAction.builder().setSystem("system").setUser(userName).setDataSourceId(id).setVersionId(versionId).build() + ); + String responseBody = execute.getResponseBody(); + GetDataSourceInfoResultDTO result = Json.fromJson(responseBody, GetDataSourceInfoResultDTO.class); + if (result.getStatus() != 0) { + throw new ExchangisDataSourceException(result.getStatus(), result.getMessage()); + } + return result; + } catch (Exception e) { + if (e instanceof ErrorException) { + ErrorException ee = (ErrorException) e; + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_QUERY_DATASOURCE_ERROR.getCode(), e.getMessage(), ee.getIp(), ee.getPort(), ee.getServiceKind()); + } else { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_QUERY_DATASOURCE_ERROR.getCode(), e.getMessage()); + } + } + + } + + + public GetDataSourceInfoResultDTO getDataSource(String userName, Long id) throws ErrorException { + LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); + try { + Result execute = linkisDataSourceRemoteClient.execute( + GetInfoByDataSourceIdAction.builder().setSystem("system").setUser(userName).setDataSourceId(id).build() + ); + String responseBody = execute.getResponseBody(); + GetDataSourceInfoResultDTO result = Json.fromJson(responseBody, GetDataSourceInfoResultDTO.class); + if (result.getStatus() != 0) { + throw new ExchangisDataSourceException(result.getStatus(), result.getMessage()); + } + return result; + } catch (Exception e) { + if (e instanceof ErrorException) { + ErrorException ee = (ErrorException) e; + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_QUERY_DATASOURCE_ERROR.getCode(), e.getMessage(), ee.getIp(), ee.getPort(), ee.getServiceKind()); + } else { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_QUERY_DATASOURCE_ERROR.getCode(), e.getMessage()); + } + } + } + + public Map getMetadata(String username, Long id, String type, String database, String table) throws ErrorException { + Map metas = new HashMap<>(); + GetDataSourceInfoResultDTO datasource = this.getDataSource(username, id); + Optional.ofNullable(datasource).ifPresent(ds -> { + Optional.ofNullable(ds.getData()).ifPresent(data -> { + Optional.ofNullable(data.getInfo()).ifPresent(info -> { + Optional.of(info.getConnectParams()).ifPresent(metas::putAll); + }); + }); + }); + + MetadataGetColumnsResultDTO columns = this.getDatasourceColumns(username, id, database, table); + + StringBuilder primaryKeys = new StringBuilder(); + Optional.ofNullable(columns).ifPresent(c -> { + Optional.ofNullable(c.getData()).ifPresent(data -> { + Optional.ofNullable(data.getColumns()).ifPresent(_cs -> { + _cs.stream().filter(MetadataGetColumnsResultDTO.Column::isPrimaryKey).forEach(_c -> { + primaryKeys.append(_c.getName()).append(","); + }); + }); + }); + }); + if (primaryKeys.length() > 0) { + metas.put("primary-keys", primaryKeys.toString().substring(0, primaryKeys.length() - 1)); + } + + MetadataGetTablePropsResultDTO metadata = this.getDatasourceMetadata(username, id, database, table); + Optional.ofNullable(metadata).ifPresent(meta -> { + Optional.ofNullable(meta.getData()).ifPresent(data -> { + Optional.ofNullable(data.getProps()).ifPresent(props -> { + props.forEach((k, v) -> { + switch (k) { + case "columns.types": + metas.put("columns-types", v); + break; + case "transient_lastDdlTime": + metas.put("transient-last-ddl-time", v); + break; + case "partition_columns.types": + metas.put("partition-columns-types", v); + break; + case "columns.comments": + metas.put("columns-comments", v); + break; + case "bucket_count": + metas.put("bucket-count", v); + break; + case "serialization.ddl": + metas.put("serialization-ddl", v); + break; + case "file.outputformat": + metas.put("file-outputformat", v); + break; + case "partition_columns": + metas.put("partition-columns", v); + break; + case "serialization.lib": + metas.put("serialization-lib", v); + break; + case "file.inputformat": + metas.put("file-inputformat", v); + break; + case "serialization.format": + metas.put("serialization-format", v); + break; + case "column.name.delimiter": + metas.put("column-name-delimiter", v); + break; + default: + metas.put(k, v); + } + }); + }); + }); + }); + + return metas; + } + + private MetadataGetColumnsResultDTO getDatasourceColumns(String username, Long id, String database, String table) throws ExchangisDataSourceException { + LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); + Result execute = linkisDataSourceRemoteClient.execute( + MetadataGetColumnsAction.builder() + .setSystem("system") + .setUser(username) + .setDataSourceId(id) + .setDatabase(database) + .setTable(table) + .build() + ); + String responseBody = execute.getResponseBody(); + + MetadataGetColumnsResultDTO result = null; + try { + result = Json.fromJson(responseBody, MetadataGetColumnsResultDTO.class); + if (result.getStatus() != 0) { + throw new ExchangisDataSourceException(result.getStatus(), result.getMessage()); + } + } catch (JsonErrorException e) { + throw new ExchangisDataSourceException(CLIENT_METADATA_GET_COLUMNS_ERROR.getCode(), + "Fail to deserialize the columns resultSet", e); + } + + return result; + } + + private MetadataGetTablePropsResultDTO getDatasourceMetadata(String username, Long id, String database, String table) throws ExchangisDataSourceException { + + LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); + Result execute = linkisDataSourceRemoteClient.execute( + MetadataGetTablePropsAction.builder() + .setSystem("system") + .setUser(username) + .setDataSourceId(id) + .setDatabase(database) + .setTable(table) + .build() + ); + String responseBody = execute.getResponseBody(); + + MetadataGetTablePropsResultDTO result = null; + try { + result = Json.fromJson(responseBody, MetadataGetTablePropsResultDTO.class); + if (result.getStatus() != 0) { + throw new ExchangisDataSourceException(result.getStatus(), result.getMessage()); + } + } catch (JsonErrorException e) { + throw new ExchangisDataSourceException(CLIENT_METADATA_GET_TABLES_ERROR.getCode(), + "Fail to deserialize the properties resultSet of table: [" + table + "], database: [" + database +"]", e); + } + return result; + + } + + public Message getDataSourceVersionsById(HttpServletRequest request, Long id) throws ErrorException { + LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); + String userName = UserUtils.getLoginUser(request); + LOGGER.info("getDataSourceVersionsById userName:" + userName); +// GetInfoByDataSourceIdResult result; + GetDataSourceInfoResultDTO result; + try { + // 先根据ID获取数据源详情 +// result = linkisDataSourceRemoteClient.getInfoByDataSourceId( +// GetInfoByDataSourceIdAction.builder().setSystem("system").setUser(userName).setDataSourceId(id).build() +// ); + + Result execute = linkisDataSourceRemoteClient.execute( + GetInfoByDataSourceIdAction.builder().setSystem("system").setUser(userName).setDataSourceId(id).build() + ); + String responseBody = execute.getResponseBody(); + + result = Json.fromJson(responseBody, GetDataSourceInfoResultDTO.class); + +// GetInfoByDataSourceIdResult result = linkisDataSourceRemoteClient.getInfoByDataSourceId( +// GetInfoByDataSourceIdAction.builder().setSystem("system").setUser(userName).setDataSourceId(id).build() +// ); + if (result.getStatus() != 0) { + throw new ExchangisDataSourceException(result.getStatus(), result.getMessage()); + } +// return Message.ok().data("info", result.getData().getInfo()); + + } catch (Exception e) { + if (e instanceof ErrorException) { + ErrorException ee = (ErrorException) e; + throw new ExchangisDataSourceException(CLIENT_GET_DATASOURCE_ERROR.getCode(), e.getMessage(), ee.getIp(), ee.getPort(), ee.getServiceKind()); + } else { + throw new ExchangisDataSourceException(CLIENT_GET_DATASOURCE_ERROR.getCode(), e.getMessage()); + } + } +// if (Objects.isNull(result)) { +// throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_GET_DATASOURCE_ERROR.getCode(), "response body null or empty"); +// } + + if (result.getStatus() != 0) { + throw new ExchangisDataSourceException(result.getStatus(), result.getMessage()); + } + + GetDataSourceInfoResultDTO.DataSourceItemDTO info = result.getData().getInfo(); + Integer publishedVersionId = info.getPublishedVersionId(); +// Long publishedVersionId = null; +// Map info = result.getInfo(); + +// if (!Objects.isNull(info)) { +// publishedVersionId = Long.parseLong(info.getOrDefault("publishedVersionId", "-1").toString()); +// } + + GetDataSourceVersionsResult versionsResult; + try { + versionsResult = linkisDataSourceRemoteClient.getDataSourceVersions( + new GetDataSourceVersionsAction.Builder().setUser(userName).setDataSourceId(Long.parseLong(id + "")).build() + ); + } catch (Exception e) { + if (e instanceof ErrorException) { + ErrorException ee = (ErrorException) e; + throw new ExchangisDataSourceException(CLIENT_GET_DATASOURCE_VERSION_ERROR.getCode(), e.getMessage(), ee.getIp(), ee.getPort(), ee.getServiceKind()); + } else { + throw new ExchangisDataSourceException(CLIENT_GET_DATASOURCE_VERSION_ERROR.getCode(), e.getMessage()); + } + } + if (Objects.isNull(versionsResult)) { + throw new ExchangisDataSourceException(CLIENT_GET_DATASOURCE_VERSION_ERROR.getCode(), "datasource version response body null or empty"); + } + + if (versionsResult.getStatus() != 0) { + throw new ExchangisDataSourceException(versionsResult.getStatus(), versionsResult.getMessage()); + } + + + List> versions = versionsResult.getVersions(); + + if (!Objects.isNull(versions) && !Objects.isNull(publishedVersionId)) { + for (Map version : versions) { + Object versionId = version.get("versionId"); + if (Objects.isNull(versionId)) { + continue; + } + int vid = Integer.parseInt(versionId.toString()); + if (vid == publishedVersionId) { + version.put("published", true); + } + } + } + + versions.sort((o1, o2) -> { + Object vid1 = o1.get("versionId"); + Object vid2 = o2.get("versionId"); + int a1 = 0, a2 = 0; + if (Objects.nonNull(vid1)) { + a1 = Integer.parseInt(vid1.toString()); + } + if (Objects.nonNull(vid2)) { + a2 = Integer.parseInt(vid2.toString()); + } + return a2 - a1; + }); + return Message.ok().data("versions", versions); + } + + public Message testConnect(HttpServletRequest request, Long id, Long version) throws ErrorException { + LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); + String userName = UserUtils.getLoginUser(request); + LOGGER.info("testConnect userName:" + userName); + DataSourceTestConnectResult result; + try { + result = linkisDataSourceRemoteClient.getDataSourceTestConnect( + new DataSourceTestConnectAction.Builder().setUser(userName).setDataSourceId(Long.parseLong(id + "")).setVersion(version + "").build() + ); + } catch (Exception e) { + if (e instanceof ErrorException) { + ErrorException ee = (ErrorException) e; + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_TEST_CONNECTION_ERROR.getCode(), e.getMessage(), ee.getIp(), ee.getPort(), ee.getServiceKind()); + } else { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_TEST_CONNECTION_ERROR.getCode(), e.getMessage()); + } + } + + if (Objects.isNull(result)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_TEST_CONNECTION_ERROR.getCode(), "datasource test connection response body null or empty"); + } + + if (result.getStatus() != 0) { + throw new ExchangisDataSourceException(result.getStatus(), result.getMessage()); + } + + return Message.ok(); + } + + public Message testConnectByVo(HttpServletRequest request, DataSourceCreateVO vo) throws ErrorException { + LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); + String userName = UserUtils.getLoginUser(request); + LOGGER.info("testConnect userName:" + userName); + + Map json; + try { + json = mapper.readValue(mapper.writeValueAsString(vo), Map.class); + json.put("labels",json.get("label")); + } catch (JsonProcessingException e) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARSE_JSON_ERROR.getCode(), e.getMessage()); + } + ParamsTestConnectResult result; + try { + result = (ParamsTestConnectResult) linkisDataSourceRemoteClient.execute( + new ParamsTestConnectAction(json, userName) + ); + } catch (Exception e) { + if (e instanceof ErrorException) { + ErrorException ee = (ErrorException) e; + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_TEST_CONNECTION_ERROR.getCode(), e.getMessage(), ee.getIp(), ee.getPort(), ee.getServiceKind()); + } else { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_TEST_CONNECTION_ERROR.getCode(), e.getMessage()); + } + } + + if (Objects.isNull(result)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_TEST_CONNECTION_ERROR.getCode(), "datasource test connection response body null or empty"); + } + + if (result.getStatus() != 0) { + throw new ExchangisDataSourceException(result.getStatus(), result.getMessage()); + } + + return Message.ok(); + } + + public Message publishDataSource(HttpServletRequest request, Long id, Long version) throws ErrorException { + LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); + + String userName = UserUtils.getLoginUser(request); + LOGGER.info("publishDataSource userName:" + userName); + PublishDataSourceVersionResult result; + try { + result = linkisDataSourceRemoteClient.publishDataSourceVersion( + new PublishDataSourceVersionAction.Builder().setUser(userName).setDataSourceId(Long.parseLong(id + "")).setVersion(Long.parseLong(version + "")).build() + ); + } catch (Exception e) { + if (e instanceof ErrorException) { + ErrorException ee = (ErrorException) e; + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_PUBLISH_VERSION_ERROR.getCode(), e.getMessage(), ee.getIp(), ee.getPort(), ee.getServiceKind()); + } else { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_PUBLISH_VERSION_ERROR.getCode(), e.getMessage()); + } + } + if (Objects.isNull(result)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_PUBLISH_VERSION_ERROR.getCode(), "datasource publish version response body null or empty"); + } + + if (result.getStatus() != 0) { + throw new ExchangisDataSourceException(result.getStatus(), result.getMessage()); + } + + return Message.ok(); + } + + public Message getDataSourceConnectParamsById(HttpServletRequest request, Long id) throws ErrorException { + String userName = UserUtils.getLoginUser(request); + LOGGER.info("getDataSourceConnectParamsById userName:" + userName); + GetConnectParamsByDataSourceIdResult result = getDataSourceConnectParamsById(userName, id); + return Message.ok().data("info", Objects.isNull(result.getConnectParams()) ? null : result.getConnectParams()); + } + + @Transactional + public Message expireDataSource(HttpServletRequest request, Long id) throws ErrorException { + LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); + + String responseBody; + String userName = UserUtils.getLoginUser(request); + LOGGER.info("getDataSourceConnectParamsById userName:" + userName); +// ExpireDataSourceResult result; + try { +// result = linkisDataSourceRemoteClient.expireDataSource( +// new ExpireDataSourceAction.Builder().setUser(userName).setDataSourceId(id+"").build() +// ); + + Result execute = linkisDataSourceRemoteClient.execute( + new ExpireDataSourceAction.Builder().setUser(userName).setDataSourceId(Long.parseLong(id + "")).build() + ); + responseBody = execute.getResponseBody(); + } catch (Exception e) { + if (e instanceof ErrorException) { + ErrorException ee = (ErrorException) e; + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_EXPIRE_ERROR.getCode(), e.getMessage(), ee.getIp(), ee.getPort(), ee.getServiceKind()); + } else { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_EXPIRE_ERROR.getCode(), e.getMessage()); + } + } +// if (Objects.isNull(result)) { + if (Strings.isNullOrEmpty(responseBody)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_EXPIRE_ERROR.getCode(), "datasource expire response body null or empty"); + } + + ExpireDataSourceSuccessResultDTO result = Json.fromJson(responseBody, ExpireDataSourceSuccessResultDTO.class); + + if (result.getStatus() != 0) { + throw new ExchangisDataSourceException(result.getStatus(), result.getMessage()); + } + + return Message.ok(); + } + + public GetConnectParamsByDataSourceIdResult getDataSourceConnectParamsById(String userName, Long id) throws ErrorException{ + LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); + LOGGER.info("getDataSourceConnectParamsById userName:" + userName); + GetConnectParamsByDataSourceIdResult result; + try { + result = linkisDataSourceRemoteClient.getConnectParams( + GetConnectParamsByDataSourceIdAction.builder().setSystem("system").setUser(userName).setDataSourceId(id).build() + ); + } catch (Exception e) { + if (e instanceof ErrorException) { + ErrorException ee = (ErrorException) e; + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_PARAMS_GET_ERROR.getCode(), e.getMessage(), ee.getIp(), ee.getPort(), ee.getServiceKind()); + } else { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_PARAMS_GET_ERROR.getCode(), e.getMessage()); + } + } + if (Objects.isNull(result)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_PARAMS_GET_ERROR.getCode(), "datasource params get response body null or empty"); + } + + if (result.getStatus() != 0) { + throw new ExchangisDataSourceException(result.getStatus(), result.getMessage()); + } + return result; + } + public Message getDataSourceKeyDefine(HttpServletRequest request, Long dataSourceTypeId) throws ErrorException { + if (Objects.isNull(dataSourceTypeId)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARAMETER_INVALID.getCode(), "dataSourceType id should not be null"); + } + Message message = Message.ok(); + LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); + + String userName = UserUtils.getLoginUser(request); + LOGGER.info("getDataSourceKeyDefine userName:" + userName); + GetKeyTypeDatasourceResult result; + try { + GetKeyTypeDatasourceAction action = new GetKeyTypeDatasourceAction.Builder().setUser(userName).setDataSourceTypeId(dataSourceTypeId).build(); + result = linkisDataSourceRemoteClient.getKeyDefinitionsByType(action); + } catch (Exception e) { + if (e instanceof ErrorException) { + ErrorException ee = (ErrorException) e; + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_GET_KEY_DEFINES_ERROR.getCode(), e.getMessage(), ee.getIp(), ee.getPort(), ee.getServiceKind()); + } else { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_GET_KEY_DEFINES_ERROR.getCode(), e.getMessage()); + } + } + + if (Objects.isNull(result)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_GET_KEY_DEFINES_ERROR.getCode(), "get datasource type key defines response body null or empty"); + } + + if (result.getStatus() != 0) { + throw new ExchangisDataSourceException(result.getStatus(), result.getMessage()); + } + + message.data("list", Objects.isNull(result.getKeyDefine()) ? null : result.getKeyDefine()); + //message.data("list", result.getDataSourceParamKeyDefinitions()); + return message; + //return Message.ok().data("list", Objects.isNull(result.getKeyDefine()) ? null : result.getKeyDefine()); + } + + public void checkDSSupportDegree(String engine, String sourceDsType, String sinkDsType) throws ExchangisDataSourceException { + switch (engine) { + case "SQOOP": + this.checkSqoopDSSupportDegree(sourceDsType, sinkDsType); + break; + case "DATAX": + this.checkDataXDSSupportDegree(sourceDsType, sinkDsType); + break; + default: + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.UNSUPPORTEd_ENGINE.getCode(), "不支持的引擎"); + } + } + + private void checkSqoopDSSupportDegree(String sourceDsType, String sinkDsType) throws ExchangisDataSourceException { + if (!("HIVE".equals(sourceDsType) || "HIVE".equals(sinkDsType))) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.DS_MAPPING_MUST_CONTAIN_HIVE.getCode(), "SQOOP引擎输入/输出数据源必须包含HIVE"); + } + if (sourceDsType.equals(sinkDsType)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.DS_TYPE_MUST_DIFFERENT.getCode(), "SQOOP引擎读写类型不可相同"); + } + } + + private void checkDataXDSSupportDegree(String sourceDsType, String sinkDsType) throws ExchangisDataSourceException { + + } + + /** + * TODO: the mapping function is defined by the rule of Hive directly, we should abstract to support all the types + * @param request + * @param vo + * @return + * @throws Exception + */ + @SuppressWarnings("unchecked") + public Message queryDataSourceDBTableFieldsMapping(HttpServletRequest request, FieldMappingVO vo) throws Exception { + + this.checkDSSupportDegree(vo.getEngine(), vo.getSourceTypeId(), vo.getSinkTypeId()); + boolean containHive = "HIVE".equals(vo.getSourceTypeId()) || "HIVE".equals(vo.getSinkTypeId()); + + Message message = Message.ok(); + message.data("addEnable", !containHive); + + Message sourceMessage = this.queryDataSourceDBTableFields(request, vo.getSourceTypeId(), vo.getSourceDataSourceId(), vo.getSourceDataBase(), vo.getSourceTable()); + List sourceFields = (List) sourceMessage.getData().get("columns"); + for (int i = 0; i < sourceFields.size(); i++) { + DataSourceDbTableColumnDTO field = sourceFields.get(i); + field.setFieldIndex(i); + field.setFieldEditable(!"HIVE".equals(vo.getSourceTypeId()) && !"ELASTICSEARCH".equals(vo.getSourceTypeId())); + } + message.data("sourceFields", sourceFields); + + Message sinkMessage = this.queryDataSourceDBTableFields(request, vo.getSinkTypeId(), vo.getSinkDataSourceId(), vo.getSinkDataBase(), vo.getSinkTable()); + List sinkFields = (List) sinkMessage.getData().get("columns"); + for (int i = 0; i < sinkFields.size(); i++) { + DataSourceDbTableColumnDTO field = sinkFields.get(i); +// field.setFieldIndex(i); + field.setFieldEditable(!"HIVE".equals(vo.getSinkTypeId()) && !"ELASTICSEARCH".equals(vo.getSinkTypeId())); + } + message.data("sinkFields", sinkFields); + + // field mapping deduction + List> deductions = new ArrayList<>(); + List left = sourceFields; + List right = sinkFields; + boolean exchanged = false; + if (containHive && "HIVE".equals(vo.getSourceTypeId())) { + left = sinkFields; + right = sourceFields; + exchanged = true; + } + + // source size and sink size must not be null + if (!Objects.isNull(left) && left.size() > 0) { + for (int i = 0; i < right.size(); i ++){ + DataSourceDbTableColumnDTO leftElement = left.get(i % left.size()); + DataSourceDbTableColumnDTO rightElement = right.get(i); + Map deduction = new HashMap<>(); + deduction.put("source", exchanged ? rightElement : leftElement); + deduction.put("sink", exchanged ? leftElement : rightElement); + deduction.put("deleteEnable", true); + deductions.add(deduction); + } + } + + message.data("deductions", deductions); + message.data("transformEnable", true); + + return message; + } + + public Message encryptConnectInfo(String encryStr) throws Exception { + if (Objects.isNull(encryStr)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARAMETER_INVALID.getCode(), "dataSourceType connect parameter show not be null"); + } + + String publicKeyStr = RSAUtil.PUBLIC_KEY_STR.getValue(); + + LOGGER.info("publicKeyStr is :{}", publicKeyStr); + PublicKey publicKey = RSAUtil.string2PublicKey(publicKeyStr); + //用公钥加密 + byte[] publicEncrypt = RSAUtil.publicEncrypt(encryStr.getBytes(), publicKey); + //加密后的内容Base64编码 + String byte2Base64 = RSAUtil.byte2Base64(publicEncrypt); + Message message = new Message(); + message.data("encryStr", byte2Base64); + return message; + } + + public Message decryptConnectInfo(String sinkStr) throws Exception { + if (Objects.isNull(sinkStr)) { + throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARAMETER_INVALID.getCode(), "dataSourceType connect parameter show not be null"); + } + + String privateKeyStr = RSAUtil.PRIVATE_KEY_STR.getValue(); + PrivateKey privateKey = RSAUtil.string2PrivateKey(privateKeyStr); + //加密后的内容Base64解码 + byte[] base642Byte = RSAUtil.base642Byte(sinkStr); + //用私钥解密 + byte[] privateDecrypt = RSAUtil.privateDecrypt(base642Byte, privateKey); + String decryptStr = new String(privateDecrypt); + Message message = new Message(); + message.data("decryptStr", decryptStr); + return message; + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/impl/DefaultDataSourceRenderService.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/impl/DefaultDataSourceRenderService.java new file mode 100644 index 000000000..6452c8071 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/impl/DefaultDataSourceRenderService.java @@ -0,0 +1,48 @@ +package com.webank.wedatasphere.exchangis.datasource.service.impl; + +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; +import com.webank.wedatasphere.exchangis.datasource.core.service.MetadataInfoService; +import com.webank.wedatasphere.exchangis.datasource.core.ui.ElementUI; +import com.webank.wedatasphere.exchangis.datasource.core.ui.builder.ElementUIFactory; +import com.webank.wedatasphere.exchangis.datasource.service.DataSourceRenderService; +import org.springframework.stereotype.Service; + +import javax.annotation.Resource; +import java.util.*; + +/** + * Default render service + */ +@Service +public class DefaultDataSourceRenderService implements DataSourceRenderService { + + /** + * Default placeholder stored in system + */ + private final static String[] DEFAULT_PLACEHOLDER = + new String[]{"${timestamp}", "${yyyyMMdd}","${yyyy-MM-dd}", "${run_date}", "${run_date-1}", "${run_month_begin}", "${run_month_begin-1}"}; + + /** + * Metadata info service + */ + @Resource + private MetadataInfoService metadataInfoService; + + /** + * Element Factory + */ + @Resource + private ElementUIFactory elementUIFactory; + + @Override + public ElementUI getPartitionAndRender(String userName, + Long dataSourceId, String database, String table, ElementUI.Type uiType, boolean tableNotExist) throws ExchangisDataSourceException { + Map renderParams = new LinkedHashMap<>(); + if (!tableNotExist) { + List partitionKeys = metadataInfoService.getPartitionKeys(userName, dataSourceId, database, table); + List placeHolder = Arrays.asList(DEFAULT_PLACEHOLDER); + partitionKeys.forEach(partition -> renderParams.putIfAbsent(partition, placeHolder)); + } + return elementUIFactory.createElement(uiType.name(), renderParams, Map.class); + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/vo/DataSourceCreateVO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/vo/DataSourceCreateVO.java new file mode 100644 index 000000000..2edfcb1cf --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/vo/DataSourceCreateVO.java @@ -0,0 +1,185 @@ +package com.webank.wedatasphere.exchangis.datasource.vo; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + +import javax.validation.constraints.Size; +import java.util.Date; +import java.util.Map; + +@JsonIgnoreProperties(ignoreUnknown = true) +public class DataSourceCreateVO { + @Size(min=0,max=100,message="Length of dataSource name should between 0 and 100(数据源名字的长度应该在0和100之间)") + private String dataSourceName; + + private Long dataSourceTypeId; + + @Size(min=0,max=200,message="Length of dataSource description should between 0 and 200(数据源描述的长度应该在0和200之间)") + private String dataSourceDesc; + + private String createIdentify; + + private String createSystem; + + private String createUser; + + private String modifyUser; + + private Date createTime; + + private Date modifyTime; + + private Long versionId; + + @Size(min=0,max=200,message="Length of labels should between 0 and 200(标签的长度应该在0和200之间)") + private String label; + + private Map labels; + + private Long publishedVersionId; + + private Boolean expire = false; + + private String comment; + + private Map connectParams; + +// private Map parameters; +// +// public Map getParameters() { +// return parameters; +// } +// +// public void setParameters(Map parameters) { +// this.parameters = parameters; +// } + + public String getDataSourceName() { + return dataSourceName; + } + + public void setDataSourceName(String dataSourceName) { + this.dataSourceName = dataSourceName; + } + + public Long getDataSourceTypeId() { + return dataSourceTypeId; + } + + public void setDataSourceTypeId(Long dataSourceTypeId) { + this.dataSourceTypeId = dataSourceTypeId; + } + + public String getDataSourceDesc() { + return dataSourceDesc; + } + + public void setDataSourceDesc(String dataSourceDesc) { + this.dataSourceDesc = dataSourceDesc; + } + + public String getCreateIdentify() { + return createIdentify; + } + + public void setCreateIdentify(String createIdentify) { + this.createIdentify = createIdentify; + } + + public String getCreateSystem() { + return createSystem; + } + + public void setCreateSystem(String createSystem) { + this.createSystem = createSystem; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } + + public String getModifyUser() { + return modifyUser; + } + + public void setModifyUser(String modifyUser) { + this.modifyUser = modifyUser; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getModifyTime() { + return modifyTime; + } + + public void setModifyTime(Date modifyTime) { + this.modifyTime = modifyTime; + } + + public Long getVersionId() { + return versionId; + } + + public void setVersionId(Long versionId) { + this.versionId = versionId; + } + + public String getLabel() { + return label; + } + + public void setLabel(String label) { + this.label = label; + } + + public Long getPublishedVersionId() { + return publishedVersionId; + } + + public void setPublishedVersionId(Long publishedVersionId) { + this.publishedVersionId = publishedVersionId; + } + + public Boolean getExpire() { + return expire; + } + + public void setExpire(Boolean expire) { + this.expire = expire; + } + + + + public Map getConnectParams() { + return connectParams; + } + + public void setConnectParams(Map connectParams) { + this.connectParams = connectParams; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + public Map getLabels() { + return labels; + } + + public void setLabels(Map labels) { + this.labels = labels; + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/vo/DataSourceQueryVO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/vo/DataSourceQueryVO.java new file mode 100644 index 000000000..3b50117f3 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/vo/DataSourceQueryVO.java @@ -0,0 +1,69 @@ +package com.webank.wedatasphere.exchangis.datasource.vo; + +import java.util.Map; + +public class DataSourceQueryVO { + private Integer page; + private Integer pageSize; + private Long typeId; + private String typeName; + private String name; + private String direct; + private Map labels; + + public Map getLabels() { + return labels; + } + + public void setLabels(Map labels) { + this.labels = labels; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Integer getPage() { + return page; + } + + public void setPage(Integer page) { + this.page = page; + } + + public Integer getPageSize() { + return pageSize; + } + + public void setPageSize(Integer pageSize) { + this.pageSize = pageSize; + } + + public String getTypeName() { + return typeName; + } + + public void setTypeName(String typeName) { + this.typeName = typeName; + } + + public Long getTypeId() { + return typeId; + } + + public void setTypeId(Long typeId) { + this.typeId = typeId; + } + + public String getDirect() { + return direct; + } + + public void setDirect(String direct) { + this.direct = direct; + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/vo/DataSourceUpdateVO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/vo/DataSourceUpdateVO.java new file mode 100644 index 000000000..c401c1ef1 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/vo/DataSourceUpdateVO.java @@ -0,0 +1,167 @@ +package com.webank.wedatasphere.exchangis.datasource.vo; + +import java.util.Date; +import java.util.Map; + +public class DataSourceUpdateVO { + + private Long id; + + private String dataSourceName; + + private Long dataSourceTypeId; + + private String dataSourceDesc; + + private String createIdentify; + + private String createSystem; + + private String createUser; + + private String modifyUser; + + private Date createTime; + + private Date modifyTime; + + private Long versionId; + + private String labels; + + private Long publishedVersionId; + + private Boolean expire = false; + + private String comment; + + public String getDataSourceName() { + return dataSourceName; + } + + public void setDataSourceName(String dataSourceName) { + this.dataSourceName = dataSourceName; + } + + public Long getDataSourceTypeId() { + return dataSourceTypeId; + } + + public void setDataSourceTypeId(Long dataSourceTypeId) { + this.dataSourceTypeId = dataSourceTypeId; + } + + public String getDataSourceDesc() { + return dataSourceDesc; + } + + public void setDataSourceDesc(String dataSourceDesc) { + this.dataSourceDesc = dataSourceDesc; + } + + public String getCreateIdentify() { + return createIdentify; + } + + public void setCreateIdentify(String createIdentify) { + this.createIdentify = createIdentify; + } + + public String getCreateSystem() { + return createSystem; + } + + public void setCreateSystem(String createSystem) { + this.createSystem = createSystem; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } + + public String getModifyUser() { + return modifyUser; + } + + public void setModifyUser(String modifyUser) { + this.modifyUser = modifyUser; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getModifyTime() { + return modifyTime; + } + + public void setModifyTime(Date modifyTime) { + this.modifyTime = modifyTime; + } + + public Long getVersionId() { + return versionId; + } + + public void setVersionId(Long versionId) { + this.versionId = versionId; + } + + public String getLabels() { + return labels; + } + + public void setLabels(String labels) { + this.labels = labels; + } + + public Long getPublishedVersionId() { + return publishedVersionId; + } + + public void setPublishedVersionId(Long publishedVersionId) { + this.publishedVersionId = publishedVersionId; + } + + public Boolean getExpire() { + return expire; + } + + public void setExpire(Boolean expire) { + this.expire = expire; + } + + private Map connectParams; + + public Map getConnectParams() { + return connectParams; + } + + public void setConnectParams(Map connectParams) { + this.connectParams = connectParams; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/vo/FieldMappingVO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/vo/FieldMappingVO.java new file mode 100644 index 000000000..1170df09c --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/vo/FieldMappingVO.java @@ -0,0 +1,99 @@ +package com.webank.wedatasphere.exchangis.datasource.vo; + +import java.util.Map; + +public class FieldMappingVO { + + private String engine; + + private String sourceTypeId; + private Long sourceDataSourceId; + private String sourceDataBase; + private String sourceTable; + + private String sinkTypeId; + private Long sinkDataSourceId; + private String sinkDataBase; + private String sinkTable; + private Map labels; + + public Map getLabels() { + return labels; + } + + public void setLabels(Map labels) { + this.labels = labels; + } + + public String getEngine() { + return engine; + } + + public void setEngine(String engine) { + this.engine = engine; + } + + public String getSourceTypeId() { + return sourceTypeId; + } + + public void setSourceTypeId(String sourceTypeId) { + this.sourceTypeId = sourceTypeId; + } + + public Long getSourceDataSourceId() { + return sourceDataSourceId; + } + + public void setSourceDataSourceId(Long sourceDataSourceId) { + this.sourceDataSourceId = sourceDataSourceId; + } + + public String getSourceDataBase() { + return sourceDataBase; + } + + public void setSourceDataBase(String sourceDataBase) { + this.sourceDataBase = sourceDataBase; + } + + public String getSourceTable() { + return sourceTable; + } + + public void setSourceTable(String sourceTable) { + this.sourceTable = sourceTable; + } + + public String getSinkTypeId() { + return sinkTypeId; + } + + public void setSinkTypeId(String sinkTypeId) { + this.sinkTypeId = sinkTypeId; + } + + public Long getSinkDataSourceId() { + return sinkDataSourceId; + } + + public void setSinkDataSourceId(Long sinkDataSourceId) { + this.sinkDataSourceId = sinkDataSourceId; + } + + public String getSinkDataBase() { + return sinkDataBase; + } + + public void setSinkDataBase(String sinkDataBase) { + this.sinkDataBase = sinkDataBase; + } + + public String getSinkTable() { + return sinkTable; + } + + public void setSinkTable(String sinkTable) { + this.sinkTable = sinkTable; + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/scala/com/webank/wedatasphere/exchangis/datasource/GetDataSourceInfoByIdAndVersionIdAction.scala b/exchangis-datasource/exchangis-datasource-service/src/main/scala/com/webank/wedatasphere/exchangis/datasource/GetDataSourceInfoByIdAndVersionIdAction.scala new file mode 100644 index 000000000..ac84231cd --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/scala/com/webank/wedatasphere/exchangis/datasource/GetDataSourceInfoByIdAndVersionIdAction.scala @@ -0,0 +1,65 @@ +package com.webank.wedatasphere.exchangis.datasource + +import org.apache.linkis.datasource.client.config.DatasourceClientConfig.DATA_SOURCE_SERVICE_MODULE +import org.apache.linkis.datasource.client.exception.DataSourceClientBuilderException +import org.apache.linkis.datasource.client.request.DataSourceAction +import org.apache.linkis.httpclient.request.GetAction + + +class GetDataSourceInfoByIdAndVersionIdAction extends GetAction with DataSourceAction { + private var user: String = _ + private var dataSourceId: Long = _ + private var versionId: String = _ + + override def setUser(user: String): Unit = this.user = user + + override def getUser: String = this.user + + override def suffixURLs: Array[String] = Array(DATA_SOURCE_SERVICE_MODULE.getValue, "info", dataSourceId.toString, versionId) +} + + +object GetDataSourceInfoByIdAndVersionIdAction { + def builder(): Builder = new Builder + + class Builder private[GetDataSourceInfoByIdAndVersionIdAction]() { + private var dataSourceId: Long = _ + private var versionId: String = _ + private var system:String = _ + private var user: String = _ + + def setUser(user: String): Builder = { + this.user = user + this + } + + def setDataSourceId(dataSourceId: Long): Builder = { + this.dataSourceId = dataSourceId + this + } + + def setSystem(system: String): Builder = { + this.system = system + this + } + + def setVersionId(versionId: String): Builder = { + this.versionId = versionId + this + } + + def build(): GetDataSourceInfoByIdAndVersionIdAction = { + if(dataSourceId == 0L) throw new DataSourceClientBuilderException("dataSourceId is needed!") + if(versionId == null) throw new DataSourceClientBuilderException("versionId is needed!") + if(system == null) throw new DataSourceClientBuilderException("system is needed!") + if(user == null) throw new DataSourceClientBuilderException("user is needed!") + + val GetDataSourceInfoByIdAndVersionIdAction = new GetDataSourceInfoByIdAndVersionIdAction + GetDataSourceInfoByIdAndVersionIdAction.dataSourceId = this.dataSourceId + GetDataSourceInfoByIdAndVersionIdAction.setParameter("system", system) + GetDataSourceInfoByIdAndVersionIdAction.versionId = this.versionId + GetDataSourceInfoByIdAndVersionIdAction.setUser(user) + GetDataSourceInfoByIdAndVersionIdAction + } + } +} \ No newline at end of file diff --git a/exchangis-datasource/exchangis-datasource-service/src/test/java/com/webank/wedatasphere/exchangis/TestDataSourceClient.java b/exchangis-datasource/exchangis-datasource-service/src/test/java/com/webank/wedatasphere/exchangis/TestDataSourceClient.java new file mode 100644 index 000000000..7a0b2b888 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/test/java/com/webank/wedatasphere/exchangis/TestDataSourceClient.java @@ -0,0 +1,35 @@ +package com.webank.wedatasphere.exchangis; + + +import org.apache.linkis.datasource.client.impl.LinkisDataSourceRemoteClient; +import org.apache.linkis.datasource.client.request.QueryDataSourceAction; +import org.apache.linkis.datasource.client.response.QueryDataSourceResult; + +public class TestDataSourceClient { + + public static void main(String[] args) { + LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = TestExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); +// String responseBody; +// try { +// Result execute = linkisDataSourceRemoteClient.execute( +// new DeleteDataSourceAction.Builder().setUser("hdfs").setResourceId("12").builder() +// ); +// responseBody = execute.getResponseBody(); +// System.out.println(responseBody); +// } catch (Exception e) { +// e.printStackTrace(); +// } + + QueryDataSourceAction.Builder builder = QueryDataSourceAction.builder() + .setSystem("system") + .setIdentifies("") + .setCurrentPage(1) + .setPageSize(500); + QueryDataSourceAction action = builder.build(); + QueryDataSourceResult result = linkisDataSourceRemoteClient.queryDataSource(action); + System.out.println(result.getResponseBody()); + System.out.println(result.getAllDataSource()); + + } + +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/test/scala/com/webank/wedatasphere/exchangis/TestExchangisLinkisRemoteClient.scala b/exchangis-datasource/exchangis-datasource-service/src/test/scala/com/webank/wedatasphere/exchangis/TestExchangisLinkisRemoteClient.scala new file mode 100644 index 000000000..d8f0889f1 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/test/scala/com/webank/wedatasphere/exchangis/TestExchangisLinkisRemoteClient.scala @@ -0,0 +1,162 @@ +package com.webank.wedatasphere.exchangis + + +import java.util.concurrent.TimeUnit + +import org.apache.linkis.datasource.client.impl.{LinkisDataSourceRemoteClient, LinkisMetaDataRemoteClient} +import org.apache.linkis.datasource.client.request.{GetAllDataSourceTypesAction, GetConnectParamsByDataSourceIdAction, MetadataGetColumnsAction, MetadataGetDatabasesAction, MetadataGetTablesAction, QueryDataSourceAction} +import org.apache.linkis.datasource.client.response.{GetConnectParamsByDataSourceIdResult, MetadataGetColumnsResult, MetadataGetDatabasesResult, MetadataGetTablesResult, QueryDataSourceResult} +import org.apache.linkis.datasourcemanager.common.domain.{DataSource, DataSourceType} +import org.apache.linkis.httpclient.dws.authentication.StaticAuthenticationStrategy +import org.apache.linkis.httpclient.dws.config.{DWSClientConfig, DWSClientConfigBuilder} + +object TestExchangisLinkisRemoteClient { + //Linkis Datasource Client Config + // val clientConfig = DWSClientConfigBuilder.newBuilder() + // .addServerUrl(serverUrl) + // .connectionTimeout(connectionTimeout) + // .discoveryEnabled(discoveryEnabled) + // .discoveryFrequency(1,TimeUnit.MINUTES) + // .loadbalancerEnabled(loadbalancerEnabled) + // .maxConnectionSize(maxConnectionSize) + // .retryEnabled(retryEnabled) + // .readTimeout(readTimeout) + // .setAuthenticationStrategy(new StaticAuthenticationStrategy()) + // .setAuthTokenKey(authTokenKey) + // .setAuthTokenValue(authTokenValue) + // .setDWSVersion(dwsVersion) + // .build() + + val clientConfig: DWSClientConfig = DWSClientConfigBuilder.newBuilder() + .addServerUrl("http://dss.shineweng.com:20088") + .connectionTimeout(30000L) + .discoveryEnabled(true) + .discoveryFrequency(1L, TimeUnit.MINUTES) + .loadbalancerEnabled(true) + .maxConnectionSize(5) + .retryEnabled(false) + .readTimeout(30000L) + .setAuthenticationStrategy(new StaticAuthenticationStrategy()) + .setAuthTokenKey("hdfs") + .setAuthTokenValue("hdfs") + .setDWSVersion("v1") + .build() + + val dataSourceClient = new LinkisDataSourceRemoteClient(clientConfig) + + val metaDataClient = new LinkisMetaDataRemoteClient(clientConfig) + + def getLinkisDataSourceRemoteClient: LinkisDataSourceRemoteClient = { + dataSourceClient + } + + def getLinkisMetadataRemoteClient: LinkisMetaDataRemoteClient = { + metaDataClient + } + + def close(): Unit = { + dataSourceClient.close() + metaDataClient.close() + } + + def queryDataSource(linkisDatasourceName: String): QueryDataSourceResult = { + dataSourceClient.queryDataSource(QueryDataSourceAction.builder() + .setSystem("") + .setName(linkisDatasourceName) + .setTypeId(1) + .setIdentifies("") + .setCurrentPage(1) + .setUser("hadoop") + .setPageSize(1).build() + ) + } + +// def createDataSource() = { +// dataSourceClient.execute().asInstanceOf[] +// } + + /** + * get datasourceConnect information + * + * @param dataSourceId id + * @param system dssSystem + * @param user username + * @return + */ + def queryConnectParams(dataSourceId: Long, system: String, user: String): GetConnectParamsByDataSourceIdResult = { + dataSourceClient.getConnectParams(GetConnectParamsByDataSourceIdAction.builder() + .setDataSourceId(dataSourceId) + .setSystem(system) + .setUser(user) + .build() + ) + } + + /** + * get all DataSourceTypes + * + * @param user user + * @return + */ + def queryDataSourceTypes(user: String): java.util.List[DataSourceType] = { + dataSourceClient.getAllDataSourceTypes(GetAllDataSourceTypesAction.builder() + .setUser(user) + .build() + ).getAllDataSourceType + } + + + def queryClusterByDataSourceType(system: String, name: String, typeId: Long, user: String): java.util.List[DataSource] = { + dataSourceClient.queryDataSource(QueryDataSourceAction.builder() + .setSystem(system) + .setName(name) + .setTypeId(typeId) + .setIdentifies("") + .setCurrentPage(1) + .setPageSize(10) + .setUser(user) + .build() + ).getAllDataSource + } + + + /** + * get DataBases list + * + * @param system + * @param dataSourceId + * @param user + * @return list + */ + def queryDataBasesByCuster(system: String, dataSourceId: Long, user: String): MetadataGetDatabasesResult = { + metaDataClient.getDatabases(MetadataGetDatabasesAction.builder() + .setSystem(system) + .setDataSourceId(dataSourceId) + .setUser(user) + .build() + ) + } + + def queryTablesByDataBase(system: String, dataSourceId: Long, dataBase: String, user: String): MetadataGetTablesResult = { + metaDataClient.getTables(MetadataGetTablesAction.builder() + .setSystem(system) + .setDataSourceId(dataSourceId) + .setDatabase(dataBase) + .setUser(user) + .build() + ) + } + + def queryColumnsByTable(system: String, dataSourceId: Long, dataBase: String, table: String, user: String): MetadataGetColumnsResult = { + metaDataClient.getColumns(MetadataGetColumnsAction.builder() + .setSystem(system) + .setDataSourceId(dataSourceId) + .setDatabase(dataBase) + .setTable(table) + .setUser(user) + .build() + ) + } + + +} diff --git a/exchangis-datasource/exchangis-datasource-streamis/pom.xml b/exchangis-datasource/exchangis-datasource-streamis/pom.xml new file mode 100644 index 000000000..bf63d0db6 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-streamis/pom.xml @@ -0,0 +1,50 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-datasource-streamis + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-datasource-core + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-linkis + ${project.version} + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + \ No newline at end of file diff --git a/exchangis-datasource/exchangis-datasource-streamis/src/main/java/com/webank/wedatasphere/exchangis/datasource/streamis/ExchangisStreamisDataSource.java b/exchangis-datasource/exchangis-datasource-streamis/src/main/java/com/webank/wedatasphere/exchangis/datasource/streamis/ExchangisStreamisDataSource.java new file mode 100644 index 000000000..71b614f5a --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-streamis/src/main/java/com/webank/wedatasphere/exchangis/datasource/streamis/ExchangisStreamisDataSource.java @@ -0,0 +1,22 @@ +package com.webank.wedatasphere.exchangis.datasource.streamis; + +import com.webank.wedatasphere.exchangis.datasource.core.AbstractExchangisDataSourceDefinition; +import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisLinkisRemoteClient; +import org.apache.linkis.datasource.client.impl.LinkisDataSourceRemoteClient; +import org.apache.linkis.datasource.client.impl.LinkisMetaDataRemoteClient; + +/** + * Exchangis streamis data source + */ +public abstract class ExchangisStreamisDataSource extends AbstractExchangisDataSourceDefinition { + + @Override + public LinkisDataSourceRemoteClient getDataSourceRemoteClient() { + return ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); + } + + @Override + public LinkisMetaDataRemoteClient getMetaDataRemoteClient() { + return ExchangisLinkisRemoteClient.getLinkisMetadataRemoteClient(); + } +} diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/pom.xml new file mode 100644 index 000000000..0ea794a53 --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/pom.xml @@ -0,0 +1,55 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../../pom.xml + + 4.0.0 + + exchangis-datasource-ext-elasticsearch + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-linkis + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-core + ${project.version} + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisESDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisESDataSource.java new file mode 100644 index 000000000..a049d07ce --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisESDataSource.java @@ -0,0 +1,37 @@ +package com.webank.wedatasphere.exchangis.extension.datasource.mysql; + +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; +import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; + +/** + * Note: ES data source + */ +public class ExchangisESDataSource extends ExchangisBatchDataSource { + + @Override + protected ExchangisDataSourceType type() { + return ExchangisDataSourceType.ELASTICSEARCH; + } + + @Override + public String structClassifier() { + return StructClassifier.NON_STRUCTURED.name; + } + + @Override + public String description() { + return "This is ES DataSource"; + } + + @Override + public String option() { + return "ES无结构化存储"; + } + + @Override + public String icon() { + return "icon-es"; + } + +} \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/pom.xml new file mode 100644 index 000000000..3f4323ab1 --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/pom.xml @@ -0,0 +1,56 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../../pom.xml + + 4.0.0 + + exchangis-datasource-ext-hive + + + 8 + 8 + + + + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-linkis + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${project.version} + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/hive/ExchangisHiveDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/hive/ExchangisHiveDataSource.java new file mode 100644 index 000000000..c59cfe277 --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/hive/ExchangisHiveDataSource.java @@ -0,0 +1,37 @@ +package com.webank.wedatasphere.exchangis.extension.datasource.hive; + +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; +import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; + +/** + * Note: Hive data source + */ +public class ExchangisHiveDataSource extends ExchangisBatchDataSource { + + @Override + protected ExchangisDataSourceType type() { + return ExchangisDataSourceType.HIVE; + } + + @Override + public String structClassifier() { + return StructClassifier.SEMI_STRUCTURED.name; + } + + @Override + public String description() { + return "This is Hive DataSource"; + } + + @Override + public String option() { + return "hive"; + } + + @Override + public String icon() { + return "icon-hive"; + } + +} diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/pom.xml new file mode 100644 index 000000000..7bbd306c7 --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/pom.xml @@ -0,0 +1,55 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../../pom.xml + + 4.0.0 + + exchangis-datasource-ext-mongodb + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-linkis + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-core + ${project.version} + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMongoDbDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMongoDbDataSource.java new file mode 100644 index 000000000..0fec6da36 --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMongoDbDataSource.java @@ -0,0 +1,37 @@ +package com.webank.wedatasphere.exchangis.extension.datasource.mysql; + +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; +import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; + +/** + * Note: MongoDB data source + */ +public class ExchangisMongoDbDataSource extends ExchangisBatchDataSource { + + @Override + protected ExchangisDataSourceType type() { + return ExchangisDataSourceType.MONGODB; + } + + @Override + public String structClassifier() { + return StructClassifier.SEMI_STRUCTURED.name; + } + + @Override + public String description() { + return "This is MongoDB DataSource"; + } + + @Override + public String option() { + return "mongodb无结构存储"; + } + + @Override + public String icon() { + return "icon-mongodb"; + } + +} \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/pom.xml new file mode 100644 index 000000000..6b8bc388f --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/pom.xml @@ -0,0 +1,55 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../../pom.xml + + 4.0.0 + + exchangis-datasource-ext-mysql + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-linkis + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-core + ${project.version} + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMySQLDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMySQLDataSource.java new file mode 100644 index 000000000..d818a1f65 --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMySQLDataSource.java @@ -0,0 +1,39 @@ +package com.webank.wedatasphere.exchangis.extension.datasource.mysql; + +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; +import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; + + +/** + * Note: MYSQL data source + */ +public class ExchangisMySQLDataSource extends ExchangisBatchDataSource { + + @Override + protected ExchangisDataSourceType type() { + return ExchangisDataSourceType.MYSQL; + } + + @Override + public String structClassifier() { + return StructClassifier.STRUCTURED.name; + } + + @Override + public String description() { + return "This is MySQL DataSource"; + } + + @Override + public String option() { + return "MySQL数据库"; + } + + @Override + public String icon() { + return "icon-mysql"; + } + + +} \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/pom.xml new file mode 100644 index 000000000..8741dfdd7 --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/pom.xml @@ -0,0 +1,55 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../../pom.xml + + 4.0.0 + + exchangis-datasource-ext-oracle + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-linkis + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-core + ${project.version} + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/oracle/ExchangisOracleDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/oracle/ExchangisOracleDataSource.java new file mode 100644 index 000000000..38d0c1a8b --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/oracle/ExchangisOracleDataSource.java @@ -0,0 +1,44 @@ +package com.webank.wedatasphere.exchangis.extension.datasource.oracle; + +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; +import com.webank.wedatasphere.exchangis.datasource.core.domain.Classifier; +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; +import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; + +import java.util.List; + +/** + * @author jefftlin + * @create 2022-09-14 + **/ +public class ExchangisOracleDataSource extends ExchangisBatchDataSource { + + @Override + protected ExchangisDataSourceType type() { + return ExchangisDataSourceType.ORACLE; + } + + @Override + public String description() { + return "This is Oracle DataSource"; + } + + @Override + public String option() { + return "Oracle数据库"; + } + + + @Override + public String structClassifier() { + return StructClassifier.STRUCTURED.name; + } + + @Override + public String icon() { + return "icon-oracle"; + } + + +} diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/pom.xml new file mode 100644 index 000000000..f8b375288 --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/pom.xml @@ -0,0 +1,55 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../../pom.xml + + 4.0.0 + + exchangis-datasource-ext-sftp + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-linkis + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-core + ${project.version} + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/sftp/ExchangisSftpDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/sftp/ExchangisSftpDataSource.java new file mode 100644 index 000000000..b9ecdfbb4 --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/sftp/ExchangisSftpDataSource.java @@ -0,0 +1,43 @@ +package com.webank.wedatasphere.exchangis.extension.datasource.sftp; + +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; +import com.webank.wedatasphere.exchangis.datasource.core.domain.Classifier; +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; +import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; + +import java.util.List; + +public class ExchangisSftpDataSource extends ExchangisBatchDataSource { + + @Override + protected ExchangisDataSourceType type() { + return ExchangisDataSourceType.SFTP; + } + + @Override + public String structClassifier() { + return StructClassifier.NON_STRUCTURED.name; + } + + @Override + public String description() { + return "This is sftp"; + } + + @Override + public String option() { + return "SFTP"; + } + + @Override + public String icon() { + return "icon-sftp"; + } + + @Override + public List getDataSourceParamConfigs() { + return super.getDataSourceParamConfigs(ExchangisDataSourceType.SFTP.name); + } + +} diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-starrocks/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-starrocks/pom.xml new file mode 100644 index 000000000..d04f4d53f --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-starrocks/pom.xml @@ -0,0 +1,54 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../../pom.xml + + 4.0.0 + + exchangis-datasource-ext-starrocks + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-linkis + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-core + ${project.version} + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-starrocks/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/starrocks/ExchangisStarRocksDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-starrocks/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/starrocks/ExchangisStarRocksDataSource.java new file mode 100644 index 000000000..f0732ba53 --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-starrocks/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/starrocks/ExchangisStarRocksDataSource.java @@ -0,0 +1,46 @@ +package com.webank.wedatasphere.exchangis.extension.datasource.starrocks; + +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; +import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; + +import java.util.List; + +/** + * @author jefftlin + * @date 2024/5/14 + */ +public class ExchangisStarRocksDataSource extends ExchangisBatchDataSource { + + @Override + protected ExchangisDataSourceType type() { + return ExchangisDataSourceType.STARROCKS; + } + @Override + public String description() { + return "This is StarRocks DataSource"; + } + + @Override + public String option() { + return "StarRocks数据库"; + } + + @Override + public String structClassifier() { + return StructClassifier.STRUCTURED.name; + } + + @Override + public String icon() { + return "icon-starrocks"; + } + + @Override + public List getDataSourceParamConfigs() { + return super.getDataSourceParamConfigs(ExchangisDataSourceType.STARROCKS.name); + } + + +} diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-tdsql/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-tdsql/pom.xml new file mode 100644 index 000000000..7ba9ba944 --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-tdsql/pom.xml @@ -0,0 +1,38 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../../pom.xml + + 4.0.0 + + exchangis-datasource-ext-tdsql + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${revision} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-linkis + ${revision} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-core + ${revision} + + + + \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-tdsql/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/tdsql/ExchangisTdsqlDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-tdsql/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/tdsql/ExchangisTdsqlDataSource.java new file mode 100644 index 000000000..2f5908022 --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-tdsql/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/tdsql/ExchangisTdsqlDataSource.java @@ -0,0 +1,36 @@ +package com.webank.wedatasphere.exchangis.extension.datasource.tdsql; + +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; +import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; + +/** + * @author jefftlin + * @date 2024/5/27 + */ +public class ExchangisTdsqlDataSource extends ExchangisBatchDataSource { + + @Override + protected ExchangisDataSourceType type() { + return ExchangisDataSourceType.TDSQL; + } + @Override + public String description() { + return "This is tdsql DataSource"; + } + + @Override + public String option() { + return "Tdsql数据库"; + } + + @Override + public String structClassifier() { + return StructClassifier.STRUCTURED.name; + } + + @Override + public String icon() { + return "icon-tdsql"; + } +} \ No newline at end of file diff --git a/exchangis-datasource/pom.xml b/exchangis-datasource/pom.xml new file mode 100644 index 000000000..b22e15b49 --- /dev/null +++ b/exchangis-datasource/pom.xml @@ -0,0 +1,38 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + + exchangis-datasource + pom + + + exchangis-datasource-core + exchangis-datasource-loader + exchangis-datasource-linkis + exchangis-datasource-streamis + exchangis-datasource-service + extension-datasources/exchangis-datasource-ext-mysql + extension-datasources/exchangis-datasource-ext-hive + extension-datasources/exchangis-datasource-ext-sftp + extension-datasources/exchangis-datasource-ext-elasticsearch + extension-datasources/exchangis-datasource-ext-mongodb + extension-datasources/exchangis-datasource-ext-oracle + extension-datasources/exchangis-datasource-ext-starrocks + extension-datasources/exchangis-datasource-ext-tdsql + exchangis-datasource-server + + + + 8 + 8 + + + \ No newline at end of file diff --git a/exchangis-engines/engineconn-plugins/datax/pom.xml b/exchangis-engines/engineconn-plugins/datax/pom.xml new file mode 100644 index 000000000..cd9b069df --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/pom.xml @@ -0,0 +1,225 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../../pom.xml + + 4.0.0 + + linkis-engineplugin-datax + + + 1.4.0 + 3.0.0-Plus-2 + 3.0.0 + 3.3.4 + 1.0.15 + 2.1.9 + 0.3 + 1.0.5 + 2.0 + 1.2 + 1.10 + + + + com.webank.wedatasphere.exchangis + datax-core + ${datax.engine.version} + + + hadoop-annotations + org.apache.hadoop + + + fastjson + com.alibaba + + + hadoop-common + org.apache.hadoop + + + + + net.sourceforge.javacsv + javacsv + ${csv.version} + + + com.alibaba + druid + ${druid.version} + + + org.codehaus.groovy + groovy-all + ${groovy.version} + + + io.airlift + aircompressor + ${aircompressor.version} + + + org.anarres.lzo + lzo-core + ${lzo.version} + + + org.apache.linkis + linkis-once-engineconn + ${linkis.version} + + + org.apache.linkis + linkis-engineconn-plugin-core + ${linkis.version} + + + org.apache.linkis + linkis-storage + ${linkis.version} + provided + + + org.apache.linkis + linkis-common + ${linkis.version} + + + commons-cli + commons-cli + ${commons-cli-version} + + + commons-configuration + commons-configuration + ${commons-configuration-version} + + + org.apache.linkis + linkis-computation-engineconn + 1.4.0 + + + org.apache.linkis + linkis-udf-common + 1.4.0 + + + org.apache.linkis + linkis-udf-client + 1.4.0 + + + org.apache.linkis + linkis-module + + + org.springframework.cloud + spring-cloud-commons + + + + + + org.jasypt + jasypt + ${jasypt.version} + + + com.fasterxml.jackson.core + jackson-core + ${jackson-core.version} + + + + + + com.diffplug.spotless + spotless-maven-plugin + + + + check + + none + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + org.apache.maven.plugins + maven-assembly-plugin + 2.3 + false + + + make-assembly + package + + single + + + + src/main/assembly/distribution.xml + + + + + + false + out + false + false + + src/main/assembly/distribution.xml + + + + + + + src/main/java + + **/*.xml + **/*.properties + + + + src/main/resources + + **/application.yml + **/bootstrap.yml + + + + + + + + apache.snapshots + Apache Snapshot Repository + https://repository.apache.org/content/repositories/snapshots/ + + true + + + + \ No newline at end of file diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/assembly/distribution.xml b/exchangis-engines/engineconn-plugins/datax/src/main/assembly/distribution.xml new file mode 100644 index 000000000..dee47b9d9 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/assembly/distribution.xml @@ -0,0 +1,320 @@ + + + + datax + + dir + zip + + true + datax + + + + + + /dist/${datax.version}/lib + true + true + false + false + true + + antlr:antlr:jar + aopalliance:aopalliance:jar + asm:asm:jar + cglib:cglib:jar + com.amazonaws:aws-java-sdk-autoscaling:jar + com.amazonaws:aws-java-sdk-core:jar + com.amazonaws:aws-java-sdk-ec2:jar + com.amazonaws:aws-java-sdk-route53:jar + com.amazonaws:aws-java-sdk-sts:jar + com.amazonaws:jmespath-java:jar + com.fasterxml.jackson.core:jackson-annotations:jar + com.fasterxml.jackson.core:jackson-core:jar + com.fasterxml.jackson.core:jackson-databind:jar + com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:jar + com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar + com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar + com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:jar + com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:jar + com.fasterxml.jackson.module:jackson-module-jaxb-annotations:jar + com.fasterxml.jackson.module:jackson-module-parameter-names:jar + com.fasterxml.jackson.module:jackson-module-paranamer:jar + com.fasterxml.jackson.module:jackson-module-scala_2.11:jar + com.github.andrewoma.dexx:dexx-collections:jar + com.github.vlsi.compactmap:compactmap:jar + com.google.code.findbugs:annotations:jar + com.google.code.findbugs:jsr305:jar + com.google.code.gson:gson:jar + com.google.guava:guava:jar + com.google.inject:guice:jar + com.google.protobuf:protobuf-java:jar + com.netflix.archaius:archaius-core:jar + com.netflix.eureka:eureka-client:jar + com.netflix.eureka:eureka-core:jar + com.netflix.hystrix:hystrix-core:jar + com.netflix.netflix-commons:netflix-commons-util:jar + com.netflix.netflix-commons:netflix-eventbus:jar + com.netflix.netflix-commons:netflix-infix:jar + com.netflix.netflix-commons:netflix-statistics:jar + com.netflix.ribbon:ribbon:jar + com.netflix.ribbon:ribbon-core:jar + com.netflix.ribbon:ribbon-eureka:jar + com.netflix.ribbon:ribbon-httpclient:jar + com.netflix.ribbon:ribbon-loadbalancer:jar + com.netflix.ribbon:ribbon-transport:jar + com.netflix.servo:servo-core:jar + com.ning:async-http-client:jar + com.sun.jersey.contribs:jersey-apache-client4:jar + com.sun.jersey:jersey-client:jar + com.sun.jersey:jersey-core:jar + com.sun.jersey:jersey-json:jar + com.sun.jersey:jersey-server:jar + com.sun.jersey:jersey-servlet:jar + com.sun.xml.bind:jaxb-impl:jar + com.thoughtworks.paranamer:paranamer:jar + com.thoughtworks.xstream:xstream:jar + org.apache.linkis:linkis-common:jar + org.apache.linkis:linkis-module:jar + commons-beanutils:commons-beanutils:jar + commons-beanutils:commons-beanutils-core:jar + commons-cli:commons-cli:jar + commons-codec:commons-codec:jar + commons-collections:commons-collections:jar + commons-configuration:commons-configuration:jar + commons-daemon:commons-daemon:jar + commons-dbcp:commons-dbcp:jar + commons-digester:commons-digester:jar + commons-httpclient:commons-httpclient:jar + commons-io:commons-io:jar + commons-jxpath:commons-jxpath:jar + commons-lang:commons-lang:jar + commons-logging:commons-logging:jar + commons-net:commons-net:jar + commons-pool:commons-pool:jar + io.micrometer:micrometer-core:jar + io.netty:netty:jar + io.netty:netty-all:jar + io.netty:netty-buffer:jar + io.netty:netty-codec:jar + io.netty:netty-codec-http:jar + io.netty:netty-common:jar + io.netty:netty-handler:jar + io.netty:netty-transport:jar + io.netty:netty-transport-native-epoll:jar + io.reactivex:rxjava:jar + io.reactivex:rxnetty:jar + io.reactivex:rxnetty-contexts:jar + io.reactivex:rxnetty-servo:jar + javax.activation:activation:jar + javax.annotation:javax.annotation-api:jar + javax.inject:javax.inject:jar + javax.servlet:javax.servlet-api:jar + javax.servlet.jsp:jsp-api:jar + javax.validation:validation-api:jar + javax.websocket:javax.websocket-api:jar + javax.ws.rs:javax.ws.rs-api:jar + javax.xml.bind:jaxb-api:jar + javax.xml.stream:stax-api:jar + joda-time:joda-time:jar + log4j:log4j:jar + mysql:mysql-connector-java:jar + net.databinder.dispatch:dispatch-core_2.11:jar + net.databinder.dispatch:dispatch-json4s-jackson_2.11:jar + org.antlr:antlr-runtime:jar + org.antlr:stringtemplate:jar + org.apache.commons:commons-compress:jar + org.apache.commons:commons-math:jar + org.apache.commons:commons-math3:jar + org.apache.curator:curator-client:jar + org.apache.curator:curator-framework:jar + org.apache.curator:curator-recipes:jar + org.apache.directory.api:api-asn1-api:jar + org.apache.directory.api:api-util:jar + org.apache.directory.server:apacheds-i18n:jar + org.apache.directory.server:apacheds-kerberos-codec:jar + org.apache.hadoop:hadoop-annotations:jar + org.apache.hadoop:hadoop-auth:jar + org.apache.hadoop:hadoop-common:jar + org.apache.hadoop:hadoop-hdfs:jar + org.apache.htrace:htrace-core:jar + org.apache.httpcomponents:httpclient:jar + org.apache.httpcomponents:httpcore:jar + org.apache.logging.log4j:log4j-api:jar + org.apache.logging.log4j:log4j-core:jar + org.apache.logging.log4j:log4j-jul:jar + org.apache.logging.log4j:log4j-slf4j-impl:jar + org.apache.zookeeper:zookeeper:jar + org.aspectj:aspectjweaver:jar + org.bouncycastle:bcpkix-jdk15on:jar + org.bouncycastle:bcprov-jdk15on:jar + org.codehaus.jackson:jackson-jaxrs:jar + org.codehaus.jackson:jackson-xc:jar + org.codehaus.jettison:jettison:jar + org.codehaus.woodstox:stax2-api:jar + org.codehaus.woodstox:woodstox-core-asl:jar + org.eclipse.jetty:jetty-annotations:jar + org.eclipse.jetty:jetty-client:jar + org.eclipse.jetty:jetty-continuation:jar + org.eclipse.jetty:jetty-http:jar + org.eclipse.jetty:jetty-io:jar + org.eclipse.jetty:jetty-jndi:jar + org.eclipse.jetty:jetty-plus:jar + org.eclipse.jetty:jetty-security:jar + org.eclipse.jetty:jetty-server:jar + org.eclipse.jetty:jetty-servlet:jar + org.eclipse.jetty:jetty-servlets:jar + org.eclipse.jetty:jetty-util:jar + org.eclipse.jetty:jetty-webapp:jar + org.eclipse.jetty:jetty-xml:jar + org.eclipse.jetty.websocket:javax-websocket-client-impl:jar + org.eclipse.jetty.websocket:javax-websocket-server-impl:jar + org.eclipse.jetty.websocket:websocket-api:jar + org.eclipse.jetty.websocket:websocket-client:jar + org.eclipse.jetty.websocket:websocket-common:jar + org.eclipse.jetty.websocket:websocket-server:jar + org.eclipse.jetty.websocket:websocket-servlet:jar + org.fusesource.leveldbjni:leveldbjni-all:jar + org.glassfish.hk2:class-model:jar + org.glassfish.hk2:config-types:jar + org.glassfish.hk2.external:aopalliance-repackaged:jar + org.glassfish.hk2.external:asm-all-repackaged:jar + org.glassfish.hk2.external:bean-validator:jar + org.glassfish.hk2.external:javax.inject:jar + org.glassfish.hk2:hk2:jar + org.glassfish.hk2:hk2-api:jar + org.glassfish.hk2:hk2-config:jar + org.glassfish.hk2:hk2-core:jar + org.glassfish.hk2:hk2-locator:jar + org.glassfish.hk2:hk2-runlevel:jar + org.glassfish.hk2:hk2-utils:jar + org.glassfish.hk2:osgi-resource-locator:jar + org.glassfish.hk2:spring-bridge:jar + org.glassfish.jersey.bundles:jaxrs-ri:jar + org.glassfish.jersey.bundles.repackaged:jersey-guava:jar + org.glassfish.jersey.containers:jersey-container-servlet:jar + org.glassfish.jersey.containers:jersey-container-servlet-core:jar + org.glassfish.jersey.core:jersey-client:jar + org.glassfish.jersey.core:jersey-common:jar + org.glassfish.jersey.core:jersey-server:jar + org.glassfish.jersey.ext:jersey-entity-filtering:jar + org.glassfish.jersey.ext:jersey-spring3:jar + org.glassfish.jersey.media:jersey-media-jaxb:jar + org.glassfish.jersey.media:jersey-media-json-jackson:jar + org.glassfish.jersey.media:jersey-media-multipart:jar + org.hdrhistogram:HdrHistogram:jar + org.javassist:javassist:jar + org.json4s:json4s-ast_2.11:jar + org.json4s:json4s-core_2.11:jar + org.json4s:json4s-jackson_2.11:jar + org.jsoup:jsoup:jar + org.jvnet.mimepull:mimepull:jar + org.jvnet:tiger-types:jar + org.latencyutils:LatencyUtils:jar + org.mortbay.jasper:apache-el:jar + org.mortbay.jetty:jetty:jar + org.mortbay.jetty:jetty-util:jar + org.ow2.asm:asm-analysis:jar + org.ow2.asm:asm-commons:jar + org.ow2.asm:asm-tree:jar + org.reflections:reflections:jar + org.scala-lang.modules:scala-parser-combinators_2.11:jar + org.scala-lang.modules:scala-xml_2.11:jar + org.scala-lang:scala-compiler:jar + org.scala-lang:scala-library:jar + org.scala-lang:scala-reflect:jar + org.scala-lang:scalap:jar + org.slf4j:jul-to-slf4j:jar + org.slf4j:slf4j-api:jar + org.springframework.boot:spring-boot:jar + org.springframework.boot:spring-boot-actuator:jar + org.springframework.boot:spring-boot-actuator-autoconfigure:jar + org.springframework.boot:spring-boot-autoconfigure:jar + org.springframework.boot:spring-boot-starter:jar + org.springframework.boot:spring-boot-starter-actuator:jar + org.springframework.boot:spring-boot-starter-aop:jar + org.springframework.boot:spring-boot-starter-jetty:jar + org.springframework.boot:spring-boot-starter-json:jar + org.springframework.boot:spring-boot-starter-log4j2:jar + org.springframework.boot:spring-boot-starter-web:jar + org.springframework.cloud:spring-cloud-commons:jar + org.springframework.cloud:spring-cloud-config-client:jar + org.springframework.cloud:spring-cloud-context:jar + org.springframework.cloud:spring-cloud-netflix-archaius:jar + org.springframework.cloud:spring-cloud-netflix-core:jar + org.springframework.cloud:spring-cloud-netflix-eureka-client:jar + org.springframework.cloud:spring-cloud-netflix-ribbon:jar + org.springframework.cloud:spring-cloud-starter:jar + org.springframework.cloud:spring-cloud-starter-config:jar + org.springframework.cloud:spring-cloud-starter-eureka:jar + org.springframework.cloud:spring-cloud-starter-netflix-archaius:jar + org.springframework.cloud:spring-cloud-starter-netflix-eureka-client:jar + org.springframework.cloud:spring-cloud-starter-netflix-ribbon:jar + org.springframework.security:spring-security-crypto:jar + org.springframework.security:spring-security-rsa:jar + org.springframework:spring-aop:jar + org.springframework:spring-beans:jar + org.springframework:spring-context:jar + org.springframework:spring-core:jar + org.springframework:spring-expression:jar + org.springframework:spring-jcl:jar + org.springframework:spring-web:jar + org.springframework:spring-webmvc:jar + org.tukaani:xz:jar + org.yaml:snakeyaml:jar + software.amazon.ion:ion-java:jar + xerces:xercesImpl:jar + xmlenc:xmlenc:jar + xmlpull:xmlpull:jar + xpp3:xpp3_min:jar + + + + + + + ${basedir}/src/main/resources + + * + + 0777 + 0755 + /dist/${datax.version}/conf + unix + + + ${basedir}/target + + *.jar + + + *doc.jar + + 0777 + /plugin/${datax.version} + + + + + diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/exception/DataxJobExecutionException.java b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/exception/DataxJobExecutionException.java new file mode 100644 index 000000000..a92a59395 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/exception/DataxJobExecutionException.java @@ -0,0 +1,21 @@ +package org.apache.linkis.engineconnplugin.datax.exception; + +import org.apache.linkis.common.exception.ErrorException; + +public class DataxJobExecutionException extends ErrorException { + + public static final int ERROR_CODE = 16023; + + public DataxJobExecutionException(String message) { + super(ERROR_CODE, message); + } + + public DataxJobExecutionException(int errCode, String desc) { + super(errCode, desc); + } + + public DataxJobExecutionException(String message, Throwable e) { + super(ERROR_CODE, message); + this.initCause(e); + } +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/exception/DataxPluginLoadException.java b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/exception/DataxPluginLoadException.java new file mode 100644 index 000000000..908b55bf9 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/exception/DataxPluginLoadException.java @@ -0,0 +1,16 @@ +package org.apache.linkis.engineconnplugin.datax.exception; + +import org.apache.linkis.common.exception.ErrorException; + +/** + * Plugin load exception + */ +public class DataxPluginLoadException extends ErrorException { + + public static final int ERROR_CODE = 16022; + + public DataxPluginLoadException(String message, String desc) { + super(ERROR_CODE, message); + + } +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/plugin/PluginBmlResource.java b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/plugin/PluginBmlResource.java new file mode 100644 index 000000000..dd90b03e4 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/plugin/PluginBmlResource.java @@ -0,0 +1,36 @@ +package org.apache.linkis.engineconnplugin.datax.plugin; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + +/** + * BML resources + */ +@JsonIgnoreProperties(ignoreUnknown = true) +public class PluginBmlResource extends PluginResource{ + + /** + * Resource id + */ + private String resourceId; + + /** + * Version + */ + private String version; + + public String getResourceId() { + return resourceId; + } + + public void setResourceId(String resourceId) { + this.resourceId = resourceId; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/plugin/PluginResource.java b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/plugin/PluginResource.java new file mode 100644 index 000000000..d2c69d818 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/plugin/PluginResource.java @@ -0,0 +1,62 @@ +package org.apache.linkis.engineconnplugin.datax.plugin; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + +/** + * Define the plugin resource + */ +@JsonIgnoreProperties(ignoreUnknown = true) +public class PluginResource { + + /** + * Resource name + */ + protected String name; + + /** + * Resource type + */ + protected String type; + + /** + * Resource path + */ + protected String path; + + /** + * Resource creator + */ + protected String creator; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getPath() { + return path; + } + + public void setPath(String path) { + this.path = path; + } + + public String getCreator() { + return creator; + } + + public void setCreator(String creator) { + this.creator = creator; + } +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/utils/SecretUtils.java b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/utils/SecretUtils.java new file mode 100644 index 000000000..3fb28566b --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/java/org/apache/linkis/engineconnplugin/datax/utils/SecretUtils.java @@ -0,0 +1,32 @@ +package org.apache.linkis.engineconnplugin.datax.utils; + +import com.alibaba.datax.common.util.Configuration; +import org.apache.commons.lang3.StringUtils; + +import java.util.Set; + +/** + * Security utils + */ +public class SecretUtils { + /** + * Extracted from 'Engine' class + */ + public static Configuration filterSensitiveConfiguration(Configuration configuration){ + Set keys = configuration.getKeys(); + String[] sensitiveSuffixes = new String[]{"password", "accessKey", "path"}; + for (final String key : keys) { + boolean isSensitive = false; + for(String suffix : sensitiveSuffixes){ + if(StringUtils.endsWithIgnoreCase(key, suffix)){ + isSensitive = true; + break; + } + } + if (isSensitive && configuration.get(key) instanceof String) { + configuration.set(key, configuration.getString(key).replaceAll("[\\s\\S]", "*")); + } + } + return configuration; + } +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/resources/linkis-engineconn.properties b/exchangis-engines/engineconn-plugins/datax/src/main/resources/linkis-engineconn.properties new file mode 100644 index 000000000..e836a184a --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/resources/linkis-engineconn.properties @@ -0,0 +1,44 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +wds.linkis.server.version=v1 +#wds.linkis.engineconn.debug.enable=true +#wds.linkis.keytab.enable=true +wds.linkis.engineconn.plugin.default.class=org.apache.linkis.engineconnplugin.datax.DataxEngineConnPlugin +wds.linkis.engine.connector.hooks=org.apache.linkis.engineconn.once.executor.hook.OnceEngineConnHook + +# Core configuration +#core.transport.type=record +#core.transport.channel.speed.byte=5242880 +#core.transport.channel.speed.record=10000 +#core.transport.channel.flowControlInterval=20 +#core.transport.channel.capacity=512 +#core.transport.channel.byteCapacity=67108864 +#core.transport.record.channel.class=com.alibaba.datax.core.transport.channel.memory.MemoryRecordChannel +#core.transport.record.exchanger.class=com.alibaba.datax.core.plugin.BufferedRecordExchanger +#core.transport.record.exchanger.bufferSize=32 +#core.transport.stream.channel.class=com.webank.wedatasphere.exchangis.datax.core.transport.channel.memory.MemoryStreamChannel +#core.transport.stream.channel.bufferSize=8192 +#core.container.job.reportInterval=5000 +#core.container.job.sleepInterval=5000 +#core.container.taskGroup.reportInterval=5000 +#core.container.taskGroup.sleepInterval=100 +#core.container.taskGroup.channel=5 +#core.container.trace.enable=false +#core.statistics.collector.plugin.taskClass=com.alibaba.datax.core.statistics.plugin.task.StdoutPluginCollector +#core.statistics.collector.plugin.maxDirtyNumber=10 +#core.processor.loader.plugin.class=com.webank.wedatasphere.exchangis.datax.core.processor.loader.plugin.DefaultPluginProcessorLoader +#core.processor.loader.plugin.package=com.webank.wedatasphere.exchangis.datax.core.processor.impl +#core.processor.loader.plugin.sourcePath=proc/src \ No newline at end of file diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/resources/log4j2.xml b/exchangis-engines/engineconn-plugins/datax/src/main/resources/log4j2.xml new file mode 100644 index 000000000..3b45ae2a1 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/resources/log4j2.xml @@ -0,0 +1,82 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/DataxEngineConnPlugin.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/DataxEngineConnPlugin.scala new file mode 100644 index 000000000..996e4da90 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/DataxEngineConnPlugin.scala @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax + +import org.apache.linkis.engineconnplugin.datax.factory.DataxEngineConnFactory +import org.apache.linkis.engineconnplugin.datax.launch.DataxEngineConnLaunchBuilder +import org.apache.linkis.engineconnplugin.datax.resource.DataxEngineConnResourceFactory +import org.apache.linkis.manager.engineplugin.common.EngineConnPlugin +import org.apache.linkis.manager.engineplugin.common.creation.EngineConnFactory +import org.apache.linkis.manager.engineplugin.common.launch.EngineConnLaunchBuilder +import org.apache.linkis.manager.engineplugin.common.resource.EngineResourceFactory +import org.apache.linkis.manager.label.entity.Label + +import java.util.List +import java.util.ArrayList + +class DataxEngineConnPlugin extends EngineConnPlugin { + + private var engineResourceFactory: EngineResourceFactory = _ + private val engineResourceFactoryLocker = new Array[Byte](0) + + private var engineConnFactory: EngineConnFactory = _ + private val engineConnFactoryLocker = new Array[Byte](0) + + override def init(params: java.util.Map[_root_.scala.Predef.String, scala.AnyRef]): Unit = {} + + override def getEngineResourceFactory: EngineResourceFactory = { + if (null == engineResourceFactory) engineResourceFactoryLocker.synchronized { + if (null == engineResourceFactory) { + engineResourceFactory = new DataxEngineConnResourceFactory + } + } + engineResourceFactory + } + + override def getEngineConnLaunchBuilder: EngineConnLaunchBuilder = { + new DataxEngineConnLaunchBuilder + } + + override def getEngineConnFactory: EngineConnFactory = { + if (null == engineConnFactory) engineConnFactoryLocker.synchronized { + if (null == engineConnFactory) { + engineConnFactory = new DataxEngineConnFactory + } + } + engineConnFactory + } + + override def getDefaultLabels: List[Label[_]] = { + new ArrayList[Label[_]]() + } +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxConfiguration.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxConfiguration.scala new file mode 100644 index 000000000..1c84c5499 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxConfiguration.scala @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.config + +import org.apache.linkis.common.conf.{CommonVars, TimeType} + +/** + * Datax basic config + */ +object DataxConfiguration { + + val CONFIG_PREFIX: String = "_datax_." + /** + * Environment config name + */ + val ENV_CONFIG_NAME: CommonVars[String] = CommonVars[String]("datax.env.config.name", "entry.environment") + + /** + * Fetch interval + */ + val STATUS_FETCH_INTERVAL: CommonVars[TimeType] = CommonVars("wds.linkis.engineconn.datax.fetch.status.interval", new TimeType("5s")) + + /** + * Execution id + */ + val JOB_EXECUTION_ID: CommonVars[String] = CommonVars[String]("wds.linkis.engineconn.datax.execution.id", "") + + /** + * Plugin resources + */ + val PLUGIN_RESOURCES: CommonVars[String] = CommonVars[String]("wds.linkis.engineconn.datax.bml.resources", "") + + /** + * Security manager + */ + val SECURITY_MANAGER_CLASSES: CommonVars[String] = CommonVars[String]("wds.linkis.engineconn.datax.security.manager", "") +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxCoreConfiguration.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxCoreConfiguration.scala new file mode 100644 index 000000000..29aa81749 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxCoreConfiguration.scala @@ -0,0 +1,160 @@ +package org.apache.linkis.engineconnplugin.datax.config + +import com.alibaba.datax.core.util.container.CoreConstant +import org.apache.linkis.common.conf.CommonVars + +/** + * Core configuration in datax + */ +object DataxCoreConfiguration { + + /** + * Format for 'datetime' column + */ + val COMMON_COLUMN_DATETIME_FORMAT: CommonVars[String] = CommonVars("common.column.datetimeFormat", "yyyy-MM-dd HH:mm:ss"); + + /** + * Format for 'time' column + */ + val COMMON_COLUMN_TIME_FORMAT: CommonVars[String] = CommonVars("common.column.timeFormat", "HH:mm:ss") + + /** + * Format for 'date' column + */ + val COMMON_COLUMN_DATE_FORMAT: CommonVars[String] = CommonVars("common.column.dateFormat", "yyyy-MM-dd") + + /** + * Extra format for 'date','datetime' and 'time' + */ + val COMMON_COLUMN_EXTRA_FORMATS: CommonVars[String] = CommonVars("common.column.extraFormats", "yyyy-MM-dd") + + /** + * TimeZone + */ + val COMMON_COLUMN_TIMEZONE: CommonVars[String] = CommonVars("common.column.timeZone", "GMT+8") + + /** + * Encoding + */ + val COMMON_COLUMN_ENCODING: CommonVars[String] = CommonVars("common.column.encoding", "utf-8") + + /** + * Container model + */ + val CORE_CONTAINER_MODEL: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_CONTAINER_MODEL, "job") + + /** + * Transport type + */ + val CORE_TRANSPORT_TYPE: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_TYPE, "record") + + /** + * Channel speed in byte + */ + val CORE_TRANSPORT_CHANNEL_SPEED_BYTE: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_BYTE, 5242880) + + /** + * Channel speed in record + */ + val CORE_TRANSPORT_CHANNEL_SPEED_RECORD: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_RECORD, 10000) + + /** + * Flow control interval + */ + val CORE_TRANSPORT_CHANNEL_FLOW_CONTROL_INTERNAL: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_FLOWCONTROLINTERVAL, 20) + + /** + * Channel capacity in record(s) + */ + val CORE_TRANSPORT_CHANNEL_CAPACITY: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_CAPACITY, 512) + + /** + * Channel capacity in byte(s) + */ + val CORE_TRANSPORT_CHANNEL_BYTE_CAPACITY: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_CAPACITY_BYTE, 67108864) + + /** + * Record channel class + */ + val CORE_TRANSPORT_RECORD_CHANNEL_CLASS: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_RECORD_CHANNEL_CLASS, "com.alibaba.datax.core.transport.channel.memory.MemoryRecordChannel") + + /** + * Record exchanger class + */ + val CORE_TRANSPORT_RECORD_EXCHANGER_CLASS: CommonVars[String] = CommonVars("core.transport.record.exchanger.class", "com.alibaba.datax.core.plugin.BufferedRecordExchanger") + + /** + * Buffer size of record exchanger + */ + val CORE_TRANSPORT_RECORD_EXCHANGER_BUFFER_SIZE: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_RECORD_EXCHANGER_BUFFERSIZE, 32) + + /** + * Stream channel class + */ + val CORE_TRANSPORT_STREAM_CHANNEL_CLASS: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_STREAM_CHANNEL_CLASS, "com.alibaba.datax.core.transport.channel.memory.MemoryStreamChannel") + + /** + * Block size of stream channel + */ + val CORE_TRANSPORT_STREAM_CHANNEL_BLOCK_SIZE: CommonVars[Int] = CommonVars("core.transport.stream.channel.bufferSize", 8192) + + /** + * Job report interval + */ + val CORE_CONTAINER_JOB_REPORT_INTERVAL: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_CONTAINER_JOB_REPORTINTERVAL, 5000) + + /** + * Job sleep interval + */ + val CORE_CONTAINER_JOB_SLEEP_INTERNAL: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_CONTAINER_JOB_SLEEPINTERVAL, 5000) + + /** + * Task group report interval + */ + val CORE_CONTAINER_TASK_GROUP_REPORT_INTERVAL: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_REPORTINTERVAL, 5000) + + /** + * Task group sleep interval + */ + val CORE_CONTAINER_TASK_GROUP_SLEEP_INTERNAL: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_SLEEPINTERVAL, 100) + + /** + * Channel number for task group + */ + val CORE_CONTAINER_TASK_GROUP_CHANNEL: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL, 5) + + /** + * Trace switch + */ + val CORE_CONTAINER_TRACE_ENABLE: CommonVars[Boolean] = CommonVars(CoreConstant.DATAX_CORE_CONTAINER_TRACE_ENABLE, false) + + /** + * Plugin collector task class + */ + val CORE_STATISTICS_COLLECTOR_PLUGIN_TASK_CLASS: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_STATISTICS_COLLECTOR_PLUGIN_TASKCLASS, "com.alibaba.datax.core.statistics.plugin.task.StdoutPluginCollector") + + /** + * Max dirty record number + */ + val CORE_STATISTICS_COLLECTOR_PLUGIN_MAX_DIRTY_NUMBER: CommonVars[Int] = CommonVars(CoreConstant.DATAX_CORE_STATISTICS_COLLECTOR_PLUGIN_MAXDIRTYNUM, 10) + + /** + * Reporter class (EC use DataxEngineConnCommunicateReporter) + */ + val CORE_STATISTICS_REPORTER_PLUGIN_CLASS: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_STATISTICS_REPORTER_PLUGIN_CLASS, "org.apache.linkis.engineconnplugin.datax.report.DataxEngineConnCommunicateReporter") + /** + * Processor loader plugin class + */ + val CORE_PROCESSOR_LOADER_PLUGIN_CLASS: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_PROCESSOR_LOADER_PLUGIN_CLASS, "com.webank.wedatasphere.exchangis.datax.core.processor.loader.plugin.DefaultPluginProcessorLoader") + + /** + * Package name of processor loader plugin + */ + val CORE_PROCESSOR_LOADER_PLUGIN_PACKAGE: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_PROCESSOR_LOADER_PLUGIN_PACKAGE, "com.webank.wedatasphere.exchangis.datax.core.processor.impl") + + /** + * Source path for processor loader plugin + */ + val CORE_PROCESSOR_LOADER_PLUGIN_SOURCE_PATH: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_PROCESSOR_LODAER_PLUGIN_SOURCEPATH, "proc/src") + +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxResourceConfiguration.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxResourceConfiguration.scala new file mode 100644 index 000000000..3f6ef46f4 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxResourceConfiguration.scala @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.config + +import org.apache.linkis.common.conf.CommonVars + +object DataxResourceConfiguration { + + +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxSettingConfiguration.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxSettingConfiguration.scala new file mode 100644 index 000000000..25e2ed4d2 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxSettingConfiguration.scala @@ -0,0 +1,49 @@ +package org.apache.linkis.engineconnplugin.datax.config + +import org.apache.linkis.common.conf.CommonVars + +/** + * Datax setting configuration + */ +object DataxSettingConfiguration { + + /** + * Sync meta + */ + val SETTING_SYNC_META: CommonVars[Boolean] = CommonVars("setting.syncMeta", false) + + /** + * Transport type + */ + val SETTING_TRANSPORT_TYPE: CommonVars[String] = CommonVars("setting.transport.type", "record") + + /** + * Key version for encrypt + */ + val SETTING_KEY_VERSION: CommonVars[String] = CommonVars("setting.keyVersion", "") + + /** + * Speed limit in byte(s) + */ + val SETTING_SPEED_BYTE: CommonVars[Int] = CommonVars("setting.speed.byte", 1048576) + + /** + * Speed limit in record(s) + */ + val SETTING_SPEED_RECORD: CommonVars[Int] = CommonVars("setting.speed.record", 100000) + + /** + * Speed limit in channel(s) + */ + val SETTING_SPEED_CHANNEL: CommonVars[Int] = CommonVars("setting.speed.channel", 0) + + /** + * Error limit in record + */ + val SETTING_ERROR_LIMIT_RECORD: CommonVars[Int] = CommonVars("setting.errorLimit.record", 0) + + /** + * If use processor + */ + val SETTING_USE_PROCESSOR: CommonVars[Boolean] = CommonVars("setting.useProcessor", false) +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxSpringConfiguration.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxSpringConfiguration.scala new file mode 100644 index 000000000..06609c615 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxSpringConfiguration.scala @@ -0,0 +1,23 @@ +package org.apache.linkis.engineconnplugin.datax.config + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.engineconn.acessible.executor.info.NodeHeartbeatMsgManager +import org.apache.linkis.engineconnplugin.datax.service.DataxHeartbeatMsgManager +import org.springframework.context.annotation.{Bean, Configuration, Primary} + +/** + * Spring configuration for datax + */ +@Configuration +class DataxSpringConfiguration extends Logging { + + /** + * Override the heartbeat manager + * @return + */ + @Bean + @Primary + def nodeHeartbeatMsgManager(): NodeHeartbeatMsgManager = { + new DataxHeartbeatMsgManager() + } +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/context/DataxEngineConnContext.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/context/DataxEngineConnContext.scala new file mode 100644 index 000000000..10bd12b16 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/context/DataxEngineConnContext.scala @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.context + +import com.alibaba.datax.common.util.Configuration +import java.util +/** + * Datax engine conn context + */ +class DataxEngineConnContext(settings: Configuration, coreConfig: Configuration, pluginDefinitions: util.List[DataxPluginDefinition]) { + + /** + * Plugin definition + * @return + */ + def getPluginDefinitions: util.List[DataxPluginDefinition] = pluginDefinitions + + /** + * Settings configuration + * @return + */ + def getSettings: Configuration = settings + + /** + * Core configuration + * @return + */ + def getCoreConfig: Configuration = coreConfig +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/context/DataxPluginDefinition.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/context/DataxPluginDefinition.scala new file mode 100644 index 000000000..fa0726768 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/context/DataxPluginDefinition.scala @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.context + +import com.alibaba.datax.common.util.Configuration + +/** + * Plugin definitions + * @param pluginName plugin name + * @param pluginPath plugin path + * @param pluginConf plugin conf + */ +class DataxPluginDefinition(pluginName: String, pluginPath: String, pluginConf: Configuration) { + + /** + * Plugin name + * @return + */ + def getPluginName: String = pluginName + + /** + * Plugin path + * @return + */ + def getPluginPath: String = pluginPath + + /** + * Plugin configuration + * @return + */ + def getPluginConf: Configuration = pluginConf +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxContainerOnceExecutor.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxContainerOnceExecutor.scala new file mode 100644 index 000000000..5b118110a --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxContainerOnceExecutor.scala @@ -0,0 +1,355 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.executor + +import com.alibaba.datax.common.element.ColumnCast +import com.alibaba.datax.common.exception.DataXException +import com.alibaba.datax.common.statistics.{PerfTrace, VMInfo} +import com.alibaba.datax.common.util.Configuration +import com.alibaba.datax.core.AbstractContainer +import com.alibaba.datax.core.util.container.{CoreConstant, LoadUtil} +import com.alibaba.datax.core.util.{ConfigurationValidate, ExceptionTracker, FrameworkErrorCode, SecretUtil} +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.common.utils.{ClassUtils, Utils} +import org.apache.linkis.engineconn.acessible.executor.service.{ExecutorHeartbeatService, ExecutorHeartbeatServiceHolder} +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconn.executor.service.ManagerService +import org.apache.linkis.engineconn.once.executor.{OnceExecutorExecutionContext, OperableOnceExecutor} +import org.apache.linkis.engineconnplugin.datax.config.DataxConfiguration +import org.apache.linkis.engineconnplugin.datax.exception.{DataxJobExecutionException, DataxPluginLoadException} +import org.apache.linkis.engineconnplugin.datax.executor.DataxContainerOnceExecutor.{CODE_NAME, JOB_CONTENT_NAME} +import org.apache.linkis.engineconnplugin.datax.report.{BasicDataxReportReceiver, DataxReportReceiver} +import org.apache.linkis.engineconnplugin.datax.utils.SecretUtils +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus +import org.apache.linkis.protocol.engine.JobProgressInfo +import org.apache.linkis.scheduler.executer.ErrorExecuteResponse + +import java.util +import java.util.concurrent.{Future, TimeUnit} +import scala.collection.JavaConverters._ + +/** + * Once executor for datax container + */ +abstract class DataxContainerOnceExecutor extends DataxOnceExecutor with OperableOnceExecutor { + /** + * Executor configuration + */ + private var execConfiguration: Configuration = _ + /** + * Future + */ + private var future: Future[_] = _ + private var daemonThread: Future[_] = _ + + /** + * Report receiver + */ + private var reportReceiver: DataxReportReceiver = _ + + /** + * Container + */ + private var container: AbstractContainer = _ + override def getId: String = "DataxOnceApp_" + getContainerName + "_" + id + + override def doSubmit(onceExecutorExecutionContext: OnceExecutorExecutionContext, options: Map[String, String]): Unit = { + if (StringUtils.isNotBlank(DataxConfiguration.SECURITY_MANAGER_CLASSES.getValue)) { + // Set the security manager + System.setSecurityManager(ClassUtils.getClassInstance(DataxConfiguration.SECURITY_MANAGER_CLASSES.getValue)) + } + // Init the report receiver + if (Option(reportReceiver).isEmpty) reportReceiver = new BasicDataxReportReceiver() + var isFailed = false + future = Utils.defaultScheduler.submit(new Runnable { + override def run(): Unit = { + val params: util.Map[String, Object] = onceExecutorExecutionContext.getOnceExecutorContent.getJobContent + val result = execute(params, onceExecutorExecutionContext.getEngineCreationContext) + info(s"The executor: [${getId}] has been finished, now to stop DataxEngineConn.") + closeDaemon() + if (result._1 != 0) { + isFailed = true + val message = s"Exec Datax engine conn occurred error, with exit code: [${result._1}]" + setResponse(ErrorExecuteResponse(message, new DataxJobExecutionException(message, result._2))) + tryFailed() + } + if (!isFailed) { + // Try to heartbeat at last + tryToHeartbeat() + trySucceed() + } + this synchronized notify() + } + }) + } + + /** + * Wait to running + */ + override protected def waitToRunning(): Unit = { + if (!isClosed) daemonThread = Utils.defaultScheduler.scheduleAtFixedRate(new Runnable { + override def run(): Unit = { + if (!(future.isDone || future.isCancelled)) { + trace(s"The executor: [$getId] has been still running") + // Heartbeat action interval + tryToHeartbeat() + } + } + }, DataxConfiguration.STATUS_FETCH_INTERVAL.getValue.toLong, + DataxConfiguration.STATUS_FETCH_INTERVAL.getValue.toLong, TimeUnit.MILLISECONDS) + } + + /** + * Get report receiver + * @return + */ + def getReportReceiver: DataxReportReceiver = this.reportReceiver + + /** + * Get container + * @return + */ + def getContainer: AbstractContainer = this.container + override def getProgress: Float = { + Option(this.reportReceiver) match { + case Some(_) => this.reportReceiver.getProgress + case _ => 0f + } + } + + override def getProgressInfo: Array[JobProgressInfo] = { + Option(this.reportReceiver) match { + case Some(_) => this.reportReceiver.getProgressInfo + case _ => Array() + } + } + + override def getMetrics: util.Map[String, Any] = { + val metrics = Option(this.reportReceiver) match { + case Some(_) => this.reportReceiver.getMetrics + case _ => new util.HashMap[String, Any]() + } + // Report the resource + metrics.put("NodeResourceJson", getCurrentNodeResource().getUsedResource.toJson) + metrics + } + + def getMessage(key: String):util.Map[String, util.List[String]] = { + null + } + + override def getDiagnosis: util.Map[String, Any] = { + // Not support diagnosis + new util.HashMap[String, Any]() + } + + override def isClosed: Boolean = { + NodeStatus.isCompleted(getStatus) + } + + override def tryFailed(): Boolean = { +// Option(this.container).foreach(_.shutdown()) + super.tryFailed() + } + + /** + * Try to send heartbeat message to ecm + */ + private def tryToHeartbeat(): Unit = { + logger.trace("heartbeat and record to linkis manager") + ExecutorHeartbeatServiceHolder.getDefaultHeartbeatService() match { + case heartbeatService: ExecutorHeartbeatService => + val heartbeatMsg = heartbeatService.generateHeartBeatMsg(this) + ManagerService.getManagerService.heartbeatReport(heartbeatMsg) + logger.trace(s"Succeed to report heartbeatMsg: [${heartbeatMsg}]") + } + } + /** + * Execute with job content + * @param jobContent job content + * @param engineCreateContext engine create context + * @return + */ + private def execute(jobContent: util.Map[String, Object], engineCreateContext: EngineCreationContext):(Int, Throwable) = { + var exitCode: Int = 0 + var throwable: Throwable = null + Utils.tryCatch { + trace("Begin to decrypt the job content") + var fullConfig: Configuration = Configuration.from(jobContent) + fullConfig = SecretUtil.decryptSecretKey(fullConfig) + // Add the settings to job content + mergeConfig(fullConfig, dataxEngineConnContext.getSettings, CODE_NAME, updateWhenConflict = false) + // Add the core configuration to job content + mergeConfig(fullConfig, dataxEngineConnContext.getCoreConfig, "", updateWhenConflict = true) + // Print VM information + // Set plugin configuration + setPluginConfig(fullConfig) + Option(VMInfo.getVmInfo) match { + case Some(vm) => info(vm.toString) + case _ => + } + info(s"Try to launch executor: [${getId}] with job content: \n ${maskJobContent(fullConfig)}.\n") + // Seems that it is not important? + ConfigurationValidate.doValidate(fullConfig) + // Init environment settings + initEnvWithConfig(fullConfig) + // Store the full configuration + this.execConfiguration = fullConfig + execute(this.execConfiguration, engineCreateContext) + } { + e: Throwable => + exitCode = 1 + throwable = e + error(s"The possible reason of problem is : \n ${ExceptionTracker.trace(e)}") + e match { + case dataxE: DataXException => + val errorCode = dataxE.getErrorCode + errorCode match { + case code: FrameworkErrorCode => + exitCode = code.toExitValue + case _ => + } + case _ => + } + } + (exitCode, throwable) + } + + /** + * Execute with configuration + * @param self configuration + * @param engineCreateContext engine create context + */ + private def execute(self: Configuration, engineCreateContext: EngineCreationContext): Unit = { + // PrefTrace + val traceEnable = self.getBool(CoreConstant.DATAX_CORE_CONTAINER_TRACE_ENABLE, true) + val perfReportEnable = self.getBool(CoreConstant.DATAX_CORE_REPORT_DATAX_PERFLOG, true) + val jobInfo = self.getConfiguration(CoreConstant.DATAX_JOB_JOBINFO) + val channelNumber = self.getInt(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL) + val isJob = this.isInstanceOf[DataxJobOnceExecutor] + val taskGroupId: Int = if (isJob) -1 else self.getInt(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID) + val perfTrace = PerfTrace.getInstance(isJob, self.getLong(CoreConstant.DATAX_CORE_CONTAINER_JOB_ID), taskGroupId, 0, traceEnable) + perfTrace.setJobInfo(jobInfo, perfReportEnable, channelNumber) + Option(createContainer(self, engineCreateContext)).foreach(container => { + this.container = container + container.start() + }) + } + /** + * Set plugin configuration + * @param self self configuration + */ + private def setPluginConfig(self: Configuration): Unit = { + val plugins: util.Map[String, Configuration] = dataxEngineConnContext + .getPluginDefinitions.asScala.map(define => (define.getPluginName, define.getPluginConf)).toMap.asJava + info(s"content is ${dataxEngineConnContext.toString}") + dataxEngineConnContext.getPluginDefinitions.asScala.foreach { definition => + info(s"PluginName: ${definition.getPluginName}, pluginConf: ${definition.getPluginConf}, pluginPath: ${definition.getPluginPath}") + } + val pluginsNeed: util.Map[String, Configuration] = new util.HashMap() + Option(self.getString(CoreConstant.DATAX_JOB_CONTENT_READER_NAME)).foreach(readerPlugin => pluginsNeed.put(readerPlugin, plugins.get(readerPlugin))) + Option(self.getString(CoreConstant.DATAX_JOB_CONTENT_WRITER_NAME)).foreach(writerPlugin => pluginsNeed.put(writerPlugin, plugins.get(writerPlugin))) + Option(self.getString(CoreConstant.DATAX_JOB_PREHANDLER_PLUGINNAME)).foreach(prePlugin => pluginsNeed.put(prePlugin, plugins.get(prePlugin))) + Option(self.getString(CoreConstant.DATAX_JOB_POSTHANDLER_PLUGINNAME)).foreach(postPlugin => pluginsNeed.put(postPlugin, plugins.get(postPlugin))) + val noLoadPlugin = pluginsNeed.asScala.filter(entry => entry._2 == null).toMap + if (noLoadPlugin.nonEmpty){ + throw new DataxPluginLoadException(s"The specific plugins have not been loaded: [${noLoadPlugin.keys.mkString(",")}]", null) + } + pluginsNeed.asScala.foreach(entry => { + val pluginName = entry._1 + if (pluginName.endsWith("reader")){ + self.set(s"plugin.reader.${pluginName}", entry._2) + } else if (pluginName.endsWith("writer")){ + self.set(s"plugin.writer.${pluginName}", entry._2) + } else { + throw new DataxPluginLoadException(s"Unrecognized plugin name: [${pluginName}], please redefine it", null) + } + }) + } + /** + * Merge configuration + * @param self self configuration + * @param another another configuration + * @param pathPrefix path prefix + * @param updateWhenConflict update when conflict + * @return + */ + private def mergeConfig(self: Configuration, another: Configuration, pathPrefix: String, + updateWhenConflict: Boolean): Unit = { + val keys = another.getKeys + keys.asScala.foreach(key => { + val combineKey: String = if (StringUtils.isNotBlank(pathPrefix)) + StringUtils.join(util.Arrays.asList(pathPrefix, key), ".") else key + if (updateWhenConflict){ + self.set(combineKey, another.get(key)) + } else { + Option(self.get(combineKey)) match { + case Some(_) => + case _ => self.set(combineKey, another.get(key)) + } + } + }) + } + + /** + * Init the environment with configuration + * @param self self + */ + private def initEnvWithConfig(self: Configuration): Unit = { + ColumnCast.bind(self) + LoadUtil.bind(self) + } + /** + * Mask the job content + * @param self self configuration + * @return + */ + private def maskJobContent(self: Configuration): String = { + val contentWithSettings = self.getConfiguration(CODE_NAME).clone() + val content: Configuration = contentWithSettings.getConfiguration(JOB_CONTENT_NAME) + SecretUtils.filterSensitiveConfiguration(content) + contentWithSettings.set(JOB_CONTENT_NAME, content) + contentWithSettings.beautify() + } + + protected def closeDaemon(): Unit = { + if (daemonThread != null) daemonThread.cancel(true) + } + /** + * Container name + * @return + */ + def getContainerName: String + + + /** + * Create container + * @param config container configuration + * @param engineCreateContext engine create context + */ + def createContainer(config: Configuration, engineCreateContext: EngineCreationContext): AbstractContainer +} + +object DataxContainerOnceExecutor{ + + val CODE_NAME: String = "job" + + val JOB_CONTENT_NAME = "content" + + +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxExecutor.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxExecutor.scala new file mode 100644 index 000000000..d82685f19 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxExecutor.scala @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.executor + +import org.apache.linkis.engineconn.executor.entity.{LabelExecutor, ResourceExecutor} +import org.apache.linkis.engineconnplugin.datax.context.DataxEngineConnContext +import org.apache.linkis.engineconnplugin.datax.exception.DataxJobExecutionException +import org.apache.linkis.manager.common.entity.resource.NodeResource +import org.apache.linkis.manager.label.entity.Label + +import java.util + +/** + * Datax executor with label + */ +trait DataxExecutor extends LabelExecutor with ResourceExecutor { + + private var executorLabels: util.List[Label[_]] = new util.ArrayList[Label[_]] + + override def getExecutorLabels(): util.List[Label[_]] = executorLabels + override def setExecutorLabels(labels: util.List[Label[_]]): Unit = this.executorLabels = labels + + override def requestExpectedResource(expectedResource: NodeResource): NodeResource = throw new DataxJobExecutionException("Not support method for requestExpectedResource.") + + protected val dataxEngineConnContext: DataxEngineConnContext + + +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxJobOnceExecutor.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxJobOnceExecutor.scala new file mode 100644 index 000000000..a8794a53f --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxJobOnceExecutor.scala @@ -0,0 +1,38 @@ +package org.apache.linkis.engineconnplugin.datax.executor + +import com.alibaba.datax.common.util.Configuration +import com.alibaba.datax.core.AbstractContainer +import com.alibaba.datax.core.job.JobContainer +import com.alibaba.datax.core.util.container.CoreConstant +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconnplugin.datax.context.DataxEngineConnContext + +import java.util +import scala.collection.JavaConverters.asScalaSetConverter + +/** + * + * @param id id + * @param dataxEngineConnContext datax engine conn context + */ +class DataxJobOnceExecutor(override val id: Long, + override protected val dataxEngineConnContext: DataxEngineConnContext) extends DataxContainerOnceExecutor { + /** + * Container name + * + * @return + */ + override def getContainerName: String = "Job-Container" + + /** + * Container entity + * + * @param config container configuration + * @param engineCreateContext engine create context + */ + override def createContainer(config: Configuration, engineCreateContext: EngineCreationContext): AbstractContainer = { + config.set(CoreConstant.DATAX_CORE_CONTAINER_JOB_MODE, "engineConn") + new JobContainer(config) + } +} \ No newline at end of file diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxOnceExecutor.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxOnceExecutor.scala new file mode 100644 index 000000000..69fb4d69c --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxOnceExecutor.scala @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.executor + +import org.apache.linkis.common.utils.Utils +import org.apache.linkis.engineconn.core.EngineConnObject +import org.apache.linkis.engineconn.once.executor.{ManageableOnceExecutor, OnceExecutorExecutionContext} +import org.apache.linkis.engineconnplugin.datax.context.DataxEngineConnContext +import org.apache.linkis.manager.common.entity.resource.{CommonNodeResource, LoadResource, NodeResource} +import org.apache.linkis.manager.engineplugin.common.conf.EngineConnPluginConf + +import scala.collection.JavaConversions.mapAsScalaMap + +trait DataxOnceExecutor extends ManageableOnceExecutor with DataxExecutor { + + val id: Long + + /** + * Submit entrance + * @param onceExecutorExecutionContext execution context + */ + override protected def submit(onceExecutorExecutionContext: OnceExecutorExecutionContext): Unit = { + val options = onceExecutorExecutionContext.getOnceExecutorContent.getJobContent.map { + case (k, v: String) => k -> v + case (k, v) if v != null => k -> v.toString + case (k, _) => k -> null + }.toMap + doSubmit(onceExecutorExecutionContext, options) + } + + def doSubmit(onceExecutorExecutionContext: OnceExecutorExecutionContext, options: Map[String, String]): Unit + + override protected val dataxEngineConnContext: DataxEngineConnContext + + override def getCurrentNodeResource(): NodeResource = { + val properties = EngineConnObject.getEngineCreationContext.getOptions + val resource = new LoadResource( + EngineConnPluginConf.JAVA_ENGINE_REQUEST_MEMORY.getValue(properties).toLong, + EngineConnPluginConf.JAVA_ENGINE_REQUEST_CORES.getValue(properties) + ) + val engineResource = new CommonNodeResource + engineResource.setUsedResource(resource) + engineResource + } + + override def ensureAvailable[A](f: => A): A = { + // Not need to throws exception + Utils.tryQuietly{ super.ensureAvailable(f) } + } + +} + + + + + + + + + + + diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxTaskGroupOnceExecutor.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxTaskGroupOnceExecutor.scala new file mode 100644 index 000000000..6ad374adf --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxTaskGroupOnceExecutor.scala @@ -0,0 +1,27 @@ +package org.apache.linkis.engineconnplugin.datax.executor +import com.alibaba.datax.common.util.Configuration +import com.alibaba.datax.core.AbstractContainer +import com.alibaba.datax.core.taskgroup.TaskGroupContainer +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconnplugin.datax.context.DataxEngineConnContext + +class DataxTaskGroupOnceExecutor(override val id: Long, + override protected val dataxEngineConnContext: DataxEngineConnContext) extends DataxContainerOnceExecutor { + /** + * Container name + * + * @return + */ + override def getContainerName: String = "TaskGroup-Container" + + /** + * Container entity + * + * @param config container configuration + * @param engineCreateContext engine create context + */ + override def createContainer(config: Configuration, engineCreateContext: EngineCreationContext): AbstractContainer = { + new TaskGroupContainer(config) + } + +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/factory/DataxCodeExecutorFactory.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/factory/DataxCodeExecutorFactory.scala new file mode 100644 index 000000000..7f9d8b1f3 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/factory/DataxCodeExecutorFactory.scala @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.factory + +import com.alibaba.datax.core.util.container.CoreConstant +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconn.common.engineconn.EngineConn +import org.apache.linkis.engineconn.once.executor.OnceExecutor +import org.apache.linkis.engineconn.once.executor.creation.OnceExecutorFactory +import org.apache.linkis.engineconnplugin.datax.context.DataxEngineConnContext +import org.apache.linkis.engineconnplugin.datax.executor.{DataxContainerOnceExecutor, DataxJobOnceExecutor, DataxTaskGroupOnceExecutor} +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.engine.RunType +import org.apache.linkis.manager.label.entity.engine.RunType.{JAVA, RunType, SCALA} + +class DataxCodeExecutorFactory extends OnceExecutorFactory { + protected override def newExecutor(id: Int, + engineCreationContext: EngineCreationContext, + engineConn: EngineConn, + labels: Array[Label[_]]): OnceExecutor = { + engineConn.getEngineConnSession match { + case context: DataxEngineConnContext => + val isJob = !("taskGroup".equalsIgnoreCase(context.getCoreConfig + .getString(CoreConstant.DATAX_CORE_CONTAINER_MODEL))) + if (isJob) + new DataxJobOnceExecutor(id, context) + else new DataxTaskGroupOnceExecutor(id, context) + case _ => null + } + } + + override protected def getSupportRunTypes: Array[String] = Array(SCALA.toString, JAVA.toString) + + override protected def getRunType: RunType = RunType.SCALA +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/factory/DataxEngineConnFactory.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/factory/DataxEngineConnFactory.scala new file mode 100644 index 000000000..624944720 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/factory/DataxEngineConnFactory.scala @@ -0,0 +1,141 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.factory + +import com.alibaba.datax.common.util.Configuration +import com.alibaba.datax.core.util.container.CoreConstant +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconnplugin.datax.config.DataxConfiguration +import org.apache.linkis.engineconnplugin.datax.config.DataxConfiguration.CONFIG_PREFIX +import org.apache.linkis.engineconnplugin.datax.config.DataxCoreConfiguration._ +import org.apache.linkis.engineconnplugin.datax.config.DataxSettingConfiguration._ +import org.apache.linkis.engineconnplugin.datax.context.DataxEngineConnContext +import org.apache.linkis.engineconnplugin.datax.factory.DataxEngineConnFactory.{CORE_ARRAY_CONFIGS, CORE_VALUE_CONFIGS, SETTING_VALUE_CONFIGS} +import org.apache.linkis.engineconnplugin.datax.plugin.{DataxPluginDefinitionLoader, LocalDataxPluginDefinitionLoader} +import org.apache.linkis.manager.engineplugin.common.creation.{ExecutorFactory, MultiExecutorEngineConnFactory} +import org.apache.linkis.manager.label.entity.engine.EngineType +import org.apache.linkis.manager.label.entity.engine.EngineType.EngineType + +import java.util +import scala.collection.JavaConverters._ + +/** + * Datax engine conn factory + */ +class DataxEngineConnFactory extends MultiExecutorEngineConnFactory with Logging { + + /** + * Plugin loader + */ + private val pluginLoader: DataxPluginDefinitionLoader = LocalDataxPluginDefinitionLoader() + + override protected def getEngineConnType: EngineType = EngineType.DATAX + + override protected def createEngineConnSession(engineCreationContext: EngineCreationContext): Any = { + var options = engineCreationContext.getOptions + options = options.asScala.map{ + case (key, value) => + if (key.startsWith(CONFIG_PREFIX)){ + (key.replaceFirst(CONFIG_PREFIX, ""), value) + } else (key, value) + }.asJava + engineCreationContext.setOptions(options) + val coreConfig = createCoreConfiguration(engineCreationContext) + val settings = createSettingsConfiguration(engineCreationContext) + new DataxEngineConnContext(settings, coreConfig, pluginLoader.loadPlugin(engineCreationContext)) + } + + /** + * Core configuration + * @param engineCreationContext engine create context + * @return + */ + private def createCoreConfiguration(engineCreationContext: EngineCreationContext): Configuration = { + val configuration = Configuration.from("{}") + val options = engineCreationContext.getOptions + CORE_VALUE_CONFIGS.foreach(config => config.getValue(options) match { + case v: Any => configuration.set(config.key, v) + case _ => //Ignore the unexpected value + }) + CORE_ARRAY_CONFIGS.foreach(config => config.getValue(options) match { + case array: Array[String] => configuration.set(config.key, array) + case str: String => if (StringUtils.isNotBlank(str)) + configuration.set(config.key, util.Arrays.asList(str.split(","))) + case _ => //Ignore the unrecognized value + }) + Option(DataxConfiguration.JOB_EXECUTION_ID.getValue(options)) match { + case Some(executionId: String) => + configuration.set(CoreConstant.DATAX_CORE_CONTAINER_JOB_ID, executionId) + case _ => + } + configuration + } + + /** + * Settings configuration + * @param engineCreationContext engine create context + * @return + */ + private def createSettingsConfiguration(engineCreationContext: EngineCreationContext): Configuration = { + val configuration = Configuration.from("{}") + SETTING_VALUE_CONFIGS.foreach(config => config.getValue(engineCreationContext.getOptions) match { + case v: Any => configuration.set(config.key, v) + case _ => //Ignore the unexpected value + }) + configuration + } + override protected def getDefaultExecutorFactoryClass: Class[_ <: ExecutorFactory] = { + classOf[DataxCodeExecutorFactory] + } + + override def getExecutorFactories: Array[ExecutorFactory] = { + val executorFactoryArray = Array[ExecutorFactory](new DataxCodeExecutorFactory) + executorFactoryArray + } + +} + +object DataxEngineConnFactory{ + /** + * Settings + */ + val SETTING_VALUE_CONFIGS: Array[CommonVars[_]] = Array(SETTING_SYNC_META, SETTING_TRANSPORT_TYPE, + SETTING_KEY_VERSION, SETTING_SPEED_BYTE, SETTING_SPEED_RECORD, + SETTING_SPEED_CHANNEL, SETTING_ERROR_LIMIT_RECORD, SETTING_USE_PROCESSOR + ) + + /** + * Core + */ + val CORE_VALUE_CONFIGS: Array[CommonVars[_]] = + Array(CORE_STATISTICS_REPORTER_PLUGIN_CLASS, COMMON_COLUMN_DATETIME_FORMAT, COMMON_COLUMN_TIME_FORMAT, COMMON_COLUMN_DATE_FORMAT, + COMMON_COLUMN_TIMEZONE, COMMON_COLUMN_ENCODING, CORE_TRANSPORT_TYPE, CORE_TRANSPORT_CHANNEL_SPEED_BYTE, + CORE_TRANSPORT_CHANNEL_SPEED_RECORD, CORE_TRANSPORT_CHANNEL_FLOW_CONTROL_INTERNAL, CORE_TRANSPORT_CHANNEL_CAPACITY, + CORE_TRANSPORT_CHANNEL_BYTE_CAPACITY, CORE_TRANSPORT_RECORD_CHANNEL_CLASS, CORE_TRANSPORT_RECORD_EXCHANGER_CLASS, + CORE_TRANSPORT_RECORD_EXCHANGER_BUFFER_SIZE, CORE_TRANSPORT_STREAM_CHANNEL_CLASS, CORE_TRANSPORT_STREAM_CHANNEL_BLOCK_SIZE, + CORE_CONTAINER_JOB_REPORT_INTERVAL, CORE_CONTAINER_JOB_SLEEP_INTERNAL, CORE_CONTAINER_TASK_GROUP_REPORT_INTERVAL, + CORE_CONTAINER_TASK_GROUP_SLEEP_INTERNAL, CORE_CONTAINER_TASK_GROUP_CHANNEL, CORE_CONTAINER_TRACE_ENABLE, + CORE_STATISTICS_COLLECTOR_PLUGIN_TASK_CLASS, CORE_STATISTICS_COLLECTOR_PLUGIN_MAX_DIRTY_NUMBER, + CORE_PROCESSOR_LOADER_PLUGIN_CLASS, CORE_PROCESSOR_LOADER_PLUGIN_PACKAGE, CORE_PROCESSOR_LOADER_PLUGIN_SOURCE_PATH, CORE_CONTAINER_MODEL + ) + + val CORE_ARRAY_CONFIGS: Array[CommonVars[_]] = Array(COMMON_COLUMN_EXTRA_FORMATS) +} \ No newline at end of file diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/launch/DataxEngineConnLaunchBuilder.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/launch/DataxEngineConnLaunchBuilder.scala new file mode 100644 index 000000000..da194c0bb --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/launch/DataxEngineConnLaunchBuilder.scala @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.launch + +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.common.utils.JsonUtils +import org.apache.linkis.engineconnplugin.datax.config.DataxConfiguration +import org.apache.linkis.engineconnplugin.datax.plugin.{PluginBmlResource, PluginResource} +import org.apache.linkis.manager.common.protocol.bml.BmlResource +import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnBuildRequest +import org.apache.linkis.manager.engineplugin.common.launch.process.Environment.{PWD, variable} +import org.apache.linkis.manager.engineplugin.common.launch.process.JavaProcessEngineConnLaunchBuilder + +import java.util +import java.util.Base64 +import scala.collection.mutable.ArrayBuffer + +/** + * Datax engine conn launch builder + * (use public module lib) + */ +class DataxEngineConnLaunchBuilder extends JavaProcessEngineConnLaunchBuilder { + + protected override def getCommands(implicit engineConnBuildRequest: EngineConnBuildRequest): Array[String] = { + // CD to the worker space directory + var commands = new ArrayBuffer[String]() + commands += "cd" + commands += variable(PWD) + commands += "&&" + commands = commands ++ super.getCommands + commands.toArray + } + + protected override def getBmlResources(implicit engineConnBuildRequest: EngineConnBuildRequest): util.List[BmlResource] = { + val bmlResources = new util.ArrayList[BmlResource](super.getBmlResources) + val props = engineConnBuildRequest.engineConnCreationDesc.properties + DataxConfiguration.PLUGIN_RESOURCES.getValue(props) match { + case resources: String => + if (StringUtils.isNotBlank(resources)) { + val mapper = JsonUtils.jackson + val pluginBmlResources: Array[PluginBmlResource] = mapper.readValue(resources, + mapper.getTypeFactory.constructArrayType(classOf[PluginBmlResource])) + Option(pluginBmlResources).foreach(pluginBmlResources => pluginBmlResources.foreach(pluginBmlResource => { + // Convert to bml resources + val bmlResource = new BmlResource + bmlResource.setFileName(pluginBmlResource.getName) + bmlResource.setResourceId(pluginBmlResource.getResourceId) + bmlResource.setVersion(pluginBmlResource.getVersion) + bmlResource.setOwner(pluginBmlResource.getCreator) + pluginBmlResource.getPath match { + case "." => + bmlResource.setVisibility(BmlResource.BmlResourceVisibility.Private) + case _ => + // Importance: major module must be a public bml resource + bmlResource.setVisibility(BmlResource.BmlResourceVisibility.Public) + } + bmlResources.add(bmlResource) + })) + // Encoding the resources json + props.put(DataxConfiguration.PLUGIN_RESOURCES.key, Base64.getEncoder.encodeToString(resources.getBytes("utf-8"))) + } + } + bmlResources + } + +} \ No newline at end of file diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/params/DataxParamsResolver.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/params/DataxParamsResolver.scala new file mode 100644 index 000000000..74472795b --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/params/DataxParamsResolver.scala @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.params + +import org.apache.linkis.engineconn.common.creation.EngineCreationContext + +import java.util + +/** + * Resolve the engine job params + */ +trait DataxParamsResolver { + + /** + * main method + * @param params input + * @return + */ + def resolve(params: util.Map[String, Object], context: EngineCreationContext): util.Map[String, Object] +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/plugin/DataxPluginDefinitionLoader.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/plugin/DataxPluginDefinitionLoader.scala new file mode 100644 index 000000000..e8fc57e0c --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/plugin/DataxPluginDefinitionLoader.scala @@ -0,0 +1,17 @@ +package org.apache.linkis.engineconnplugin.datax.plugin + +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconnplugin.datax.context.DataxPluginDefinition +import java.util +/** + * Plugin definition loader + */ +trait DataxPluginDefinitionLoader { + + /** + * Load plugin + * @param engineCreationContext engine create context + * @return + */ + def loadPlugin(engineCreationContext: EngineCreationContext): util.List[DataxPluginDefinition] +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/plugin/LocalDataxPluginDefinitionLoader.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/plugin/LocalDataxPluginDefinitionLoader.scala new file mode 100644 index 000000000..3ee0f4774 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/plugin/LocalDataxPluginDefinitionLoader.scala @@ -0,0 +1,88 @@ +package org.apache.linkis.engineconnplugin.datax.plugin +import com.alibaba.datax.common.util.Configuration +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.common.utils.{JsonUtils, Logging} +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconnplugin.datax.config.DataxConfiguration +import org.apache.linkis.engineconnplugin.datax.context.DataxPluginDefinition +import org.apache.linkis.engineconnplugin.datax.exception.DataxPluginLoadException +import org.apache.linkis.engineconnplugin.datax.plugin.LocalDataxPluginDefinitionLoader.{PLUGIN_JSON_NAME, PLUGIN_NAME, PLUGIN_PATH} +import org.apache.linkis.manager.engineplugin.common.launch.process.Environment + +import java.io.File +import java.util +import java.util.Base64 +/** + * Local plugin definition loader + */ +class LocalDataxPluginDefinitionLoader extends DataxPluginDefinitionLoader with Logging{ + /** + * Load plugin + * + * @param engineCreationContext engine create context + * @return + */ + override def loadPlugin(engineCreationContext: EngineCreationContext): util.List[DataxPluginDefinition] = { + val options = engineCreationContext.getOptions + val plugins = new util.ArrayList[DataxPluginDefinition]() + val pluginDefineSet: util.Set[String] = new util.HashSet[String]() + DataxConfiguration.PLUGIN_RESOURCES.getValue(options) match { + case encryptRes: String => + if (StringUtils.isNotBlank(encryptRes)) { + // First to decode the resources + val resources = new String(Base64.getDecoder.decode(encryptRes), "utf-8"); + val mapper = JsonUtils.jackson + val pluginResources: Array[PluginResource] = mapper.readValue(resources, + mapper.getTypeFactory.constructArrayType(classOf[PluginResource])) + val workDir = CommonVars(Environment.PWD.toString, "").getValue + if (StringUtils.isBlank(workDir)) { + throw new DataxPluginLoadException(s"Cannot get the working directory from variable: 'PWD' in datax engine conn environment", null) + } + Option(pluginResources).foreach(resources => resources.foreach( + resource => Option(convertPluginResourceToDefine(pluginDefineSet, resource, workDir)) + .foreach(definition => plugins.add(definition)))) + } + case _ => + } + plugins + } + + private def convertPluginResourceToDefine(pluginDefineSet: util.Set[String], resource: PluginResource, workDir: String): DataxPluginDefinition = { + // Skip the path has value '.' + resource.getPath match { + case "." => null + case _ => + // Search and load the resource definition at work directory + val resLocalFile = new File(workDir, new File(resource.getPath).getName) + if (resLocalFile.isDirectory) { + val pluginConf: Configuration = Configuration.from(new File(resLocalFile.getPath, PLUGIN_JSON_NAME)) + val pluginName: String = pluginConf.getString(PLUGIN_NAME) + var pluginPath: String = pluginConf.getString(PLUGIN_PATH) + if (pluginDefineSet.contains(pluginName)) { + throw new DataxPluginLoadException(s"Fail to load plugin [name: ${pluginName}, path: ${pluginPath}], duplicated plugin exists", null) + } + pluginDefineSet.add(pluginName) + if (StringUtils.isBlank(pluginPath)) { + pluginPath = resLocalFile.getPath + pluginConf.set(PLUGIN_PATH, pluginPath) + } + new DataxPluginDefinition(pluginName, pluginPath, pluginConf) + } else { + warn(s"Cannot find the plugin resource in path: [${resLocalFile.getPath}], please examine the working directory: [${workDir}]") + null + } + } + } +} + +object LocalDataxPluginDefinitionLoader{ + + val PLUGIN_JSON_NAME = "plugin.json" + + val PLUGIN_PATH = "path" + + val PLUGIN_NAME = "name" + def apply(): LocalDataxPluginDefinitionLoader = new LocalDataxPluginDefinitionLoader() + +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/BasicDataxReportReceiver.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/BasicDataxReportReceiver.scala new file mode 100644 index 000000000..1e1f36052 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/BasicDataxReportReceiver.scala @@ -0,0 +1,81 @@ +package org.apache.linkis.engineconnplugin.datax.report +import com.alibaba.datax.core.statistics.communication.{Communication, CommunicationTool} +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.protocol.engine.JobProgressInfo + +import java.util + +/** + * Basic datax report receiver + */ +class BasicDataxReportReceiver extends DataxReportReceiver { + + private var jobId: String =_ + /** + * Just store the last communication + */ + private var lastCommunication: Communication = _ + /** + * Receive communication + * + * @param communication communication + */ + override def receive(jobId: String, communication: Communication): Unit = { + if (StringUtils.isNotBlank(jobId)){ + this.jobId = jobId + } + // Update + this.lastCommunication = communication + } + + /** + * Progress value + * + * @return + */ + override def getProgress: Float = { + Option(this.lastCommunication) match { + case Some(communication) => + communication.getDoubleCounter(CommunicationTool.PERCENTAGE).floatValue() + case _ => 0f + } + } + + /** + * Progress info + * + * @return + */ +override def getProgressInfo: Array[JobProgressInfo] = { + // datax does not have failed task + var totalTask: Long = 0 + var finishTask: Long = 0 + Option(this.lastCommunication) match { + case Some(communication) => + // Just statistics the total job + finishTask = communication.getLongCounter(CommunicationTool.STAGE) + // reverse calculate + val percentage = communication.getDoubleCounter(CommunicationTool.PERCENTAGE) + totalTask = (finishTask.toDouble / percentage).toInt + case _ => + } + Array(JobProgressInfo(this.jobId, totalTask.toInt, (totalTask - finishTask).toInt, 0, finishTask.toInt)) +} + + /** + * Metrics info + * + * @return + */ + override def getMetrics: util.Map[String, Any] = { + // Convert the whole counter in communication + Option(this.lastCommunication) match { + case Some(communication) => + val counter = communication.getCounter + counter.asInstanceOf[util.Map[String, Any]] + case _ => new util.HashMap[String, Any]() + } + } + + +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/DataxEngineConnCommunicateReporter.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/DataxEngineConnCommunicateReporter.scala new file mode 100644 index 000000000..356bf421f --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/DataxEngineConnCommunicateReporter.scala @@ -0,0 +1,40 @@ +package org.apache.linkis.engineconnplugin.datax.report + +import com.alibaba.datax.common.util.Configuration +import com.alibaba.datax.core.statistics.communication.{Communication, LocalTGCommunicationManager} +import com.alibaba.datax.core.statistics.container.report.AbstractReporter +import org.apache.linkis.engineconn.once.executor.creation.OnceExecutorManager +import org.apache.linkis.engineconnplugin.datax.executor.DataxContainerOnceExecutor + +import java.lang + +/** + * Communication reporter for datax engine conn + */ +class DataxEngineConnCommunicateReporter(configuration: Configuration) extends AbstractReporter{ + + + /** + * Report the job communication + * @param jobId job id + * @param communication communication + */ + override def reportJobCommunication(jobId: lang.Long, communication: Communication): Unit = { + OnceExecutorManager.getInstance.getReportExecutor match{ + case executor: DataxContainerOnceExecutor => + executor.getReportReceiver.receive(jobId.toString, communication) + case _ => + } + } + + /** + * Report the task group communication + * @param taskGroupId task group id + * @param communication communication + */ + override def reportTGCommunication(taskGroupId: Integer, communication: Communication): Unit = { + LocalTGCommunicationManager.updateTaskGroupCommunication(taskGroupId, communication) + } + + def getConfiguration: Configuration = this.configuration +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/DataxReportQuota.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/DataxReportQuota.scala new file mode 100644 index 000000000..1b304ce11 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/DataxReportQuota.scala @@ -0,0 +1,28 @@ +package org.apache.linkis.engineconnplugin.datax.report + +import org.apache.linkis.protocol.engine.JobProgressInfo +import java.util +/** + * Quota interface + */ +trait DataxReportQuota { + + /** + * Progress value + * @return + */ + def getProgress: Float + + /** + * Progress info + * @return + */ + def getProgressInfo: Array[JobProgressInfo] + + /** + * Metrics info + * @return + */ + def getMetrics: util.Map[String, Any] + +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/DataxReportReceiver.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/DataxReportReceiver.scala new file mode 100644 index 000000000..1e539c0d5 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/report/DataxReportReceiver.scala @@ -0,0 +1,15 @@ +package org.apache.linkis.engineconnplugin.datax.report + +import com.alibaba.datax.core.statistics.communication.Communication + +/** + * Report receiver + */ +trait DataxReportReceiver extends DataxReportQuota { + /** + * Receive communication + * @param communication communication + */ + def receive(jobId: String, communication: Communication): Unit + +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/resource/DataxEngineConnResourceFactory.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/resource/DataxEngineConnResourceFactory.scala new file mode 100644 index 000000000..8bb7cdf22 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/resource/DataxEngineConnResourceFactory.scala @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.datax.resource + +import org.apache.linkis.manager.common.entity.resource.{LoadInstanceResource, Resource} +import org.apache.linkis.manager.engineplugin.common.conf.EngineConnPluginConf +import org.apache.linkis.manager.engineplugin.common.resource.AbstractEngineResourceFactory + +import java.util + +/** + * Resource factory + */ +class DataxEngineConnResourceFactory extends AbstractEngineResourceFactory { + + override protected def getRequestResource(properties: util.Map[String, String]): Resource = { + // Just use local resource + new LoadInstanceResource(EngineConnPluginConf.JAVA_ENGINE_REQUEST_MEMORY.getValue(properties).toLong, + EngineConnPluginConf.JAVA_ENGINE_REQUEST_CORES.getValue(properties), 1) + } +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/service/DataxHeartbeatMsgManager.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/service/DataxHeartbeatMsgManager.scala new file mode 100644 index 000000000..26b26392c --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/service/DataxHeartbeatMsgManager.scala @@ -0,0 +1,30 @@ +package org.apache.linkis.engineconnplugin.datax.service + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.acessible.executor.info.NodeHeartbeatMsgManager +import org.apache.linkis.engineconn.executor.entity.Executor +import org.apache.linkis.engineconnplugin.datax.executor.DataxContainerOnceExecutor +import org.apache.linkis.server.BDPJettyServerHelper + +import scala.collection.JavaConverters.mapAsScalaMapConverter + +/** + * Datax heartbeat message (include: metric, error message) + */ +class DataxHeartbeatMsgManager extends NodeHeartbeatMsgManager with Logging{ + override def getHeartBeatMsg(executor: Executor): String = { + executor match { + case dataxExecutor: DataxContainerOnceExecutor => + val metric = dataxExecutor.getMetrics + Utils.tryCatch(BDPJettyServerHelper.gson.toJson(metric)) { case e: Exception => + val mV = metric.asScala + .map { case (k, v) => if (null == v) s"${k}->null" else s"${k}->${v.toString}" } + .mkString(",") + val errMsg = e.getMessage + logger.error(s"Convert metric to json failed because : ${errMsg}, metric values : {${mV}}") + "{\"errorMsg\":\"Convert metric to json failed because : " + errMsg + "\"}" + } + case _ => "{}" + } + } +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/pom.xml b/exchangis-engines/engineconn-plugins/sqoop/pom.xml new file mode 100644 index 000000000..bd6afb7d2 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/pom.xml @@ -0,0 +1,275 @@ + + + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../../pom.xml + + 4.0.0 + + linkis-engineplugin-sqoop + + 1.4.6 + 3.1.3 + 3.3.4 + 1.4.0 + + + + + org.apache.commons + commons-exec + provided + 1.3 + + + org.apache.sqoop + sqoop + hadoop200 + ${sqoop.version} + + + org.apache.avro + avro + provided + 1.10.2 + + + com.lmax + disruptor + provided + 3.4.3 + + + org.apache.hive + hive-common + provided + ${hive.version} + + + * + * + + + + + org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop.version} + + + servlet-api + javax.servlet + + + + + org.apache.linkis + linkis-once-engineconn + ${linkis.version} + + + commons-logging + commons-logging + + + + + org.apache.linkis + linkis-computation-engineconn + ${linkis.version} + + + org.apache.hadoop + hadoop-client + ${hadoop.version} + + + log4j + log4j + + + org.mortbay.jetty + jetty + + + org.mortbay.jetty + jetty-util + + + com.sun.jersey + jersey-core + + + com.sun.jersey + jersey-server + + + com.sun.jersey + jersey-json + + + jsr311-api + javax.ws.rs + + + net.java.dev.jets3t + jets3t + + + com.jcraft + jsch + + + com.google.code.findbugs + jsr305 + + + xmlenc + xmlenc + + + net.java.dev.jets3t + jets3t + + + org.apache.avro + avro + + + org.apache.hadoop + hadoop-auth + + + com.jcraft + jsch + + + com.google.code.findbugs + jsr305 + + + servlet-api + javax.servlet + + + org.slf4j + slf4j-log4j12 + + + hadoop-hdfs + org.apache.hadoop + + + org.apache.hadoop + hadoop-hdfs + + + + + org.apache.linkis + linkis-engineconn-plugin-core + ${linkis.version} + + + org.apache.linkis + linkis-rpc + ${linkis.version} + provided + + + org.apache.linkis + linkis-storage + ${linkis.version} + provided + + + org.apache.linkis + linkis-common + ${linkis.version} + provided + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + org.apache.maven.plugins + maven-assembly-plugin + 2.3 + false + + + make-assembly + package + + single + + + + src/main/assembly/distribution.xml + + + + + + false + out + false + false + + src/main/assembly/distribution.xml + + + + + + + src/main/java + + **/*.xml + **/*.properties + + + + src/main/resources + + **/application.yml + **/bootstrap.yml + + + + + \ No newline at end of file diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/assembly/distribution.xml b/exchangis-engines/engineconn-plugins/sqoop/src/main/assembly/distribution.xml new file mode 100644 index 000000000..a88001e18 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/assembly/distribution.xml @@ -0,0 +1,324 @@ + + + + sqoop + + dir + + true + sqoop + + + + + + /dist/${sqoop.version}/lib + true + true + false + false + true + + + antlr:antlr:jar + aopalliance:aopalliance:jar + asm:asm:jar + cglib:cglib:jar + com.amazonaws:aws-java-sdk-autoscaling:jar + com.amazonaws:aws-java-sdk-core:jar + com.amazonaws:aws-java-sdk-ec2:jar + com.amazonaws:aws-java-sdk-route53:jar + com.amazonaws:aws-java-sdk-sts:jar + com.amazonaws:jmespath-java:jar + com.fasterxml.jackson.core:jackson-annotations:jar + com.fasterxml.jackson.core:jackson-core:jar + com.fasterxml.jackson.core:jackson-databind:jar + com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:jar + com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar + com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar + com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:jar + com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:jar + com.fasterxml.jackson.module:jackson-module-jaxb-annotations:jar + com.fasterxml.jackson.module:jackson-module-parameter-names:jar + com.fasterxml.jackson.module:jackson-module-paranamer:jar + com.fasterxml.jackson.module:jackson-module-scala_2.11:jar + com.github.andrewoma.dexx:dexx-collections:jar + com.github.vlsi.compactmap:compactmap:jar + com.google.code.findbugs:annotations:jar + com.google.code.findbugs:jsr305:jar + com.google.code.gson:gson:jar + com.google.guava:guava:jar + com.google.inject:guice:jar + com.google.protobuf:protobuf-java:jar + com.netflix.archaius:archaius-core:jar + com.netflix.eureka:eureka-client:jar + com.netflix.eureka:eureka-core:jar + com.netflix.hystrix:hystrix-core:jar + com.netflix.netflix-commons:netflix-commons-util:jar + com.netflix.netflix-commons:netflix-eventbus:jar + com.netflix.netflix-commons:netflix-infix:jar + com.netflix.netflix-commons:netflix-statistics:jar + com.netflix.ribbon:ribbon:jar + com.netflix.ribbon:ribbon-core:jar + com.netflix.ribbon:ribbon-eureka:jar + com.netflix.ribbon:ribbon-httpclient:jar + com.netflix.ribbon:ribbon-loadbalancer:jar + com.netflix.ribbon:ribbon-transport:jar + com.netflix.servo:servo-core:jar + com.ning:async-http-client:jar + com.sun.jersey.contribs:jersey-apache-client4:jar + com.sun.jersey:jersey-client:jar + com.sun.jersey:jersey-core:jar + com.sun.jersey:jersey-json:jar + com.sun.jersey:jersey-server:jar + com.sun.jersey:jersey-servlet:jar + com.sun.xml.bind:jaxb-impl:jar + com.thoughtworks.paranamer:paranamer:jar + com.thoughtworks.xstream:xstream:jar + org.apache.linkis:linkis-common:jar + org.apache.linkis:linkis-module:jar + commons-beanutils:commons-beanutils:jar + commons-beanutils:commons-beanutils-core:jar + commons-cli:commons-cli:jar + commons-codec:commons-codec:jar + commons-collections:commons-collections:jar + commons-configuration:commons-configuration:jar + commons-daemon:commons-daemon:jar + commons-dbcp:commons-dbcp:jar + commons-digester:commons-digester:jar + commons-httpclient:commons-httpclient:jar + commons-io:commons-io:jar + commons-jxpath:commons-jxpath:jar + commons-lang:commons-lang:jar + commons-logging:commons-logging:jar + commons-net:commons-net:jar + commons-pool:commons-pool:jar + io.micrometer:micrometer-core:jar + io.netty:netty:jar + io.netty:netty-all:jar + io.netty:netty-buffer:jar + io.netty:netty-codec:jar + io.netty:netty-codec-http:jar + io.netty:netty-common:jar + io.netty:netty-handler:jar + io.netty:netty-transport:jar + io.netty:netty-transport-native-epoll:jar + io.reactivex:rxjava:jar + io.reactivex:rxnetty:jar + io.reactivex:rxnetty-contexts:jar + io.reactivex:rxnetty-servo:jar + javax.activation:activation:jar + javax.annotation:javax.annotation-api:jar + javax.inject:javax.inject:jar + javax.servlet:javax.servlet-api:jar + javax.servlet.jsp:jsp-api:jar + javax.validation:validation-api:jar + javax.websocket:javax.websocket-api:jar + javax.ws.rs:javax.ws.rs-api:jar + javax.xml.bind:jaxb-api:jar + javax.xml.stream:stax-api:jar + joda-time:joda-time:jar + log4j:log4j:jar + mysql:mysql-connector-java:jar + net.databinder.dispatch:dispatch-core_2.11:jar + net.databinder.dispatch:dispatch-json4s-jackson_2.11:jar + org.antlr:antlr-runtime:jar + org.antlr:stringtemplate:jar + org.apache.commons:commons-compress:jar + org.apache.commons:commons-math:jar + org.apache.commons:commons-math3:jar + org.apache.curator:curator-client:jar + org.apache.curator:curator-framework:jar + org.apache.curator:curator-recipes:jar + org.apache.directory.api:api-asn1-api:jar + org.apache.directory.api:api-util:jar + org.apache.directory.server:apacheds-i18n:jar + org.apache.directory.server:apacheds-kerberos-codec:jar + org.apache.hadoop:hadoop-annotations:jar + org.apache.hadoop:hadoop-auth:jar + org.apache.hadoop:hadoop-common:jar + org.apache.hadoop:hadoop-hdfs:jar + org.apache.htrace:htrace-core:jar + org.apache.httpcomponents:httpclient:jar + org.apache.httpcomponents:httpcore:jar + org.apache.logging.log4j:log4j-api:jar + org.apache.logging.log4j:log4j-core:jar + org.apache.logging.log4j:log4j-jul:jar + org.apache.logging.log4j:log4j-slf4j-impl:jar + org.apache.zookeeper:zookeeper:jar + org.aspectj:aspectjweaver:jar + org.bouncycastle:bcpkix-jdk15on:jar + org.bouncycastle:bcprov-jdk15on:jar + org.codehaus.jackson:jackson-jaxrs:jar + org.codehaus.jackson:jackson-xc:jar + org.codehaus.jettison:jettison:jar + org.codehaus.woodstox:stax2-api:jar + org.codehaus.woodstox:woodstox-core-asl:jar + org.eclipse.jetty:jetty-annotations:jar + org.eclipse.jetty:jetty-client:jar + org.eclipse.jetty:jetty-continuation:jar + org.eclipse.jetty:jetty-http:jar + org.eclipse.jetty:jetty-io:jar + org.eclipse.jetty:jetty-jndi:jar + org.eclipse.jetty:jetty-plus:jar + org.eclipse.jetty:jetty-security:jar + org.eclipse.jetty:jetty-server:jar + org.eclipse.jetty:jetty-servlet:jar + org.eclipse.jetty:jetty-servlets:jar + org.eclipse.jetty:jetty-util:jar + org.eclipse.jetty:jetty-webapp:jar + org.eclipse.jetty:jetty-xml:jar + org.eclipse.jetty.websocket:javax-websocket-client-impl:jar + org.eclipse.jetty.websocket:javax-websocket-server-impl:jar + org.eclipse.jetty.websocket:websocket-api:jar + org.eclipse.jetty.websocket:websocket-client:jar + org.eclipse.jetty.websocket:websocket-common:jar + org.eclipse.jetty.websocket:websocket-server:jar + org.eclipse.jetty.websocket:websocket-servlet:jar + org.fusesource.leveldbjni:leveldbjni-all:jar + org.glassfish.hk2:class-model:jar + org.glassfish.hk2:config-types:jar + org.glassfish.hk2.external:aopalliance-repackaged:jar + org.glassfish.hk2.external:asm-all-repackaged:jar + org.glassfish.hk2.external:bean-validator:jar + org.glassfish.hk2.external:javax.inject:jar + org.glassfish.hk2:hk2:jar + org.glassfish.hk2:hk2-api:jar + org.glassfish.hk2:hk2-config:jar + org.glassfish.hk2:hk2-core:jar + org.glassfish.hk2:hk2-locator:jar + org.glassfish.hk2:hk2-runlevel:jar + org.glassfish.hk2:hk2-utils:jar + org.glassfish.hk2:osgi-resource-locator:jar + org.glassfish.hk2:spring-bridge:jar + org.glassfish.jersey.bundles:jaxrs-ri:jar + org.glassfish.jersey.bundles.repackaged:jersey-guava:jar + org.glassfish.jersey.containers:jersey-container-servlet:jar + org.glassfish.jersey.containers:jersey-container-servlet-core:jar + org.glassfish.jersey.core:jersey-client:jar + org.glassfish.jersey.core:jersey-common:jar + org.glassfish.jersey.core:jersey-server:jar + org.glassfish.jersey.ext:jersey-entity-filtering:jar + org.glassfish.jersey.ext:jersey-spring3:jar + org.glassfish.jersey.media:jersey-media-jaxb:jar + org.glassfish.jersey.media:jersey-media-json-jackson:jar + org.glassfish.jersey.media:jersey-media-multipart:jar + org.hdrhistogram:HdrHistogram:jar + org.javassist:javassist:jar + org.json4s:json4s-ast_2.11:jar + org.json4s:json4s-core_2.11:jar + org.json4s:json4s-jackson_2.11:jar + org.jsoup:jsoup:jar + org.jvnet.mimepull:mimepull:jar + org.jvnet:tiger-types:jar + org.latencyutils:LatencyUtils:jar + org.mortbay.jasper:apache-el:jar + org.mortbay.jetty:jetty:jar + org.mortbay.jetty:jetty-util:jar + org.ow2.asm:asm-analysis:jar + org.ow2.asm:asm-commons:jar + org.ow2.asm:asm-tree:jar + org.reflections:reflections:jar + org.scala-lang.modules:scala-parser-combinators_2.11:jar + org.scala-lang.modules:scala-xml_2.11:jar + org.scala-lang:scala-compiler:jar + org.scala-lang:scala-library:jar + org.scala-lang:scala-reflect:jar + org.scala-lang:scalap:jar + org.slf4j:jul-to-slf4j:jar + org.slf4j:slf4j-api:jar + org.springframework.boot:spring-boot:jar + org.springframework.boot:spring-boot-actuator:jar + org.springframework.boot:spring-boot-actuator-autoconfigure:jar + org.springframework.boot:spring-boot-autoconfigure:jar + org.springframework.boot:spring-boot-starter:jar + org.springframework.boot:spring-boot-starter-actuator:jar + org.springframework.boot:spring-boot-starter-aop:jar + org.springframework.boot:spring-boot-starter-jetty:jar + org.springframework.boot:spring-boot-starter-json:jar + org.springframework.boot:spring-boot-starter-log4j2:jar + org.springframework.boot:spring-boot-starter-web:jar + org.springframework.cloud:spring-cloud-commons:jar + org.springframework.cloud:spring-cloud-config-client:jar + org.springframework.cloud:spring-cloud-context:jar + org.springframework.cloud:spring-cloud-netflix-archaius:jar + org.springframework.cloud:spring-cloud-netflix-core:jar + org.springframework.cloud:spring-cloud-netflix-eureka-client:jar + org.springframework.cloud:spring-cloud-netflix-ribbon:jar + org.springframework.cloud:spring-cloud-starter:jar + org.springframework.cloud:spring-cloud-starter-config:jar + org.springframework.cloud:spring-cloud-starter-eureka:jar + org.springframework.cloud:spring-cloud-starter-netflix-archaius:jar + org.springframework.cloud:spring-cloud-starter-netflix-eureka-client:jar + org.springframework.cloud:spring-cloud-starter-netflix-ribbon:jar + org.springframework.security:spring-security-crypto:jar + org.springframework.security:spring-security-rsa:jar + org.springframework:spring-aop:jar + org.springframework:spring-beans:jar + org.springframework:spring-context:jar + org.springframework:spring-core:jar + org.springframework:spring-expression:jar + org.springframework:spring-jcl:jar + org.springframework:spring-web:jar + org.springframework:spring-webmvc:jar + org.tukaani:xz:jar + org.yaml:snakeyaml:jar + software.amazon.ion:ion-java:jar + xerces:xercesImpl:jar + xmlenc:xmlenc:jar + xmlpull:xmlpull:jar + xpp3:xpp3_min:jar + + + + + + + + ${basedir}/src/main/resources + + * + + 0777 + 0755 + /dist/v${sqoop.version}/conf + unix + + + + ${basedir}/target + + *.jar + + + *doc.jar + + 0777 + /plugin/${sqoop.version} + + + + + + + diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/LinkisSqoopClient.java b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/LinkisSqoopClient.java new file mode 100644 index 000000000..9d364be57 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/LinkisSqoopClient.java @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.client; + +import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.engineconnplugin.sqoop.client.utils.JarLoader; +import org.apache.linkis.protocol.engine.JobProgressInfo; + +import org.apache.sqoop.SqoopOptions; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.PrintWriter; +import java.io.StringWriter; +import java.io.Writer; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.Map; +import java.util.function.Consumer; + +public class LinkisSqoopClient { + private static Class sqoopEngineClass; + private static Logger logger = LoggerFactory.getLogger(LinkisSqoopClient.class); + private static JarLoader jarLoader; + + public static int run(Map params) { + try { + jarLoader = + new JarLoader( + new String[] { + LinkisSqoopClient.class + .getProtectionDomain() + .getCodeSource() + .getLocation() + .getPath() + }); + // Load the sqoop class redefined by progress, notice that is not be resolved + jarLoader.loadClass("org.apache.sqoop.mapreduce.JobBase", false); + // Add the sqoop-{version}.jar to class path + jarLoader.addJarURL( + SqoopOptions.class + .getProtectionDomain() + .getCodeSource() + .getLocation() + .getPath()); + // Update the context loader + Thread.currentThread().setContextClassLoader(jarLoader); + sqoopEngineClass = + jarLoader.loadClass("org.apache.linkis.engineconnplugin.sqoop.client.Sqoop"); + Method method = sqoopEngineClass.getDeclaredMethod("main", Map.class); + return (Integer) method.invoke(null, params); + } catch (Throwable e) { + logger.error("Run Error Message:" + getLog(e), e); + return -1; + } + } + + /** Close */ + public static void close() { + operateInClassLoader( + jarLoader, + () -> { + Method method = sqoopEngineClass.getDeclaredMethod("close"); + method.invoke(null); + return null; + }, + e -> logger.error("Close Error Message: {}", getLog(e))); + } + + /** + * Fetch application id + * + * @return application id + */ + public static String getApplicationId() { + return operateInClassLoader( + jarLoader, + () -> { + Method method = sqoopEngineClass.getDeclaredMethod("getApplicationId"); + return (String) method.invoke(null); + }, + e -> logger.error("Linkis SqoopClient getApplicationId: {}", getLog(e))); + } + + /** + * Fetch application url + * + * @return url + */ + public static String getApplicationURL() { + return operateInClassLoader( + jarLoader, + () -> { + Method method = sqoopEngineClass.getDeclaredMethod("getApplicationURL"); + return (String) method.invoke(null); + }, + e -> logger.error("Linkis SqoopClient getApplicationURL: {}", getLog(e))); + } + + /** + * Progress value + * + * @return progress + */ + public static Float progress() { + return operateInClassLoader( + jarLoader, + () -> { + Method method = sqoopEngineClass.getDeclaredMethod("progress"); + return (Float) method.invoke(null); + }, + e -> logger.error("Linkis SqoopClient progress: {}", getLog(e))); + } + + /** + * ProgressInfo + * + * @return + */ + @SuppressWarnings("unchecked") + public static JobProgressInfo getProgressInfo() { + return operateInClassLoader( + jarLoader, + () -> { + Method method = sqoopEngineClass.getDeclaredMethod("getProgressInfo"); + return (JobProgressInfo) method.invoke(null); + }, + e -> logger.error("Linkis SqoopClient getProgressInfo: {}", getLog(e))); + } + + /** + * Get metrics + * + * @return map value + */ + @SuppressWarnings("unchecked") + public static Map getMetrics() { + return operateInClassLoader( + jarLoader, + () -> { + Method method = sqoopEngineClass.getDeclaredMethod("getMetrics"); + return (Map) method.invoke(null); + }, + e -> logger.error("Linkis SqoopClient getMetrics: {}", getLog(e))); + } + + /** + * Get diagnosis + * + * @return map value + */ + @SuppressWarnings("unchecked") + public static Map getDiagnosis() { + return operateInClassLoader( + jarLoader, + () -> { + Method method = sqoopEngineClass.getDeclaredMethod("getDiagnosis"); + return (Map) method.invoke(null); + }, + e -> logger.error("Linkis SqoopClient getDiagnosis: {}", getLog(e))); + } + + /** + * Console log + * + * @param e throwable + * @return log + */ + private static String getLog(Throwable e) { + Writer result = new StringWriter(); + PrintWriter printWriter = new PrintWriter(result); + e.printStackTrace(printWriter); + return e.toString(); + } + + /** + * Operate in special classloader + * + * @param classLoader classloader + * @param operation operation + * @param resolver resolver + * @param return type + * @return return + */ + private static R operateInClassLoader( + ClassLoader classLoader, ClientOperation operation, Consumer resolver) { + ClassLoader currentLoader = Thread.currentThread().getContextClassLoader(); + R result = null; + try { + Thread.currentThread().setContextClassLoader(classLoader); + result = operation.operate(); + } catch (Exception t) { + resolver.accept(t); + } finally { + Thread.currentThread().setContextClassLoader(currentLoader); + } + return result; + } + + @FunctionalInterface + interface ClientOperation { + + /** + * Operate + * + * @return T + * @throws ErrorException error exception + */ + T operate() + throws ErrorException, NoSuchMethodException, InvocationTargetException, + IllegalAccessException; + } +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/Sqoop.java b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/Sqoop.java new file mode 100644 index 000000000..bb1e0c093 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/Sqoop.java @@ -0,0 +1,551 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.client; + +import org.apache.linkis.engineconnplugin.sqoop.client.config.ParamsMapping; +import org.apache.linkis.engineconnplugin.sqoop.client.exception.JobClosableException; +import org.apache.linkis.engineconnplugin.sqoop.context.SqoopEnvConfiguration; +import org.apache.linkis.engineconnplugin.sqoop.context.SqoopParamsConfiguration; +import org.apache.linkis.protocol.engine.JobProgressInfo; + +import org.apache.commons.lang.StringUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.conf.Configured; +import org.apache.hadoop.mapred.TIPStatus; +import org.apache.hadoop.mapreduce.*; +import org.apache.hadoop.util.Tool; +import org.apache.hadoop.util.ToolRunner; +import org.apache.sqoop.manager.SqlManager; +import org.apache.sqoop.manager.oracle.OraOopManagerFactory; +import org.apache.sqoop.util.LoggingUtils; + +import com.cloudera.sqoop.SqoopOptions; +import com.cloudera.sqoop.manager.DefaultManagerFactory; +import com.cloudera.sqoop.tool.SqoopTool; +import com.cloudera.sqoop.util.OptionsFileUtil; + +import java.io.File; +import java.io.IOException; +import java.lang.reflect.Field; +import java.net.MalformedURLException; +import java.nio.file.Paths; +import java.sql.SQLException; +import java.util.*; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; + +/** + * Main entry-point for Sqoop Usage: hadoop jar (this_jar_name) com.cloudera.sqoop.Sqoop (options) + * See the SqoopOptions class for options. + */ +public class Sqoop extends Configured implements Tool { + + public static final Log LOG = LogFactory.getLog(Sqoop.class.getName()); + public static volatile AtomicReference job = new AtomicReference<>(); + public static SqlManager sqlManager; + public static final String[] DEFAULT_FACTORY_CLASS_NAMES_ARR = { + OraOopManagerFactory.class.getName(), DefaultManagerFactory.class.getName(), + }; + public static final String FACTORY_CLASS_NAMES_KEY = "sqoop.connection.factories"; + public static final String METRICS_RUN_TIME = "MetricsRunTime"; + private static Float progress = 0.0f; + + /** + * If this System property is set, always throw an exception, do not just exit with status 1. + */ + public static final String SQOOP_RETHROW_PROPERTY = "sqoop.throwOnError"; + + /** The option to specify an options file from which other options to the tool are read. */ + public static final String SQOOP_OPTIONS_FILE_SPECIFIER = "--options-file"; + + static { + Configuration.addDefaultResource("sqoop-site.xml"); + } + + private SqoopTool tool; + private SqoopOptions options; + private String[] childPrgmArgs; + + /** + * Creates a new instance of Sqoop set to run the supplied SqoopTool with the default + * configuration. + * + * @param tool the SqoopTool to run in the main body of Sqoop. + */ + public Sqoop(SqoopTool tool) { + this(tool, (Configuration) null); + } + + /** + * Creates a new instance of Sqoop set to run the supplied SqoopTool with the provided + * configuration. + * + * @param tool the SqoopTool to run in the main body of Sqoop. + * @param conf the Configuration to use (e.g., from ToolRunner). + */ + public Sqoop(SqoopTool tool, Configuration conf) { + this(tool, conf, new SqoopOptions()); + } + + /** + * Creates a new instance of Sqoop set to run the supplied SqoopTool with the provided + * configuration and SqoopOptions. + * + * @param tool the SqoopTool to run in the main body of Sqoop. + * @param conf the Configuration to use (e.g., from ToolRunner). + * @param opts the SqoopOptions which control the tool's parameters. + */ + public Sqoop(SqoopTool tool, Configuration conf, SqoopOptions opts) { + /*LOG.info("Running Sqoop version: " + new SqoopVersion().VERSION);*/ + + if (null != conf) { + setConf(conf); + } + + this.options = opts; + this.options.setConf(getConf()); + + this.tool = tool; + } + + /** @return the SqoopOptions used in this Sqoop instance. */ + public SqoopOptions getOptions() { + return this.options; + } + + /** @return the SqoopTool used in this Sqoop instance. */ + public SqoopTool getTool() { + return this.tool; + } + + @Override + /** Actual main entry-point for the program */ + public int run(String[] args) { + if (options.getConf() == null) { + options.setConf(getConf()); + } + options.getConf().setStrings(FACTORY_CLASS_NAMES_KEY, DEFAULT_FACTORY_CLASS_NAMES_ARR); + try { + options = tool.parseArguments(args, null, options, false); + tool.appendArgs(this.childPrgmArgs); + tool.validateOptions(options); + if (options.getVerbose()) { + LoggingUtils.setDebugLevel(); + } + } catch (Exception e) { + LOG.error(e.getMessage(), e); + System.err.println(e.getMessage()); + return 1; + } + return tool.run(options); + } + + /** + * SqoopTools sometimes pass arguments to a child program (e.g., mysqldump). Users can specify + * additional args to these programs by preceeding the additional arguments with a standalone + * '--'; but ToolRunner/GenericOptionsParser will cull out this argument. We remove the + * child-program arguments in advance, and store them to be readded later. + * + * @param argv the argv in to the SqoopTool + * @return the argv with a "--" and any subsequent arguments removed. + */ + private String[] stashChildPrgmArgs(String[] argv) { + for (int i = 0; i < argv.length; i++) { + if ("--".equals(argv[i])) { + this.childPrgmArgs = Arrays.copyOfRange(argv, i, argv.length); + return Arrays.copyOfRange(argv, 0, i); + } + } + + // Didn't find child-program arguments. + return argv; + } + + /** + * Given a Sqoop object and a set of arguments to deliver to its embedded SqoopTool, run the + * tool, wrapping the call to ToolRunner. This entry-point is preferred to ToolRunner.run() + * because it has a chance to stash child program arguments before GenericOptionsParser would + * remove them. + */ + public static int runSqoop(Sqoop sqoop, String[] args) { + String[] toolArgs = sqoop.stashChildPrgmArgs(args); + try { + return ToolRunner.run(sqoop.getConf(), sqoop, toolArgs); + } catch (Exception e) { + LOG.error("Got exception running Sqoop: " + e.toString()); + e.printStackTrace(); + rethrowIfRequired(toolArgs, e); + return 1; + } + } + + public static void rethrowIfRequired(String[] toolArgs, Exception ex) { + final RuntimeException exceptionToThrow; + if (ex instanceof RuntimeException) { + exceptionToThrow = (RuntimeException) ex; + } else { + exceptionToThrow = new RuntimeException(ex); + } + + throw exceptionToThrow; + } + + /** + * Entry-point that parses the correct SqoopTool to use from the args, but does not call + * System.exit() as main() will. + */ + public static int runTool(Map argsMap, Configuration conf) { + + // Expand the options + String[] expandedArgs = null; + try { + String[] flatArgs = convertParamsMapToAarray(argsMap, conf); + expandedArgs = OptionsFileUtil.expandArguments(flatArgs); + } catch (Exception ex) { + LOG.error("Error while expanding arguments", ex); + System.err.println(ex.getMessage()); + System.err.println("Try 'sqoop help' for usage."); + return 1; + } + + String toolName = expandedArgs[0]; + Configuration pluginConf = SqoopTool.loadPlugins(conf); + SqoopTool tool = SqoopTool.getTool(toolName); + if (null == tool) { + System.err.println("No such sqoop tool: " + toolName + ". See 'sqoop help'."); + return 1; + } + + Sqoop sqoop = new Sqoop(tool, pluginConf); + return runSqoop(sqoop, Arrays.copyOfRange(expandedArgs, 1, expandedArgs.length)); + } + + private static String[] convertParamsMapToAarray( + Map paramsMap, Configuration conf) throws Exception { + List paramsList = new ArrayList<>(); + + for (Map.Entry entry : paramsMap.entrySet()) { + if (StringUtils.isNotBlank(entry.getKey())) { + String key = entry.getKey().toLowerCase(); + if (key.equals(SqoopParamsConfiguration.SQOOP_PARAM_MODE().getValue())) { + paramsList.add(0, entry.getValue()); + continue; + } + if (key.startsWith(SqoopParamsConfiguration.SQOOP_PARAM_ENV_PREFIX().getValue())) { + key = + key.substring( + SqoopParamsConfiguration.SQOOP_PARAM_ENV_PREFIX() + .getValue() + .length()); + conf.set(key, entry.getValue()); + continue; + } + String conKey = ParamsMapping.mapping.get(key); + if (conKey != null) { + if (entry.getValue() != null && entry.getValue().length() != 0) { + paramsList.add(conKey); + paramsList.add(entry.getValue()); + } else { + paramsList.add(conKey); + } + } else { + // Ignore the unrecognized params + LOG.warn("The Key " + entry.getKey() + " Is Not Supported"); + } + } + } + return paramsList.toArray(new String[0]); + } + + /** + * Entry-point that parses the correct SqoopTool to use from the args, but does not call + * System.exit() as main() will. + */ + public static int runTool(Map params) { + Configuration conf = new Configuration(); + try { + for (String fileName : + SqoopEnvConfiguration.SQOOP_HADOOP_SITE_FILE().getValue().split(";")) { + File resourceFile = Paths.get(fileName).toFile(); + if (resourceFile.exists()) { + LOG.info("Append resource: [" + resourceFile.getPath() + "] to configuration"); + conf.addResource(resourceFile.toURI().toURL()); + } + } + + } catch (MalformedURLException e) { + e.printStackTrace(); + System.exit(1); + } + return runTool(params, conf); + } + + public static int main(Map code) { + return runTool(code); + } + + /** + * Close method + * + * @throws JobClosableException + */ + public static void close() throws JobClosableException { + Job runnableJob = job.get(); + try { + if (Objects.nonNull(runnableJob)) { + runnableJob.killJob(); + } + if (sqlManager != null && sqlManager.getConnection() != null) { + sqlManager.getConnection().close(); + } + } catch (IllegalStateException se) { + if (isJobReady(runnableJob)) { + LOG.warn( + "Unable to close the mapReduce job, it seems that the job isn't connected to the cluster"); + } else if (Objects.nonNull(runnableJob)) { + String cluster = "UNKNOWN"; + try { + cluster = runnableJob.getCluster().getFileSystem().getCanonicalServiceName(); + } catch (Exception e) { + // Ignore + } + throw new JobClosableException( + "Unable to close the mapReduce job related to cluster [" + cluster + "]", + se); + } + } catch (IOException | SQLException e) { + throw new JobClosableException("Error in closing sqoop client", e); + } + } + + /** + * Get application id + * + * @return string value + */ + public static String getApplicationId() { + String applicationId = ""; + try { + Job runnableJob = job.get(); + if (Objects.nonNull(runnableJob)) { + JobID jobId = runnableJob.getJobID(); + if (Objects.nonNull(jobId)) { + applicationId = jobId.toString(); + } + } + } catch (Exception e) { + // Not throw exception + LOG.error("GetApplicationId in sqoop Error", e); + } + return applicationId; + } + + /** + * Get application url + * + * @return url + */ + public static String getApplicationURL() { + String applicationUrl = ""; + Job runnableJob = job.get(); + try { + if (Objects.nonNull(runnableJob)) { + return runnableJob.getTrackingURL(); + } + } catch (Exception e) { + if (e instanceof IllegalStateException && !isJobReady(runnableJob)) { + LOG.trace("The mapReduce job is not ready, wait for the job status to be Running"); + } else { + LOG.error("GetApplicationURL in sqoop Error", e); + } + } + return applicationUrl; + } + + /** + * Get progress value + * + * @return float value + */ + public static Float progress() { + Job runnableJob = job.get(); + try { + if (Objects.nonNull(runnableJob)) { + // Count by two paragraphs + progress = (runnableJob.mapProgress() + runnableJob.reduceProgress()) / 2.0f; + } + } catch (Exception e) { + if (e instanceof IllegalStateException && !isJobReady(runnableJob)) { + LOG.trace("The mapReduce job is not ready, the value of progress is 0.0 always"); + } else { + LOG.error("Get progress in sqoop Error", e); + } + } + return progress; + } + + /** + * Get progress info + * + * @return info + */ + public static JobProgressInfo getProgressInfo() { + Job runnableJob = job.get(); + try { + if (Objects.nonNull(runnableJob)) { + AtomicInteger totalTasks = new AtomicInteger(); + AtomicInteger failedTasks = new AtomicInteger(); + AtomicInteger runTasks = new AtomicInteger(); + AtomicInteger successTasks = new AtomicInteger(); + TaskType[] analyzeTypes = new TaskType[] {TaskType.MAP, TaskType.REDUCE}; + for (TaskType taskType : analyzeTypes) { + TaskReport[] taskReports = runnableJob.getTaskReports(taskType); + Optional.ofNullable(taskReports) + .ifPresent( + reports -> { + totalTasks.addAndGet(reports.length); + for (TaskReport report : reports) { + TIPStatus tipStatus = report.getCurrentStatus(); + switch (tipStatus) { + case FAILED: + case KILLED: + failedTasks.getAndIncrement(); + break; + case PENDING: + case RUNNING: + runTasks.getAndIncrement(); + break; + case COMPLETE: + successTasks.getAndIncrement(); + break; + default: + } + } + }); + } + return new JobProgressInfo( + getApplicationId(), + totalTasks.get(), + runTasks.get(), + failedTasks.get(), + successTasks.get()); + } + } catch (Exception e) { + if (e instanceof IllegalStateException && !isJobReady(runnableJob)) { + LOG.trace( + "The mapReduce job is not ready, the value of progressInfo is always empty"); + } else { + LOG.error("Get progress info in sqoop Error", e); + } + } + return new JobProgressInfo(getApplicationId(), 0, 0, 0, 0); + } + + /** + * Get metrics + * + * @return metrics map + */ + public static Map getMetrics() { + Job runnableJob = job.get(); + // Actual the counter map + Map metricsMap = new HashMap<>(); + try { + if (Objects.nonNull(runnableJob)) { + Counters counters = runnableJob.getCounters(); + counters.forEach( + group -> + metricsMap.computeIfAbsent( + group.getName(), + (groupName) -> { + Map map = new HashMap<>(); + group.forEach( + counter -> + map.put( + counter.getName(), + counter.getValue())); + return map; + })); + long startTime = runnableJob.getStartTime(); + long endTime = + runnableJob.getFinishTime() > 0 + ? runnableJob.getFinishTime() + : System.currentTimeMillis(); + // Analyze the run time + metricsMap.put(METRICS_RUN_TIME, startTime > 0 ? endTime - startTime : 0); + } + } catch (Exception e) { + if (e instanceof IllegalStateException && !isJobReady(runnableJob)) { + LOG.trace( + "The mapReduce job is not ready, the value of metricsMap is always empty"); + } else { + LOG.error("Get metrics info in sqoop Error", e); + } + } + return metricsMap; + } + + /** + * Get diagnosis + * + * @return + */ + public static Map getDiagnosis() { + Job runnableJob = job.get(); + Map diagnosis = new HashMap<>(); + try { + if (Objects.nonNull(runnableJob)) { + TaskType[] analyzeTypes = new TaskType[] {TaskType.MAP, TaskType.REDUCE}; + List listReports = new ArrayList<>(); + for (TaskType taskType : analyzeTypes) { + listReports.addAll(Arrays.asList(runnableJob.getTaskReports(taskType))); + } + listReports.forEach( + report -> diagnosis.put(report.getTaskId(), report.getDiagnostics())); + } + } catch (Exception e) { + if (e instanceof IllegalStateException && !isJobReady(runnableJob)) { + LOG.trace("The mapReduce job is not ready, the value of diagnosis is always empty"); + } else { + LOG.error("Get diagnosis info in sqoop Error", e); + } + } + return diagnosis; + } + + /** + * If the job is ready + * + * @param runnableJob job + * @return + */ + private static boolean isJobReady(Job runnableJob) { + boolean ready = false; + try { + Field stateField = Job.class.getDeclaredField("state"); + stateField.setAccessible(true); + Job.JobState state = (Job.JobState) stateField.get(runnableJob); + ready = state.equals(Job.JobState.RUNNING); + } catch (NoSuchFieldException | IllegalAccessException e) { + // Ignore + } + return ready; + } +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ExecutionContext.java b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ExecutionContext.java new file mode 100644 index 000000000..4dde08a76 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ExecutionContext.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.client.config; + +import org.apache.linkis.engineconn.common.creation.EngineCreationContext; + +public class ExecutionContext { + private final EngineCreationContext environmentContext; + + public ExecutionContext(EngineCreationContext environmentContext) { + this.environmentContext = environmentContext; + } +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ParamsMapping.java b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ParamsMapping.java new file mode 100644 index 000000000..d52d68176 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ParamsMapping.java @@ -0,0 +1,144 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.client.config; + +import org.apache.linkis.engineconnplugin.sqoop.context.SqoopParamsConfiguration; + +import java.util.HashMap; +import java.util.Map; + +/** Params mapping */ +public final class ParamsMapping { + public static Map mapping; + + static { + String paramPrefix = SqoopParamsConfiguration.SQOOP_PARAM_PREFIX().getValue(); + mapping = new HashMap<>(); + mapping.put(paramPrefix + "connect", "--connect"); + mapping.put(paramPrefix + "connection.manager", "--connection-manager"); + mapping.put(paramPrefix + "connection.param.file", "--connection-param-file"); + mapping.put(paramPrefix + "driver", "--driver"); + mapping.put(paramPrefix + "hadoop.home", "--hadoop-home"); + mapping.put(paramPrefix + "hadoop.mapred.home", "--hadoop-mapred-home"); + mapping.put(paramPrefix + "help", "help"); + mapping.put(paramPrefix + "password", "--password"); + mapping.put(paramPrefix + "password.alias", "--password-alias"); + mapping.put(paramPrefix + "password.file", "--password-file"); + mapping.put(paramPrefix + "relaxed.isolation", "--relaxed-isolation"); + mapping.put(paramPrefix + "skip.dist.cache", "--skip-dist-cache"); + mapping.put(paramPrefix + "username", "--username"); + mapping.put(paramPrefix + "verbose", "--verbose"); + mapping.put(paramPrefix + "append", "--append"); + mapping.put(paramPrefix + "as.avrodatafile", "--as-avrodatafile"); + mapping.put(paramPrefix + "as.parquetfile", "--as-parquetfile"); + mapping.put(paramPrefix + "as.sequencefile", "--as-sequencefile"); + mapping.put(paramPrefix + "as.textfile", "--as-textfile"); + mapping.put(paramPrefix + "autoreset.to.one.mapper", "--autoreset-to-one-mapper"); + mapping.put(paramPrefix + "boundary.query", "--boundary-query"); + mapping.put(paramPrefix + "case.insensitive", "--case-insensitive"); + mapping.put(paramPrefix + "columns", "--columns"); + mapping.put(paramPrefix + "compression.codec", "--compression-codec"); + mapping.put(paramPrefix + "delete.target.dir", "--delete-target-dir"); + mapping.put(paramPrefix + "direct", "--direct"); + mapping.put(paramPrefix + "direct.split.size", "--direct-split-size"); + mapping.put(paramPrefix + "query", "--query"); + mapping.put(paramPrefix + "fetch.size", "--fetch-size"); + mapping.put(paramPrefix + "inline.lob.limit", "--inline-lob-limit"); + mapping.put(paramPrefix + "num.mappers", "--num-mappers"); + mapping.put(paramPrefix + "mapreduce.job.name", "--mapreduce-job-name"); + mapping.put(paramPrefix + "merge.key", "--merge-key"); + mapping.put(paramPrefix + "split.by", "--split-by"); + mapping.put(paramPrefix + "table", "--table"); + mapping.put(paramPrefix + "target.dir", "--target-dir"); + mapping.put(paramPrefix + "validate", "--validate"); + mapping.put(paramPrefix + "validation.failurehandler", "--validation-failurehandler"); + mapping.put(paramPrefix + "validation.threshold", " --validation-threshold"); + mapping.put(paramPrefix + "validator", "--validator"); + mapping.put(paramPrefix + "warehouse.dir", "--warehouse-dir"); + mapping.put(paramPrefix + "where", "--where"); + mapping.put(paramPrefix + "compress", "--compress"); + mapping.put(paramPrefix + "check.column", "--check-column"); + mapping.put(paramPrefix + "incremental", "--incremental"); + mapping.put(paramPrefix + "last.value", "--last-value"); + mapping.put(paramPrefix + "enclosed.by", "--enclosed-by"); + mapping.put(paramPrefix + "escaped.by", "--escaped-by"); + mapping.put(paramPrefix + "fields.terminated.by", "--fields-terminated-by"); + mapping.put(paramPrefix + "lines.terminated.by", "--lines-terminated-by"); + mapping.put(paramPrefix + "mysql.delimiters", "--mysql-delimiters"); + mapping.put(paramPrefix + "optionally.enclosed.by", "--optionally-enclosed-by"); + mapping.put(paramPrefix + "input.enclosed.by", "--input-enclosed-by"); + mapping.put(paramPrefix + "input.escaped.by", "--input-escaped-by"); + mapping.put(paramPrefix + "input.fields.terminated.by", "--input-fields-terminated-by"); + mapping.put(paramPrefix + "input.lines.terminated.by", "--input-lines-terminated-by"); + mapping.put(paramPrefix + "input.optionally.enclosed.by", "--input-optionally-enclosed-by"); + mapping.put(paramPrefix + "create.hive.table", "--create-hive-table"); + mapping.put(paramPrefix + "hive.delims.replacement", "--hive-delims-replacement"); + mapping.put(paramPrefix + "hive.database", "--hive-database"); + mapping.put(paramPrefix + "hive.drop.import.delims", "--hive-drop-import-delims"); + mapping.put(paramPrefix + "hive.home", "--hive-home"); + mapping.put(paramPrefix + "hive.import", "--hive-import"); + mapping.put(paramPrefix + "hive.overwrite", "--hive-overwrite"); + mapping.put(paramPrefix + "hive.partition.value", "--hive-partition-value"); + mapping.put(paramPrefix + "hive.table", "--hive-table"); + mapping.put(paramPrefix + "column.family", "--column-family"); + mapping.put(paramPrefix + "hbase.bulkload", "--hbase-bulkload"); + mapping.put(paramPrefix + "hbase.create.table", "--hbase-create-table"); + mapping.put(paramPrefix + "hbase.row.key", "--hbase-row-key"); + mapping.put(paramPrefix + "hbase.table", "--hbase-table"); + mapping.put(paramPrefix + "hcatalog.database", "--hcatalog-database"); + mapping.put(paramPrefix + "hcatalog.home", "--hcatalog-home"); + mapping.put(paramPrefix + "hcatalog.partition.keys", "--hcatalog-partition-keys"); + mapping.put(paramPrefix + "hcatalog.partition.values", "--hcatalog-partition-values"); + mapping.put(paramPrefix + "hcatalog.table", "--hcatalog-table"); + mapping.put(paramPrefix + "hive.partition.key", "--hive-partition-key"); + mapping.put(paramPrefix + "map.column.hive", "--map-column-hive"); + mapping.put(paramPrefix + "create.hcatalog.table", "--create-hcatalog-table"); + mapping.put(paramPrefix + "hcatalog.storage.stanza", "--hcatalog-storage-stanza"); + mapping.put(paramPrefix + "accumulo.batch.size", "--accumulo-batch-size"); + mapping.put(paramPrefix + "accumulo.column.family", "--accumulo-column-family"); + mapping.put(paramPrefix + "accumulo.create.table", "--accumulo-create-table"); + mapping.put(paramPrefix + "accumulo.instance", "--accumulo-instance"); + mapping.put(paramPrefix + "accumulo.max.latency", "--accumulo-max-latency"); + mapping.put(paramPrefix + "accumulo.password", "--accumulo-password"); + mapping.put(paramPrefix + "accumulo.row.key", "--accumulo-row-key"); + mapping.put(paramPrefix + "accumulo.table", "--accumulo-table"); + mapping.put(paramPrefix + "accumulo.user", "--accumulo-user"); + mapping.put(paramPrefix + "accumulo.visibility", "--accumulo-visibility"); + mapping.put(paramPrefix + "accumulo.zookeepers", "--accumulo-zookeepers"); + mapping.put(paramPrefix + "bindir", "--bindir"); + mapping.put(paramPrefix + "class.name", "--class-name"); + mapping.put(paramPrefix + "input.null.non.string", "--input-null-non-string"); + mapping.put(paramPrefix + "input.null.string", "--input-null-string"); + mapping.put(paramPrefix + "jar.file", "--jar-file"); + mapping.put(paramPrefix + "map.column.java", "--map-column-java"); + mapping.put(paramPrefix + "null.non.string", "--null-non-string"); + mapping.put(paramPrefix + "null.string", "--null-string"); + mapping.put(paramPrefix + "outdir", "--outdir"); + mapping.put(paramPrefix + "package.name", "--package-name"); + mapping.put(paramPrefix + "conf", "-conf"); + mapping.put(paramPrefix + "D", "-D"); + mapping.put(paramPrefix + "fs", "-fs"); + mapping.put(paramPrefix + "jt", "-jt"); + mapping.put(paramPrefix + "files", "-files"); + mapping.put(paramPrefix + "libjars", "-libjars"); + mapping.put(paramPrefix + "archives", "-archives"); + mapping.put(paramPrefix + "update.key", "--update-key"); + mapping.put(paramPrefix + "update.mode", "--update-mode"); + mapping.put(paramPrefix + "export.dir", "--export-dir"); + } +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobClosableException.java b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobClosableException.java new file mode 100644 index 000000000..efbcd0e6b --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobClosableException.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.client.exception; + +import org.apache.linkis.common.exception.ErrorException; + +/** Exception in closing/destroying the job */ +public class JobClosableException extends ErrorException { + private static final long serialVersionUID = 1L; + + public static final int ERROR_CODE = 16025; + + public JobClosableException(String message) { + super(ERROR_CODE, message); + } + + public JobClosableException(String message, Throwable e) { + super(ERROR_CODE, message); + this.initCause(e); + } +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobExecutionException.java b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobExecutionException.java new file mode 100644 index 000000000..168d54ff3 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobExecutionException.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.client.exception; + +import org.apache.linkis.common.exception.ErrorException; + +public class JobExecutionException extends ErrorException { + private static final long serialVersionUID = 1L; + + public static final int ERROR_CODE = 16023; + + public JobExecutionException(String message) { + super(ERROR_CODE, message); + } + + public JobExecutionException(String message, Throwable e) { + super(ERROR_CODE, message); + this.initCause(e); + } +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/utils/JarLoader.java b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/utils/JarLoader.java new file mode 100644 index 000000000..6140e68e8 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/utils/JarLoader.java @@ -0,0 +1,170 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.client.utils; + +import org.apache.commons.lang3.Validate; + +import java.io.File; +import java.io.FileFilter; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLClassLoader; +import java.security.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +public class JarLoader extends URLClassLoader { + private AccessControlContext acc; + + public JarLoader(String[] paths) { + this(paths, false); + } + + public JarLoader(String[] paths, boolean recursive) { + this(paths, recursive, JarLoader.class.getClassLoader()); + } + + public JarLoader(String[] paths, boolean recursive, ClassLoader parent) { + super(getURLs(paths, recursive), parent); + } + + private static URL[] getURLs(String[] paths, boolean recursive) { + List urls = new ArrayList<>(); + if (recursive) { + List dirs = new ArrayList<>(); + for (String path : paths) { + dirs.add(path); + collectDirs(path, dirs); + } + for (String path : dirs) { + urls.addAll(doGetURLs(path)); + } + } else { + // For classpath, classloader will recursive automatically + urls.addAll( + Arrays.stream(paths) + .map(File::new) + .filter(File::exists) + .map( + f -> { + try { + return f.toURI().toURL(); + } catch (MalformedURLException e) { + // Ignore + return null; + } + }) + .collect(Collectors.toList())); + } + return urls.toArray(new URL[0]); + } + + public void addJarURL(String path) { + // Single jar + File singleJar = new File(path); + if (singleJar.exists() && singleJar.isFile()) { + try { + this.addURL(singleJar.toURI().toURL()); + } catch (MalformedURLException e) { + // Ignore + } + } + } + + private static void collectDirs(String path, List collector) { + + File current = new File(path); + if (!current.exists() || !current.isDirectory()) { + return; + } + + if (null != current.listFiles()) { + for (File child : Objects.requireNonNull(current.listFiles())) { + if (!child.isDirectory()) { + continue; + } + + collector.add(child.getAbsolutePath()); + collectDirs(child.getAbsolutePath(), collector); + } + } + } + + private static List doGetURLs(final String path) { + + File jarPath = new File(path); + + Validate.isTrue(jarPath.exists() && jarPath.isDirectory(), "jar包路径必须存在且为目录."); + + /* set filter */ + FileFilter jarFilter = pathname -> pathname.getName().endsWith(".jar"); + + /* iterate all jar */ + File[] allJars = new File(path).listFiles(jarFilter); + assert allJars != null; + List jarURLs = new ArrayList<>(allJars.length); + + for (File allJar : allJars) { + try { + jarURLs.add(allJar.toURI().toURL()); + } catch (Exception e) { + // Ignore + } + } + + return jarURLs; + } + + /** + * change the order to load class + * + * @param name name + * @param resolve isResolve + * @return + * @throws ClassNotFoundException + */ + @Override + public Class loadClass(String name, boolean resolve) throws ClassNotFoundException { + synchronized (getClassLoadingLock(name)) { + // First, check if the class has already been loaded + Class c = findLoadedClass(name); + if (c == null) { + long t0 = System.nanoTime(); + try { + // invoke findClass in this class + c = findClass(name); + } catch (ClassNotFoundException e) { + // ClassNotFoundException thrown if class not found + } + if (c == null) { + return super.loadClass(name, resolve); + } + // For compatibility with higher versions > java 1.8.0_141 + // sun.misc.PerfCounter.getFindClasses().addElapsedTimeFrom(t0); + // sun.misc.PerfCounter.getFindClasses().increment(); + } + if (resolve) { + resolveClass(c); + } + return c; + } + } +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/sqoop/mapreduce/JobBase.java b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/sqoop/mapreduce/JobBase.java new file mode 100644 index 000000000..6703c303c --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/java/org/apache/sqoop/mapreduce/JobBase.java @@ -0,0 +1,410 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sqoop.mapreduce; + +import org.apache.linkis.engineconnplugin.sqoop.client.Sqoop; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.mapreduce.InputFormat; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.Mapper; +import org.apache.hadoop.mapreduce.OutputFormat; +import org.apache.hadoop.util.StringUtils; +import org.apache.sqoop.config.ConfigurationConstants; + +import com.cloudera.sqoop.SqoopOptions; +import com.cloudera.sqoop.config.ConfigurationHelper; +import com.cloudera.sqoop.manager.ConnManager; +import com.cloudera.sqoop.tool.SqoopTool; +import com.cloudera.sqoop.util.ClassLoaderStack; +import com.cloudera.sqoop.util.Jars; + +import java.io.File; +import java.io.IOException; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * Base class for configuring and running a MapReduce job. Allows dependency injection, etc, for + * easy customization of import job types. + */ +public class JobBase { + + public static final Log LOG = LogFactory.getLog(JobBase.class.getName()); + + public static final String SERIALIZE_SQOOPOPTIONS = "sqoop.jobbase.serialize.sqoopoptions"; + public static final boolean SERIALIZE_SQOOPOPTIONS_DEFAULT = false; + public static final String HADOOP_MAP_TASK_MAX_ATTEMTPS = "mapreduce.map.maxattempts"; + public static final String HADOOP_REDUCE_TASK_MAX_ATTEMTPS = "mapreduce.reduce.maxattempts"; + + protected SqoopOptions options; + protected Class mapperClass; + protected Class inputFormatClass; + protected Class outputFormatClass; + + private Job mrJob; + + private ClassLoader prevClassLoader = null; + protected final boolean isHCatJob; + + public static final String PROPERTY_VERBOSE = "sqoop.verbose"; + + public JobBase() { + this(null); + } + + public JobBase(final SqoopOptions opts) { + this(opts, null, null, null); + } + + public JobBase( + final SqoopOptions opts, + final Class mapperClass, + final Class inputFormatClass, + final Class outputFormatClass) { + System.out.println(SqoopOptions.class.getClassLoader()); + this.options = opts; + this.mapperClass = mapperClass; + this.inputFormatClass = inputFormatClass; + this.outputFormatClass = outputFormatClass; + isHCatJob = options.getHCatTableName() != null; + } + + /** @return the mapper class to use for the job. */ + protected Class getMapperClass() throws ClassNotFoundException { + return this.mapperClass; + } + + /** @return the inputformat class to use for the job. */ + protected Class getInputFormatClass() throws ClassNotFoundException { + return this.inputFormatClass; + } + + /** @return the outputformat class to use for the job. */ + protected Class getOutputFormatClass() throws ClassNotFoundException { + return this.outputFormatClass; + } + + /** Set the OutputFormat class to use for this job. */ + public void setOutputFormatClass(Class cls) { + this.outputFormatClass = cls; + } + + /** Set the InputFormat class to use for this job. */ + public void setInputFormatClass(Class cls) { + this.inputFormatClass = cls; + } + + /** Set the Mapper class to use for this job. */ + public void setMapperClass(Class cls) { + this.mapperClass = cls; + } + + /** Set the SqoopOptions configuring this job. */ + public void setOptions(SqoopOptions opts) { + this.options = opts; + } + + /** + * Put jar files required by Sqoop into the DistributedCache. + * + * @param job the Job being submitted. + * @param mgr the ConnManager to use. + */ + protected void cacheJars(Job job, ConnManager mgr) throws IOException { + if (options.isSkipDistCache()) { + LOG.info("Not adding sqoop jars to distributed cache as requested"); + return; + } + + Configuration conf = job.getConfiguration(); + FileSystem fs = FileSystem.getLocal(conf); + Set localUrls = new HashSet(); + + addToCache(Jars.getSqoopJarPath(), fs, localUrls); + if (null != mgr) { + addToCache(Jars.getDriverClassJar(mgr), fs, localUrls); + addToCache(Jars.getJarPathForClass(mgr.getClass()), fs, localUrls); + } + + SqoopTool tool = this.options.getActiveSqoopTool(); + if (null != tool) { + // Make sure the jar for the tool itself is on the classpath. (In case + // this is a third-party plugin tool.) + addToCache(Jars.getJarPathForClass(tool.getClass()), fs, localUrls); + List toolDeps = tool.getDependencyJars(); + if (null != toolDeps) { + for (String depFile : toolDeps) { + addToCache(depFile, fs, localUrls); + } + } + } + + // If the user specified a particular jar file name, + + // Add anything in $SQOOP_HOME/lib, if this is set. + String sqoopHome = System.getenv("SQOOP_HOME"); + if (null != sqoopHome) { + File sqoopHomeFile = new File(sqoopHome); + File sqoopLibFile = new File(sqoopHomeFile, "lib"); + if (sqoopLibFile.exists()) { + addDirToCache(sqoopLibFile, fs, localUrls); + } + } else { + LOG.warn("SQOOP_HOME is unset. May not be able to find " + "all job dependencies."); + } + + // If the user run import into hive as Parquet file, + // Add anything in $HIVE_HOME/lib. + if (options.doHiveImport() + && (options.getFileLayout() == SqoopOptions.FileLayout.ParquetFile)) { + String hiveHome = options.getHiveHome(); + if (null != hiveHome) { + File hiveHomeFile = new File(hiveHome); + File hiveLibFile = new File(hiveHomeFile, "lib"); + if (hiveLibFile.exists()) { + addDirToCache(hiveLibFile, fs, localUrls); + } + } else { + LOG.warn("HIVE_HOME is unset. Cannot add hive libs as dependencies."); + } + } + + String tmpjars = conf.get(ConfigurationConstants.MAPRED_DISTCACHE_CONF_PARAM); + StringBuilder sb = new StringBuilder(); + + // If we didn't put anything in our set, then there's nothing to cache. + if (localUrls.isEmpty() && (org.apache.commons.lang.StringUtils.isEmpty(tmpjars))) { + return; + } + + if (null != tmpjars) { + String[] tmpjarsElements = tmpjars.split(","); + for (String jarElement : tmpjarsElements) { + if (jarElement.isEmpty()) { + warn("Empty input is invalid and was removed from tmpjars."); + } else { + sb.append(jarElement); + sb.append(","); + } + } + } + + int lastComma = sb.lastIndexOf(","); + if (localUrls.isEmpty() && lastComma >= 0) { + sb.deleteCharAt(lastComma); + } + + // Add these to the 'tmpjars' array, which the MR JobSubmitter + // will upload to HDFS and put in the DistributedCache libjars. + sb.append(StringUtils.arrayToString(localUrls.toArray(new String[0]))); + conf.set(ConfigurationConstants.MAPRED_DISTCACHE_CONF_PARAM, sb.toString()); + } + + protected void warn(String message) { + LOG.warn(message); + } + + private void addToCache(String file, FileSystem fs, Set localUrls) { + if (null == file) { + return; + } + + Path p = new Path(file); + String qualified = p.makeQualified(fs).toString(); + LOG.debug("Adding to job classpath: " + qualified); + localUrls.add(qualified); + } + + /** Add the .jar elements of a directory to the DCache classpath, nonrecursively. */ + private void addDirToCache(File dir, FileSystem fs, Set localUrls) { + if (null == dir) { + return; + } + + for (File libfile : dir.listFiles()) { + if (libfile.exists() && !libfile.isDirectory() && libfile.getName().endsWith("jar")) { + addToCache(libfile.toString(), fs, localUrls); + } + } + } + + /** If jars must be loaded into the local environment, do so here. */ + protected void loadJars(Configuration conf, String ormJarFile, String tableClassName) + throws IOException { + + boolean isLocal = + "local".equals(conf.get("mapreduce.jobtracker.address")) + || "local".equals(conf.get("mapred.job.tracker")); + if (isLocal) { + // If we're using the LocalJobRunner, then instead of using the compiled + // jar file as the job source, we're running in the current thread. Push + // on another classloader that loads from that jar in addition to + // everything currently on the classpath. + this.prevClassLoader = ClassLoaderStack.addJarFile(ormJarFile, tableClassName); + } + } + + /** If any classloader was invoked by loadJars, free it here. */ + protected void unloadJars() { + if (null != this.prevClassLoader) { + // unload the special classloader for this jar. + ClassLoaderStack.setCurrentClassLoader(this.prevClassLoader); + } + } + + /** Configure the inputformat to use for the job. */ + protected void configureInputFormat( + Job job, String tableName, String tableClassName, String splitByCol) + throws ClassNotFoundException, IOException { + // TODO: 'splitByCol' is import-job specific; lift it out of this API. + Class ifClass = getInputFormatClass(); + LOG.debug("Using InputFormat: " + ifClass); + job.setInputFormatClass(ifClass); + } + + /** Configure the output format to use for the job. */ + protected void configureOutputFormat(Job job, String tableName, String tableClassName) + throws ClassNotFoundException, IOException { + Class ofClass = getOutputFormatClass(); + LOG.debug("Using OutputFormat: " + ofClass); + job.setOutputFormatClass(ofClass); + } + + /** + * Set the mapper class implementation to use in the job, as well as any related configuration + * (e.g., map output types). + */ + protected void configureMapper(Job job, String tableName, String tableClassName) + throws ClassNotFoundException, IOException { + job.setMapperClass(getMapperClass()); + } + + /** + * Configure the number of map/reduce tasks to use in the job, returning the number of map tasks + * for backward compatibility. + */ + protected int configureNumTasks(Job job) throws IOException { + int numMapTasks = configureNumMapTasks(job); + configureNumReduceTasks(job); + return numMapTasks; + } + + /** Configure the number of map tasks to use in the job. */ + protected int configureNumMapTasks(Job job) throws IOException { + int numMapTasks = options.getNumMappers(); + if (numMapTasks < 1) { + numMapTasks = SqoopOptions.DEFAULT_NUM_MAPPERS; + LOG.warn("Invalid mapper count; using " + numMapTasks + " mappers."); + } + ConfigurationHelper.setJobNumMaps(job, numMapTasks); + return numMapTasks; + } + + /** Configure the number of reduce tasks to use in the job. */ + protected int configureNumReduceTasks(Job job) throws IOException { + job.setNumReduceTasks(0); + return 0; + } + + /** Set the main job that will be run. */ + protected void setJob(Job job) { + LOG.info("Customize JobBase Set The Job"); + mrJob = job; + Sqoop.job.set(job); + } + + /** @return the main MapReduce job that is being run, or null if no job has started. */ + public Job getJob() { + return mrJob; + } + + /** + * Create new Job object in unified way for all types of jobs. + * + * @param configuration Hadoop configuration that should be used + * @return New job object, created object won't be persisted in the instance + */ + public Job createJob(Configuration configuration) throws IOException { + // Put the SqoopOptions into job if requested + if (configuration.getBoolean(SERIALIZE_SQOOPOPTIONS, SERIALIZE_SQOOPOPTIONS_DEFAULT)) { + putSqoopOptionsToConfiguration(options, configuration); + } + + return new Job(configuration); + } + + /** + * Iterates over serialized form of SqoopOptions and put them into Configuration object. + * + * @param opts SqoopOptions that should be serialized + * @param configuration Target configuration object + */ + public void putSqoopOptionsToConfiguration(SqoopOptions opts, Configuration configuration) { + for (Map.Entry e : opts.writeProperties().entrySet()) { + String key = (String) e.getKey(); + String value = (String) e.getValue(); + + // We don't need to do if(value is empty) because that is already done + // for us by the SqoopOptions.writeProperties() method. + configuration.set("sqoop.opt." + key, value); + } + } + + /** Actually run the MapReduce job. */ + protected boolean runJob(Job job) + throws ClassNotFoundException, IOException, InterruptedException { + return job.waitForCompletion(true); + } + + /** + * Display a notice on the log that the current MapReduce job has been retired, and thus + * Counters are unavailable. + * + * @param log the Log to display the info to. + */ + protected void displayRetiredJobNotice(Log log) { + log.info("The MapReduce job has already been retired. Performance"); + log.info("counters are unavailable. To get this information, "); + log.info("you will need to enable the completed job store on "); + log.info("the jobtracker with:"); + log.info("mapreduce.jobtracker.persist.jobstatus.active = true"); + log.info("mapreduce.jobtracker.persist.jobstatus.hours = 1"); + log.info("A jobtracker restart is required for these settings"); + log.info("to take effect."); + } + + /** + * Save interesting options to constructed job. Goal here is to propagate some of them to the + * job itself, so that they can be easily accessed. We're propagating only interesting global + * options (like verbose flag). + * + * @param job Destination job to save options + */ + protected void propagateOptionsToJob(Job job) { + Configuration configuration = job.getConfiguration(); + + // So far, propagate only verbose flag + configuration.setBoolean(PROPERTY_VERBOSE, options.getVerbose()); + } +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/resources/linkis-engineconn.properties b/exchangis-engines/engineconn-plugins/sqoop/src/main/resources/linkis-engineconn.properties new file mode 100644 index 000000000..99b76eaea --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/resources/linkis-engineconn.properties @@ -0,0 +1,22 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +wds.linkis.server.version=v1 + +wds.linkis.engineconn.plugin.default.class=org.apache.linkis.engineconnplugin.sqoop.SqoopEngineConnPlugin + +wds.linkis.engine.connector.hooks=org.apache.linkis.engineconn.computation.executor.hook.ComputationEngineConnHook +# wds.linkis.hadoop.site.xml= \ No newline at end of file diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/resources/log4j2.xml b/exchangis-engines/engineconn-plugins/sqoop/src/main/resources/log4j2.xml new file mode 100644 index 000000000..3b45ae2a1 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/resources/log4j2.xml @@ -0,0 +1,82 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/SqoopEngineConnPlugin.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/SqoopEngineConnPlugin.scala new file mode 100644 index 000000000..ae322a3f5 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/SqoopEngineConnPlugin.scala @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop + +import org.apache.linkis.manager.engineplugin.common.EngineConnPlugin +import org.apache.linkis.manager.engineplugin.common.creation.EngineConnFactory +import org.apache.linkis.manager.engineplugin.common.launch.EngineConnLaunchBuilder +import org.apache.linkis.manager.engineplugin.common.resource.{EngineResourceFactory, GenericEngineResourceFactory} +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.engineconnplugin.sqoop.factory.SqoopEngineConnFactory +import org.apache.linkis.engineconnplugin.sqoop.launch.SqoopEngineConnLaunchBuilder + +import java.util.Map +import java.util.List + +class SqoopEngineConnPlugin extends EngineConnPlugin{ + private val EP_CONTEXT_CONSTRUCTOR_LOCK = new Object() + private var engineResourceFactory: EngineResourceFactory = _ + private var engineConnLaunchBuilder: EngineConnLaunchBuilder = _ + private var engineConnFactory: EngineConnFactory = _ + override def init(params: Map[String, AnyRef]): Unit = {} + + override def getEngineResourceFactory: EngineResourceFactory = { + EP_CONTEXT_CONSTRUCTOR_LOCK.synchronized{ + if(null == engineResourceFactory){ + engineResourceFactory = new GenericEngineResourceFactory + } + engineResourceFactory + } + } + + override def getEngineConnLaunchBuilder: EngineConnLaunchBuilder = { + EP_CONTEXT_CONSTRUCTOR_LOCK.synchronized { + if (null == engineConnLaunchBuilder) { + engineConnLaunchBuilder = new SqoopEngineConnLaunchBuilder() + } + engineConnLaunchBuilder + } + } + + + override def getEngineConnFactory: EngineConnFactory = { + EP_CONTEXT_CONSTRUCTOR_LOCK.synchronized { + if (null == engineConnFactory) { + engineConnFactory = new SqoopEngineConnFactory + } + engineConnFactory + } + } + + override def getDefaultLabels: List[Label[_]] = new java.util.ArrayList[Label[_]] + +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEngineConnContext.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEngineConnContext.scala new file mode 100644 index 000000000..be1d340db --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEngineConnContext.scala @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.context + +import org.apache.linkis.engineconnplugin.sqoop.client.config.ExecutionContext + +class SqoopEngineConnContext{ + private var executionContext: ExecutionContext = _ + + def getExecutionContext: ExecutionContext = executionContext + + def setExecutionContext(executionContext: ExecutionContext): Unit = this.executionContext = executionContext + +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEnvConfiguration.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEnvConfiguration.scala new file mode 100644 index 000000000..63417dbca --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEnvConfiguration.scala @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.context + +import org.apache.linkis.common.conf.{CommonVars, TimeType} + +object SqoopEnvConfiguration { + + val SQOOP_HADOOP_SITE_FILE: CommonVars[String] = CommonVars("wds.linkis.hadoop.site.xml", "core-site.xml;hdfs-site.xml;yarn-site.xml;mapred-site.xml") + + val SQOOP_STATUS_FETCH_INTERVAL: CommonVars[TimeType] = CommonVars("sqoop.fetch.status.interval", new TimeType("5s")) + + val LINKIS_DATASOURCE_SERVICE_NAME: CommonVars[String] = CommonVars("wds.linkis.datasource.service.name", "linkis-ps-data-source-manager") + + val SQOOP_HOME: CommonVars[String] = CommonVars("SQOOP_HOME", "") + + val SQOOP_CONF_DIR: CommonVars[String] = CommonVars("SQOOP_CONF_DIR", "") + + val SQOOP_HCAT_HOME: CommonVars[String] = CommonVars("HCAT_HOME", "") + + val SQOOP_HBASE_HOME: CommonVars[String] = CommonVars("HBASE_HOME", "") + + val SQOOP_ZOOCFGDIR: CommonVars[String] = CommonVars("ZOOCFGDIR", "") +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopParamsConfiguration.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopParamsConfiguration.scala new file mode 100644 index 000000000..0c449a9d4 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopParamsConfiguration.scala @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.context + +import org.apache.linkis.common.conf.CommonVars + +/** + * Sqoop Params Configuration + */ +object SqoopParamsConfiguration { + + val SQOOP_PARAM_MODE: CommonVars[String] = CommonVars("sqoop.params.name.mode", "sqoop.mode") + + val SQOOP_PARAM_HOST: CommonVars[String] = CommonVars("sqoop.params.name.host", "sqoop.args.host") + + val SQOOP_PARAM_PORT: CommonVars[String] = CommonVars("sqoop.params.name.ip", "sqoop.args.port") + + val SQOOP_PARAM_CONNECT_PARAMS: CommonVars[String] = CommonVars("sqoop.params.name.ip", "sqoop.args.params") + + val SQOOP_PARAM_CONNECT: CommonVars[String] = CommonVars("sqoop.params.name.connect", "sqoop.args.connect") + + val SQOOP_PARAM_DATA_SOURCE: CommonVars[String] = CommonVars("sqoop.params.name.data-source", "sqoop.args.datasource.name") + + val SQOOP_PARAM_PREFIX: CommonVars[String] = CommonVars("sqoop.params.name.prefix", "sqoop.args.") + + val SQOOP_PARAM_ENV_PREFIX: CommonVars[String] = CommonVars("sqoop.params.name.env.prefix", "sqoop.env.") +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopResourceConfiguration.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopResourceConfiguration.scala new file mode 100644 index 000000000..710d28cc2 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopResourceConfiguration.scala @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.context + +import org.apache.linkis.common.conf.CommonVars + + +object SqoopResourceConfiguration { + + val LINKIS_SQOOP_TASK_MAP_MEMORY: CommonVars[Int] = CommonVars[Int]("sqoop.task.map.memory", 2) + + val LINKIS_SQOOP_TASK_MAP_CPU_CORES: CommonVars[Int] = CommonVars[Int]("sqoop.task.map.cpu.cores", 1) + + val LINKIS_QUEUE_NAME: CommonVars[String] = CommonVars[String]("wds.linkis.rm.yarnqueue", "default") +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopExecutor.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopExecutor.scala new file mode 100644 index 000000000..5c43366ba --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopExecutor.scala @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.executor + +import org.apache.linkis.engineconn.executor.entity.{LabelExecutor, ResourceExecutor, YarnExecutor} +import org.apache.linkis.engineconnplugin.sqoop.client.Sqoop +import org.apache.linkis.engineconnplugin.sqoop.context.SqoopResourceConfiguration.LINKIS_QUEUE_NAME +import org.apache.linkis.manager.common.entity.resource.NodeResource +import org.apache.linkis.manager.label.entity.Label +import java.util + +import org.apache.linkis.engineconnplugin.sqoop.client.Sqoop +import org.apache.linkis.engineconnplugin.sqoop.client.exception.JobExecutionException +import org.apache.linkis.engineconnplugin.sqoop.context.SqoopEngineConnContext + +trait SqoopExecutor extends YarnExecutor with LabelExecutor with ResourceExecutor{ + private var yarnMode: String = "Client" + private var executorLabels: util.List[Label[_]] = new util.ArrayList[Label[_]] + override def getApplicationId: String = Sqoop.getApplicationId + + override def getApplicationURL: String = Sqoop.getApplicationURL + + override def getYarnMode: String = yarnMode + def setYarnMode(yarnMode: String): Unit = this.yarnMode = yarnMode + + override def getQueue: String = LINKIS_QUEUE_NAME.getValue + + override def getExecutorLabels(): util.List[Label[_]] = executorLabels + + override def setExecutorLabels(labels: util.List[Label[_]]): Unit = this.executorLabels = labels + + override def requestExpectedResource(expectedResource: NodeResource): NodeResource = throw new JobExecutionException("Not support method for requestExpectedResource.") + + protected val sqoopEngineConnContext: SqoopEngineConnContext +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceCodeExecutor.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceCodeExecutor.scala new file mode 100644 index 000000000..1d7cb6af3 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceCodeExecutor.scala @@ -0,0 +1,142 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.executor + +import org.apache.linkis.common.utils.{JsonUtils, OverloadUtils, Utils} +import org.apache.linkis.engineconn.once.executor.{OnceExecutorExecutionContext, OperableOnceExecutor} +import org.apache.linkis.engineconnplugin.sqoop.client.{LinkisSqoopClient, Sqoop} +import org.apache.linkis.engineconnplugin.sqoop.context.SqoopResourceConfiguration.{LINKIS_QUEUE_NAME, LINKIS_SQOOP_TASK_MAP_CPU_CORES, LINKIS_SQOOP_TASK_MAP_MEMORY} +import org.apache.linkis.engineconnplugin.sqoop.context.{SqoopEngineConnContext, SqoopParamsConfiguration} +import org.apache.linkis.manager.common.entity.resource.{CommonNodeResource, DriverAndYarnResource, LoadInstanceResource, NodeResource, YarnResource} +import org.apache.linkis.scheduler.executer.ErrorExecuteResponse +import java.util +import java.util.concurrent.{Future, TimeUnit} + +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconn.core.EngineConnObject +import org.apache.linkis.manager.engineplugin.common.conf.EngineConnPluginConf +import org.apache.linkis.protocol.engine.JobProgressInfo +import org.apache.linkis.engineconnplugin.sqoop.client.LinkisSqoopClient +import org.apache.linkis.engineconnplugin.sqoop.client.exception.JobExecutionException +import org.apache.linkis.engineconnplugin.sqoop.context.{SqoopEngineConnContext, SqoopEnvConfiguration} +import org.apache.linkis.engineconnplugin.sqoop.params.SqoopParamsResolver + + +class SqoopOnceCodeExecutor(override val id: Long, + override protected val sqoopEngineConnContext: SqoopEngineConnContext) extends SqoopOnceExecutor with OperableOnceExecutor{ + + + private var params: util.Map[String, String] = _ + private var future: Future[_] = _ + private var daemonThread: Future[_] = _ + private val paramsResolvers: Array[SqoopParamsResolver] = Array() + + override def doSubmit(onceExecutorExecutionContext: OnceExecutorExecutionContext, options: Map[String, String]): Unit = { + var isFailed = false + future = Utils.defaultScheduler.submit(new Runnable { + override def run(): Unit = { + // TODO filter job content + params = onceExecutorExecutionContext.getOnceExecutorContent.getJobContent.asInstanceOf[util.Map[String, String]] + info("Try to execute params." + params) + if(runSqoop(params, onceExecutorExecutionContext.getEngineCreationContext) != 0) { + isFailed = true + tryFailed() + setResponse(ErrorExecuteResponse("Run code failed!", new JobExecutionException("Exec Sqoop Code Error"))) + } + info("All codes completed, now to stop SqoopEngineConn.") + closeDaemon() + if (!isFailed) { + trySucceed() + } + this synchronized notify() + } + }) + } + protected def runSqoop(params: util.Map[String, String], context: EngineCreationContext): Int = { + Utils.tryCatch { + val finalParams = paramsResolvers.foldLeft(params) { + case (newParam, resolver) => resolver.resolve(newParam, context) + } + LinkisSqoopClient.run(finalParams) + }{ + case e: Exception => + error(s"Run Error Message: ${e.getMessage}", e) + -1 + } + + } + + override protected def waitToRunning(): Unit = { + if (!isCompleted) daemonThread = Utils.defaultScheduler.scheduleAtFixedRate(new Runnable { + override def run(): Unit = { + if (!(future.isDone || future.isCancelled)) { + info("The Sqoop Process In Running") + } + } + }, SqoopEnvConfiguration.SQOOP_STATUS_FETCH_INTERVAL.getValue.toLong, + SqoopEnvConfiguration.SQOOP_STATUS_FETCH_INTERVAL.getValue.toLong, TimeUnit.MILLISECONDS) + } + override def getCurrentNodeResource(): NodeResource = { + val memorySuffix = "g" + val properties = EngineConnObject.getEngineCreationContext.getOptions + Option(properties.get(EngineConnPluginConf.JAVA_ENGINE_REQUEST_MEMORY.key)).foreach(memory => { + if (! memory.toLowerCase.endsWith(memorySuffix)) { + properties.put(EngineConnPluginConf.JAVA_ENGINE_REQUEST_MEMORY.key, memory + memorySuffix) + } + }) + val resource = new DriverAndYarnResource( + new LoadInstanceResource(EngineConnPluginConf.JAVA_ENGINE_REQUEST_MEMORY.getValue(properties).toLong, + EngineConnPluginConf.JAVA_ENGINE_REQUEST_CORES.getValue(properties), + EngineConnPluginConf.JAVA_ENGINE_REQUEST_INSTANCE), + new YarnResource(LINKIS_SQOOP_TASK_MAP_MEMORY.getValue * getNumTasks, LINKIS_SQOOP_TASK_MAP_CPU_CORES.getValue * getNumTasks, 0, LINKIS_QUEUE_NAME.getValue) + ) + val engineResource = new CommonNodeResource + engineResource.setUsedResource(resource) + engineResource + } + + def getNumTasks: Int = { + if (params != null) { + params.getOrDefault("sqoop.args.num.mappers", "1").toInt + } else { + 0 + } + } + protected def closeDaemon(): Unit = { + if (daemonThread != null) daemonThread.cancel(true) + } + + override def getProgress: Float = LinkisSqoopClient.progress() + + override def getProgressInfo: Array[JobProgressInfo] = { + val progressInfo = LinkisSqoopClient.getProgressInfo + info(s"Progress Info, id: ${progressInfo.id}, total: ${progressInfo.totalTasks}, running: ${progressInfo.runningTasks}," + + s" succeed: ${progressInfo.succeedTasks}, fail: ${progressInfo.failedTasks}") + Array(progressInfo) + } + + + override def getMetrics: util.Map[String, Any] = { + val metrics = LinkisSqoopClient.getMetrics.asInstanceOf[util.Map[String, Any]] + // Report the resource + metrics.put("NodeResourceJson", getCurrentNodeResource().getUsedResource.toJson) + metrics + } + + override def getDiagnosis: util.Map[String, Any] = LinkisSqoopClient.getDiagnosis.asInstanceOf[util.Map[String, Any]] +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceExecutor.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceExecutor.scala new file mode 100644 index 000000000..727d1f640 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceExecutor.scala @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.executor + +import org.apache.linkis.common.utils.Utils +import org.apache.linkis.engineconn.core.hook.ShutdownHook +import org.apache.linkis.engineconn.once.executor.{ManageableOnceExecutor, OnceExecutorExecutionContext} +import org.apache.linkis.engineconnplugin.sqoop.client.LinkisSqoopClient +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus +import org.apache.linkis.engineconnplugin.sqoop.client.{LinkisSqoopClient, Sqoop} + +import scala.collection.convert.WrapAsScala._ + + +trait SqoopOnceExecutor extends ManageableOnceExecutor with SqoopExecutor{ + protected def submit(onceExecutorExecutionContext: OnceExecutorExecutionContext): Unit = { + val options = onceExecutorExecutionContext.getOnceExecutorContent.getJobContent.map { + case (k, v: String) => k -> v + case (k, v) if v != null => k -> v.toString + case (k, _) => k -> null + }.toMap + doSubmit(onceExecutorExecutionContext, options) + } + def doSubmit(onceExecutorExecutionContext: OnceExecutorExecutionContext, options: Map[String, String]): Unit + + val id: Long + + override def getId: String = "SqoopOnceApp_" + id + override def close(): Unit = { + Sqoop.close() + super.close() + } + override def trySucceed(): Boolean = { + super.trySucceed() + } + + + override def ensureAvailable[A](f: => A): A = { + // Not need to throws exception + Utils.tryQuietly{ super.ensureAvailable(f) } + } + + override def tryFailed(): Boolean = { + LinkisSqoopClient.close() + super.tryFailed() + } + + override def supportCallBackLogs(): Boolean = true + + + protected def isCompleted: Boolean = isClosed || NodeStatus.isCompleted(getStatus) +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopEngineConnFactory.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopEngineConnFactory.scala new file mode 100644 index 000000000..25666df2a --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopEngineConnFactory.scala @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.factory + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconnplugin.sqoop.context.SqoopEngineConnContext +import org.apache.linkis.engineconnplugin.sqoop.util.ClassUtil +import org.apache.linkis.manager.engineplugin.common.creation.{ExecutorFactory, MultiExecutorEngineConnFactory} +import org.apache.linkis.manager.label.entity.engine.EngineType +import org.apache.linkis.manager.label.entity.engine.EngineType.EngineType +import org.apache.linkis.engineconnplugin.sqoop.context.SqoopEngineConnContext +import org.apache.linkis.engineconnplugin.sqoop.util.ClassUtil + +class SqoopEngineConnFactory extends MultiExecutorEngineConnFactory with Logging{ + override def getExecutorFactories: Array[ExecutorFactory] = executorFactoryArray + + override protected def getDefaultExecutorFactoryClass: Class[_ <: ExecutorFactory] = classOf[SqoopExecutorFactory] + + override protected def getEngineConnType: EngineType = EngineType.SQOOP + + override protected def createEngineConnSession(engineCreationContext: EngineCreationContext): Any = { + //val environmentContext = createEnvironmentContext(engineCreationContext) + val sqoopEngineConnContext = new SqoopEngineConnContext() + sqoopEngineConnContext + } + + + private val executorFactoryArray = Array[ExecutorFactory](ClassUtil.getInstance(classOf[SqoopExecutorFactory], new SqoopExecutorFactory)) +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopExecutorFactory.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopExecutorFactory.scala new file mode 100644 index 000000000..4b39fa0f0 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopExecutorFactory.scala @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.factory + +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconn.common.engineconn.EngineConn +import org.apache.linkis.engineconn.once.executor.OnceExecutor +import org.apache.linkis.engineconn.once.executor.creation.OnceExecutorFactory +import org.apache.linkis.engineconnplugin.sqoop.context.SqoopEngineConnContext +import org.apache.linkis.engineconnplugin.sqoop.executor.SqoopOnceCodeExecutor +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.engine.RunType.{APPCONN, RunType} +import org.apache.linkis.engineconnplugin.sqoop.context.SqoopEngineConnContext +import org.apache.linkis.engineconnplugin.sqoop.executor.SqoopOnceCodeExecutor + +class SqoopExecutorFactory extends OnceExecutorFactory{ + + + override protected def getRunType: RunType = APPCONN + + override protected def newExecutor(id: Int, engineCreationContext: EngineCreationContext, engineConn: EngineConn, labels: Array[Label[_]]): OnceExecutor = { + engineConn.getEngineConnSession match { + case context: SqoopEngineConnContext => + new SqoopOnceCodeExecutor(id, context) + + } + } +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/launch/SqoopEngineConnLaunchBuilder.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/launch/SqoopEngineConnLaunchBuilder.scala new file mode 100644 index 000000000..a643b792c --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/launch/SqoopEngineConnLaunchBuilder.scala @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.launch + +import java.nio.file.Paths +import java.util +import java.util.concurrent.TimeUnit + +import org.apache.linkis.engineconnplugin.sqoop.context.SqoopEnvConfiguration._ +import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnBuildRequest +import org.apache.linkis.manager.engineplugin.common.launch.process.Environment.{variable, _} +import org.apache.linkis.manager.engineplugin.common.launch.process.JavaProcessEngineConnLaunchBuilder +import org.apache.linkis.manager.engineplugin.common.launch.process.LaunchConstants._ +import org.apache.commons.io.IOUtils +import org.apache.commons.lang3.StringUtils + +import scala.collection.JavaConverters._ + +class SqoopEngineConnLaunchBuilder extends JavaProcessEngineConnLaunchBuilder{ + + override protected def getEnvironment(implicit engineConnBuildRequest: EngineConnBuildRequest): util.Map[String, String] = { + val environment = super.getEnvironment + // Basic classpath + addPathToClassPath(environment, variable(HADOOP_CONF_DIR)) + addExistPathToClassPath(environment, Seq(SQOOP_CONF_DIR.getValue)) + if (StringUtils.isNotBlank(SQOOP_HOME.getValue)) { + addPathToClassPath(environment, Seq(SQOOP_HOME.getValue, "/*")) + addPathToClassPath(environment, Seq(SQOOP_HOME.getValue, "/lib/*")) + } + // HBase classpath + if (StringUtils.isNotBlank(SQOOP_HBASE_HOME.getValue) && Paths.get(SQOOP_HBASE_HOME.getValue).toFile.exists()) { + resolveCommandToClassPath(environment, SQOOP_HBASE_HOME.getValue + "/bin/hbase classpath") + } + // HCat classpath + if (StringUtils.isNotBlank(SQOOP_HCAT_HOME.getValue) && Paths.get(SQOOP_HCAT_HOME.getValue).toFile.exists()) { + resolveCommandToClassPath(environment, SQOOP_HCAT_HOME.getValue + "/bin/hcat -classpath") + } + addExistPathToClassPath(environment, Seq(SQOOP_ZOOCFGDIR.getValue)) + environment + } + + + override protected def getNecessaryEnvironment(implicit engineConnBuildRequest: EngineConnBuildRequest): Array[String] = { + // To submit a mapReduce job, we should load the configuration from hadoop config dir + Array(HADOOP_CONF_DIR.toString, SQOOP_HOME.key) + } + + private def addExistPathToClassPath(env: util.Map[String, String], path: String): Unit = { + if (StringUtils.isNotBlank(path) && Paths.get(path).toFile.exists()) { + addPathToClassPath(env, path) + } + } + private def resolveCommandToClassPath(env: util.Map[String, String], command: String): Unit = { + trace(s"Invoke command [${command}] to get class path sequence") + val builder = new ProcessBuilder(Array("/bin/bash", "-c", command): _*) + // Set the environment + builder.environment.putAll(sys.env.asJava) + builder.redirectErrorStream(false) + val process = builder.start() + if(process.waitFor(5, TimeUnit.SECONDS) && + process.waitFor() == 0) { + val jarPathSerial = IOUtils.toString(process.getInputStream).trim() + // TODO we should decide separator in different environment + val separatorChar = ":" + val jarPathList = StringUtils.split(jarPathSerial, separatorChar).filterNot(jarPath => { + val splitIndex = jarPath.lastIndexOf("/") + val jarName = if (splitIndex >= 0) jarPath.substring(splitIndex + 1) else jarPath + jarName.matches("^jasper-compiler-[\\s\\S]+?\\.jar$") || jarName.matches("^jsp-[\\s\\S]+?\\.jar$") || jarName.matches("^disruptor-[\\s\\S]+?\\.jar") + }).toList + addPathToClassPath(env, StringUtils.join(jarPathList.asJava, separatorChar)) + } + // Release the process + process.destroy(); + } + private implicit def buildPath(paths: Seq[String]): String = Paths.get(paths.head, paths.tail: _*).toFile.getPath + +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/SqoopParamsResolver.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/SqoopParamsResolver.scala new file mode 100644 index 000000000..464fc3922 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/SqoopParamsResolver.scala @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.params + +import java.util + +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +/** + * Resolve the engine job params + */ +trait SqoopParamsResolver { + + /** + * main method + * @param params input + * @return + */ + def resolve(params: util.Map[String, String], context: EngineCreationContext): util.Map[String, String] +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/resource/SqoopEngineConnResourceFactory.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/resource/SqoopEngineConnResourceFactory.scala new file mode 100644 index 000000000..1634e8b7e --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/resource/SqoopEngineConnResourceFactory.scala @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.resource + +import org.apache.linkis.manager.common.entity.resource.{LoadInstanceResource, Resource} +import org.apache.linkis.manager.engineplugin.common.resource.AbstractEngineResourceFactory + +import java.util + +class SqoopEngineConnResourceFactory extends AbstractEngineResourceFactory{ + override protected def getRequestResource(properties: util.Map[String, String]): Resource = { + new LoadInstanceResource(1, + 1, + 1) + } +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/util/ClassUtil.scala b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/util/ClassUtil.scala new file mode 100644 index 000000000..c3476821f --- /dev/null +++ b/exchangis-engines/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/util/ClassUtil.scala @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.util + +import org.apache.linkis.common.utils.{ClassUtils, Utils} +import org.apache.linkis.engineconnplugin.sqoop.client.exception.JobExecutionException + +import scala.collection.convert.wrapAsScala._ + +object ClassUtil { + + def getInstance[T](clazz: Class[T] , defaultValue: T): T = { + val classes = ClassUtils.reflections.getSubTypesOf(clazz).filterNot(ClassUtils.isInterfaceOrAbstract).toArray + if(classes.length <= 1) defaultValue + else if(classes.length == 2) { + val realClass = if(classes(0) == defaultValue.getClass) classes(1) else classes(0); + Utils.tryThrow(realClass.newInstance) { t => + new JobExecutionException(s"New a instance of ${clazz.getSimpleName} failed!", t); + } + } else { + throw new JobExecutionException(s"Too many subClasses of ${clazz.getSimpleName}, list: $classes."); + } + } + +} diff --git a/exchangis-engines/engines/datax/datax-assembly/package.xml b/exchangis-engines/engines/datax/datax-assembly/package.xml new file mode 100644 index 000000000..d88b54980 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-assembly/package.xml @@ -0,0 +1,66 @@ + + core + + zip + + false + + + ../datax-core/target/core + / + + + ../datax-hdfsreader/target/plugin + / + + + ../datax-hdfswriter/target/plugin + / + + + ../datax-textfilereader/target/plugin + / + + + ../datax-textfilewriter/target/plugin + / + + + ../datax-ftpreader/target/plugin + / + + + ../datax-ftpwriter/target/plugin + / + + + ../datax-elasticsearchwriter/target/plugin + / + + + ../datax-mysqlreader/target/plugin + / + + + ../datax-mysqlwriter/target/plugin + / + + + ../datax-oraclereader/target/plugin + / + + + ../datax-oraclewriter/target/plugin + / + + + + + true + lib + runtime + + + \ No newline at end of file diff --git a/assembly/pom.xml b/exchangis-engines/engines/datax/datax-assembly/pom.xml similarity index 60% rename from assembly/pom.xml rename to exchangis-engines/engines/datax/datax-assembly/pom.xml index 9a8450e67..7f9873a45 100644 --- a/assembly/pom.xml +++ b/exchangis-engines/engines/datax/datax-assembly/pom.xml @@ -1,33 +1,16 @@ - - - 4.0.0 - + exchangis-engine-datax com.webank.wedatasphere.exchangis - exchangis - 0.5.0.RELEASE + ${revision} ../pom.xml - exchangis-assembly + 4.0.0 + + datax-assembly @@ -51,8 +34,8 @@ ${basedir}/package.xml - wedatasphere-${project.parent.artifactId}-${project.parent.version} - ../build + datax + ${project.parent.build.directory} diff --git a/exchangis-engines/engines/datax/datax-core/pom.xml b/exchangis-engines/engines/datax/datax-core/pom.xml new file mode 100644 index 000000000..d7d17f09d --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/pom.xml @@ -0,0 +1,212 @@ + + + + exchangis-engine-datax + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + jar + 3.0.0-Plus-2 + datax-core + + + com.alibaba + druid + 1.0.15 + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + commons-configuration + commons-configuration + + + org.apache.commons + commons-lang3 + + + commons-cli + commons-cli + + + commons-pool + commons-pool + + + commons-beanutils + commons-beanutils + + + org.apache.httpcomponents + httpclient + 4.4 + + + org.apache.httpcomponents + fluent-hc + 4.4 + + + org.codehaus.janino + janino + 2.5.16 + + + org.codehaus.groovy + groovy-all + 2.1.9 + + + net.sourceforge.javacsv + javacsv + 2.0 + + + org.anarres.lzo + lzo-core + 1.0.5 + + + ch.qos.logback + logback-classic + provided + + + org.slf4j + slf4j-api + + + io.airlift + aircompressor + 0.3 + + + com.facebook.presto.hadoop + hadoop-apache2 + 0.3 + provided + + + junit + junit + 4.12 + test + + + org.apache.commons + commons-math3 + + + org.apache.commons + commons-compress + + + commons-io + commons-io + + + com.google.guava + guava + + + com.google.code.gson + gson + + + com.dm + dm + 16 + system + ${pom.basedir}/src/main/lib/Dm7JdbcDriver16.jar + + + com.sybase + jconn3 + 1.0.0-SNAPSHOT + system + ${pom.basedir}/src/main/lib/jconn3-1.0.0-SNAPSHOT.jar + + + ppas + ppas + 16 + system + ${pom.basedir}/src/main/lib/edb-jdbc16.jar + + + + org.apache.hadoop + hadoop-common + 3.3.4 + + + org.apache.commons + commons-compress + + + slf4j-log4j12 + org.slf4j + + + + + + + + src/main/resources + + **/* + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.2.1 + + + assemble + + single + + + install + + + + false + false + + + com.alibaba.datax.core.Engine + + + + ${basedir}/src/main/assembly/package.xml + + core + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.5.1 + + ${jdk.compile.version} + ${jdk.compile.version} + + + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-core/src/main/assembly/package.xml new file mode 100644 index 000000000..4c909689f --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/assembly/package.xml @@ -0,0 +1,30 @@ + + core + + dir + + false + + + src/main/resources + conf + + + src/main/bin + bin + + + src/main/log + log + + + + + true + lib + runtime + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/bin/datax.py b/exchangis-engines/engines/datax/datax-core/src/main/bin/datax.py new file mode 100644 index 000000000..bb183ddda --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/bin/datax.py @@ -0,0 +1,238 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- + +import sys +import os +import signal +import subprocess +import time +import re +import socket +import json +from optparse import OptionParser +from optparse import OptionGroup +from string import Template +import codecs +import platform + +JAVA_HOME = "/nemo/jdk8" +DATAX_HOME = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +DATAX_VERSION = 'DATAX-OPENSOURCE-3.0' + + +def isWindows(): + return platform.system() == 'Windows' + + +if isWindows(): + codecs.register(lambda name: name == 'cp65001' and codecs.lookup('utf-8') or None) + CLASS_PATH = "%s/lib/*" % DATAX_HOME +else: + CLASS_PATH = "%s/lib/*:." % DATAX_HOME +LOGBACK_FILE = "%s/conf/log/logback.xml" % DATAX_HOME +DEFAULT_JVM = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s/log/ " % DATAX_HOME +DEFAULT_PROPERTY_CONF = "-Dfile.encoding=UTF-8 " \ + "-Dlogback.statusListenerClass=ch.qos.logback.core.status.NopStatusListener " \ + "-Djava.security.egd=file:///dev/urandom -Ddatax.home=%s -Dlogback.configurationFile=%s" % ( + DATAX_HOME, LOGBACK_FILE) +ENGINE_COMMAND = "%s/bin/java -server ${jvm} %s -classpath %s " \ + "${params} com.alibaba.datax.core.Engine -mode ${mode} -jobid ${jobid} -job ${job}" % ( + JAVA_HOME, DEFAULT_PROPERTY_CONF, CLASS_PATH) +REMOTE_DEBUG_CONFIG = "-Xdebug -Xrunjdwp:transport=dt_socket,server=y,address=9999" + +RET_STATE = { + "KILL": 143, + "FAIL": -1, + "OK": 0, + "RUN": 1, + "RETRY": 2 +} + + +def getLocalIp(): + try: + return socket.gethostbyname(socket.getfqdn(socket.gethostname())) + except: + return "Unknown" + + +def suicide(signum, e): + global child_process + print >> sys.stderr, "[Error] DataX receive unexpected signal %d, starts to suicide." % (signum) + + if child_process: + child_process.send_signal(signal.SIGQUIT) + time.sleep(1) + child_process.kill() + print >> sys.stderr, "DataX Process was killed ! you did ?" + sys.exit(RET_STATE["KILL"]) + + +def register_signal(): + if not isWindows(): + global child_process + signal.signal(2, suicide) + signal.signal(3, suicide) + signal.signal(15, suicide) + + +def getOptionParser(): + usage = "usage: %prog [options] job-url-or-path" + parser = OptionParser(usage=usage) + + prodEnvOptionGroup = OptionGroup(parser, "Product Env Options", + "Normal user use these options to set jvm parameters, job runtime mode etc. " + "Make sure these options can be used in Product Env.") + prodEnvOptionGroup.add_option("-j", "--jvm", metavar="", dest="jvmParameters", action="store", + default=DEFAULT_JVM, help="Set jvm parameters if necessary.") + prodEnvOptionGroup.add_option("--jobid", metavar="", dest="jobid", action="store", default="-1", + help="Set job unique id when running by Distribute/Local Mode.") + prodEnvOptionGroup.add_option("-m", "--mode", metavar="", + action="store", default="standalone", + help="Set job runtime mode such as: standalone, local, distribute. " + "Default mode is standalone.") + prodEnvOptionGroup.add_option("-p", "--params", metavar="", + action="store", dest="params", + help='Set job parameter, eg: the source tableName you want to set it by command, ' + 'then you can use like this: -p"-DtableName=your-table-name", ' + 'if you have mutiple parameters: -p"-DtableName=your-table-name ' + '-DcolumnName=your-column-name". ' + 'Note: you should config in you job tableName with ${tableName}.') + prodEnvOptionGroup.add_option("-r", "--reader", metavar="", + action="store", dest="reader", type="string", + help='View job config[reader] template, eg: mysqlreader,streamreader') + prodEnvOptionGroup.add_option("-w", "--writer", metavar="", + action="store", dest="writer", type="string", + help='View job config[writer] template, eg: mysqlwriter,streamwriter') + parser.add_option_group(prodEnvOptionGroup) + + devEnvOptionGroup = OptionGroup(parser, "Develop/Debug Options", + "Developer use these options to trace more details of DataX.") + devEnvOptionGroup.add_option("-d", "--debug", dest="remoteDebug", action="store_true", + help="Set to remote debug mode.") + devEnvOptionGroup.add_option("--loglevel", metavar="", dest="loglevel", action="store", + default="info", help="Set log level such as: debug, info, all etc.") + parser.add_option_group(devEnvOptionGroup) + return parser + + +def generateJobConfigTemplate(reader, writer): + global writerPar, readerPar + readerRef = "Please refer to the %s document:\n https://github.com/alibaba/DataX/blob/master/%s/doc/%s.md \n" % ( + reader, reader, reader) + writerRef = "Please refer to the %s document:\n https://github.com/alibaba/DataX/blob/master/%s/doc/%s.md \n " % ( + writer, writer, writer) + print readerRef + print writerRef + jobGuid = 'Please save the following configuration as a json file and use\n ' \ + 'python {DATAX_HOME}/bin/datax.py {JSON_FILE_NAME}.json \nto run the job.\n' + print(jobGuid) + jobTemplate = { + "job": { + "setting": { + "speed": { + "channel": "" + } + }, + "content": [ + { + "reader": {}, + "writer": {} + } + ] + } + } + readerTemplatePath = "%s/plugin/reader/%s/plugin_job_template.json" % (DATAX_HOME, reader) + writerTemplatePath = "%s/plugin/writer/%s/plugin_job_template.json" % (DATAX_HOME, writer) + try: + readerPar = readPluginTemplate(readerTemplatePath) + except Exception, e: + print "Read reader[%s] template error: can\'t find file %s" % (reader, readerTemplatePath) + try: + writerPar = readPluginTemplate(writerTemplatePath) + except Exception, e: + print "Read writer[%s] template error: : can\'t find file %s" % (writer, writerTemplatePath) + jobTemplate['job']['content'][0]['reader'] = readerPar + jobTemplate['job']['content'][0]['writer'] = writerPar + print json.dumps(jobTemplate, indent=4, sort_keys=True) + + +def readPluginTemplate(plugin): + with open(plugin, 'r') as f: + return json.load(f) + + +def isUrl(path): + if not path: + return False + + assert (isinstance(path, str)) + m = re.match(r"^http[s]?://\S+\w*", path.lower()) + if m: + return True + else: + return False + + +def buildStartCommand(options, args): + commandMap = {} + tempJVMCommand = DEFAULT_JVM + if options.jvmParameters: + tempJVMCommand = tempJVMCommand + " " + options.jvmParameters + + if options.remoteDebug: + tempJVMCommand = tempJVMCommand + " " + REMOTE_DEBUG_CONFIG + print 'local ip: ', getLocalIp() + + if options.loglevel: + tempJVMCommand = tempJVMCommand + " " + ("-Dloglevel=%s" % (options.loglevel)) + + if options.mode: + commandMap["mode"] = options.mode + + # jobResource 可能是 URL,也可能是本地文件路径(相对,绝对) + jobResource = args[0] + if not isUrl(jobResource): + jobResource = os.path.abspath(jobResource) + if jobResource.lower().startswith("file://"): + jobResource = jobResource[len("file://"):] + + # jobParams = "-Dlog.file.name=%s" % (jobResource[-20:].replace('/', '_').replace('.', '_')) + jobParams = "" + if options.params: + jobParams = jobParams + " " + options.params + + if options.jobid: + commandMap["jobid"] = options.jobid + + commandMap["jvm"] = tempJVMCommand + commandMap["params"] = jobParams + commandMap["job"] = jobResource + return Template(ENGINE_COMMAND).substitute(**commandMap) + + +def printCopyright(): + print ''' +DataX (%s), From Alibaba ! +Copyright (C) 2010-2017, Alibaba Group. All Rights Reserved. + +''' % DATAX_VERSION + sys.stdout.flush() + + +if __name__ == "__main__": + # printCopyright() + parser = getOptionParser() + options, args = parser.parse_args(sys.argv[1:]) + if options.reader is not None and options.writer is not None: + generateJobConfigTemplate(options.reader, options.writer) + sys.exit(RET_STATE['OK']) + if len(args) != 1: + parser.print_help() + sys.exit(RET_STATE['FAIL']) + startCommand = buildStartCommand(options, args) + # print startCommand + child_process = subprocess.Popen(startCommand, shell=True, env=os.environ.copy()) + register_signal() + (stdout, stderr) = child_process.communicate() + sys.exit(child_process.returncode) diff --git a/exchangis-engines/engines/datax/datax-core/src/main/bin/dxprof.py b/exchangis-engines/engines/datax/datax-core/src/main/bin/dxprof.py new file mode 100644 index 000000000..181bf9008 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/bin/dxprof.py @@ -0,0 +1,191 @@ +#! /usr/bin/env python +# vim: set expandtab tabstop=4 shiftwidth=4 foldmethod=marker nu: + +import re +import sys +import time + +REG_SQL_WAKE = re.compile(r'Begin\s+to\s+read\s+record\s+by\s+Sql', re.IGNORECASE) +REG_SQL_DONE = re.compile(r'Finished\s+read\s+record\s+by\s+Sql', re.IGNORECASE) +REG_SQL_PATH = re.compile(r'from\s+(\w+)(\s+where|\s*$)', re.IGNORECASE) +REG_SQL_JDBC = re.compile(r'jdbcUrl:\s*\[(.+?)\]', re.IGNORECASE) +REG_SQL_UUID = re.compile(r'(\d+\-)+reader') +REG_COMMIT_UUID = re.compile(r'(\d+\-)+writer') +REG_COMMIT_WAKE = re.compile(r'begin\s+to\s+commit\s+blocks', re.IGNORECASE) +REG_COMMIT_DONE = re.compile(r'commit\s+blocks\s+ok', re.IGNORECASE) + +# {{{ function parse_timestamp() # +def parse_timestamp(line): + try: + ts = int(time.mktime(time.strptime(line[0:19], '%Y-%m-%d %H:%M:%S'))) + except: + ts = 0 + + return ts + +# }}} # + +# {{{ function parse_query_host() # +def parse_query_host(line): + ori = REG_SQL_JDBC.search(line) + if (not ori): + return '' + + ori = ori.group(1).split('?')[0] + off = ori.find('@') + if (off > -1): + ori = ori[off+1:len(ori)] + else: + off = ori.find('//') + if (off > -1): + ori = ori[off+2:len(ori)] + + return ori.lower() +# }}} # + +# {{{ function parse_query_table() # +def parse_query_table(line): + ori = REG_SQL_PATH.search(line) + return (ori and ori.group(1).lower()) or '' +# }}} # + +# {{{ function parse_reader_task() # +def parse_task(fname): + global LAST_SQL_UUID + global LAST_COMMIT_UUID + global DATAX_JOBDICT + global DATAX_JOBDICT_COMMIT + global UNIXTIME + LAST_SQL_UUID = '' + DATAX_JOBDICT = {} + LAST_COMMIT_UUID = '' + DATAX_JOBDICT_COMMIT = {} + + UNIXTIME = int(time.time()) + with open(fname, 'r') as f: + for line in f.readlines(): + line = line.strip() + + if (LAST_SQL_UUID and (LAST_SQL_UUID in DATAX_JOBDICT)): + DATAX_JOBDICT[LAST_SQL_UUID]['host'] = parse_query_host(line) + LAST_SQL_UUID = '' + + if line.find('CommonRdbmsReader$Task') > 0: + parse_read_task(line) + elif line.find('commit blocks') > 0: + parse_write_task(line) + else: + continue +# }}} # + +# {{{ function parse_read_task() # +def parse_read_task(line): + ser = REG_SQL_UUID.search(line) + if not ser: + return + + LAST_SQL_UUID = ser.group() + if REG_SQL_WAKE.search(line): + DATAX_JOBDICT[LAST_SQL_UUID] = { + 'stat' : 'R', + 'wake' : parse_timestamp(line), + 'done' : UNIXTIME, + 'host' : parse_query_host(line), + 'path' : parse_query_table(line) + } + elif ((LAST_SQL_UUID in DATAX_JOBDICT) and REG_SQL_DONE.search(line)): + DATAX_JOBDICT[LAST_SQL_UUID]['stat'] = 'D' + DATAX_JOBDICT[LAST_SQL_UUID]['done'] = parse_timestamp(line) +# }}} # + +# {{{ function parse_write_task() # +def parse_write_task(line): + ser = REG_COMMIT_UUID.search(line) + if not ser: + return + + LAST_COMMIT_UUID = ser.group() + if REG_COMMIT_WAKE.search(line): + DATAX_JOBDICT_COMMIT[LAST_COMMIT_UUID] = { + 'stat' : 'R', + 'wake' : parse_timestamp(line), + 'done' : UNIXTIME, + } + elif ((LAST_COMMIT_UUID in DATAX_JOBDICT_COMMIT) and REG_COMMIT_DONE.search(line)): + DATAX_JOBDICT_COMMIT[LAST_COMMIT_UUID]['stat'] = 'D' + DATAX_JOBDICT_COMMIT[LAST_COMMIT_UUID]['done'] = parse_timestamp(line) +# }}} # + +# {{{ function result_analyse() # +def result_analyse(): + def compare(a, b): + return b['cost'] - a['cost'] + + tasklist = [] + hostsmap = {} + statvars = {'sum' : 0, 'cnt' : 0, 'svr' : 0, 'max' : 0, 'min' : int(time.time())} + tasklist_commit = [] + statvars_commit = {'sum' : 0, 'cnt' : 0} + + for idx in DATAX_JOBDICT: + item = DATAX_JOBDICT[idx] + item['uuid'] = idx; + item['cost'] = item['done'] - item['wake'] + tasklist.append(item); + + if (not (item['host'] in hostsmap)): + hostsmap[item['host']] = 1 + statvars['svr'] += 1 + + if (item['cost'] > -1 and item['cost'] < 864000): + statvars['sum'] += item['cost'] + statvars['cnt'] += 1 + statvars['max'] = max(statvars['max'], item['done']) + statvars['min'] = min(statvars['min'], item['wake']) + + for idx in DATAX_JOBDICT_COMMIT: + itemc = DATAX_JOBDICT_COMMIT[idx] + itemc['uuid'] = idx + itemc['cost'] = itemc['done'] - itemc['wake'] + tasklist_commit.append(itemc) + + if (itemc['cost'] > -1 and itemc['cost'] < 864000): + statvars_commit['sum'] += itemc['cost'] + statvars_commit['cnt'] += 1 + + ttl = (statvars['max'] - statvars['min']) or 1 + idx = float(statvars['cnt']) / (statvars['sum'] or ttl) + + tasklist.sort(compare) + for item in tasklist: + print '%s\t%s.%s\t%s\t%s\t% 4d\t% 2.1f%%\t% .2f' %(item['stat'], item['host'], item['path'], + time.strftime('%H:%M:%S', time.localtime(item['wake'])), + (('D' == item['stat']) and time.strftime('%H:%M:%S', time.localtime(item['done']))) or '--', + item['cost'], 100 * item['cost'] / ttl, idx * item['cost']) + + if (not len(tasklist) or not statvars['cnt']): + return + + print '\n--- DataX Profiling Statistics ---' + print '%d task(s) on %d server(s), Total elapsed %d second(s), %.2f second(s) per task in average' %(statvars['cnt'], + statvars['svr'], statvars['sum'], float(statvars['sum']) / statvars['cnt']) + print 'Actually cost %d second(s) (%s - %s), task concurrency: %.2f, tilt index: %.2f' %(ttl, + time.strftime('%H:%M:%S', time.localtime(statvars['min'])), + time.strftime('%H:%M:%S', time.localtime(statvars['max'])), + float(statvars['sum']) / ttl, idx * tasklist[0]['cost']) + + idx_commit = float(statvars_commit['cnt']) / (statvars_commit['sum'] or ttl) + tasklist_commit.sort(compare) + print '%d task(s) done odps comit, Total elapsed %d second(s), %.2f second(s) per task in average, tilt index: %.2f' % ( + statvars_commit['cnt'], + statvars_commit['sum'], float(statvars_commit['sum']) / statvars_commit['cnt'], + idx_commit * tasklist_commit[0]['cost']) + +# }}} # + +if (len(sys.argv) < 2): + print "Usage: %s filename" %(sys.argv[0]) + quit(1) +else: + parse_task(sys.argv[1]) + result_analyse() \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/bin/perftrace.py b/exchangis-engines/engines/datax/datax-core/src/main/bin/perftrace.py new file mode 100644 index 000000000..f6c4074df --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/bin/perftrace.py @@ -0,0 +1,400 @@ +#!/usr/bin/env python +# -*- coding:utf-8 -*- + + +""" + Life's short, Python more. +""" + +import re +import os +import sys +import json +import uuid +import signal +import time +import subprocess +from optparse import OptionParser +reload(sys) +sys.setdefaultencoding('utf8') + +##begin cli & help logic +def getOptionParser(): + usage = getUsage() + parser = OptionParser(usage = usage) + #rdbms reader and writer + parser.add_option('-r', '--reader', action='store', dest='reader', help='trace datasource read performance with specified !json! string') + parser.add_option('-w', '--writer', action='store', dest='writer', help='trace datasource write performance with specified !json! string') + + parser.add_option('-c', '--channel', action='store', dest='channel', default='1', help='the number of concurrent sync thread, the default is 1') + parser.add_option('-f', '--file', action='store', help='existing datax configuration file, include reader and writer params') + parser.add_option('-t', '--type', action='store', default='reader', help='trace which side\'s performance, cooperate with -f --file params, need to be reader or writer') + parser.add_option('-d', '--delete', action='store', default='true', help='delete temporary files, the default value is true') + #parser.add_option('-h', '--help', action='store', default='true', help='print usage information') + return parser + +def getUsage(): + return ''' +The following params are available for -r --reader: + [these params is for rdbms reader, used to trace rdbms read performance, it's like datax's key] + *datasourceType: datasource type, may be mysql|drds|oracle|ads|sqlserver|postgresql|db2 etc... + *jdbcUrl: datasource jdbc connection string, mysql as a example: jdbc:mysql://ip:port/database + *username: username for datasource + *password: password for datasource + *table: table name for read domain + column: column to be read, the default value is ['*'] + splitPk: the splitPk column of rdbms table + where: limit the scope of the performance domain set + fetchSize: how many rows to be fetched at each communicate + + [these params is for stream reader, used to trace rdbms write performance] + reader-sliceRecordCount: how man test domain to mock(each channel), the default value is 10000 + reader-column : stream reader while generate test domain(type supports: string|long|date|double|bool|bytes; support constant value and random function),demo: [{"type":"string","value":"abc"},{"type":"string","random":"10,20"}] + +The following params are available for -w --writer: + [these params is for rdbms writer, used to trace rdbms write performance, it's like datax's key] + datasourceType: datasource type, may be mysql|drds|oracle|ads|sqlserver|postgresql|db2|ads etc... + *jdbcUrl: datasource jdbc connection string, mysql as a example: jdbc:mysql://ip:port/database + *username: username for datasource + *password: password for datasource + *table: table name for write domain + column: column to be writed, the default value is ['*'] + batchSize: how many rows to be storeed at each communicate, the default value is 512 + preSql: prepare sql to be executed before write domain, the default value is '' + postSql: post sql to be executed end of write domain, the default value is '' + url: required for ads, pattern is ip:port + schme: required for ads, ads database name + + [these params is for stream writer, used to trace rdbms read performance] + writer-print: true means print domain read from source datasource, the default value is false + +The following params are available global control: + -c --channel: the number of concurrent tasks, the default value is 1 + -f --file: existing completely dataX configuration file path + -t --type: test read or write performance for a datasource, couble be reader or writer, in collaboration with -f --file + -h --help: print help message + +some demo: +perftrace.py --channel=10 --reader='{"jdbcUrl":"jdbc:mysql://127.0.0.1:3306/database", "username":"", "password":"", "table": "", "where":"", "splitPk":"", "writer-print":"false"}' +perftrace.py --channel=10 --writer='{"jdbcUrl":"jdbc:mysql://127.0.0.1:3306/database", "username":"", "password":"", "table": "", "reader-sliceRecordCount": "10000", "reader-column": [{"type":"string","value":"abc"},{"type":"string","random":"10,20"}]}' +perftrace.py --file=/tmp/datax.job.json --type=reader --reader='{"writer-print": "false"}' +perftrace.py --file=/tmp/datax.job.json --type=writer --writer='{"reader-sliceRecordCount": "10000", "reader-column": [{"type":"string","value":"abc"},{"type":"string","random":"10,20"}]}' + +some example jdbc url pattern, may help: +jdbc:oracle:thin:@ip:port:database +jdbc:mysql://ip:port/database +jdbc:sqlserver://ip:port;DatabaseName=database +jdbc:postgresql://ip:port/database +warn: ads url pattern is ip:port +warn: test write performance will write domain into your table, you can use a temporary table just for test. +''' + +def printCopyright(): + DATAX_VERSION = 'UNKNOWN_DATAX_VERSION' + print ''' +DataX Util Tools (%s), From Alibaba ! +Copyright (C) 2010-2016, Alibaba Group. All Rights Reserved.''' % DATAX_VERSION + sys.stdout.flush() + + +def yesNoChoice(): + yes = set(['yes','y', 'ye', '']) + no = set(['no','n']) + choice = raw_input().lower() + if choice in yes: + return True + elif choice in no: + return False + else: + sys.stdout.write("Please respond with 'yes' or 'no'") +##end cli & help logic + + +##begin process logic +def suicide(signum, e): + global childProcess + print >> sys.stderr, "[Error] Receive unexpected signal %d, starts to suicide." % (signum) + if childProcess: + childProcess.send_signal(signal.SIGQUIT) + time.sleep(1) + childProcess.kill() + print >> sys.stderr, "DataX Process was killed ! you did ?" + sys.exit(-1) + + +def registerSignal(): + global childProcess + signal.signal(2, suicide) + signal.signal(3, suicide) + signal.signal(15, suicide) + + +def fork(command, isShell=False): + global childProcess + childProcess = subprocess.Popen(command, shell = isShell) + registerSignal() + (stdout, stderr) = childProcess.communicate() + #阻塞直到子进程结束 + childProcess.wait() + return childProcess.returncode +##end process logic + + +##begin datax json generate logic +#warn: if not '': -> true; if not None: -> true +def notNone(obj, context): + if not obj: + raise Exception("Configuration property [%s] could not be blank!" % (context)) + +def attributeNotNone(obj, attributes): + for key in attributes: + notNone(obj.get(key), key) + +def isBlank(value): + if value is None or len(value.strip()) == 0: + return True + return False + +def parsePluginName(jdbcUrl, pluginType): + import re + #warn: drds + name = 'pluginName' + mysqlRegex = re.compile('jdbc:(mysql)://.*') + if (mysqlRegex.match(jdbcUrl)): + name = 'mysql' + postgresqlRegex = re.compile('jdbc:(postgresql)://.*') + if (postgresqlRegex.match(jdbcUrl)): + name = 'postgresql' + oracleRegex = re.compile('jdbc:(oracle):.*') + if (oracleRegex.match(jdbcUrl)): + name = 'oracle' + sqlserverRegex = re.compile('jdbc:(sqlserver)://.*') + if (sqlserverRegex.match(jdbcUrl)): + name = 'sqlserver' + db2Regex = re.compile('jdbc:(db2)://.*') + if (db2Regex.match(jdbcUrl)): + name = 'db2' + return "%s%s" % (name, pluginType) + +def renderDataXJson(paramsDict, readerOrWriter = 'reader', channel = 1): + dataxTemplate = { + "job": { + "setting": { + "speed": { + "channel": 1 + } + }, + "content": [ + { + "reader": { + "name": "", + "parameter": { + "username": "", + "password": "", + "sliceRecordCount": "10000", + "column": [ + "*" + ], + "connection": [ + { + "table": [], + "jdbcUrl": [] + } + ] + } + }, + "writer": { + "name": "", + "parameter": { + "print": "false", + "connection": [ + { + "table": [], + "jdbcUrl": '' + } + ] + } + } + } + ] + } + } + dataxTemplate['job']['setting']['speed']['channel'] = channel + dataxTemplateContent = dataxTemplate['job']['content'][0] + + pluginName = '' + if paramsDict.get('datasourceType'): + pluginName = '%s%s' % (paramsDict['datasourceType'], readerOrWriter) + elif paramsDict.get('jdbcUrl'): + pluginName = parsePluginName(paramsDict['jdbcUrl'], readerOrWriter) + elif paramsDict.get('url'): + pluginName = 'adswriter' + + theOtherSide = 'writer' if readerOrWriter == 'reader' else 'reader' + dataxPluginParamsContent = dataxTemplateContent.get(readerOrWriter).get('parameter') + dataxPluginParamsContent.update(paramsDict) + + dataxPluginParamsContentOtherSide = dataxTemplateContent.get(theOtherSide).get('parameter') + + if readerOrWriter == 'reader': + dataxTemplateContent.get('reader')['name'] = pluginName + dataxTemplateContent.get('writer')['name'] = 'streamwriter' + if paramsDict.get('writer-print'): + dataxPluginParamsContentOtherSide['print'] = paramsDict['writer-print'] + del dataxPluginParamsContent['writer-print'] + del dataxPluginParamsContentOtherSide['connection'] + if readerOrWriter == 'writer': + dataxTemplateContent.get('reader')['name'] = 'streamreader' + dataxTemplateContent.get('writer')['name'] = pluginName + if paramsDict.get('reader-column'): + dataxPluginParamsContentOtherSide['column'] = paramsDict['reader-column'] + del dataxPluginParamsContent['reader-column'] + if paramsDict.get('reader-sliceRecordCount'): + dataxPluginParamsContentOtherSide['sliceRecordCount'] = paramsDict['reader-sliceRecordCount'] + del dataxPluginParamsContent['reader-sliceRecordCount'] + del dataxPluginParamsContentOtherSide['connection'] + + if paramsDict.get('jdbcUrl'): + if readerOrWriter == 'reader': + dataxPluginParamsContent['connection'][0]['jdbcUrl'].append(paramsDict['jdbcUrl']) + else: + dataxPluginParamsContent['connection'][0]['jdbcUrl'] = paramsDict['jdbcUrl'] + if paramsDict.get('table'): + dataxPluginParamsContent['connection'][0]['table'].append(paramsDict['table']) + + + traceJobJson = json.dumps(dataxTemplate, indent = 4) + return traceJobJson + +def isUrl(path): + if not path: + return False + if not isinstance(path, str): + raise Exception('Configuration file path required for the string, you configure is:%s' % path) + m = re.match(r"^http[s]?://\S+\w*", path.lower()) + if m: + return True + else: + return False + + +def readJobJsonFromLocal(jobConfigPath): + jobConfigContent = None + jobConfigPath = os.path.abspath(jobConfigPath) + file = open(jobConfigPath) + try: + jobConfigContent = file.read() + finally: + file.close() + if not jobConfigContent: + raise Exception("Your job configuration file read the result is empty, please check the configuration is legal, path: [%s]\nconfiguration:\n%s" % (jobConfigPath, str(jobConfigContent))) + return jobConfigContent + + +def readJobJsonFromRemote(jobConfigPath): + import urllib + conn = urllib.urlopen(jobConfigPath) + jobJson = conn.read() + return jobJson + +def parseJson(strConfig, context): + try: + return json.loads(strConfig) + except Exception, e: + import traceback + traceback.print_exc() + sys.stdout.flush() + print >> sys.stderr, '%s %s need in line with json syntax' % (context, strConfig) + sys.exit(-1) + +def convert(options, args): + traceJobJson = '' + if options.file: + if isUrl(options.file): + traceJobJson = readJobJsonFromRemote(options.file) + else: + traceJobJson = readJobJsonFromLocal(options.file) + traceJobDict = parseJson(traceJobJson, '%s content' % options.file) + attributeNotNone(traceJobDict, ['job']) + attributeNotNone(traceJobDict['job'], ['content']) + attributeNotNone(traceJobDict['job']['content'][0], ['reader', 'writer']) + attributeNotNone(traceJobDict['job']['content'][0]['reader'], ['name', 'parameter']) + attributeNotNone(traceJobDict['job']['content'][0]['writer'], ['name', 'parameter']) + if options.type == 'reader': + traceJobDict['job']['content'][0]['writer']['name'] = 'streamwriter' + if options.reader: + traceReaderDict = parseJson(options.reader, 'reader config') + if traceReaderDict.get('writer-print') is not None: + traceJobDict['job']['content'][0]['writer']['parameter']['print'] = traceReaderDict.get('writer-print') + else: + traceJobDict['job']['content'][0]['writer']['parameter']['print'] = 'false' + else: + traceJobDict['job']['content'][0]['writer']['parameter']['print'] = 'false' + elif options.type == 'writer': + traceJobDict['job']['content'][0]['reader']['name'] = 'streamreader' + if options.writer: + traceWriterDict = parseJson(options.writer, 'writer config') + if traceWriterDict.get('reader-column'): + traceJobDict['job']['content'][0]['reader']['parameter']['column'] = traceWriterDict['reader-column'] + if traceWriterDict.get('reader-sliceRecordCount'): + traceJobDict['job']['content'][0]['reader']['parameter']['sliceRecordCount'] = traceWriterDict['reader-sliceRecordCount'] + else: + columnSize = len(traceJobDict['job']['content'][0]['writer']['parameter']['column']) + streamReaderColumn = [] + for i in range(columnSize): + streamReaderColumn.append({"type": "long", "random": "2,10"}) + traceJobDict['job']['content'][0]['reader']['parameter']['column'] = streamReaderColumn + traceJobDict['job']['content'][0]['reader']['parameter']['sliceRecordCount'] = 10000 + else: + pass#do nothing + return json.dumps(traceJobDict, indent = 4) + elif options.reader: + traceReaderDict = parseJson(options.reader, 'reader config') + return renderDataXJson(traceReaderDict, 'reader', options.channel) + elif options.writer: + traceWriterDict = parseJson(options.writer, 'writer config') + return renderDataXJson(traceWriterDict, 'writer', options.channel) + else: + print getUsage() + sys.exit(-1) + #dataxParams = {} + #for opt, value in options.__dict__.items(): + # dataxParams[opt] = value +##end datax json generate logic + + +if __name__ == "__main__": + printCopyright() + parser = getOptionParser() + + options, args = parser.parse_args(sys.argv[1:]) + #print options, args + dataxTraceJobJson = convert(options, args) + + #由MAC地址、当前时间戳、随机数生成,可以保证全球范围内的唯一性 + dataxJobPath = os.path.join(os.getcwd(), "perftrace-" + str(uuid.uuid1())) + jobConfigOk = True + if os.path.exists(dataxJobPath): + print "file already exists, truncate and rewrite it? %s" % dataxJobPath + if yesNoChoice(): + jobConfigOk = True + else: + print "exit failed, because of file conflict" + sys.exit(-1) + fileWriter = open(dataxJobPath, 'w') + fileWriter.write(dataxTraceJobJson) + fileWriter.close() + + + print "trace environments:" + print "dataxJobPath: %s" % dataxJobPath + dataxHomePath = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + print "dataxHomePath: %s" % dataxHomePath + + dataxCommand = "%s %s" % (os.path.join(dataxHomePath, "bin", "datax.py"), dataxJobPath) + print "dataxCommand: %s" % dataxCommand + + returncode = fork(dataxCommand, True) + if options.delete == 'true': + os.remove(dataxJobPath) + sys.exit(returncode) diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/base/BaseObject.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/base/BaseObject.java new file mode 100644 index 000000000..79c70858a --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/base/BaseObject.java @@ -0,0 +1,25 @@ +package com.alibaba.datax.common.base; + +import org.apache.commons.lang3.builder.EqualsBuilder; +import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.apache.commons.lang3.builder.ToStringBuilder; +import org.apache.commons.lang3.builder.ToStringStyle; + +public class BaseObject { + + @Override + public int hashCode() { + return HashCodeBuilder.reflectionHashCode(this, false); + } + + @Override + public boolean equals(Object object) { + return EqualsBuilder.reflectionEquals(this, object, false); + } + + @Override + public String toString() { + return ToStringBuilder.reflectionToString(this, + ToStringStyle.MULTI_LINE_STYLE); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/constant/CommonConstant.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/constant/CommonConstant.java new file mode 100644 index 000000000..3f019198b --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/constant/CommonConstant.java @@ -0,0 +1,12 @@ +package com.alibaba.datax.common.constant; + +public final class CommonConstant { + /** + * 用于插件对自身 split 的每个 task 标识其使用的资源,以告知core 对 reader/writer split 之后的 task 进行拼接时需要根据资源标签进行更有意义的 shuffle 操作 + */ + public static final String LOAD_BALANCE_RESOURCE_MARK = "loadBalanceResourceMark"; + + public static final String TEMP_SUFFIX = "-channel%s.tmp"; + + public static final String TEMP_PREFIX = ".udes_%s_"; +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/constant/PluginType.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/constant/PluginType.java new file mode 100644 index 000000000..ceee089e9 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/constant/PluginType.java @@ -0,0 +1,20 @@ +package com.alibaba.datax.common.constant; + +/** + * Created by jingxing on 14-8-31. + */ +public enum PluginType { + //pluginType还代表了资源目录,很难扩展,或者说需要足够必要才扩展。先mark Handler(其实和transformer一样),再讨论 + READER("reader"), TRANSFORMER("transformer"), WRITER("writer"), HANDLER("handler"); + + private String pluginType; + + private PluginType(String pluginType) { + this.pluginType = pluginType; + } + + @Override + public String toString() { + return this.pluginType; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/BoolColumn.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/BoolColumn.java new file mode 100644 index 000000000..ee5fca453 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/BoolColumn.java @@ -0,0 +1,115 @@ +package com.alibaba.datax.common.element; + +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.Date; + +/** + * Created by jingxing on 14-8-24. + */ +public class BoolColumn extends Column { + + public BoolColumn(Boolean bool) { + super(bool, Column.Type.BOOLEAN, 1); + } + + public BoolColumn(final String data) { + this(true); + this.validate(data); + if (null == data) { + this.setRawData(null); + this.setByteSize(0); + } else { + this.setRawData(Boolean.valueOf(data)); + this.setByteSize(1); + } + return; + } + + public BoolColumn() { + super(null, Column.Type.BOOLEAN, 1); + } + + @Override + public Boolean asBoolean() { + if (null == super.getRawData()) { + return null; + } + + return (Boolean) super.getRawData(); + } + + @Override + public Long asLong() { + if (null == this.getRawData()) { + return null; + } + + return this.asBoolean() ? 1L : 0L; + } + + @Override + public Double asDouble() { + if (null == this.getRawData()) { + return null; + } + + return this.asBoolean() ? 1.0d : 0.0d; + } + + @Override + public String asString() { + if (null == super.getRawData()) { + return null; + } + + return this.asBoolean() ? "true" : "false"; + } + + @Override + public BigInteger asBigInteger() { + if (null == this.getRawData()) { + return null; + } + + return BigInteger.valueOf(this.asLong()); + } + + @Override + public BigDecimal asBigDecimal() { + if (null == this.getRawData()) { + return null; + } + + return BigDecimal.valueOf(this.asLong()); + } + + @Override + public Date asDate() { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, "Bool类型不能转为Date ."); + } + + @Override + public byte[] asBytes() { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, "Boolean类型不能转为Bytes ."); + } + + private void validate(final String data) { + if (null == data) { + return; + } + + if ("true".equalsIgnoreCase(data) || "false".equalsIgnoreCase(data)) { + return; + } + + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, + String.format("String[%s]不能转为Bool .", data)); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/BytesColumn.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/BytesColumn.java new file mode 100644 index 000000000..961ac4c25 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/BytesColumn.java @@ -0,0 +1,84 @@ +package com.alibaba.datax.common.element; + +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; +import org.apache.commons.lang3.ArrayUtils; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.Date; + +/** + * Created by jingxing on 14-8-24. + */ +public class BytesColumn extends Column { + + public BytesColumn() { + this(null); + } + + public BytesColumn(byte[] bytes) { + super(ArrayUtils.clone(bytes), Column.Type.BYTES, null == bytes ? 0 + : bytes.length); + } + + @Override + public byte[] asBytes() { + if (null == this.getRawData()) { + return null; + } + + return (byte[]) this.getRawData(); + } + + @Override + public String asString() { + if (null == this.getRawData()) { + return null; + } + + try { + return ColumnCast.bytes2String(this); + } catch (Exception e) { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, + String.format("Bytes[%s]不能转为String .", this.toString())); + } + } + + @Override + public Long asLong() { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, "Bytes类型不能转为Long ."); + } + + @Override + public BigDecimal asBigDecimal() { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, "Bytes类型不能转为BigDecimal ."); + } + + @Override + public BigInteger asBigInteger() { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, "Bytes类型不能转为BigInteger ."); + } + + @Override + public Double asDouble() { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, "Bytes类型不能转为Long ."); + } + + @Override + public Date asDate() { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, "Bytes类型不能转为Date ."); + } + + @Override + public Boolean asBoolean() { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, "Bytes类型不能转为Boolean ."); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Column.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Column.java new file mode 100644 index 000000000..829fd520c --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Column.java @@ -0,0 +1,76 @@ +package com.alibaba.datax.common.element; + + +import com.webank.wedatasphere.exchangis.datax.util.Json; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.Date; + +/** + * Created by jingxing on 14-8-24. + *

+ */ +public abstract class Column { + + private Type type; + + private Object rawData; + + private int byteSize; + + public Column(final Object object, final Type type, int byteSize) { + this.rawData = object; + this.type = type; + this.byteSize = byteSize; + } + + public Object getRawData() { + return this.rawData; + } + + public Type getType() { + return this.type; + } + + public int getByteSize() { + return this.byteSize; + } + + protected void setType(Type type) { + this.type = type; + } + + protected void setRawData(Object rawData) { + this.rawData = rawData; + } + + protected void setByteSize(int byteSize) { + this.byteSize = byteSize; + } + + public abstract Long asLong(); + + public abstract Double asDouble(); + + public abstract String asString(); + + public abstract Date asDate(); + + public abstract byte[] asBytes(); + + public abstract Boolean asBoolean(); + + public abstract BigDecimal asBigDecimal(); + + public abstract BigInteger asBigInteger(); + + @Override + public String toString() { + return Json.toJson(this, null); + } + + public enum Type { + BAD, NULL, INT, LONG, DOUBLE, STRING, BOOLEAN, DATE, BYTES + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/ColumnCast.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/ColumnCast.java new file mode 100644 index 000000000..e112269e1 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/ColumnCast.java @@ -0,0 +1,202 @@ +package com.alibaba.datax.common.element; + +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import org.apache.commons.lang3.time.DateFormatUtils; +import org.apache.commons.lang3.time.FastDateFormat; + +import java.io.UnsupportedEncodingException; +import java.text.ParseException; +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.TimeZone; + +public final class ColumnCast { + + public static void bind(final Configuration configuration) { + StringCast.init(configuration); + DateCast.init(configuration); + BytesCast.init(configuration); + } + + public static Date string2Date(final StringColumn column) + throws ParseException { + return StringCast.asDate(column); + } + + public static byte[] string2Bytes(final StringColumn column) + throws UnsupportedEncodingException { + return StringCast.asBytes(column); + } + + public static String date2String(final DateColumn column) { + return DateCast.asString(column); + } + + public static String bytes2String(final BytesColumn column) + throws UnsupportedEncodingException { + return BytesCast.asString(column); + } +} + +class StringCast { + static String datetimeFormat = "yyyy-MM-dd HH:mm:ss"; + + static String dateFormat = "yyyy-MM-dd"; + + static String timeFormat = "HH:mm:ss"; + + static List extraFormats = Collections.emptyList(); + + static String timeZone = "GMT+8"; + + static FastDateFormat dateFormatter; + + static FastDateFormat timeFormatter; + + static FastDateFormat datetimeFormatter; + + static TimeZone timeZoner; + + static String encoding = "UTF-8"; + + static void init(final Configuration configuration) { + StringCast.datetimeFormat = configuration.getString( + "common.column.datetimeFormat", StringCast.datetimeFormat); + StringCast.dateFormat = configuration.getString( + "common.column.dateFormat", StringCast.dateFormat); + StringCast.timeFormat = configuration.getString( + "common.column.timeFormat", StringCast.timeFormat); + StringCast.extraFormats = configuration.getList( + "common.column.extraFormats", Collections.emptyList(), String.class); + + StringCast.timeZone = configuration.getString("common.column.timeZone", + StringCast.timeZone); + StringCast.timeZoner = TimeZone.getTimeZone(StringCast.timeZone); + + StringCast.datetimeFormatter = FastDateFormat.getInstance( + StringCast.datetimeFormat, StringCast.timeZoner); + StringCast.dateFormatter = FastDateFormat.getInstance( + StringCast.dateFormat, StringCast.timeZoner); + StringCast.timeFormatter = FastDateFormat.getInstance( + StringCast.timeFormat, StringCast.timeZoner); + + StringCast.encoding = configuration.getString("common.column.encoding", + StringCast.encoding); + } + + static Date asDate(final StringColumn column) throws ParseException { + if (null == column.asString()) { + return null; + } + + try { + return StringCast.datetimeFormatter.parse(column.asString()); + } catch (ParseException ignored) { + } + + try { + return StringCast.dateFormatter.parse(column.asString()); + } catch (ParseException ignored) { + } + + ParseException e; + try { + return StringCast.timeFormatter.parse(column.asString()); + } catch (ParseException ignored) { + e = ignored; + } + + for (String format : StringCast.extraFormats) { + try { + return FastDateFormat.getInstance(format, StringCast.timeZoner).parse(column.asString()); + } catch (ParseException ignored) { + e = ignored; + } + } + throw e; + } + + static byte[] asBytes(final StringColumn column) + throws UnsupportedEncodingException { + if (null == column.asString()) { + return null; + } + + return column.asString().getBytes(StringCast.encoding); + } +} + +/** + * 后续为了可维护性,可以考虑直接使用 apache 的DateFormatUtils. + *

+ * 迟南已经修复了该问题,但是为了维护性,还是直接使用apache的内置函数 + */ +class DateCast { + + static String datetimeFormat = "yyyy-MM-dd HH:mm:ss"; + + static String dateFormat = "yyyy-MM-dd"; + + static String timeFormat = "HH:mm:ss"; + + static String timeZone = "GMT+8"; + + static TimeZone timeZoner = TimeZone.getTimeZone(DateCast.timeZone); + + static void init(final Configuration configuration) { + DateCast.datetimeFormat = configuration.getString( + "common.column.datetimeFormat", datetimeFormat); + DateCast.timeFormat = configuration.getString( + "common.column.timeFormat", timeFormat); + DateCast.dateFormat = configuration.getString( + "common.column.dateFormat", dateFormat); + DateCast.timeZone = configuration.getString("common.column.timeZone", + DateCast.timeZone); + DateCast.timeZoner = TimeZone.getTimeZone(DateCast.timeZone); + return; + } + + static String asString(final DateColumn column) { + if (null == column.asDate()) { + return null; + } + + switch (column.getSubType()) { + case DATE: + return DateFormatUtils.format(column.asDate(), DateCast.dateFormat, + DateCast.timeZoner); + case TIME: + return DateFormatUtils.format(column.asDate(), DateCast.timeFormat, + DateCast.timeZoner); + case DATETIME: + return DateFormatUtils.format(column.asDate(), + DateCast.datetimeFormat, DateCast.timeZoner); + default: + throw DataXException + .asDataXException(CommonErrorCode.CONVERT_NOT_SUPPORT, + "时间类型出现不支持类型,目前仅支持DATE/TIME/DATETIME。该类型属于编程错误,请反馈给DataX开发团队 ."); + } + } +} + +class BytesCast { + static String encoding = "utf-8"; + + static void init(final Configuration configuration) { + BytesCast.encoding = configuration.getString("common.column.encoding", + BytesCast.encoding); + return; + } + + static String asString(final BytesColumn column) + throws UnsupportedEncodingException { + if (null == column.asBytes()) { + return null; + } + + return new String(column.asBytes(), encoding); + } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/DateColumn.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/DateColumn.java new file mode 100644 index 000000000..a6a8198f5 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/DateColumn.java @@ -0,0 +1,130 @@ +package com.alibaba.datax.common.element; + +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.Date; + +/** + * Created by jingxing on 14-8-24. + */ +public class DateColumn extends Column { + + private DateType subType = DateType.DATETIME; + + public static enum DateType { + DATE, TIME, DATETIME + } + + /** + * 构建值为null的DateColumn,使用Date子类型为DATETIME + */ + public DateColumn() { + this((Long) null); + } + + /** + * 构建值为stamp(Unix时间戳)的DateColumn,使用Date子类型为DATETIME + * 实际存储有date改为long的ms,节省存储 + */ + public DateColumn(final Long stamp) { + super(stamp, Column.Type.DATE, (null == stamp ? 0 : 8)); + } + + /** + * 构建值为date(java.util.Date)的DateColumn,使用Date子类型为DATETIME + */ + public DateColumn(final Date date) { + this(date == null ? null : date.getTime()); + } + + /** + * 构建值为date(java.sql.Date)的DateColumn,使用Date子类型为DATE,只有日期,没有时间 + */ + public DateColumn(final java.sql.Date date) { + this(date == null ? null : date.getTime()); + this.setSubType(DateType.DATE); + } + + /** + * 构建值为time(java.sql.Time)的DateColumn,使用Date子类型为TIME,只有时间,没有日期 + */ + public DateColumn(final java.sql.Time time) { + this(time == null ? null : time.getTime()); + this.setSubType(DateType.TIME); + } + + /** + * 构建值为ts(java.sql.Timestamp)的DateColumn,使用Date子类型为DATETIME + */ + public DateColumn(final java.sql.Timestamp ts) { + this(ts == null ? null : ts.getTime()); + this.setSubType(DateType.DATETIME); + } + + @Override + public Long asLong() { + + return (Long) this.getRawData(); + } + + @Override + public String asString() { + try { + return ColumnCast.date2String(this); + } catch (Exception e) { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, + String.format("Date[%s]类型不能转为String .", this.toString())); + } + } + + @Override + public Date asDate() { + if (null == this.getRawData()) { + return null; + } + + return new Date((Long) this.getRawData()); + } + + @Override + public byte[] asBytes() { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, "Date类型不能转为Bytes ."); + } + + @Override + public Boolean asBoolean() { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, "Date类型不能转为Boolean ."); + } + + @Override + public Double asDouble() { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, "Date类型不能转为Double ."); + } + + @Override + public BigInteger asBigInteger() { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, "Date类型不能转为BigInteger ."); + } + + @Override + public BigDecimal asBigDecimal() { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, "Date类型不能转为BigDecimal ."); + } + + public DateType getSubType() { + return subType; + } + + public void setSubType(DateType subType) { + this.subType = subType; + } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/DoubleColumn.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/DoubleColumn.java new file mode 100644 index 000000000..04c799aa0 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/DoubleColumn.java @@ -0,0 +1,159 @@ +package com.alibaba.datax.common.element; + +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.Date; + +public class DoubleColumn extends Column { + + public DoubleColumn(final String data) { + this(data, null == data ? 0 : data.length()); + this.validate(data); + } + + public DoubleColumn(Long data) { + this(data == null ? (String) null : String.valueOf(data)); + } + + public DoubleColumn(Integer data) { + this(data == null ? (String) null : String.valueOf(data)); + } + + /** + * Double无法表示准确的小数数据,我们不推荐使用该方法保存Double数据,建议使用String作为构造入参 + */ + public DoubleColumn(final Double data) { + this(data == null ? (String) null + : new BigDecimal(String.valueOf(data)).toPlainString()); + } + + /** + * Float无法表示准确的小数数据,我们不推荐使用该方法保存Float数据,建议使用String作为构造入参 + */ + public DoubleColumn(final Float data) { + this(data == null ? (String) null + : new BigDecimal(String.valueOf(data)).toPlainString()); + } + + public DoubleColumn(final BigDecimal data) { + this(null == data ? (String) null : data.toPlainString()); + } + + public DoubleColumn(final BigInteger data) { + this(null == data ? (String) null : data.toString()); + } + + public DoubleColumn() { + this((String) null); + } + + private DoubleColumn(final String data, int byteSize) { + super(data, Column.Type.DOUBLE, byteSize); + } + + @Override + public BigDecimal asBigDecimal() { + if (null == this.getRawData()) { + return null; + } + + try { + return new BigDecimal((String) this.getRawData()); + } catch (NumberFormatException e) { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, + String.format("String[%s] 无法转换为Double类型 .", + (String) this.getRawData())); + } + } + + @Override + public Double asDouble() { + if (null == this.getRawData()) { + return null; + } + + String string = (String) this.getRawData(); + + boolean isDoubleSpecific = string.equals("NaN") + || string.equals("-Infinity") || string.equals("+Infinity"); + if (isDoubleSpecific) { + return Double.valueOf(string); + } + + BigDecimal result = this.asBigDecimal(); + OverFlowUtil.validateDoubleNotOverFlow(result); + + return result.doubleValue(); + } + + @Override + public Long asLong() { + if (null == this.getRawData()) { + return null; + } + + BigDecimal result = this.asBigDecimal(); + OverFlowUtil.validateLongNotOverFlow(result.toBigInteger()); + + return result.longValue(); + } + + @Override + public BigInteger asBigInteger() { + if (null == this.getRawData()) { + return null; + } + + return this.asBigDecimal().toBigInteger(); + } + + @Override + public String asString() { + if (null == this.getRawData()) { + return null; + } + return (String) this.getRawData(); + } + + @Override + public Boolean asBoolean() { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, "Double类型无法转为Bool ."); + } + + @Override + public Date asDate() { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, "Double类型无法转为Date类型 ."); + } + + @Override + public byte[] asBytes() { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, "Double类型无法转为Bytes类型 ."); + } + + private void validate(final String data) { + if (null == data) { + return; + } + + if (data.equalsIgnoreCase("NaN") || data.equalsIgnoreCase("-Infinity") + || data.equalsIgnoreCase("Infinity")) { + return; + } + + try { + new BigDecimal(data); + } catch (Exception e) { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, + String.format("String[%s]无法转为Double类型 .", data)); + } + } + +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/LongColumn.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/LongColumn.java new file mode 100644 index 000000000..ffade789e --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/LongColumn.java @@ -0,0 +1,134 @@ +package com.alibaba.datax.common.element; + +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; +import org.apache.commons.lang3.math.NumberUtils; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.Date; + +public class LongColumn extends Column { + + /** + * 从整形字符串表示转为LongColumn,支持Java科学计数法 + *

+ * NOTE:
+ * 如果data为浮点类型的字符串表示,数据将会失真,请使用DoubleColumn对接浮点字符串 + */ + public LongColumn(final String data) { + super(null, Column.Type.LONG, 0); + if (null == data) { + return; + } + + try { + BigInteger rawData = NumberUtils.createBigDecimal(data) + .toBigInteger(); + super.setRawData(rawData); + + // 当 rawData 为[0-127]时,rawData.bitLength() < 8,导致其 byteSize = 0,简单起见,直接认为其长度为 domain.length() + // super.setByteSize(rawData.bitLength() / 8); + super.setByteSize(data.length()); + } catch (Exception e) { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, + String.format("String[%s]不能转为Long .", data)); + } + } + + public LongColumn(Long data) { + this(null == data ? (BigInteger) null : BigInteger.valueOf(data)); + } + + public LongColumn(Integer data) { + this(null == data ? (BigInteger) null : BigInteger.valueOf(data)); + } + + public LongColumn(BigInteger data) { + this(data, null == data ? 0 : 8); + } + + private LongColumn(BigInteger data, int byteSize) { + super(data, Column.Type.LONG, byteSize); + } + + public LongColumn() { + this((BigInteger) null); + } + + @Override + public BigInteger asBigInteger() { + if (null == this.getRawData()) { + return null; + } + + return (BigInteger) this.getRawData(); + } + + @Override + public Long asLong() { + BigInteger rawData = (BigInteger) this.getRawData(); + if (null == rawData) { + return null; + } + + OverFlowUtil.validateLongNotOverFlow(rawData); + + return rawData.longValue(); + } + + @Override + public Double asDouble() { + if (null == this.getRawData()) { + return null; + } + + BigDecimal decimal = this.asBigDecimal(); + OverFlowUtil.validateDoubleNotOverFlow(decimal); + + return decimal.doubleValue(); + } + + @Override + public Boolean asBoolean() { + if (null == this.getRawData()) { + return null; + } + + return this.asBigInteger().compareTo(BigInteger.ZERO) != 0 ? true + : false; + } + + @Override + public BigDecimal asBigDecimal() { + if (null == this.getRawData()) { + return null; + } + + return new BigDecimal(this.asBigInteger()); + } + + @Override + public String asString() { + if (null == this.getRawData()) { + return null; + } + return ((BigInteger) this.getRawData()).toString(); + } + + @Override + public Date asDate() { + if (null == this.getRawData()) { + return null; + } + return new Date(this.asLong()); + } + + @Override + public byte[] asBytes() { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, "Long类型不能转为Bytes ."); + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/OverFlowUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/OverFlowUtil.java new file mode 100644 index 000000000..17a3e95d3 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/OverFlowUtil.java @@ -0,0 +1,62 @@ +package com.alibaba.datax.common.element; + +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; + +import java.math.BigDecimal; +import java.math.BigInteger; + +public final class OverFlowUtil { + public static final BigInteger MAX_LONG = BigInteger + .valueOf(Long.MAX_VALUE); + + public static final BigInteger MIN_LONG = BigInteger + .valueOf(Long.MIN_VALUE); + + public static final BigDecimal MIN_DOUBLE_POSITIVE = new BigDecimal( + String.valueOf(Double.MIN_VALUE)); + + public static final BigDecimal MAX_DOUBLE_POSITIVE = new BigDecimal( + String.valueOf(Double.MAX_VALUE)); + + public static boolean isLongOverflow(final BigInteger integer) { + return (integer.compareTo(OverFlowUtil.MAX_LONG) > 0 || integer + .compareTo(OverFlowUtil.MIN_LONG) < 0); + + } + + public static void validateLongNotOverFlow(final BigInteger integer) { + boolean isOverFlow = OverFlowUtil.isLongOverflow(integer); + + if (isOverFlow) { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_OVER_FLOW, + String.format("[%s] 转为Long类型出现溢出 .", integer.toString())); + } + } + + public static boolean isDoubleOverFlow(final BigDecimal decimal) { + if (decimal.signum() == 0) { + return false; + } + + BigDecimal newDecimal = decimal; + boolean isPositive = decimal.signum() == 1; + if (!isPositive) { + newDecimal = decimal.negate(); + } + + return (newDecimal.compareTo(MIN_DOUBLE_POSITIVE) < 0 || newDecimal + .compareTo(MAX_DOUBLE_POSITIVE) > 0); + } + + public static void validateDoubleNotOverFlow(final BigDecimal decimal) { + boolean isOverFlow = OverFlowUtil.isDoubleOverFlow(decimal); + if (isOverFlow) { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_OVER_FLOW, + String.format("[%s]转为Double类型出现溢出 .", + decimal.toPlainString())); + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Record.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Record.java new file mode 100644 index 000000000..59d81c58b --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Record.java @@ -0,0 +1,31 @@ +package com.alibaba.datax.common.element; + +import com.webank.wedatasphere.exchangis.datax.core.transport.channel.ChannelElement; + +import java.util.List; + +/** + * Created by jingxing on 14-8-24. + */ + +public interface Record extends ChannelElement { + + void addColumn(Column column); + + void setColumn(int i, final Column column); + + Column getColumn(int i); + + @Override + String toString(); + + int getColumnNumber(); + + List getColumns(); + + /** + * Unique id + * @return + */ + String uid(); +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/StringColumn.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/StringColumn.java new file mode 100644 index 000000000..398f54cdc --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/StringColumn.java @@ -0,0 +1,163 @@ +package com.alibaba.datax.common.element; + +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.Date; + +/** + * Created by jingxing on 14-8-24. + */ + +public class StringColumn extends Column { + + public StringColumn() { + this((String) null); + } + + public StringColumn(final String rawData) { + super(rawData, Column.Type.STRING, (null == rawData ? 0 : rawData + .length())); + } + + @Override + public String asString() { + if (null == this.getRawData()) { + return null; + } + + return (String) this.getRawData(); + } + + private void validateDoubleSpecific(final String data) { + if ("NaN".equals(data) || "Infinity".equals(data) + || "-Infinity".equals(data)) { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, + String.format("String[\"%s\"]属于Double特殊类型,不能转为其他类型 .", data)); + } + + return; + } + + @Override + public BigInteger asBigInteger() { + if (null == this.getRawData()) { + return null; + } + + this.validateDoubleSpecific((String) this.getRawData()); + + try { + return this.asBigDecimal().toBigInteger(); + } catch (Exception e) { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, String.format( + "String[\"%s\"]不能转为BigInteger .", this.asString())); + } + } + + @Override + public Long asLong() { + if (null == this.getRawData()) { + return null; + } + + this.validateDoubleSpecific((String) this.getRawData()); + + try { + BigInteger integer = this.asBigInteger(); + OverFlowUtil.validateLongNotOverFlow(integer); + return integer.longValue(); + } catch (Exception e) { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, + String.format("String[\"%s\"]不能转为Long .", this.asString())); + } + } + + @Override + public BigDecimal asBigDecimal() { + if (null == this.getRawData()) { + return null; + } + + this.validateDoubleSpecific((String) this.getRawData()); + + try { + return new BigDecimal(this.asString()); + } catch (Exception e) { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, String.format( + "String [\"%s\"] 不能转为BigDecimal .", this.asString())); + } + } + + @Override + public Double asDouble() { + if (null == this.getRawData()) { + return null; + } + + String data = (String) this.getRawData(); + if ("NaN".equals(data)) { + return Double.NaN; + } + + if ("Infinity".equals(data)) { + return Double.POSITIVE_INFINITY; + } + + if ("-Infinity".equals(data)) { + return Double.NEGATIVE_INFINITY; + } + + BigDecimal decimal = this.asBigDecimal(); + OverFlowUtil.validateDoubleNotOverFlow(decimal); + + return decimal.doubleValue(); + } + + @Override + public Boolean asBoolean() { + if (null == this.getRawData()) { + return null; + } + + if ("true".equalsIgnoreCase(this.asString())) { + return true; + } + + if ("false".equalsIgnoreCase(this.asString())) { + return false; + } + + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, + String.format("String[\"%s\"]不能转为Bool .", this.asString())); + } + + @Override + public Date asDate() { + try { + return ColumnCast.string2Date(this); + } catch (Exception e) { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, + String.format("String[\"%s\"]不能转为Date .", this.asString())); + } + } + + @Override + public byte[] asBytes() { + try { + return ColumnCast.string2Bytes(this); + } catch (Exception e) { + throw DataXException.asDataXException( + CommonErrorCode.CONVERT_NOT_SUPPORT, + String.format("String[\"%s\"]不能转为Bytes .", this.asString())); + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/CommonErrorCode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/CommonErrorCode.java new file mode 100644 index 000000000..0cd4ad7e2 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/CommonErrorCode.java @@ -0,0 +1,46 @@ +package com.alibaba.datax.common.exception; + +import com.alibaba.datax.common.spi.ErrorCode; + +/** + * + */ +public enum CommonErrorCode implements ErrorCode { + + CONFIG_ERROR("Common-00", "您提供的配置文件存在错误信息,请检查您的作业配置 ."), + CONVERT_NOT_SUPPORT("Common-01", "同步数据出现业务脏数据情况,数据类型转换错误 ."), + CONVERT_OVER_FLOW("Common-02", "同步数据出现业务脏数据情况,数据类型转换溢出 ."), + RETRY_FAIL("Common-10", "方法调用多次仍旧失败 ."), + RUNTIME_ERROR("Common-11", "运行时内部调用错误 ."), + HOOK_INTERNAL_ERROR("Common-12", "Hook运行错误 ."), + SHUT_DOWN_TASK("Common-20", "Task收到了shutdown指令,为failover做准备"), + WAIT_TIME_EXCEED("Common-21", "等待时间超出范围"), + TASK_HUNG_EXPIRED("Common-22", "任务hung住,Expired"), + UNSUPPORTED_METHOD("Commmon-23", "暂不支持该方法"); + + private final String code; + + private final String describe; + + private CommonErrorCode(String code, String describe) { + this.code = code; + this.describe = describe; + } + + @Override + public String getCode() { + return this.code; + } + + @Override + public String getDescription() { + return this.describe; + } + + @Override + public String toString() { + return String.format("Code:[%s], Describe:[%s]", this.code, + this.describe); + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/DataXException.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/DataXException.java new file mode 100644 index 000000000..f360e6990 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/DataXException.java @@ -0,0 +1,62 @@ +package com.alibaba.datax.common.exception; + +import com.alibaba.datax.common.spi.ErrorCode; + +import java.io.PrintWriter; +import java.io.StringWriter; + +public class DataXException extends RuntimeException { + + private static final long serialVersionUID = 1L; + + private ErrorCode errorCode; + + public DataXException(ErrorCode errorCode, String errorMessage) { + super(errorCode.toString() + " - " + errorMessage); + this.errorCode = errorCode; + } + + private DataXException(ErrorCode errorCode, String errorMessage, Throwable cause) { + super(errorCode.toString() + " - " + getMessage(errorMessage) + " - " + getMessage(cause), cause); + + this.errorCode = errorCode; + } + + public static DataXException asDataXException(ErrorCode errorCode, String message) { + return new DataXException(errorCode, message); + } + + public static DataXException asDataXException(ErrorCode errorCode, String message, Throwable cause) { + if (cause instanceof DataXException) { + return (DataXException) cause; + } + return new DataXException(errorCode, message, cause); + } + + public static DataXException asDataXException(ErrorCode errorCode, Throwable cause) { + if (cause instanceof DataXException) { + return (DataXException) cause; + } + return new DataXException(errorCode, getMessage(cause), cause); + } + + public ErrorCode getErrorCode() { + return this.errorCode; + } + + private static String getMessage(Object obj) { + if (obj == null) { + return ""; + } + + if (obj instanceof Throwable) { + StringWriter str = new StringWriter(); + PrintWriter pw = new PrintWriter(str); + ((Throwable) obj).printStackTrace(pw); + return str.toString(); + // return ((Throwable) obj).getMessage(); + } else { + return obj.toString(); + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/ExceptionTracker.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/ExceptionTracker.java new file mode 100644 index 000000000..f6d3732e2 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/exception/ExceptionTracker.java @@ -0,0 +1,15 @@ +package com.alibaba.datax.common.exception; + +import java.io.PrintWriter; +import java.io.StringWriter; + +public final class ExceptionTracker { + public static final int STRING_BUFFER = 1024; + + public static String trace(Throwable ex) { + StringWriter sw = new StringWriter(STRING_BUFFER); + PrintWriter pw = new PrintWriter(sw); + ex.printStackTrace(pw); + return sw.toString(); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractJobPlugin.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractJobPlugin.java new file mode 100644 index 000000000..3820ee7ab --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractJobPlugin.java @@ -0,0 +1,35 @@ +package com.alibaba.datax.common.plugin; + +import com.webank.wedatasphere.exchangis.datax.common.constant.TransportType; + +/** + * Created by jingxing on 14-8-24. + */ +public abstract class AbstractJobPlugin extends AbstractPlugin { + private TransportType transportType; + + public void setTransportType(TransportType transportType){ + this.transportType = transportType; + } + + public TransportType getTransportType(){ + return this.transportType; + } + /** + * @return the jobPluginCollector + */ + public JobPluginCollector getJobPluginCollector() { + return jobPluginCollector; + } + + /** + * @param jobPluginCollector the jobPluginCollector to set + */ + public void setJobPluginCollector( + JobPluginCollector jobPluginCollector) { + this.jobPluginCollector = jobPluginCollector; + } + + private JobPluginCollector jobPluginCollector; + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractPlugin.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractPlugin.java new file mode 100644 index 000000000..4b4cb3433 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractPlugin.java @@ -0,0 +1,92 @@ +package com.alibaba.datax.common.plugin; + +import com.alibaba.datax.common.base.BaseObject; +import com.alibaba.datax.common.util.Configuration; + +import java.util.ArrayList; +import java.util.List; + +public abstract class AbstractPlugin extends BaseObject implements Pluginable { + + //Job configuration + private Configuration pluginJobConf; + + //Plugin config + private Configuration pluginConf; + + //Support multiply peers + private List peerPluginJobConfList = new ArrayList<>(); + + private List peerPluginNames = new ArrayList<>(); + + @Override + public String getPluginName() { + assert null != this.pluginConf; + return this.pluginConf.getString("name"); + } + + @Override + public String getDeveloper() { + assert null != this.pluginConf; + return this.pluginConf.getString("developer"); + } + + @Override + public String getDescription() { + assert null != this.pluginConf; + return this.pluginConf.getString("description"); + } + + @Override + public Configuration getPluginJobConf() { + return pluginJobConf; + } + + @Override + public void setPluginJobConf(Configuration pluginJobConf) { + this.pluginJobConf = pluginJobConf; + } + + @Override + public void setPluginConf(Configuration pluginConf) { + this.pluginConf = pluginConf; + } + + @Override + public List getPeerPluginJobConfList() { + return peerPluginJobConfList; + } + + @Override + public void addPeerPluginJobConf(Configuration peerPluginJobConf) { + this.peerPluginJobConfList.add(peerPluginJobConf); + } + + @Override + public List getPeerPluginNameList() { + return peerPluginNames; + } + + @Override + public void addPeerPluginName(String peerPluginName) { + this.peerPluginNames.add(peerPluginName); + } + + public void preCheck() { + } + + public void prepare() { + } + + public void post() { + } + + public void preHandler(Configuration jobConfiguration) { + + } + + public void postHandler(Configuration jobConfiguration) { + + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractTaskPlugin.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractTaskPlugin.java new file mode 100644 index 000000000..39fbbe9b5 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/AbstractTaskPlugin.java @@ -0,0 +1,37 @@ +package com.alibaba.datax.common.plugin; + +/** + * Created by jingxing on 14-8-24. + */ +public abstract class AbstractTaskPlugin extends AbstractPlugin { + + //TaskPlugin 应该具备taskId + private int taskGroupId; + private int taskId; + private TaskPluginCollector taskPluginCollector; + + public TaskPluginCollector getTaskPluginCollector() { + return taskPluginCollector; + } + + public void setTaskPluginCollector( + TaskPluginCollector taskPluginCollector) { + this.taskPluginCollector = taskPluginCollector; + } + + public int getTaskId() { + return taskId; + } + + public void setTaskId(int taskId) { + this.taskId = taskId; + } + + public int getTaskGroupId() { + return taskGroupId; + } + + public void setTaskGroupId(int taskGroupId) { + this.taskGroupId = taskGroupId; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/BasicDataReceiver.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/BasicDataReceiver.java new file mode 100644 index 000000000..4b5156bb9 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/BasicDataReceiver.java @@ -0,0 +1,18 @@ +package com.alibaba.datax.common.plugin; + +/** + * @author davidhua + * 2019/8/20 + */ +public interface BasicDataReceiver { + /** + * get data from reader(channel actually) + * @return + */ + T getFromReader(); + + /** + * shutdown the channel + */ + void shutdown(); +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/JobPluginCollector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/JobPluginCollector.java new file mode 100644 index 000000000..58bdc4563 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/JobPluginCollector.java @@ -0,0 +1,20 @@ +package com.alibaba.datax.common.plugin; + +import java.util.List; +import java.util.Map; + +/** + * Created by jingxing on 14-9-9. + */ +public interface JobPluginCollector extends PluginCollector { + + /** + * 从Task获取自定义收集信息 + */ + Map> getMessage(); + + /** + * 从Task获取自定义收集信息 + */ + List getMessage(String key); +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/PluginCollector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/PluginCollector.java new file mode 100644 index 000000000..b58cd77ad --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/PluginCollector.java @@ -0,0 +1,9 @@ +package com.alibaba.datax.common.plugin; + + +/** + * 这里只是一个标示类 + */ +public interface PluginCollector { + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/PluginProcessorLoader.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/PluginProcessorLoader.java new file mode 100644 index 000000000..8a5516e19 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/PluginProcessorLoader.java @@ -0,0 +1,25 @@ +package com.alibaba.datax.common.plugin; + +/** + * @author davidhua + * 2019/8/26 + */ +public interface PluginProcessorLoader { + + /** + * load + * @param fullClassName + * @param javaCode + * @param classpath + * @return + */ + boolean load(String fullClassName, String javaCode, String classpath); + + /** + * load + * @param fullClassName + * @param javaCode + * return + */ + boolean load(String fullClassName, String javaCode); +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/Pluginable.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/Pluginable.java new file mode 100644 index 000000000..412b943bb --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/Pluginable.java @@ -0,0 +1,32 @@ +package com.alibaba.datax.common.plugin; + +import com.alibaba.datax.common.util.Configuration; + +import java.util.List; + +public interface Pluginable { + String getDeveloper(); + + String getDescription(); + + void setPluginConf(Configuration pluginConf); + + void init(); + + void destroy(); + + String getPluginName(); + + Configuration getPluginJobConf(); + + List getPeerPluginJobConfList(); + + List getPeerPluginNameList(); + + void setPluginJobConf(Configuration jobConf); + + void addPeerPluginJobConf(Configuration peerPluginJobConf); + + void addPeerPluginName(String peerPluginName); + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/RecordReceiver.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/RecordReceiver.java new file mode 100644 index 000000000..6a055e8b6 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/RecordReceiver.java @@ -0,0 +1,23 @@ +/** + * (C) 2010-2013 Alibaba Group Holding Limited. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.alibaba.datax.common.plugin; + +import com.alibaba.datax.common.element.Record; + +public interface RecordReceiver extends BasicDataReceiver{ + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/RecordSender.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/RecordSender.java new file mode 100644 index 000000000..ddc2486a8 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/RecordSender.java @@ -0,0 +1,34 @@ +/** + * (C) 2010-2013 Alibaba Group Holding Limited. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.alibaba.datax.common.plugin; + +import com.alibaba.datax.common.element.Record; + +public interface RecordSender { + + Record createRecord(); + + void sendToWriter(Record record); + + void flush(); + + void terminate(); + + void shutdown(); + +// void setCheckPointId(String id); +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/TaskPluginCollector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/TaskPluginCollector.java new file mode 100644 index 000000000..466d73c09 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/plugin/TaskPluginCollector.java @@ -0,0 +1,57 @@ +package com.alibaba.datax.common.plugin; + + import com.alibaba.datax.common.element.Record; + +/** + * 该接口提供给Task Plugin用来记录脏数据和自定义信息。
+ *

+ * 1. 脏数据记录,TaskPluginCollector提供多种脏数据记录的适配,包括本地输出、集中式汇报等等
+ * 2. 自定义信息,所有的task插件运行过程中可以通过TaskPluginCollector收集信息,
+ * Job的插件在POST过程中通过getMessage()接口获取信息 + */ +public abstract class TaskPluginCollector implements PluginCollector { + /** + * 收集脏数据 + * + * @param dirtyRecord 脏数据信息 + * @param t 异常信息 + * @param errorMessage 错误的提示信息 + */ + public abstract void collectDirtyRecord(final Record dirtyRecord, + final Throwable t, final String errorMessage); + + /** + * 收集脏数据 + * + * @param dirtyRecord 脏数据信息 + * @param errorMessage 错误的提示信息 + */ + public void collectDirtyRecord(final Record dirtyRecord, + final String errorMessage) { + this.collectDirtyRecord(dirtyRecord, null, errorMessage); + } + + /** + * 收集脏数据 + * + * @param dirtyRecord 脏数据信息 + * @param t 异常信息 + */ + public void collectDirtyRecord(final Record dirtyRecord, final Throwable t) { + this.collectDirtyRecord(dirtyRecord, t, ""); + } + + /** + * 收集自定义信息,Job插件可以通过getMessage获取该信息
+ * 如果多个key冲突,内部使用List记录同一个key,多个value情况。
+ */ + public abstract void collectMessage(final String key, final String value); + + /** + * Collect task's parameter + * @param key param key + * @param value string value + * @param isUnique if is a unique key + */ + public abstract void collectParameter(final String key, final String value, boolean isUnique); +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/ErrorCode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/ErrorCode.java new file mode 100644 index 000000000..c26857ff6 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/ErrorCode.java @@ -0,0 +1,33 @@ +package com.alibaba.datax.common.spi; + +/** + * 尤其注意:最好提供toString()实现。例如: + *

+ *

+ *
+ * @Override
+ * static String toString() {
+ * 	return String.format("Code:[%s], Description:[%s]. ", this.code, this.describe);
+ * }
+ * 
+ */ +public interface ErrorCode { + // 错误码编号 + String getCode(); + + // 错误码描述 + String getDescription(); + + /** + * 必须提供toString的实现 + *

+ *

+     * @Override
+     * static String toString() {
+     * 	return String.format("Code:[%s], Description:[%s]. ", this.code, this.describe);
+     * }
+     * 
+ */ + @Override + String toString(); +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Hook.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Hook.java new file mode 100644 index 000000000..d510f57c1 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Hook.java @@ -0,0 +1,27 @@ +package com.alibaba.datax.common.spi; + +import com.alibaba.datax.common.util.Configuration; + +import java.util.Map; + +/** + * Created by xiafei.qiuxf on 14/12/17. + */ +public interface Hook { + + /** + * 返回名字 + * + * @return + */ + public String getName(); + + /** + * TODO 文档 + * + * @param jobConf + * @param msg + */ + public void invoke(Configuration jobConf, Map msg); + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Reader.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Reader.java new file mode 100644 index 000000000..433f3274e --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Reader.java @@ -0,0 +1,44 @@ +package com.alibaba.datax.common.spi; + +import com.alibaba.datax.common.base.BaseObject; +import com.alibaba.datax.common.plugin.RecordSender; +import com.alibaba.datax.common.util.Configuration; +import com.webank.wedatasphere.exchangis.datax.common.spi.EnhancedReader; + +import java.util.List; + +/** + * 每个Reader插件在其内部内部实现Job、Task两个内部类。 + */ +public abstract class Reader extends BaseObject { + + /** + * 每个Reader插件必须实现Job内部类。 + */ + public static abstract class Job extends EnhancedReader.Job { + + /** + * 切分任务 + * + * @param adviceNumber 着重说明下,adviceNumber是框架建议插件切分的任务数,插件开发人员最好切分出来的任务数>= + * adviceNumber。
+ *
+ * 之所以采取这个建议是为了给用户最好的实现,例如框架根据计算认为用户数据存储可以支持100个并发连接, + * 并且用户认为需要100个并发。 此时,插件开发人员如果能够根据上述切分规则进行切分并做到>=100连接信息, + * DataX就可以同时启动100个Channel,这样给用户最好的吞吐量
+ * 例如用户同步一张Mysql单表,但是认为可以到10并发吞吐量,插件开发人员最好对该表进行切分,比如使用主键范围切分, + * 并且如果最终切分任务数到>=10,我们就可以提供给用户最大的吞吐量。
+ *
+ * 当然,我们这里只是提供一个建议值,Reader插件可以按照自己规则切分。但是我们更建议按照框架提供的建议值来切分。
+ *
+ * 对于ODPS写入OTS而言,如果存在预排序预切分问题,这样就可能只能按照分区信息切分,无法更细粒度切分, + * 这类情况只能按照源头物理信息切分规则切分。
+ *
+ */ + public abstract List split(int adviceNumber); + } + + public static abstract class Task extends EnhancedReader.Task { + public abstract void startRead(RecordSender recordSender); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Writer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Writer.java new file mode 100644 index 000000000..101dd7382 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/spi/Writer.java @@ -0,0 +1,42 @@ +package com.alibaba.datax.common.spi; + +import com.alibaba.datax.common.base.BaseObject; +import com.alibaba.datax.common.plugin.RecordReceiver; +import com.alibaba.datax.common.util.Configuration; +import com.webank.wedatasphere.exchangis.datax.common.spi.EnhancedWriter; + +import java.util.List; + +/** + * 每个Writer插件需要实现Writer类,并在其内部实现Job、Task两个内部类。 + */ +public abstract class Writer extends BaseObject { + /** + * 每个Writer插件必须实现Job内部类 + */ + public abstract static class Job extends EnhancedWriter.Job { + + /** + * 切分任务。
+ * + * @param mandatoryNumber 为了做到Reader、Writer任务数对等,这里要求Writer插件必须按照源端的切分数进行切分。否则框架报错! + */ + public abstract List split(int mandatoryNumber); + + } + + /** + * 每个Writer插件必须实现Task内部类 + */ + public abstract static class Task extends EnhancedWriter.Task { + + public abstract void startWrite(RecordReceiver lineReceiver); + + public boolean supportFailOver() { + return false; + } + + + + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/PerfRecord.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/PerfRecord.java new file mode 100644 index 000000000..cf59205f3 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/PerfRecord.java @@ -0,0 +1,273 @@ +package com.alibaba.datax.common.statistics; + +import com.alibaba.datax.common.util.HostUtils; +import org.apache.commons.lang3.time.DateFormatUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Date; +import java.util.concurrent.atomic.AtomicLong; + +/** + * Created by liqiang on 15/8/23. + */ +@SuppressWarnings("NullableProblems") +public class PerfRecord implements Comparable { + private static Logger perf = LoggerFactory.getLogger(PerfRecord.class); + private static String datetimeFormat = "yyyy-MM-dd HH:mm:ss"; + + + public enum PHASE { + /** + * task total运行的时间,前10为框架统计,后面为部分插件的个性统计 + */ + TASK_TOTAL(0), + + READ_TASK_INIT(1), + READ_TASK_PREPARE(2), + READ_TASK_DATA(3), + READ_TASK_POST(4), + READ_TASK_DESTROY(5), + + WRITE_TASK_INIT(6), + WRITE_TASK_PREPARE(7), + WRITE_TASK_DATA(8), + WRITE_TASK_POST(9), + WRITE_TASK_DESTROY(10), + + /** + * SQL_QUERY: sql query阶段, 部分reader的个性统计 + */ + SQL_QUERY(100), + /** + * 数据从sql全部读出来 + */ + RESULT_NEXT_ALL(101), + + /** + * only odps block close + */ + ODPS_BLOCK_CLOSE(102), + + WAIT_READ_TIME(103), + + WAIT_WRITE_TIME(104), + + TRANSFORMER_TIME(201); + + private int val; + + PHASE(int val) { + this.val = val; + } + + public int toInt() { + return val; + } + } + + public enum ACTION { + start, + end + } + + private final int taskGroupId; + private final int taskId; + private final PHASE phase; + private ACTION action; + private Date startTime; + private volatile long elapsedTimeInNs = -1; + private AtomicLong count = new AtomicLong(0); + private AtomicLong size = new AtomicLong(0); + + private volatile long startTimeInNs; + private volatile boolean isReport = false; + + public PerfRecord(int taskGroupId, int taskId, PHASE phase) { + this.taskGroupId = taskGroupId; + this.taskId = taskId; + this.phase = phase; + } + + public static void addPerfRecord(int taskGroupId, int taskId, PHASE phase, long startTime, long elapsedTimeInNs) { + if (PerfTrace.getInstance().isEnable()) { + PerfRecord perfRecord = new PerfRecord(taskGroupId, taskId, phase); + perfRecord.elapsedTimeInNs = elapsedTimeInNs; + perfRecord.action = ACTION.end; + perfRecord.startTime = new Date(startTime); + //在PerfTrace里注册 + PerfTrace.getInstance().tracePerfRecord(perfRecord); + perf.info(perfRecord.toString()); + } + } + + public void start() { + if (PerfTrace.getInstance().isEnable()) { + this.startTime = new Date(); + this.startTimeInNs = System.nanoTime(); + this.action = ACTION.start; + //在PerfTrace里注册 + PerfTrace.getInstance().tracePerfRecord(this); + perf.info(toString()); + } + } + + public void addCount(long count) { + this.count.addAndGet(count); + } + + public void addSize(long size) { + this.count.addAndGet(size); + } + + public void end() { + if (PerfTrace.getInstance().isEnable()) { + this.elapsedTimeInNs = System.nanoTime() - startTimeInNs; + this.action = ACTION.end; + PerfTrace.getInstance().tracePerfRecord(this); + perf.info(toString()); + } + } + + public void end(long elapsedTimeInNs) { + if (PerfTrace.getInstance().isEnable()) { + this.elapsedTimeInNs = elapsedTimeInNs; + this.action = ACTION.end; + PerfTrace.getInstance().tracePerfRecord(this); + perf.info(toString()); + } + } + + @Override + public String toString() { + return String.format("%s,%s,%s,%s,%s,%s,%s,%s,%s,%s" + , getInstId(), taskGroupId, taskId, phase, action, + DateFormatUtils.format(startTime, datetimeFormat), elapsedTimeInNs, count.get(), size.get(), getHostIP()); + } + + + @Override + public int compareTo(PerfRecord o) { + if (o == null) { + return 1; + } + return this.elapsedTimeInNs > o.elapsedTimeInNs ? 1 : this.elapsedTimeInNs == o.elapsedTimeInNs ? 0 : -1; + } + + @Override + public int hashCode() { + long jobId = getInstId(); + int result = (int) (jobId ^ (jobId >>> 32)); + result = 31 * result + taskGroupId; + result = 31 * result + taskId; + result = 31 * result + phase.toInt(); + result = 31 * result + (startTime != null ? startTime.hashCode() : 0); + return result; + } + + @Override + public boolean equals(Object o) { + if(this == o) { + return true; + } + if (!(o instanceof PerfRecord)) { + return false; + } + + PerfRecord dst = (PerfRecord) o; + + if (this.getInstId() != dst.getInstId()){ + return false; + } + if(this.taskGroupId != dst.taskGroupId){ + return false; + } + if(this.taskId != dst.taskId){ + return false; + } + if(phase != null ? !phase.equals(dst.phase) : dst.phase != null){ + return false; + } + if(startTime != null ? !startTime.equals(dst.startTime) : dst.startTime != null){ + return false; + } + return true; + } + + public PerfRecord copy() { + PerfRecord copy = new PerfRecord(this.taskGroupId, this.getTaskId(), this.phase); + copy.action = this.action; + copy.startTime = this.startTime; + copy.elapsedTimeInNs = this.elapsedTimeInNs; + copy.count = this.count; + copy.size = this.size; + return copy; + } + + public int getTaskGroupId() { + return taskGroupId; + } + + public int getTaskId() { + return taskId; + } + + public PHASE getPhase() { + return phase; + } + + public ACTION getAction() { + return action; + } + + public long getElapsedTimeInNs() { + return elapsedTimeInNs; + } + + public long getCount() { + return count.get(); + } + + public long getSize() { + return size.get(); + } + + public long getInstId() { + return PerfTrace.getInstance().getInstId(); + } + + public String getHostIP() { + return HostUtils.IP; + } + + public String getHostName() { + return HostUtils.HOSTNAME; + } + + public Date getStartTime() { + return startTime; + } + + public long getStartTimeInMs() { + return startTime.getTime(); + } + + public long getStartTimeInNs() { + return startTimeInNs; + } + + public String getDatetime() { + if (startTime == null) { + return "null time"; + } + return DateFormatUtils.format(startTime, datetimeFormat); + } + + public boolean isReport() { + return isReport; + } + + public void setIsReport(boolean isReport) { + this.isReport = isReport; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/PerfTrace.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/PerfTrace.java new file mode 100644 index 000000000..6640474c0 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/PerfTrace.java @@ -0,0 +1,912 @@ +package com.alibaba.datax.common.statistics; + +import com.alibaba.datax.common.statistics.PerfRecord.PHASE; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.common.util.HostUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeUnit; + +/** + * PerfTrace 记录 job(local模式),taskGroup(distribute模式),因为这2种都是jvm,即一个jvm里只需要有1个PerfTrace。 + */ + +public class PerfTrace { + + private static Logger LOG = LoggerFactory.getLogger(PerfTrace.class); + private volatile static PerfTrace instance; + private static final Object lock = new Object(); + private String perfTraceId; + private volatile boolean enable; + private volatile boolean isJob; + private long instId; + private long jobId; + private long jobVersion; + private int taskGroupId; + private int channelNumber; + + private int priority; + private int batchSize = 500; + private volatile boolean perfReportEnable = true; + + //jobid_jobversion,instanceid,taskid, src_mark, dst_mark, + private Map taskDetails = new ConcurrentHashMap(); + //PHASE => PerfRecord + private ConcurrentHashMap perfRecordMaps4print = new ConcurrentHashMap(); + // job_phase => SumPerf4Report + private SumPerf4Report sumPerf4Report = new SumPerf4Report(); + private SumPerf4Report sumPerf4Report4NotEnd; + private Configuration jobInfo; + private final Set needReportPool4NotEnd = new HashSet(); + private final List totalEndReport = new ArrayList(); + + /** + * 单实例 + * + * @param isJob + * @param jobId + * @param taskGroupId + * @return + */ + public static PerfTrace getInstance(boolean isJob, long jobId, int taskGroupId, int priority, boolean enable) { + + if (instance == null) { + synchronized (lock) { + if (instance == null) { + instance = new PerfTrace(isJob, jobId, taskGroupId, priority, enable); + } + } + } + return instance; + } + + /** + * 因为一个JVM只有一个,因此在getInstance(isJob,jobId,taskGroupId)调用完成实例化后,方便后续调用,直接返回该实例 + * + * @return + */ + public static PerfTrace getInstance() { + if (instance == null) { + LOG.error("PerfTrace instance not be init! must have some error! "); + synchronized (lock) { + if (instance == null) { + instance = new PerfTrace(false, -1111, -1111, 0, false); + } + } + } + return instance; + } + + private PerfTrace(boolean isJob, long jobId, int taskGroupId, int priority, boolean enable) { + try { + this.perfTraceId = isJob ? "job_" + jobId : String.format("taskGroup_%s_%s", jobId, taskGroupId); + this.enable = enable; + this.isJob = isJob; + this.taskGroupId = taskGroupId; + this.instId = jobId; + this.priority = priority; + LOG.info(String.format("PerfTrace traceId=%s, isEnable=%s, priority=%s", this.perfTraceId, this.enable, this.priority)); + + } catch (Exception e) { + // do nothing + this.enable = false; + } + } + + public void addTaskDetails(int taskId, String detail) { + if (enable) { + String before = ""; + int index = detail.indexOf("?"); + String current = detail.substring(0, index == -1 ? detail.length() : index); + if (current.indexOf("[") >= 0) { + current += "]"; + } + if (taskDetails.containsKey(taskId)) { + before = taskDetails.get(taskId).trim(); + } + if (StringUtils.isEmpty(before)) { + before = ""; + } else { + before += ","; + } + this.taskDetails.put(taskId, before + current); + } + } + + public void tracePerfRecord(PerfRecord perfRecord) { + try { + if (enable) { + long curNanoTime = System.nanoTime(); + //ArrayList非线程安全 + switch (perfRecord.getAction()) { + case end: + synchronized (totalEndReport) { + totalEndReport.add(perfRecord); + + if (totalEndReport.size() > batchSize * 10) { + sumPerf4EndPrint(totalEndReport); + } + } + + if (perfReportEnable && needReport(perfRecord)) { + synchronized (needReportPool4NotEnd) { + sumPerf4Report.add(curNanoTime, perfRecord); + needReportPool4NotEnd.remove(perfRecord); + } + } + + break; + case start: + if (perfReportEnable && needReport(perfRecord)) { + synchronized (needReportPool4NotEnd) { + needReportPool4NotEnd.add(perfRecord); + } + } + break; + } + } + } catch (Exception e) { + // do nothing + } + } + + private boolean needReport(PerfRecord perfRecord) { + switch (perfRecord.getPhase()) { + case TASK_TOTAL: + case SQL_QUERY: + case RESULT_NEXT_ALL: + case ODPS_BLOCK_CLOSE: + return true; + } + return false; + } + + public String summarizeNoException() { + String res; + try { + res = summarize(); + } catch (Exception e) { + res = "PerfTrace summarize has Exception " + e.getMessage(); + } + return res; + } + + //任务结束时,对当前的perf总汇总统计 + private synchronized String summarize() { + if (!enable) { + return "PerfTrace not enable!"; + } + + if (totalEndReport.size() > 0) { + sumPerf4EndPrint(totalEndReport); + } + + StringBuilder info = new StringBuilder(); + info.append("\n === total summarize info === \n"); + info.append("\n 1. all phase average time info and max time task info: \n\n"); + info.append(String.format("%-20s | %18s | %18s | %18s | %18s | %-100s\n", "PHASE", "AVERAGE USED TIME", "ALL TASK NUM", "MAX USED TIME", "MAX TASK ID", "MAX TASK INFO")); + + List keys = new ArrayList(perfRecordMaps4print.keySet()); + Collections.sort(keys, new Comparator() { + @Override + public int compare(PHASE o1, PHASE o2) { + return o1.toInt() - o2.toInt(); + } + }); + for (PHASE phase : keys) { + SumPerfRecord4Print sumPerfRecord = perfRecordMaps4print.get(phase); + if (sumPerfRecord == null) { + continue; + } + long averageTime = sumPerfRecord.getAverageTime(); + long maxTime = sumPerfRecord.getMaxTime(); + int maxTaskId = sumPerfRecord.maxTaskId; + int maxTaskGroupId = sumPerfRecord.getMaxTaskGroupId(); + info.append(String.format("%-20s | %18s | %18s | %18s | %18s | %-100s\n", + phase, unitTime(averageTime), sumPerfRecord.totalCount, unitTime(maxTime), jobId + "-" + maxTaskGroupId + "-" + maxTaskId, taskDetails.get(maxTaskId))); + } + + //SumPerfRecord4Print countSumPerf = Optional.fromNullable(perfRecordMaps4print.get(PHASE.READ_TASK_DATA)).or(new SumPerfRecord4Print()); + + SumPerfRecord4Print countSumPerf = perfRecordMaps4print.get(PHASE.READ_TASK_DATA); + if (countSumPerf == null) { + countSumPerf = new SumPerfRecord4Print(); + } + + long averageRecords = countSumPerf.getAverageRecords(); + long averageBytes = countSumPerf.getAverageBytes(); + long maxRecord = countSumPerf.getMaxRecord(); + long maxByte = countSumPerf.getMaxByte(); + int maxTaskId4Records = countSumPerf.getMaxTaskId4Records(); + int maxTGID4Records = countSumPerf.getMaxTGID4Records(); + + info.append("\n\n 2. record average count and max count task info :\n\n"); + info.append(String.format("%-20s | %18s | %18s | %18s | %18s | %18s | %-100s\n", "PHASE", "AVERAGE RECORDS", "AVERAGE BYTES", "MAX RECORDS", "MAX RECORD`S BYTES", "MAX TASK ID", "MAX TASK INFO")); + if (maxTaskId4Records > -1) { + info.append(String.format("%-20s | %18s | %18s | %18s | %18s | %18s | %-100s\n" + , PHASE.READ_TASK_DATA, averageRecords, unitSize(averageBytes), maxRecord, unitSize(maxByte), jobId + "-" + maxTGID4Records + "-" + maxTaskId4Records, taskDetails.get(maxTaskId4Records))); + + } + return info.toString(); + } + + //缺省传入的时间是nano + public static String unitTime(long time) { + return unitTime(time, TimeUnit.NANOSECONDS); + } + + public static String unitTime(long time, TimeUnit timeUnit) { + return String.format("%,.3fs", ((float) timeUnit.toNanos(time)) / 1000000000); + } + + public static String unitSize(long size) { + if (size > 1000000000) { + return String.format("%,.2fG", (float) size / 1000000000); + } else if (size > 1000000) { + return String.format("%,.2fM", (float) size / 1000000); + } else if (size > 1000) { + return String.format("%,.2fK", (float) size / 1000); + } else { + return size + "B"; + } + } + + + public synchronized ConcurrentHashMap getPerfRecordMaps4print() { + if (totalEndReport.size() > 0) { + sumPerf4EndPrint(totalEndReport); + } + return perfRecordMaps4print; + } + + public SumPerf4Report getSumPerf4Report() { + return sumPerf4Report; + } + + public Set getNeedReportPool4NotEnd() { + return needReportPool4NotEnd; + } + + public List getTotalEndReport() { + return totalEndReport; + } + + public Map getTaskDetails() { + return taskDetails; + } + + public boolean isEnable() { + return enable; + } + + public boolean isJob() { + return isJob; + } + + private String cluster; + private String jobDomain; + private String srcType; + private String dstType; + private String srcGuid; + private String dstGuid; + private Date windowStart; + private Date windowEnd; + private Date jobStartTime; + + public void setJobInfo(Configuration jobInfo, boolean perfReportEnable, int channelNumber) { + try { + this.jobInfo = jobInfo; + if (jobInfo != null && perfReportEnable) { + + cluster = jobInfo.getString("cluster"); + + String srcDomain = jobInfo.getString("srcDomain", "null"); + String dstDomain = jobInfo.getString("dstDomain", "null"); + jobDomain = srcDomain + "|" + dstDomain; + srcType = jobInfo.getString("srcType"); + dstType = jobInfo.getString("dstType"); + srcGuid = jobInfo.getString("srcGuid"); + dstGuid = jobInfo.getString("dstGuid"); + windowStart = getWindow(jobInfo.getString("windowStart"), true); + windowEnd = getWindow(jobInfo.getString("windowEnd"), false); + String jobIdStr = jobInfo.getString("jobId"); + jobId = StringUtils.isEmpty(jobIdStr) ? (long) -5 : Long.parseLong(jobIdStr); + String jobVersionStr = jobInfo.getString("jobVersion"); + jobVersion = StringUtils.isEmpty(jobVersionStr) ? (long) -4 : Long.parseLong(jobVersionStr); + jobStartTime = new Date(); + } + this.perfReportEnable = perfReportEnable; + this.channelNumber = channelNumber; + } catch (Exception e) { + this.perfReportEnable = false; + } + } + + private Date getWindow(String windowStr, boolean startWindow) { + SimpleDateFormat sdf1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + SimpleDateFormat sdf2 = new SimpleDateFormat("yyyy-MM-dd 00:00:00"); + if (StringUtils.isNotEmpty(windowStr)) { + try { + return sdf1.parse(windowStr); + } catch (ParseException e) { + // do nothing + } + } + + if (startWindow) { + try { + return sdf2.parse(sdf2.format(new Date())); + } catch (ParseException e1) { + //do nothing + } + } + + return null; + } + + public long getInstId() { + return instId; + } + + public Configuration getJobInfo() { + return jobInfo; + } + + public void setBatchSize(int batchSize) { + this.batchSize = batchSize; + } + + public synchronized JobStatisticsDto2 getReports(String mode) { + + try { + if (!enable || !perfReportEnable) { + return null; + } + + if (("job".equalsIgnoreCase(mode) && !isJob) || "tg".equalsIgnoreCase(mode) && isJob) { + return null; + } + + //每次将未完成的task的统计清空 + sumPerf4Report4NotEnd = new SumPerf4Report(); + Set needReportPool4NotEndTmp = null; + synchronized (needReportPool4NotEnd) { + needReportPool4NotEndTmp = new HashSet(needReportPool4NotEnd); + } + + long curNanoTime = System.nanoTime(); + for (PerfRecord perfRecord : needReportPool4NotEndTmp) { + sumPerf4Report4NotEnd.add(curNanoTime, perfRecord); + } + + JobStatisticsDto2 jdo = new JobStatisticsDto2(); + jdo.setInstId(this.instId); + if (isJob) { + jdo.setTaskGroupId(-6); + } else { + jdo.setTaskGroupId(this.taskGroupId); + } + jdo.setJobId(this.jobId); + jdo.setJobVersion(this.jobVersion); + jdo.setWindowStart(this.windowStart); + jdo.setWindowEnd(this.windowEnd); + jdo.setJobStartTime(jobStartTime); + jdo.setJobRunTimeMs(System.currentTimeMillis() - jobStartTime.getTime()); + jdo.setJobPriority(this.priority); + jdo.setChannelNum(this.channelNumber); + jdo.setCluster(this.cluster); + jdo.setJobDomain(this.jobDomain); + jdo.setSrcType(this.srcType); + jdo.setDstType(this.dstType); + jdo.setSrcGuid(this.srcGuid); + jdo.setDstGuid(this.dstGuid); + jdo.setHostAddress(HostUtils.IP); + + //sum + jdo.setTaskTotalTimeMs(sumPerf4Report4NotEnd.totalTaskRunTimeInMs + sumPerf4Report.totalTaskRunTimeInMs); + jdo.setOdpsBlockCloseTimeMs(sumPerf4Report4NotEnd.odpsCloseTimeInMs + sumPerf4Report.odpsCloseTimeInMs); + jdo.setSqlQueryTimeMs(sumPerf4Report4NotEnd.sqlQueryTimeInMs + sumPerf4Report.sqlQueryTimeInMs); + jdo.setResultNextTimeMs(sumPerf4Report4NotEnd.resultNextTimeInMs + sumPerf4Report.resultNextTimeInMs); + + return jdo; + } catch (Exception e) { + // do nothing + } + + return null; + } + + private void sumPerf4EndPrint(List totalEndReport) { + if (!enable || totalEndReport == null) { + return; + } + + for (PerfRecord perfRecord : totalEndReport) { + SumPerfRecord4Print oldPrefRecord4Print = perfRecordMaps4print.putIfAbsent(perfRecord.getPhase(), new SumPerfRecord4Print()); + if(oldPrefRecord4Print != null){ + oldPrefRecord4Print.add(perfRecord); + }else { + perfRecordMaps4print.get(perfRecord.getPhase()).add(perfRecord); + } + } + + totalEndReport.clear(); + } + + public void setChannelNumber(int needChannelNumber) { + this.channelNumber = needChannelNumber; + } + + + public static class SumPerf4Report { + long totalTaskRunTimeInMs = 0L; + long odpsCloseTimeInMs = 0L; + long sqlQueryTimeInMs = 0L; + long resultNextTimeInMs = 0L; + + public void add(long curNanoTime, PerfRecord perfRecord) { + try { + long runTimeEndInMs; + if (perfRecord.getElapsedTimeInNs() == -1) { + runTimeEndInMs = (curNanoTime - perfRecord.getStartTimeInNs()) / 1000000; + } else { + runTimeEndInMs = perfRecord.getElapsedTimeInNs() / 1000000; + } + switch (perfRecord.getPhase()) { + case TASK_TOTAL: + totalTaskRunTimeInMs += runTimeEndInMs; + break; + case SQL_QUERY: + sqlQueryTimeInMs += runTimeEndInMs; + break; + case RESULT_NEXT_ALL: + resultNextTimeInMs += runTimeEndInMs; + break; + case ODPS_BLOCK_CLOSE: + odpsCloseTimeInMs += runTimeEndInMs; + break; + } + } catch (Exception e) { + //do nothing + } + } + + public long getTotalTaskRunTimeInMs() { + return totalTaskRunTimeInMs; + } + + public long getOdpsCloseTimeInMs() { + return odpsCloseTimeInMs; + } + + public long getSqlQueryTimeInMs() { + return sqlQueryTimeInMs; + } + + public long getResultNextTimeInMs() { + return resultNextTimeInMs; + } + } + + public static class SumPerfRecord4Print { + private long perfTimeTotal = 0; + private long averageTime = 0; + private long maxTime = 0; + private int maxTaskId = -1; + private int maxTaskGroupId = -1; + private int totalCount = 0; + + private long recordsTotal = 0; + private long sizesTotal = 0; + private long averageRecords = 0; + private long averageBytes = 0; + private long maxRecord = 0; + private long maxByte = 0; + private int maxTaskId4Records = -1; + private int maxTGID4Records = -1; + + public void add(PerfRecord perfRecord) { + if (perfRecord == null) { + return; + } + perfTimeTotal += perfRecord.getElapsedTimeInNs(); + if (perfRecord.getElapsedTimeInNs() >= maxTime) { + maxTime = perfRecord.getElapsedTimeInNs(); + maxTaskId = perfRecord.getTaskId(); + maxTaskGroupId = perfRecord.getTaskGroupId(); + } + + recordsTotal += perfRecord.getCount(); + sizesTotal += perfRecord.getSize(); + if (perfRecord.getCount() >= maxRecord) { + maxRecord = perfRecord.getCount(); + maxByte = perfRecord.getSize(); + maxTaskId4Records = perfRecord.getTaskId(); + maxTGID4Records = perfRecord.getTaskGroupId(); + } + + totalCount++; + } + + public long getPerfTimeTotal() { + return perfTimeTotal; + } + + public long getAverageTime() { + if (totalCount > 0) { + averageTime = perfTimeTotal / totalCount; + } + return averageTime; + } + + public long getMaxTime() { + return maxTime; + } + + public int getMaxTaskId() { + return maxTaskId; + } + + public int getMaxTaskGroupId() { + return maxTaskGroupId; + } + + public long getRecordsTotal() { + return recordsTotal; + } + + public long getSizesTotal() { + return sizesTotal; + } + + public long getAverageRecords() { + if (totalCount > 0) { + averageRecords = recordsTotal / totalCount; + } + return averageRecords; + } + + public long getAverageBytes() { + if (totalCount > 0) { + averageBytes = sizesTotal / totalCount; + } + return averageBytes; + } + + public long getMaxRecord() { + return maxRecord; + } + + public long getMaxByte() { + return maxByte; + } + + public int getMaxTaskId4Records() { + return maxTaskId4Records; + } + + public int getMaxTGID4Records() { + return maxTGID4Records; + } + + public int getTotalCount() { + return totalCount; + } + } + + class JobStatisticsDto2 { + + private Long id; + private Date gmtCreate; + private Date gmtModified; + private Long instId; + private Long jobId; + private Long jobVersion; + private Integer taskGroupId; + private Date windowStart; + private Date windowEnd; + private Date jobStartTime; + private Date jobEndTime; + private Long jobRunTimeMs; + private Integer jobPriority; + private Integer channelNum; + private String cluster; + private String jobDomain; + private String srcType; + private String dstType; + private String srcGuid; + private String dstGuid; + private Long records; + private Long bytes; + private Long speedRecord; + private Long speedByte; + private String stagePercent; + private Long errorRecord; + private Long errorBytes; + private Long waitReadTimeMs; + private Long waitWriteTimeMs; + private Long odpsBlockCloseTimeMs; + private Long sqlQueryTimeMs; + private Long resultNextTimeMs; + private Long taskTotalTimeMs; + private String hostAddress; + + public Long getId() { + return id; + } + + public Date getGmtCreate() { + return gmtCreate; + } + + public Date getGmtModified() { + return gmtModified; + } + + public Long getInstId() { + return instId; + } + + public Long getJobId() { + return jobId; + } + + public Long getJobVersion() { + return jobVersion; + } + + public Integer getTaskGroupId() { + return taskGroupId; + } + + public Date getWindowStart() { + return windowStart; + } + + public Date getWindowEnd() { + return windowEnd; + } + + public Date getJobStartTime() { + return jobStartTime; + } + + public Date getJobEndTime() { + return jobEndTime; + } + + public Long getJobRunTimeMs() { + return jobRunTimeMs; + } + + public Integer getJobPriority() { + return jobPriority; + } + + public Integer getChannelNum() { + return channelNum; + } + + public String getCluster() { + return cluster; + } + + public String getJobDomain() { + return jobDomain; + } + + public String getSrcType() { + return srcType; + } + + public String getDstType() { + return dstType; + } + + public String getSrcGuid() { + return srcGuid; + } + + public String getDstGuid() { + return dstGuid; + } + + public Long getRecords() { + return records; + } + + public Long getBytes() { + return bytes; + } + + public Long getSpeedRecord() { + return speedRecord; + } + + public Long getSpeedByte() { + return speedByte; + } + + public String getStagePercent() { + return stagePercent; + } + + public Long getErrorRecord() { + return errorRecord; + } + + public Long getErrorBytes() { + return errorBytes; + } + + public Long getWaitReadTimeMs() { + return waitReadTimeMs; + } + + public Long getWaitWriteTimeMs() { + return waitWriteTimeMs; + } + + public Long getOdpsBlockCloseTimeMs() { + return odpsBlockCloseTimeMs; + } + + public Long getSqlQueryTimeMs() { + return sqlQueryTimeMs; + } + + public Long getResultNextTimeMs() { + return resultNextTimeMs; + } + + public Long getTaskTotalTimeMs() { + return taskTotalTimeMs; + } + + public String getHostAddress() { + return hostAddress; + } + + public void setId(Long id) { + this.id = id; + } + + public void setGmtCreate(Date gmtCreate) { + this.gmtCreate = gmtCreate; + } + + public void setGmtModified(Date gmtModified) { + this.gmtModified = gmtModified; + } + + public void setInstId(Long instId) { + this.instId = instId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public void setJobVersion(Long jobVersion) { + this.jobVersion = jobVersion; + } + + public void setTaskGroupId(Integer taskGroupId) { + this.taskGroupId = taskGroupId; + } + + public void setWindowStart(Date windowStart) { + this.windowStart = windowStart; + } + + public void setWindowEnd(Date windowEnd) { + this.windowEnd = windowEnd; + } + + public void setJobStartTime(Date jobStartTime) { + this.jobStartTime = jobStartTime; + } + + public void setJobEndTime(Date jobEndTime) { + this.jobEndTime = jobEndTime; + } + + public void setJobRunTimeMs(Long jobRunTimeMs) { + this.jobRunTimeMs = jobRunTimeMs; + } + + public void setJobPriority(Integer jobPriority) { + this.jobPriority = jobPriority; + } + + public void setChannelNum(Integer channelNum) { + this.channelNum = channelNum; + } + + public void setCluster(String cluster) { + this.cluster = cluster; + } + + public void setJobDomain(String jobDomain) { + this.jobDomain = jobDomain; + } + + public void setSrcType(String srcType) { + this.srcType = srcType; + } + + public void setDstType(String dstType) { + this.dstType = dstType; + } + + public void setSrcGuid(String srcGuid) { + this.srcGuid = srcGuid; + } + + public void setDstGuid(String dstGuid) { + this.dstGuid = dstGuid; + } + + public void setRecords(Long records) { + this.records = records; + } + + public void setBytes(Long bytes) { + this.bytes = bytes; + } + + public void setSpeedRecord(Long speedRecord) { + this.speedRecord = speedRecord; + } + + public void setSpeedByte(Long speedByte) { + this.speedByte = speedByte; + } + + public void setStagePercent(String stagePercent) { + this.stagePercent = stagePercent; + } + + public void setErrorRecord(Long errorRecord) { + this.errorRecord = errorRecord; + } + + public void setErrorBytes(Long errorBytes) { + this.errorBytes = errorBytes; + } + + public void setWaitReadTimeMs(Long waitReadTimeMs) { + this.waitReadTimeMs = waitReadTimeMs; + } + + public void setWaitWriteTimeMs(Long waitWriteTimeMs) { + this.waitWriteTimeMs = waitWriteTimeMs; + } + + public void setOdpsBlockCloseTimeMs(Long odpsBlockCloseTimeMs) { + this.odpsBlockCloseTimeMs = odpsBlockCloseTimeMs; + } + + public void setSqlQueryTimeMs(Long sqlQueryTimeMs) { + this.sqlQueryTimeMs = sqlQueryTimeMs; + } + + public void setResultNextTimeMs(Long resultNextTimeMs) { + this.resultNextTimeMs = resultNextTimeMs; + } + + public void setTaskTotalTimeMs(Long taskTotalTimeMs) { + this.taskTotalTimeMs = taskTotalTimeMs; + } + + public void setHostAddress(String hostAddress) { + this.hostAddress = hostAddress; + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/VMInfo.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/VMInfo.java new file mode 100644 index 000000000..b8f652c56 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/statistics/VMInfo.java @@ -0,0 +1,416 @@ +package com.alibaba.datax.common.statistics; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.lang.management.GarbageCollectorMXBean; +import java.lang.management.MemoryPoolMXBean; +import java.lang.management.OperatingSystemMXBean; +import java.lang.management.RuntimeMXBean; +import java.lang.reflect.Method; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Created by liqiang on 15/11/12. + */ +public class VMInfo { + private static final Logger LOG = LoggerFactory.getLogger(VMInfo.class); + static final long MB = 1024L * 1024L; + static final long GB = 1024L * 1024L * 1024L; + public static final Object LOCK = new Object(); + private volatile static VMInfo vmInfo; + + /** + * @return null or vmInfo. null is something error, job no care it. + */ + public static VMInfo getVmInfo() { + if (vmInfo == null) { + synchronized (LOCK) { + if (vmInfo == null) { + try { + vmInfo = new VMInfo(); + } catch (Exception e) { + LOG.warn("no need care, the fail is ignored : vmInfo init failed " + e.getMessage(), e); + } + } + } + + } + return vmInfo; + } + + // 数据的MxBean + private final OperatingSystemMXBean osMXBean; + private final RuntimeMXBean runtimeMXBean; + private final List garbageCollectorMXBeanList; + private final List memoryPoolMXBeanList; + /** + * 静态信息 + */ + private final String osInfo; + private final String jvmInfo; + + /** + * cpu个数 + */ + private final int totalProcessorCount; + + /** + * 机器的各个状态,用于中间打印和统计上报 + */ + private final PhyOSStatus startPhyOSStatus; + private final ProcessCpuStatus processCpuStatus = new ProcessCpuStatus(); + private final ProcessGCStatus processGCStatus = new ProcessGCStatus(); + private final ProcessMemoryStatus processMomoryStatus = new ProcessMemoryStatus(); + //ms + private long lastUpTime = 0; + //nano + private long lastProcessCpuTime = 0; + + + private VMInfo() { + //初始化静态信息 + osMXBean = java.lang.management.ManagementFactory.getOperatingSystemMXBean(); + runtimeMXBean = java.lang.management.ManagementFactory.getRuntimeMXBean(); + garbageCollectorMXBeanList = java.lang.management.ManagementFactory.getGarbageCollectorMXBeans(); + memoryPoolMXBeanList = java.lang.management.ManagementFactory.getMemoryPoolMXBeans(); + + osInfo = runtimeMXBean.getVmVendor() + " " + runtimeMXBean.getSpecVersion() + " " + runtimeMXBean.getVmVersion(); + jvmInfo = osMXBean.getName() + " " + osMXBean.getArch() + " " + osMXBean.getVersion(); + totalProcessorCount = osMXBean.getAvailableProcessors(); + + //构建startPhyOSStatus + startPhyOSStatus = new PhyOSStatus(); + LOG.info("VMInfo# operatingSystem class => " + osMXBean.getClass().getName()); + if (VMInfo.isSunOsMBean(osMXBean)) { + { + startPhyOSStatus.totalPhysicalMemory = VMInfo.getLongFromOperatingSystem(osMXBean, "getTotalPhysicalMemorySize"); + startPhyOSStatus.freePhysicalMemory = VMInfo.getLongFromOperatingSystem(osMXBean, "getFreePhysicalMemorySize"); + startPhyOSStatus.maxFileDescriptorCount = VMInfo.getLongFromOperatingSystem(osMXBean, "getMaxFileDescriptorCount"); + startPhyOSStatus.currentOpenFileDescriptorCount = VMInfo.getLongFromOperatingSystem(osMXBean, "getOpenFileDescriptorCount"); + } + } + + //初始化processGCStatus; + for (GarbageCollectorMXBean garbage : garbageCollectorMXBeanList) { + GCStatus gcStatus = new GCStatus(); + gcStatus.name = garbage.getName(); + processGCStatus.gcStatusMap.put(garbage.getName(), gcStatus); + } + + //初始化processMemoryStatus + if (memoryPoolMXBeanList != null && !memoryPoolMXBeanList.isEmpty()) { + for (MemoryPoolMXBean pool : memoryPoolMXBeanList) { + MemoryStatus memoryStatus = new MemoryStatus(); + memoryStatus.name = pool.getName(); + memoryStatus.initSize = pool.getUsage().getInit(); + memoryStatus.maxSize = pool.getUsage().getMax(); + processMomoryStatus.memoryStatusMap.put(pool.getName(), memoryStatus); + } + } + } + + @Override + public String toString() { + return "the machine info => \n\n" + + "\tosInfo:\t" + osInfo + "\n" + + "\tjvmInfo:\t" + jvmInfo + "\n" + + "\tcpu num:\t" + totalProcessorCount + "\n\n" + + startPhyOSStatus.toString() + "\n" + + processGCStatus.toString() + "\n" + + processMomoryStatus.toString() + "\n"; + } + + public String totalString() { + return (processCpuStatus.getTotalString() + processGCStatus.getTotalString()); + } + + public void getDelta() { + getDelta(true); + } + + public synchronized void getDelta(boolean print) { + + try { + if (VMInfo.isSunOsMBean(osMXBean)) { + long curUptime = runtimeMXBean.getUptime(); + long curProcessTime = getLongFromOperatingSystem(osMXBean, "getProcessCpuTime"); + //百分比, uptime是ms,processTime是nano + if ((curUptime > lastUpTime) && (curProcessTime >= lastProcessCpuTime)) { + float curDeltaCpu = (float) (curProcessTime - lastProcessCpuTime) / ((curUptime - lastUpTime) * totalProcessorCount * 10000); + processCpuStatus.setMaxMinCpu(curDeltaCpu); + processCpuStatus.averageCpu = (float) curProcessTime / (curUptime * totalProcessorCount * 10000); + + lastUpTime = curUptime; + lastProcessCpuTime = curProcessTime; + } + } + + for (GarbageCollectorMXBean garbage : garbageCollectorMXBeanList) { + + GCStatus gcStatus = processGCStatus.gcStatusMap.get(garbage.getName()); + if (gcStatus == null) { + gcStatus = new GCStatus(); + gcStatus.name = garbage.getName(); + processGCStatus.gcStatusMap.put(garbage.getName(), gcStatus); + } + + long curTotalGcCount = garbage.getCollectionCount(); + gcStatus.setCurTotalGcCount(curTotalGcCount); + + long curtotalGcTime = garbage.getCollectionTime(); + gcStatus.setCurTotalGcTime(curtotalGcTime); + } + + if (memoryPoolMXBeanList != null && !memoryPoolMXBeanList.isEmpty()) { + for (MemoryPoolMXBean pool : memoryPoolMXBeanList) { + + MemoryStatus memoryStatus = processMomoryStatus.memoryStatusMap.get(pool.getName()); + if (memoryStatus == null) { + memoryStatus = new MemoryStatus(); + memoryStatus.name = pool.getName(); + processMomoryStatus.memoryStatusMap.put(pool.getName(), memoryStatus); + } + memoryStatus.commitedSize = pool.getUsage().getCommitted(); + memoryStatus.setMaxMinUsedSize(pool.getUsage().getUsed()); + long maxMemory = memoryStatus.commitedSize > 0 ? memoryStatus.commitedSize : memoryStatus.maxSize; + memoryStatus.setMaxMinPercent(maxMemory > 0 ? (float) 100 * memoryStatus.usedSize / maxMemory : -1); + } + } + + if (print) { + LOG.info(processCpuStatus.getDeltaString() + processMomoryStatus.getDeltaString() + processGCStatus.getDeltaString()); + } + + } catch (Exception e) { + LOG.warn("no need care, the fail is ignored : vmInfo getDelta failed " + e.getMessage(), e); + } + } + + public static boolean isSunOsMBean(OperatingSystemMXBean operatingSystem) { + final String className = operatingSystem.getClass().getName(); + + return "com.sun.management.UnixOperatingSystem".equals(className); + } + + public static long getLongFromOperatingSystem(OperatingSystemMXBean operatingSystem, String methodName) { + try { + final Method method = operatingSystem.getClass().getMethod(methodName, (Class[]) null); + method.setAccessible(true); + return (Long) method.invoke(operatingSystem, (Object[]) null); + } catch (final Exception e) { + LOG.info(String.format("OperatingSystemMXBean %s failed, Exception = %s ", methodName, e.getMessage())); + } + + return -1; + } + + private class PhyOSStatus { + long totalPhysicalMemory = -1; + long freePhysicalMemory = -1; + long maxFileDescriptorCount = -1; + long currentOpenFileDescriptorCount = -1; + + @Override + public String toString() { + return String.format("\ttotalPhysicalMemory:\t%,.2fG\n" + + "\tfreePhysicalMemory:\t%,.2fG\n" + + "\tmaxFileDescriptorCount:\t%s\n" + + "\tcurrentOpenFileDescriptorCount:\t%s\n", + (float) totalPhysicalMemory / GB, (float) freePhysicalMemory / GB, maxFileDescriptorCount, currentOpenFileDescriptorCount); + } + } + + private class ProcessCpuStatus { + // 百分比的值 比如30.0 表示30.0% + float maxDeltaCpu = -1; + float minDeltaCpu = -1; + float curDeltaCpu = -1; + float averageCpu = -1; + + public void setMaxMinCpu(float curCpu) { + this.curDeltaCpu = curCpu; + if (maxDeltaCpu < curCpu) { + maxDeltaCpu = curCpu; + } + + if (minDeltaCpu == -1 || minDeltaCpu > curCpu) { + minDeltaCpu = curCpu; + } + } + + public String getDeltaString() { + StringBuilder sb = new StringBuilder(); + sb.append("\n\t [delta cpu info] => \n"); + sb.append("\t\t"); + sb.append(String.format("%-30s | %-30s | %-30s | %-30s \n", "curDeltaCpu", "averageCpu", "maxDeltaCpu", "minDeltaCpu")); + sb.append("\t\t"); + sb.append(String.format("%-30s | %-30s | %-30s | %-30s \n", + String.format("%,.2f%%", processCpuStatus.curDeltaCpu), + String.format("%,.2f%%", processCpuStatus.averageCpu), + String.format("%,.2f%%", processCpuStatus.maxDeltaCpu), + String.format("%,.2f%%\n", processCpuStatus.minDeltaCpu))); + + return sb.toString(); + } + + public String getTotalString() { + StringBuilder sb = new StringBuilder(); + sb.append("\n\t [total cpu info] => \n"); + sb.append("\t\t"); + sb.append(String.format("%-30s | %-30s | %-30s \n", "averageCpu", "maxDeltaCpu", "minDeltaCpu")); + sb.append("\t\t"); + sb.append(String.format("%-30s | %-30s | %-30s \n", + String.format("%,.2f%%", processCpuStatus.averageCpu), + String.format("%,.2f%%", processCpuStatus.maxDeltaCpu), + String.format("%,.2f%%\n", processCpuStatus.minDeltaCpu))); + + return sb.toString(); + } + + } + + private class ProcessGCStatus { + final Map gcStatusMap = new HashMap(); + + @Override + public String toString() { + return "\tGC Names\t" + gcStatusMap.keySet() + "\n"; + } + + public String getDeltaString() { + StringBuilder sb = new StringBuilder(); + sb.append("\n\t [delta gc info] => \n"); + sb.append("\t\t "); + sb.append(String.format("%-20s | %-18s | %-18s | %-18s | %-18s | %-18s | %-18s | %-18s | %-18s \n", "NAME", "curDeltaGCCount", "totalGCCount", "maxDeltaGCCount", "minDeltaGCCount", "curDeltaGCTime", "totalGCTime", "maxDeltaGCTime", "minDeltaGCTime")); + for (GCStatus gc : gcStatusMap.values()) { + sb.append("\t\t "); + sb.append(String.format("%-20s | %-18s | %-18s | %-18s | %-18s | %-18s | %-18s | %-18s | %-18s \n", + gc.name, gc.curDeltaGCCount, gc.totalGCCount, gc.maxDeltaGCCount, gc.minDeltaGCCount, + String.format("%,.3fs", (float) gc.curDeltaGCTime / 1000), + String.format("%,.3fs", (float) gc.totalGCTime / 1000), + String.format("%,.3fs", (float) gc.maxDeltaGCTime / 1000), + String.format("%,.3fs", (float) gc.minDeltaGCTime / 1000))); + + } + return sb.toString(); + } + + public String getTotalString() { + StringBuilder sb = new StringBuilder(); + sb.append("\n\t [total gc info] => \n"); + sb.append("\t\t "); + sb.append(String.format("%-20s | %-18s | %-18s | %-18s | %-18s | %-18s | %-18s \n", "NAME", "totalGCCount", "maxDeltaGCCount", "minDeltaGCCount", "totalGCTime", "maxDeltaGCTime", "minDeltaGCTime")); + for (GCStatus gc : gcStatusMap.values()) { + sb.append("\t\t "); + sb.append(String.format("%-20s | %-18s | %-18s | %-18s | %-18s | %-18s | %-18s \n", + gc.name, gc.totalGCCount, gc.maxDeltaGCCount, gc.minDeltaGCCount, + String.format("%,.3fs", (float) gc.totalGCTime / 1000), + String.format("%,.3fs", (float) gc.maxDeltaGCTime / 1000), + String.format("%,.3fs", (float) gc.minDeltaGCTime / 1000))); + + } + return sb.toString(); + } + } + + private class ProcessMemoryStatus { + final Map memoryStatusMap = new HashMap(); + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("\t"); + sb.append(String.format("%-30s | %-30s | %-30s \n", "MEMORY_NAME", "allocation_size", "init_size")); + for (MemoryStatus ms : memoryStatusMap.values()) { + sb.append("\t"); + sb.append(String.format("%-30s | %-30s | %-30s \n", + ms.name, String.format("%,.2fMB", (float) ms.maxSize / MB), String.format("%,.2fMB", (float) ms.initSize / MB))); + } + return sb.toString(); + } + + public String getDeltaString() { + StringBuilder sb = new StringBuilder(); + sb.append("\n\t [delta memory info] => \n"); + sb.append("\t\t "); + sb.append(String.format("%-30s | %-30s | %-30s | %-30s | %-30s \n", "NAME", "used_size", "used_percent", "max_used_size", "max_percent")); + for (MemoryStatus ms : memoryStatusMap.values()) { + sb.append("\t\t "); + sb.append(String.format("%-30s | %-30s | %-30s | %-30s | %-30s \n", + ms.name, String.format("%,.2f", (float) ms.usedSize / MB) + "MB", + String.format("%,.2f", (float) ms.percent) + "%", + String.format("%,.2f", (float) ms.maxUsedSize / MB) + "MB", + String.format("%,.2f", (float) ms.maxpercent) + "%")); + + } + return sb.toString(); + } + } + + private class GCStatus { + String name; + long maxDeltaGCCount = -1; + long minDeltaGCCount = -1; + long curDeltaGCCount; + long totalGCCount = 0; + long maxDeltaGCTime = -1; + long minDeltaGCTime = -1; + long curDeltaGCTime; + long totalGCTime = 0; + + public void setCurTotalGcCount(long curTotalGcCount) { + this.curDeltaGCCount = curTotalGcCount - totalGCCount; + this.totalGCCount = curTotalGcCount; + + if (maxDeltaGCCount < curDeltaGCCount) { + maxDeltaGCCount = curDeltaGCCount; + } + + if (minDeltaGCCount == -1 || minDeltaGCCount > curDeltaGCCount) { + minDeltaGCCount = curDeltaGCCount; + } + } + + public void setCurTotalGcTime(long curTotalGcTime) { + this.curDeltaGCTime = curTotalGcTime - totalGCTime; + this.totalGCTime = curTotalGcTime; + + if (maxDeltaGCTime < curDeltaGCTime) { + maxDeltaGCTime = curDeltaGCTime; + } + + if (minDeltaGCTime == -1 || minDeltaGCTime > curDeltaGCTime) { + minDeltaGCTime = curDeltaGCTime; + } + } + } + + private class MemoryStatus { + String name; + long initSize; + long maxSize; + long commitedSize; + long usedSize; + float percent; + long maxUsedSize = -1; + float maxpercent = 0; + + void setMaxMinUsedSize(long curUsedSize) { + if (maxUsedSize < curUsedSize) { + maxUsedSize = curUsedSize; + } + this.usedSize = curUsedSize; + } + + void setMaxMinPercent(float curPercent) { + if (maxpercent < curPercent) { + maxpercent = curPercent; + } + this.percent = curPercent; + } + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/Configuration.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/Configuration.java new file mode 100644 index 000000000..c6d5d2a1a --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/Configuration.java @@ -0,0 +1,1087 @@ +package com.alibaba.datax.common.util; + +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.spi.ErrorCode; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.CharUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.builder.ToStringBuilder; + +import java.io.*; +import java.util.*; + +/** + * Configuration 提供多级JSON配置信息无损存储
+ *
+ *

+ * 实例代码:
+ *

+ * 获取job的配置信息
+ * Configuration configuration = Configuration.from(new File("Config.json"));
+ * String jobContainerClass = + * configuration.getString("core.container.job.class");
+ *

+ *
+ * 设置多级List
+ * configuration.set("job.reader.parameter.jdbcUrl", Arrays.asList(new String[] + * {"jdbc", "jdbc"})); + *

+ *

+ *
+ *
+ * 合并Configuration:
+ * configuration.merge(another); + *

+ *

+ *
+ *
+ *
+ *

+ * Configuration 存在两种较好地实现方式
+ * 第一种是将JSON配置信息中所有的Key全部打平,用a.b.c的级联方式作为Map的Key,内部使用一个Map保存信息
+ * 第二种是将JSON的对象直接使用结构化树形结构保存
+ *

+ * 目前使用的第二种实现方式,使用第一种的问题在于:
+ * 1. 插入新对象,比较难处理,例如a.b.c="bazhen",此时如果需要插入a="bazhen",也即是根目录下第一层所有类型全部要废弃 + * ,使用"bazhen"作为value,第一种方式使用字符串表示key,难以处理这类问题。
+ * 2. 返回树形结构,例如 a.b.c.d = "bazhen",如果返回"a"下的所有元素,实际上是一个Map,需要合并处理
+ * 3. 输出JSON,将上述对象转为JSON,要把上述Map的多级key转为树形结构,并输出为JSON
+ */ +public class Configuration { + + /** + * 对于加密的keyPath,需要记录下来 + * 为的是后面分布式情况下将该值加密后抛到DataXServer中 + */ + private Set secretKeyPathSet = + new HashSet(); + + private Object root = null; + + /** + * 初始化空白的Configuration + */ + public static Configuration newDefault() { + return Configuration.from("{}"); + } + + /** + * 从JSON字符串加载Configuration + */ + public static Configuration from(String json) { + json = StrUtil.replaceVariable(json); + checkJSON(json); + + try { + return new Configuration(json); + } catch (Exception e) { + throw DataXException.asDataXException(CommonErrorCode.CONFIG_ERROR, + e); + } + + } + + /** + * 从包括json的File对象加载Configuration + */ + public static Configuration from(File file) { + try { + return Configuration.from(IOUtils + .toString(new FileInputStream(file))); + } catch (FileNotFoundException e) { + throw DataXException.asDataXException(CommonErrorCode.CONFIG_ERROR, + String.format("配置信息错误,您提供的配置文件[%s]不存在. 请检查您的配置文件.", file.getAbsolutePath())); + } catch (IOException e) { + throw DataXException.asDataXException( + CommonErrorCode.CONFIG_ERROR, + String.format("配置信息错误. 您提供配置文件[%s]读取失败,错误原因: %s. 请检查您的配置文件的权限设置.", + file.getAbsolutePath(), e)); + } + } + + /** + * 从包括json的InputStream对象加载Configuration + */ + public static Configuration from(InputStream is) { + try { + return Configuration.from(IOUtils.toString(is)); + } catch (IOException e) { + throw DataXException.asDataXException(CommonErrorCode.CONFIG_ERROR, + String.format("请检查您的配置文件. 您提供的配置文件读取失败,错误原因: %s. 请检查您的配置文件的权限设置.", e)); + } + } + + /** + * 从Map对象加载Configuration + */ + public static Configuration from(final Map object) { + return Configuration.from(Configuration.toJSONString(object)); + } + + /** + * 从List对象加载Configuration + */ + public static Configuration from(final List object) { + return Configuration.from(Configuration.toJSONString(object)); + } + + public String getNecessaryValue(String key, ErrorCode errorCode) { + String value = this.getString(key, null); + if (StringUtils.isBlank(value)) { + throw DataXException.asDataXException(errorCode, + String.format("您提供配置文件有误,[%s]是必填参数,不允许为空或者留白 .", key)); + } + + return value; + } + + public String getUnnecessaryValue(String key, String defaultValue, ErrorCode errorCode) { + String value = this.getString(key, defaultValue); + if (StringUtils.isBlank(value)) { + value = defaultValue; + } + return value; + } + + public Boolean getNecessaryBool(String key, ErrorCode errorCode) { + Boolean value = this.getBool(key); + if (value == null) { + throw DataXException.asDataXException(errorCode, + String.format("您提供配置文件有误,[%s]是必填参数,不允许为空或者留白 .", key)); + } + + return value; + } + + /** + * 根据用户提供的json path,寻址具体的对象。 + *

+ *
+ *

+ * NOTE: 目前仅支持Map以及List下标寻址, 例如: + *

+ *
+ *

+ * 对于如下JSON + *

+ * {"a": {"b": {"c": [0,1,2,3]}}} + *

+ * config.get("") 返回整个Map
+ * config.get("a") 返回a下属整个Map
+ * config.get("a.b.c") 返回c对应的数组List
+ * config.get("a.b.c[0]") 返回数字0 + * + * @return Java表示的JSON对象,如果path不存在或者对象不存在,均返回null。 + */ + public Object get(final String path) { + this.checkPath(path); + try { + return this.findObject(path); + } catch (Exception e) { + return null; + } + } + + /** + * 用户指定部分path,获取Configuration的子集 + *

+ *
+ * 如果path获取的路径或者对象不存在,返回null + */ + public Configuration getConfiguration(final String path) { + Object object = this.get(path); + if (null == object) { + return null; + } + + return Configuration.from(Configuration.toJSONString(object)); + } + + /** + * 根据用户提供的json path,寻址String对象 + * + * @return String对象,如果path不存在或者String不存在,返回null + */ + public String getString(final String path) { + Object string = this.get(path); + if (null == string) { + return null; + } + Class clazz = string.getClass(); + if(clazz.equals(String.class) || + clazz.isPrimitive() || isWrapClass(clazz)){ + return String.valueOf(string); + } + return Json.toJson(string, null); + } + + /** + * 根据用户提供的json path,寻址String对象,如果对象不存在,返回默认字符串 + * + * @return String对象,如果path不存在或者String不存在,返回默认字符串 + */ + public String getString(final String path, final String defaultValue) { + String result = this.getString(path); + + if (null == result) { + return defaultValue; + } + + return result; + } + + /** + * 根据用户提供的json path,寻址Character对象 + * + * @return Character对象,如果path不存在或者Character不存在,返回null + */ + public Character getChar(final String path) { + String result = this.getString(path); + if (null == result) { + return null; + } + + try { + return CharUtils.toChar(result); + } catch (Exception e) { + throw DataXException.asDataXException( + CommonErrorCode.CONFIG_ERROR, + String.format("任务读取配置文件出错. 因为配置文件路径[%s] 值非法,期望是字符类型: %s. 请检查您的配置并作出修改.", path, + e.getMessage())); + } + } + + /** + * 根据用户提供的json path,寻址Boolean对象,如果对象不存在,返回默认Character对象 + * + * @return Character对象,如果path不存在或者Character不存在,返回默认Character对象 + */ + public Character getChar(final String path, char defaultValue) { + Character result = this.getChar(path); + if (null == result) { + return defaultValue; + } + return result; + } + + /** + * 根据用户提供的json path,寻址Boolean对象 + * + * @return Boolean对象,如果path值非true,false ,将报错.特别注意:当 path 不存在时,会返回:null. + */ + public Boolean getBool(final String path) { + String result = this.getString(path); + + if (null == result) { + return null; + } else if ("true".equalsIgnoreCase(result)) { + return Boolean.TRUE; + } else if ("false".equalsIgnoreCase(result)) { + return Boolean.FALSE; + } else { + throw DataXException.asDataXException(CommonErrorCode.CONFIG_ERROR, + String.format("您提供的配置信息有误,因为从[%s]获取的值[%s]无法转换为bool类型. 请检查源表的配置并且做出相应的修改.", + path, result)); + } + + } + + /** + * 根据用户提供的json path,寻址Boolean对象,如果对象不存在,返回默认Boolean对象 + * + * @return Boolean对象,如果path不存在或者Boolean不存在,返回默认Boolean对象 + */ + public Boolean getBool(final String path, boolean defaultValue) { + Boolean result = this.getBool(path); + if (null == result) { + return defaultValue; + } + return result; + } + + /** + * 根据用户提供的json path,寻址Integer对象 + * + * @return Integer对象,如果path不存在或者Integer不存在,返回null + */ + public Integer getInt(final String path) { + String result = this.getString(path); + if (null == result) { + return null; + } + + try { + return Integer.valueOf(result); + } catch (Exception e) { + throw DataXException.asDataXException( + CommonErrorCode.CONFIG_ERROR, + String.format("任务读取配置文件出错. 配置文件路径[%s] 值非法, 期望是整数类型: %s. 请检查您的配置并作出修改.", path, + e.getMessage())); + } + } + + /** + * 根据用户提供的json path,寻址Integer对象,如果对象不存在,返回默认Integer对象 + * + * @return Integer对象,如果path不存在或者Integer不存在,返回默认Integer对象 + */ + public Integer getInt(final String path, int defaultValue) { + Integer object = this.getInt(path); + if (null == object) { + return defaultValue; + } + return object; + } + + /** + * 根据用户提供的json path,寻址Long对象 + * + * @return Long对象,如果path不存在或者Long不存在,返回null + */ + public Long getLong(final String path) { + String result = this.getString(path); + if (StringUtils.isBlank(result)) { + return null; + } + + try { + return Long.valueOf(result); + } catch (Exception e) { + throw DataXException.asDataXException( + CommonErrorCode.CONFIG_ERROR, + String.format("任务读取配置文件出错. 配置文件路径[%s] 值非法, 期望是整数类型: %s. 请检查您的配置并作出修改.", path, + e.getMessage())); + } + } + + /** + * 根据用户提供的json path,寻址Long对象,如果对象不存在,返回默认Long对象 + * + * @return Long对象,如果path不存在或者Integer不存在,返回默认Long对象 + */ + public Long getLong(final String path, long defaultValue) { + Long result = this.getLong(path); + if (null == result) { + return defaultValue; + } + return result; + } + + /** + * 根据用户提供的json path,寻址Double对象 + * + * @return Double对象,如果path不存在或者Double不存在,返回null + */ + public Double getDouble(final String path) { + String result = this.getString(path); + if (StringUtils.isBlank(result)) { + return null; + } + + try { + return Double.valueOf(result); + } catch (Exception e) { + throw DataXException.asDataXException( + CommonErrorCode.CONFIG_ERROR, + String.format("任务读取配置文件出错. 配置文件路径[%s] 值非法, 期望是浮点类型: %s. 请检查您的配置并作出修改.", path, + e.getMessage())); + } + } + + /** + * 根据用户提供的json path,寻址Double对象,如果对象不存在,返回默认Double对象 + * + * @return Double对象,如果path不存在或者Double不存在,返回默认Double对象 + */ + public Double getDouble(final String path, double defaultValue) { + Double result = this.getDouble(path); + if (null == result) { + return defaultValue; + } + return result; + } + + /** + * 根据用户提供的json path,寻址List对象,如果对象不存在,返回null + */ + @SuppressWarnings("unchecked") + public List getList(final String path) { + List list = this.get(path, List.class); + if (null == list) { + return null; + } + return list; + } + + /** + * 根据用户提供的json path,寻址List对象,如果对象不存在,返回null + */ + @SuppressWarnings("unchecked") + public List getList(final String path, Class t) { + Object object = this.get(path, List.class); + if (null == object) { + return null; + } + + List result = new ArrayList(); + + List origin = (List) object; + for (final Object each : origin) { + result.add((T) each); + } + + return result; + } + + /** + * 根据用户提供的json path,寻址List对象,如果对象不存在,返回默认List + */ + @SuppressWarnings("unchecked") + public List getList(final String path, + final List defaultList) { + Object object = this.getList(path); + if (null == object) { + return defaultList; + } + return (List) object; + } + + /** + * 根据用户提供的json path,寻址List对象,如果对象不存在,返回默认List + */ + public List getList(final String path, final List defaultList, + Class t) { + List list = this.getList(path, t); + if (null == list) { + return defaultList; + } + return list; + } + + /** + * 根据用户提供的json path,寻址包含Configuration的List,如果对象不存在,返回默认null + */ + public List getListConfiguration(final String path) { + List lists = getList(path); + if (lists == null) { + return null; + } + + List result = new ArrayList(); + for (final Object object : lists) { + result.add(Configuration.from(Configuration.toJSONString(object))); + } + return result; + } + + /** + * 根据用户提供的json path,寻址Map对象,如果对象不存在,返回null + */ + @SuppressWarnings("unchecked") + public Map getMap(final String path) { + Map result = this.get(path, Map.class); + if (null == result) { + return null; + } + return result; + } + + /** + * 根据用户提供的json path,寻址Map对象,如果对象不存在,返回null; + */ + @SuppressWarnings("unchecked") + public Map getMap(final String path, Class t) { + Map map = this.get(path, Map.class); + if (null == map) { + return null; + } + + Map result = new HashMap(); + for (final String key : map.keySet()) { + result.put(key, (T) map.get(key)); + } + + return result; + } + + /** + * 根据用户提供的json path,寻址Map对象,如果对象不存在,返回默认map + */ + @SuppressWarnings("unchecked") + public Map getMap(final String path, + final Map defaultMap) { + Object object = this.getMap(path); + if (null == object) { + return defaultMap; + } + return (Map) object; + } + + /** + * 根据用户提供的json path,寻址Map对象,如果对象不存在,返回默认map + */ + public Map getMap(final String path, + final Map defaultMap, Class t) { + Map result = getMap(path, t); + if (null == result) { + return defaultMap; + } + return result; + } + + /** + * 根据用户提供的json path,寻址包含Configuration的Map,如果对象不存在,返回默认null + */ + @SuppressWarnings("unchecked") + public Map getMapConfiguration(final String path) { + Map map = this.get(path, Map.class); + if (null == map) { + return null; + } + + Map result = new HashMap(); + for (final String key : map.keySet()) { + result.put(key, Configuration.from(Configuration.toJSONString(map + .get(key)))); + } + + return result; + } + + /** + * 根据用户提供的json path,寻址具体的对象,并转为用户提供的类型 + *

+ *
+ *

+ * NOTE: 目前仅支持Map以及List下标寻址, 例如: + *

+ *
+ *

+ * 对于如下JSON + *

+ * {"a": {"b": {"c": [0,1,2,3]}}} + *

+ * config.get("") 返回整个Map
+ * config.get("a") 返回a下属整个Map
+ * config.get("a.b.c") 返回c对应的数组List
+ * config.get("a.b.c[0]") 返回数字0 + * + * @return Java表示的JSON对象,如果转型失败,将抛出异常 + */ + @SuppressWarnings("unchecked") + public T get(final String path, Class clazz) { + this.checkPath(path); + return (T) this.get(path); + } + + /** + * 格式化Configuration输出 + */ + public String beautify() { + return Json.toJson(this.getInternal(), null, true); + } + + /** + * 根据用户提供的json path,插入指定对象,并返回之前存在的对象(如果存在) + *

+ *
+ *

+ * 目前仅支持.以及数组下标寻址, 例如: + *

+ *
+ *

+ * config.set("a.b.c[3]", object); + *

+ *
+ * 对于插入对象,Configuration不做任何限制,但是请务必保证该对象是简单对象(包括Map、List),不要使用自定义对象,否则后续对于JSON序列化等情况会出现未定义行为。 + * + * @param path JSON path对象 + * @param object 需要插入的对象 + * @return Java表示的JSON对象 + */ + public Object set(final String path, final Object object) { + checkPath(path); + + Object result = this.get(path); + + setObject(path, extractConfiguration(object)); + + return result; + } + + /** + * 获取Configuration下所有叶子节点的key + *

+ *
+ *

+ * 对于
+ *

+ * {"a": {"b": {"c": [0,1,2,3]}}, "x": "y"} + *

+ * 下属的key包括: a.b.c[0],a.b.c[1],a.b.c[2],a.b.c[3],x + */ + public Set getKeys() { + Set collect = new HashSet(); + this.getKeysRecursive(this.getInternal(), "", collect); + return collect; + } + + /** + * 删除path对应的值,如果path不存在,将抛出异常。 + */ + public Object remove(final String path) { + final Object result = this.get(path); + if (null == result) { + throw DataXException.asDataXException( + CommonErrorCode.RUNTIME_ERROR, + String.format("配置文件对应Key[%s]并不存在,该情况是代码编程错误. 请联系DataX团队的同学.", path)); + } + + this.set(path, null); + return result; + } + + /** + * 合并其他Configuration,并修改两者冲突的KV配置 + * + * @param another 合并加入的第三方Configuration + * @param updateWhenConflict 当合并双方出现KV冲突时候,选择更新当前KV,或者忽略该KV + * @return 返回合并后对象 + */ + public Configuration merge(final Configuration another, + boolean updateWhenConflict) { + Set keys = another.getKeys(); + + for (final String key : keys) { + // 如果使用更新策略,凡是another存在的key,均需要更新 + if (updateWhenConflict) { + this.set(key, another.get(key)); + continue; + } + + // 使用忽略策略,只有another Configuration存在但是当前Configuration不存在的key,才需要更新 + boolean isCurrentExists = this.get(key) != null; + if (isCurrentExists) { + continue; + } + + this.set(key, another.get(key)); + } + return this; + } + + @Override + public String toString() { + return this.toJSON(); + } + + /** + * 将Configuration作为JSON输出 + */ + public String toJSON() { + return Configuration.toJSONString(this.getInternal()); + } + + /** + * 拷贝当前Configuration,注意,这里使用了深拷贝,避免冲突 + */ + @Override + public Configuration clone() { + Configuration config = Configuration + .from(Configuration.toJSONString(this.getInternal())); + config.addSecretKeyPath(this.secretKeyPathSet); + return config; + } + + /** + * 按照configuration要求格式的path + * 比如: + * a.b.c + * a.b[2].c + * + * @param path + */ + public void addSecretKeyPath(String path) { + if (StringUtils.isNotBlank(path)) { + this.secretKeyPathSet.add(path); + } + } + + public void addSecretKeyPath(Set pathSet) { + if (pathSet != null) { + this.secretKeyPathSet.addAll(pathSet); + } + } + + public void setSecretKeyPathSet(Set keyPathSet) { + if (keyPathSet != null) { + this.secretKeyPathSet = keyPathSet; + } + } + + public boolean isSecretPath(String path) { + return this.secretKeyPathSet.contains(path); + } + + @SuppressWarnings("unchecked") + void getKeysRecursive(final Object current, String path, Set collect) { + boolean isRegularElement = !(current instanceof Map || current instanceof List); + if (isRegularElement) { + collect.add(path); + return; + } + + boolean isMap = current instanceof Map; + if (isMap) { + Map mapping = ((Map) current); + for (final String key : mapping.keySet()) { + if (StringUtils.isBlank(path)) { + getKeysRecursive(mapping.get(key), key.trim(), collect); + } else { + getKeysRecursive(mapping.get(key), path + "." + key.trim(), + collect); + } + } + return; + } + + boolean isList = current instanceof List; + if (isList) { + List lists = (List) current; + for (int i = 0; i < lists.size(); i++) { + getKeysRecursive(lists.get(i), path + String.format("[%d]", i), + collect); + } + return; + } + + return; + } + + public Object getInternal() { + return this.root; + } + + private void setObject(final String path, final Object object) { + Object newRoot = setObjectRecursive(this.root, split2List(path), 0, + object); + + if (isSuitForRoot(newRoot)) { + this.root = newRoot; + return; + } + + throw DataXException.asDataXException(CommonErrorCode.RUNTIME_ERROR, + String.format("值[%s]无法适配您提供[%s], 该异常代表系统编程错误, 请联系DataX开发团队!", + ToStringBuilder.reflectionToString(object), path)); + } + + @SuppressWarnings("unchecked") + private Object extractConfiguration(final Object object) { + if (object instanceof Configuration) { + return extractFromConfiguration(object); + } + + if (object instanceof List) { + List result = new ArrayList(); + for (final Object each : (List) object) { + result.add(extractFromConfiguration(each)); + } + return result; + } + + if (object instanceof Map) { + Map result = new HashMap(); + for (final String key : ((Map) object).keySet()) { + result.put(key, + extractFromConfiguration(((Map) object) + .get(key))); + } + return result; + } + + return object; + } + + private Object extractFromConfiguration(final Object object) { + if (object instanceof Configuration) { + return ((Configuration) object).getInternal(); + } + + return object; + } + + Object buildObject(final List paths, final Object object) { + if (null == paths) { + throw DataXException.asDataXException( + CommonErrorCode.RUNTIME_ERROR, + "Path不能为null,该异常代表系统编程错误, 请联系DataX开发团队 !"); + } + + if (1 == paths.size() && StringUtils.isBlank(paths.get(0))) { + return object; + } + + Object child = object; + for (int i = paths.size() - 1; i >= 0; i--) { + String path = paths.get(i); + + if (isPathMap(path)) { + Map mapping = new HashMap(); + mapping.put(path, child); + child = mapping; + continue; + } + + if (isPathList(path)) { + List lists = new ArrayList( + this.getIndex(path) + 1); + expand(lists, this.getIndex(path) + 1); + lists.set(this.getIndex(path), child); + child = lists; + continue; + } + + throw DataXException.asDataXException( + CommonErrorCode.RUNTIME_ERROR, String.format( + "路径[%s]出现非法值类型[%s],该异常代表系统编程错误, 请联系DataX开发团队! .", + StringUtils.join(paths, "."), path)); + } + + return child; + } + + @SuppressWarnings("unchecked") + Object setObjectRecursive(Object current, final List paths, + int index, final Object value) { + + // 如果是已经超出path,我们就返回value即可,作为最底层叶子节点 + boolean isLastIndex = index == paths.size(); + if (isLastIndex) { + return value; + } + + String path = paths.get(index).trim(); + boolean isNeedMap = isPathMap(path); + if (isNeedMap) { + Map mapping; + + // 当前不是map,因此全部替换为map,并返回新建的map对象 + boolean isCurrentMap = current instanceof Map; + if (!isCurrentMap) { + mapping = new HashMap(); + mapping.put( + path, + buildObject(paths.subList(index + 1, paths.size()), + value)); + return mapping; + } + + // 当前是map,但是没有对应的key,也就是我们需要新建对象插入该map,并返回该map + mapping = ((Map) current); + boolean hasSameKey = mapping.containsKey(path); + if (!hasSameKey) { + mapping.put( + path, + buildObject(paths.subList(index + 1, paths.size()), + value)); + return mapping; + } + + // 当前是map,而且还竟然存在这个值,好吧,继续递归遍历 + current = mapping.get(path); + mapping.put(path, + setObjectRecursive(current, paths, index + 1, value)); + return mapping; + } + + boolean isNeedList = isPathList(path); + if (isNeedList) { + List lists; + int listIndexer = getIndex(path); + + // 当前是list,直接新建并返回即可 + boolean isCurrentList = current instanceof List; + if (!isCurrentList) { + lists = expand(new ArrayList(), listIndexer + 1); + lists.set( + listIndexer, + buildObject(paths.subList(index + 1, paths.size()), + value)); + return lists; + } + + // 当前是list,但是对应的indexer是没有具体的值,也就是我们新建对象然后插入到该list,并返回该List + lists = (List) current; + lists = expand(lists, listIndexer + 1); + + boolean hasSameIndex = lists.get(listIndexer) != null; + if (!hasSameIndex) { + lists.set( + listIndexer, + buildObject(paths.subList(index + 1, paths.size()), + value)); + return lists; + } + + // 当前是list,并且存在对应的index,没有办法继续递归寻找 + current = lists.get(listIndexer); + lists.set(listIndexer, + setObjectRecursive(current, paths, index + 1, value)); + return lists; + } + + throw DataXException.asDataXException(CommonErrorCode.RUNTIME_ERROR, + "该异常代表系统编程错误, 请联系DataX开发团队 !"); + } + + private Object findObject(final String path) { + boolean isRootQuery = StringUtils.isBlank(path); + if (isRootQuery) { + return this.root; + } + + Object target = this.root; + + for (final String each : split2List(path)) { + if (isPathMap(each)) { + target = findObjectInMap(target, each); + continue; + } else { + target = findObjectInList(target, each); + continue; + } + } + + return target; + } + + @SuppressWarnings("unchecked") + private Object findObjectInMap(final Object target, final String index) { + boolean isMap = (target instanceof Map); + if (!isMap) { + throw new IllegalArgumentException(String.format( + "您提供的配置文件有误. 路径[%s]需要配置Json格式的Map对象,但该节点发现实际类型是[%s]. 请检查您的配置并作出修改.", + index, target.getClass().toString())); + } + + Object result = ((Map) target).get(index); + if (null == result) { + throw new IllegalArgumentException(String.format( + "您提供的配置文件有误. 路径[%s]值为null,datax无法识别该配置. 请检查您的配置并作出修改.", index)); + } + + return result; + } + + @SuppressWarnings({"unchecked"}) + private Object findObjectInList(final Object target, final String each) { + boolean isList = (target instanceof List); + if (!isList) { + throw new IllegalArgumentException(String.format( + "您提供的配置文件有误. 路径[%s]需要配置Json格式的Map对象,但该节点发现实际类型是[%s]. 请检查您的配置并作出修改.", + each, target.getClass().toString())); + } + + String index = each.replace("[", "").replace("]", ""); + if (!StringUtils.isNumeric(index)) { + throw new IllegalArgumentException( + String.format( + "系统编程错误,列表下标必须为数字类型,但该节点发现实际类型是[%s] ,该异常代表系统编程错误, 请联系DataX开发团队 !", + index)); + } + + return ((List) target).get(Integer.valueOf(index)); + } + + private List expand(List list, int size) { + int expand = size - list.size(); + while (expand-- > 0) { + list.add(null); + } + return list; + } + + private boolean isPathList(final String path) { + return path.contains("[") && path.contains("]"); + } + + private boolean isPathMap(final String path) { + return StringUtils.isNotBlank(path) && !isPathList(path); + } + + private int getIndex(final String index) { + return Integer.valueOf(index.replace("[", "").replace("]", "")); + } + + private boolean isSuitForRoot(final Object object) { + if (null != object && (object instanceof List || object instanceof Map)) { + return true; + } + + return false; + } + + private String split(final String path) { + return StringUtils.replace(path, "[", ".["); + } + + private List split2List(final String path) { + return Arrays.asList(StringUtils.split(split(path), ".")); + } + + private void checkPath(final String path) { + if (null == path) { + throw new IllegalArgumentException( + "系统编程错误, 该异常代表系统编程错误, 请联系DataX开发团队!."); + } + + for (final String each : StringUtils.split(".")) { + if (StringUtils.isBlank(each)) { + throw new IllegalArgumentException(String.format( + "系统编程错误, 路径[%s]不合法, 路径层次之间不能出现空白字符 .", path)); + } + } + } + + @SuppressWarnings("unused") + private String toJSONPath(final String path) { + return (StringUtils.isBlank(path) ? "$" : "$." + path).replace("$.[", + "$["); + } + + private static void checkJSON(final String json) { + if (StringUtils.isBlank(json)) { + throw DataXException.asDataXException(CommonErrorCode.CONFIG_ERROR, + "配置信息错误. 因为您提供的配置信息不是合法的JSON格式, JSON不能为空白. 请按照标准json格式提供配置信息. "); + } + } + + private Configuration(final String json) { + try { + this.root = Json.fromJson(json, Object.class); + } catch (Exception e) { + throw DataXException.asDataXException(CommonErrorCode.CONFIG_ERROR, + String.format("配置信息错误. 您提供的配置信息不是合法的JSON格式: %s . 请按照标准json格式提供配置信息. ", e.getMessage())); + } + } + + private static String toJSONString(final Object object) { + return Json.toJson(object, null); + } + + public Set getSecretKeyPathSet() { + return secretKeyPathSet; + } + + private static boolean isWrapClass(Class clz){ + try{ + return ((Class)clz.getField("TYPE").get(null)).isPrimitive(); + }catch (Exception e){ + return false; + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/FilterUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/FilterUtil.java new file mode 100644 index 000000000..82de00ec7 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/FilterUtil.java @@ -0,0 +1,53 @@ +package com.alibaba.datax.common.util; + +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Pattern; + +/** + * 提供从 List 中根据 regular 过滤的通用工具(返回值已经去重). 使用场景,比如:odpsreader + * 的分区筛选,hdfsreader/txtfilereader的路径筛选等 + */ +public final class FilterUtil { + + //已经去重 + public static List filterByRegular(List allStrs, + String regular) { + List matchedValues = new ArrayList(); + + // 语法习惯上的兼容处理(pt=* 实际正则应该是:pt=.*) + String newReqular = regular.replace(".*", "*").replace("*", ".*"); + + Pattern p = Pattern.compile(newReqular); + + for (String partition : allStrs) { + if (p.matcher(partition).matches()) { + if (!matchedValues.contains(partition)) { + matchedValues.add(partition); + } + } + } + + return matchedValues; + } + + //已经去重 + public static List filterByRegulars(List allStrs, + List regulars) { + List matchedValues = new ArrayList(); + + List tempMatched = null; + for (String regular : regulars) { + tempMatched = filterByRegular(allStrs, regular); + if (null != tempMatched && !tempMatched.isEmpty()) { + for (String temp : tempMatched) { + if (!matchedValues.contains(temp)) { + matchedValues.add(temp); + } + } + } + } + + return matchedValues; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/HostUtils.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/HostUtils.java new file mode 100644 index 000000000..2ed8f1019 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/HostUtils.java @@ -0,0 +1,49 @@ +package com.alibaba.datax.common.util; + +import org.apache.commons.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.net.InetAddress; +import java.net.UnknownHostException; + +/** + * Created by liqiang on 15/8/25. + */ +public class HostUtils { + + public static final String IP; + public static final String HOSTNAME; + private static final Logger log = LoggerFactory.getLogger(HostUtils.class); + + static { + String ip; + String hostname; + try { + InetAddress addr = InetAddress.getLocalHost(); + ip = addr.getHostAddress(); + hostname = addr.getHostName(); + } catch (UnknownHostException e) { + log.error("Can't find out address: " + e.getMessage()); + ip = "UNKNOWN"; + hostname = "UNKNOWN"; + } + if (ip.equals("127.0.0.1") || ip.equals("::1") || ip.equals("UNKNOWN")) { + try { + Process process = Runtime.getRuntime().exec("hostname -i"); + if (process.waitFor() == 0) { + ip = new String(IOUtils.toByteArray(process.getInputStream()), "UTF8"); + } + process = Runtime.getRuntime().exec("hostname"); + if (process.waitFor() == 0) { + hostname = (new String(IOUtils.toByteArray(process.getInputStream()), "UTF8")).trim(); + } + } catch (Exception e) { + log.warn("get hostname failed {}", e.getMessage()); + } + } + IP = ip; + HOSTNAME = hostname; + log.info("IP {} HOSTNAME {}", IP, HOSTNAME); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/ListUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/ListUtil.java new file mode 100644 index 000000000..d7a5b7646 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/ListUtil.java @@ -0,0 +1,139 @@ +package com.alibaba.datax.common.util; + +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; +import org.apache.commons.lang3.StringUtils; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +/** + * 提供针对 DataX中使用的 List 较为常见的一些封装。 比如:checkIfValueDuplicate 可以用于检查用户配置的 writer + * 的列不能重复。makeSureNoValueDuplicate亦然,只是会严格报错。 + */ +public final class ListUtil { + + public static boolean checkIfValueDuplicate(List aList, + boolean caseSensitive) { + if (null == aList || aList.isEmpty()) { + throw DataXException.asDataXException(CommonErrorCode.CONFIG_ERROR, + "您提供的作业配置有误,List不能为空."); + } + + try { + makeSureNoValueDuplicate(aList, caseSensitive); + } catch (Exception e) { + return true; + } + return false; + } + + public static void makeSureNoValueDuplicate(List aList, + boolean caseSensitive) { + if (null == aList || aList.isEmpty()) { + throw new IllegalArgumentException("您提供的作业配置有误, List不能为空."); + } + + if (1 == aList.size()) { + return; + } else { + List list = null; + if (!caseSensitive) { + list = valueToLowerCase(aList); + } else { + list = new ArrayList(aList); + } + + Collections.sort(list); + + for (int i = 0, len = list.size() - 1; i < len; i++) { + if (list.get(i).equals(list.get(i + 1))) { + throw DataXException + .asDataXException( + CommonErrorCode.CONFIG_ERROR, + String.format( + "您提供的作业配置信息有误, String:[%s] 不允许重复出现在列表中: [%s].", + list.get(i), + StringUtils.join(aList, ","))); + } + } + } + } + + public static boolean checkIfBInA(List aList, List bList, + boolean caseSensitive) { + if (null == aList || aList.isEmpty() || null == bList + || bList.isEmpty()) { + throw new IllegalArgumentException("您提供的作业配置有误, List不能为空."); + } + + try { + makeSureBInA(aList, bList, caseSensitive); + } catch (Exception e) { + return false; + } + return true; + } + + public static void makeSureBInA(List aList, List bList, + boolean caseSensitive) { + if (null == aList || aList.isEmpty() || null == bList + || bList.isEmpty()) { + throw new IllegalArgumentException("您提供的作业配置有误, List不能为空."); + } + + List all = null; + List part = null; + + if (!caseSensitive) { + all = valueToLowerCase(aList); + part = valueToLowerCase(bList); + } else { + all = new ArrayList(aList); + part = new ArrayList(bList); + } + + for (String oneValue : part) { + if (!all.contains(oneValue)) { + throw DataXException + .asDataXException( + CommonErrorCode.CONFIG_ERROR, + String.format( + "您提供的作业配置信息有误, String:[%s] 不存在于列表中:[%s].", + oneValue, StringUtils.join(aList, ","))); + } + } + + } + + public static boolean checkIfValueSame(List aList) { + if (null == aList || aList.isEmpty()) { + throw new IllegalArgumentException("您提供的作业配置有误, List不能为空."); + } + + if (1 == aList.size()) { + return true; + } else { + Boolean firstValue = aList.get(0); + for (int i = 1, len = aList.size(); i < len; i++) { + if (firstValue.booleanValue() != aList.get(i).booleanValue()) { + return false; + } + } + return true; + } + } + + public static List valueToLowerCase(List aList) { + if (null == aList || aList.isEmpty()) { + throw new IllegalArgumentException("您提供的作业配置有误, List不能为空."); + } + List result = new ArrayList(aList.size()); + for (String oneValue : aList) { + result.add(null != oneValue ? oneValue.toLowerCase() : null); + } + + return result; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/RangeSplitUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/RangeSplitUtil.java new file mode 100644 index 000000000..791f9ea12 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/RangeSplitUtil.java @@ -0,0 +1,209 @@ +package com.alibaba.datax.common.util; + +import org.apache.commons.lang3.tuple.ImmutablePair; +import org.apache.commons.lang3.tuple.Pair; + +import java.math.BigInteger; +import java.util.*; + +/** + * 提供通用的根据数字范围、字符串范围等进行切分的通用功能. + */ +public final class RangeSplitUtil { + + public static String[] doAsciiStringSplit(String left, String right, int expectSliceNumber) { + int radix = 128; + + BigInteger[] tempResult = doBigIntegerSplit(stringToBigInteger(left, radix), + stringToBigInteger(right, radix), expectSliceNumber); + String[] result = new String[tempResult.length]; + + //处理第一个字符串(因为:在转换为数字,再还原的时候,如果首字符刚好是 basic,则不知道应该添加多少个 basic) + result[0] = left; + result[tempResult.length - 1] = right; + + for (int i = 1, len = tempResult.length - 1; i < len; i++) { + result[i] = bigIntegerToString(tempResult[i], radix); + } + + return result; + } + + + public static long[] doLongSplit(long left, long right, int expectSliceNumber) { + BigInteger[] result = doBigIntegerSplit(BigInteger.valueOf(left), + BigInteger.valueOf(right), expectSliceNumber); + long[] returnResult = new long[result.length]; + for (int i = 0, len = result.length; i < len; i++) { + returnResult[i] = result[i].longValue(); + } + return returnResult; + } + + public static BigInteger[] doBigIntegerSplit(BigInteger left, BigInteger right, int expectSliceNumber) { + if (expectSliceNumber < 1) { + throw new IllegalArgumentException(String.format( + "切分份数不能小于1. 此处:expectSliceNumber=[%s].", expectSliceNumber)); + } + + if (null == left || null == right) { + throw new IllegalArgumentException(String.format( + "对 BigInteger 进行切分时,其左右区间不能为 null. 此处:left=[%s],right=[%s].", left, right)); + } + + if (left.compareTo(right) == 0) { + return new BigInteger[]{left, right}; + } else { + // 调整大小顺序,确保 left < right + if (left.compareTo(right) > 0) { + BigInteger temp = left; + left = right; + right = temp; + } + + //left < right + BigInteger endAndStartGap = right.subtract(left); + + BigInteger step = endAndStartGap.divide(BigInteger.valueOf(expectSliceNumber)); + BigInteger remainder = endAndStartGap.remainder(BigInteger.valueOf(expectSliceNumber)); + + //remainder 不可能超过expectSliceNumber,所以不需要检查remainder的 Integer 的范围 + + // 这里不能 step.intValue()==0,因为可能溢出 + if (step.compareTo(BigInteger.ZERO) == 0) { + expectSliceNumber = remainder.intValue(); + } + + BigInteger[] result = new BigInteger[expectSliceNumber + 1]; + result[0] = left; + result[expectSliceNumber] = right; + + BigInteger lowerBound; + BigInteger upperBound = left; + for (int i = 1; i < expectSliceNumber; i++) { + lowerBound = upperBound; + upperBound = lowerBound.add(step); + upperBound = upperBound.add((remainder.compareTo(BigInteger.valueOf(i)) >= 0) + ? BigInteger.ONE : BigInteger.ZERO); + result[i] = upperBound; + } + + return result; + } + } + + private static void checkIfBetweenRange(int value, int left, int right) { + if (value < left || value > right) { + throw new IllegalArgumentException(String.format("parameter can not <[%s] or >[%s].", + left, right)); + } + } + + /** + * 由于只支持 ascii 码对应字符,所以radix 范围为[1,128] + */ + public static BigInteger stringToBigInteger(String aString, int radix) { + if (null == aString) { + throw new IllegalArgumentException("参数 bigInteger 不能为空."); + } + + checkIfBetweenRange(radix, 1, 128); + + BigInteger result = BigInteger.ZERO; + BigInteger radixBigInteger = BigInteger.valueOf(radix); + + int tempChar; + int k = 0; + + for (int i = aString.length() - 1; i >= 0; i--) { + tempChar = aString.charAt(i); + if (tempChar >= 128) { + throw new IllegalArgumentException(String.format("根据字符串进行切分时仅支持 ASCII 字符串,而字符串:[%s]非 ASCII 字符串.", aString)); + } + result = result.add(BigInteger.valueOf(tempChar).multiply(radixBigInteger.pow(k))); + k++; + } + + return result; + } + + /** + * 把BigInteger 转换为 String.注意:radix 和 basic 范围都为[1,128], radix + basic 的范围也必须在[1,128]. + */ + private static String bigIntegerToString(BigInteger bigInteger, int radix) { + if (null == bigInteger) { + throw new IllegalArgumentException("参数 bigInteger 不能为空."); + } + + checkIfBetweenRange(radix, 1, 128); + + StringBuilder resultStringBuilder = new StringBuilder(); + + List list = new ArrayList(); + BigInteger radixBigInteger = BigInteger.valueOf(radix); + BigInteger currentValue = bigInteger; + + BigInteger quotient = currentValue.divide(radixBigInteger); + while (quotient.compareTo(BigInteger.ZERO) > 0) { + list.add(currentValue.remainder(radixBigInteger).intValue()); + currentValue = currentValue.divide(radixBigInteger); + quotient = currentValue; + } + Collections.reverse(list); + + if (list.isEmpty()) { + list.add(0, bigInteger.remainder(radixBigInteger).intValue()); + } + + Map map = new HashMap(); + for (int i = 0; i < radix; i++) { + map.put(i, (char) (i)); + } + +// String msg = String.format("%s 转为 %s 进制,结果为:%s", bigInteger.longValue(), radix, list); +// System.out.println(msg); + + for (Integer aList : list) { + resultStringBuilder.append(map.get(aList)); + } + + return resultStringBuilder.toString(); + } + + /** + * 获取字符串中的最小字符和最大字符(依据 ascii 进行判断).要求字符串必须非空,并且为 ascii 字符串. + * 返回的Pair,left=最小字符,right=最大字符. + */ + public static Pair getMinAndMaxCharacter(String aString) { + if (!isPureAscii(aString)) { + throw new IllegalArgumentException(String.format("根据字符串进行切分时仅支持 ASCII 字符串,而字符串:[%s]非 ASCII 字符串.", aString)); + } + + char min = aString.charAt(0); + char max = min; + + char temp; + for (int i = 1, len = aString.length(); i < len; i++) { + temp = aString.charAt(i); + min = min < temp ? min : temp; + max = max > temp ? max : temp; + } + + return new ImmutablePair(min, max); + } + + private static boolean isPureAscii(String aString) { + if (null == aString) { + return false; + } + + for (int i = 0, len = aString.length(); i < len; i++) { + char ch = aString.charAt(i); + if (ch >= 127 || ch < 0) { + return false; + } + } + return true; + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/RetryUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/RetryUtil.java new file mode 100644 index 000000000..f01423812 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/RetryUtil.java @@ -0,0 +1,208 @@ +package com.alibaba.datax.common.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.concurrent.*; + +public final class RetryUtil { + + private static final Logger LOG = LoggerFactory.getLogger(RetryUtil.class); + + private static final long MAX_SLEEP_MILLISECOND = 256L * 1000L; + + /** + * 重试次数工具方法. + * + * @param callable 实际逻辑 + * @param retryTimes 最大重试次数(>1) + * @param sleepTimeInMilliSecond 运行失败后休眠对应时间再重试 + * @param exponential 休眠时间是否指数递增 + * @param 返回值类型 + * @return 经过重试的callable的执行结果 + */ + public static T executeWithRetry(Callable callable, + int retryTimes, + long sleepTimeInMilliSecond, + boolean exponential) throws Exception { + Retry retry = new Retry(); + return retry.doRetry(callable, retryTimes, sleepTimeInMilliSecond, exponential, null); + } + + /** + * 重试次数工具方法. + * + * @param callable 实际逻辑 + * @param retryTimes 最大重试次数(>1) + * @param sleepTimeInMilliSecond 运行失败后休眠对应时间再重试 + * @param exponential 休眠时间是否指数递增 + * @param 返回值类型 + * @param retryExceptionClasss 出现指定的异常类型时才进行重试 + * @return 经过重试的callable的执行结果 + */ + public static T executeWithRetry(Callable callable, + int retryTimes, + long sleepTimeInMilliSecond, + boolean exponential, + List> retryExceptionClasss) throws Exception { + Retry retry = new Retry(); + return retry.doRetry(callable, retryTimes, sleepTimeInMilliSecond, exponential, retryExceptionClasss); + } + + /** + * 在外部线程执行并且重试。每次执行需要在timeoutMs内执行完,不然视为失败。 + * 执行异步操作的线程池从外部传入,线程池的共享粒度由外部控制。比如,HttpClientUtil共享一个线程池。 + *

+ * 限制条件:仅仅能够在阻塞的时候interrupt线程 + * + * @param callable 实际逻辑 + * @param retryTimes 最大重试次数(>1) + * @param sleepTimeInMilliSecond 运行失败后休眠对应时间再重试 + * @param exponential 休眠时间是否指数递增 + * @param timeoutMs callable执行超时时间,毫秒 + * @param executor 执行异步操作的线程池 + * @param 返回值类型 + * @return 经过重试的callable的执行结果 + */ + public static T asyncExecuteWithRetry(Callable callable, + int retryTimes, + long sleepTimeInMilliSecond, + boolean exponential, + long timeoutMs, + ThreadPoolExecutor executor) throws Exception { + Retry retry = new AsyncRetry(timeoutMs, executor); + return retry.doRetry(callable, retryTimes, sleepTimeInMilliSecond, exponential, null); + } + + /** + * 创建异步执行的线程池。特性如下: + * core大小为0,初始状态下无线程,无初始消耗。 + * max大小为5,最多五个线程。 + * 60秒超时时间,闲置超过60秒线程会被回收。 + * 使用SynchronousQueue,任务不会排队,必须要有可用线程才能提交成功,否则会RejectedExecutionException。 + * + * @return 线程池 + */ + public static ThreadPoolExecutor createThreadPoolExecutor() { + return new ThreadPoolExecutor(0, 5, + 60L, TimeUnit.SECONDS, + new SynchronousQueue()); + } + + + private static class Retry { + + public T doRetry(Callable callable, int retryTimes, long sleepTimeInMilliSecond, boolean exponential, List> retryExceptionClasss) + throws Exception { + + if (null == callable) { + throw new IllegalArgumentException("系统编程错误, 入参callable不能为空 ! "); + } + + if (retryTimes < 1) { + throw new IllegalArgumentException(String.format( + "系统编程错误, 入参retrytime[%d]不能小于1 !", retryTimes)); + } + + Exception saveException = null; + for (int i = 0; i < retryTimes; i++) { + try { + return call(callable); + } catch (Exception e) { + saveException = e; + if (i == 0) { + LOG.error(String.format("Exception when calling callable, 异常Msg:%s", saveException.getMessage()), saveException); + } + + if (null != retryExceptionClasss && !retryExceptionClasss.isEmpty()) { + boolean needRetry = false; + for (Class eachExceptionClass : retryExceptionClasss) { + if (eachExceptionClass == e.getClass()) { + needRetry = true; + break; + } + } + if (!needRetry) { + throw saveException; + } + } + + if (i + 1 < retryTimes && sleepTimeInMilliSecond > 0) { + long startTime = System.currentTimeMillis(); + + long timeToSleep; + if (exponential) { + timeToSleep = sleepTimeInMilliSecond * (long) Math.pow(2, i); + if (timeToSleep >= MAX_SLEEP_MILLISECOND) { + timeToSleep = MAX_SLEEP_MILLISECOND; + } + } else { + timeToSleep = sleepTimeInMilliSecond; + if (timeToSleep >= MAX_SLEEP_MILLISECOND) { + timeToSleep = MAX_SLEEP_MILLISECOND; + } + } + + try { + Thread.sleep(timeToSleep); + } catch (InterruptedException ignored) { + } + + long realTimeSleep = System.currentTimeMillis() - startTime; + + LOG.error(String.format("Exception when calling callable, 即将尝试执行第%s次重试.本次重试计划等待[%s]ms,实际等待[%s]ms, 异常Msg:[%s]", + i + 1, timeToSleep, realTimeSleep, e.getMessage())); + + } + } + } + throw saveException; + } + + protected T call(Callable callable) throws Exception { + return callable.call(); + } + } + + private static class AsyncRetry extends Retry { + + private long timeoutMs; + private ThreadPoolExecutor executor; + + public AsyncRetry(long timeoutMs, ThreadPoolExecutor executor) { + this.timeoutMs = timeoutMs; + this.executor = executor; + } + + /** + * 使用传入的线程池异步执行任务,并且等待。 + *

+ * future.get()方法,等待指定的毫秒数。如果任务在超时时间内结束,则正常返回。 + * 如果抛异常(可能是执行超时、执行异常、被其他线程cancel或interrupt),都记录日志并且网上抛异常。 + * 正常和非正常的情况都会判断任务是否结束,如果没有结束,则cancel任务。cancel参数为true,表示即使 + * 任务正在执行,也会interrupt线程。 + * + * @param callable + * @param + * @return + * @throws Exception + */ + @Override + protected T call(Callable callable) throws Exception { + Future future = executor.submit(callable); + try { + return future.get(timeoutMs, TimeUnit.MILLISECONDS); + } catch (Exception e) { + LOG.warn("Try once failed", e); + throw e; + } finally { + if (!future.isDone()) { + future.cancel(true); + LOG.warn("Try once task not done, cancel it, active count: " + executor.getActiveCount()); + } + } + } + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/StrUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/StrUtil.java new file mode 100644 index 000000000..29400e2d4 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/util/StrUtil.java @@ -0,0 +1,85 @@ +package com.alibaba.datax.common.util; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.Validate; + +import java.text.DecimalFormat; +import java.util.HashMap; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class StrUtil { + + private final static long KB_IN_BYTES = 1024; + + private final static long MB_IN_BYTES = 1024 * KB_IN_BYTES; + + private final static long GB_IN_BYTES = 1024 * MB_IN_BYTES; + + private final static long TB_IN_BYTES = 1024 * GB_IN_BYTES; + + private final static DecimalFormat df = new DecimalFormat("0.00"); + + private static final Pattern VARIABLE_PATTERN = Pattern + .compile("(\\$)\\{?(\\w+)\\}?"); + + private static String SYSTEM_ENCODING = System.getProperty("file.encoding"); + + static { + if (SYSTEM_ENCODING == null) { + SYSTEM_ENCODING = "UTF-8"; + } + } + + private StrUtil() { + } + + public static String stringify(long byteNumber) { + if (byteNumber / TB_IN_BYTES > 0) { + return df.format((double) byteNumber / (double) TB_IN_BYTES) + "TB"; + } else if (byteNumber / GB_IN_BYTES > 0) { + return df.format((double) byteNumber / (double) GB_IN_BYTES) + "GB"; + } else if (byteNumber / MB_IN_BYTES > 0) { + return df.format((double) byteNumber / (double) MB_IN_BYTES) + "MB"; + } else if (byteNumber / KB_IN_BYTES > 0) { + return df.format((double) byteNumber / (double) KB_IN_BYTES) + "KB"; + } else { + return String.valueOf(byteNumber) + "B"; + } + } + + + public static String replaceVariable(final String param) { + Map mapping = new HashMap(); + + Matcher matcher = VARIABLE_PATTERN.matcher(param); + while (matcher.find()) { + String variable = matcher.group(2); + String value = System.getProperty(variable); + if (StringUtils.isBlank(value)) { + value = matcher.group(); + } + mapping.put(matcher.group(), value); + } + + String retString = param; + for (final String key : mapping.keySet()) { + retString = retString.replace(key, mapping.get(key)); + } + + return retString; + } + + public static String compressMiddle(String s, int headLength, int tailLength) { + Validate.notNull(s, "Input string must not be null"); + Validate.isTrue(headLength > 0, "Head length must be larger than 0"); + Validate.isTrue(tailLength > 0, "Tail length must be larger than 0"); + + if (headLength + tailLength >= s.length()) { + return s; + } + return s.substring(0, headLength) + "..." + s.substring(s.length() - tailLength); + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/AbstractContainer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/AbstractContainer.java new file mode 100644 index 000000000..362457ea4 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/AbstractContainer.java @@ -0,0 +1,38 @@ +package com.alibaba.datax.core; + +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.container.communicator.AbstractContainerCommunicator; +import org.apache.commons.lang.Validate; + +/** + * 执行容器的抽象类,持有该容器全局的配置 configuration + */ +public abstract class AbstractContainer { + protected Configuration configuration; + + protected AbstractContainerCommunicator containerCommunicator; + + public AbstractContainer(Configuration configuration) { + Validate.notNull(configuration, "Configuration can not be null."); + + this.configuration = configuration; + } + + public Configuration getConfiguration() { + return configuration; + } + + public AbstractContainerCommunicator getContainerCommunicator() { + return containerCommunicator; + } + + public void setContainerCommunicator(AbstractContainerCommunicator containerCommunicator) { + this.containerCommunicator = containerCommunicator; + } + + public abstract void start(); + + public void shutdown(){ + + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/Engine.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/Engine.java new file mode 100644 index 000000000..b2e346b4d --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/Engine.java @@ -0,0 +1,274 @@ +package com.alibaba.datax.core; + +import com.alibaba.datax.common.element.ColumnCast; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.spi.ErrorCode; +import com.alibaba.datax.common.statistics.PerfTrace; +import com.alibaba.datax.common.statistics.VMInfo; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.job.JobContainer; +import com.alibaba.datax.core.taskgroup.TaskGroupContainer; +import com.alibaba.datax.core.util.ConfigParser; +import com.alibaba.datax.core.util.ConfigurationValidate; +import com.alibaba.datax.core.util.ExceptionTracker; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.core.util.container.CoreConstant; +import com.alibaba.datax.core.util.container.LoadUtil; +import com.webank.wedatasphere.exchangis.datax.core.ThreadLocalSecurityManager; +import org.apache.commons.cli.BasicParser; +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.Options; +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.lang.management.ManagementFactory; +import java.util.Arrays; +import java.util.List; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Engine是DataX入口类,该类负责初始化Job或者Task的运行容器,并运行插件的Job或者Task逻辑 + */ +public class Engine { + private static final Logger LOG = LoggerFactory.getLogger(Engine.class); + private static final String ENV_TASK_HOME = "datax.task.home"; + + private static String RUNTIME_MODE; + + /* check job model (job/task) first */ + public void start(Configuration allConf) { + + //todo 放在DataxOnceExecutor实现 + + // 绑定column转换信息 + ColumnCast.bind(allConf); + + /** + * 初始化PluginLoader,可以获取各种插件配置 + */ + LoadUtil.bind(allConf); + + boolean isJob = !("taskGroup".equalsIgnoreCase(allConf + .getString(CoreConstant.DATAX_CORE_CONTAINER_MODEL))); + //JobContainer会在schedule后再行进行设置和调整值 + int channelNumber =0; + AbstractContainer container; + long instanceId; + int taskGroupId = -1; + if (isJob) { + allConf.set(CoreConstant.DATAX_CORE_CONTAINER_JOB_MODE, RUNTIME_MODE); + container = new JobContainer(allConf); + instanceId = allConf.getLong( + CoreConstant.DATAX_CORE_CONTAINER_JOB_ID, 0); + + } else { + container = new TaskGroupContainer(allConf); + instanceId = allConf.getLong( + CoreConstant.DATAX_CORE_CONTAINER_JOB_ID); + taskGroupId = allConf.getInt( + CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID); + channelNumber = allConf.getInt( + CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL); + } + + //缺省打开perfTrace + boolean traceEnable = allConf.getBool(CoreConstant.DATAX_CORE_CONTAINER_TRACE_ENABLE, true); + boolean perfReportEnable = allConf.getBool(CoreConstant.DATAX_CORE_REPORT_DATAX_PERFLOG, true); + + //standlone模式的datax shell任务不进行汇报 + if(instanceId == -1){ + perfReportEnable = false; + } + + int priority = 0; + try { + priority = Integer.parseInt(System.getenv("SKYNET_PRIORITY")); + }catch (NumberFormatException e){ + LOG.warn("prioriy set to 0, because NumberFormatException, the value is: "+System.getProperty("PROIORY")); + } + + Configuration jobInfoConfig = allConf.getConfiguration(CoreConstant.DATAX_JOB_JOBINFO); + //初始化PerfTrace + PerfTrace perfTrace = PerfTrace.getInstance(isJob, instanceId, taskGroupId, priority, traceEnable); + perfTrace.setJobInfo(jobInfoConfig,perfReportEnable,channelNumber); + container.start(); + + } + + + // 注意屏蔽敏感信息 + public static String filterJobConfiguration(final Configuration configuration) { + Configuration jobConfWithSetting = configuration.getConfiguration("job").clone(); + + Configuration jobContent = jobConfWithSetting.getConfiguration("content"); + + filterSensitiveConfiguration(jobContent); + + jobConfWithSetting.set("content",jobContent); + + return jobConfWithSetting.beautify(); + } + + public static Configuration filterSensitiveConfiguration(Configuration configuration){ + Set keys = configuration.getKeys(); + for (final String key : keys) { + boolean isSensitive = StringUtils.endsWithIgnoreCase(key, "password") + || StringUtils.endsWithIgnoreCase(key, "accessKey"); + if (isSensitive && configuration.get(key) instanceof String) { + configuration.set(key, configuration.getString(key).replaceAll("[\\s\\S]", "*")); + } + } + return configuration; + } + + public static void entry(final String[] args) throws Throwable { + Options options = new Options(); + options.addOption("job", true, "Job config."); + options.addOption("jobid", true, "Job unique id."); + options.addOption("mode", true, "Job runtime mode."); + + BasicParser parser = new BasicParser(); + CommandLine cl = parser.parse(options, args); + + String jobPath = cl.getOptionValue("job"); + + // 如果用户没有明确指定jobid, 则 datax.py 会指定 jobid 默认值为-1 + String jobIdString = cl.getOptionValue("jobid"); + RUNTIME_MODE = cl.getOptionValue("mode"); + + Configuration configuration = ConfigParser.parse(jobPath); + + long jobId; + if (!"-1".equalsIgnoreCase(jobIdString)) { + jobId = Long.parseLong(jobIdString); + } else { + // only for dsc & ds & datax 3 update + String dscJobUrlPatternString = "/instance/(\\d{1,})/config.xml"; + String dsJobUrlPatternString = "/inner/job/(\\d{1,})/config"; + String dsTaskGroupUrlPatternString = "/inner/job/(\\d{1,})/taskGroup/"; + List patternStringList = Arrays.asList(dscJobUrlPatternString, + dsJobUrlPatternString, dsTaskGroupUrlPatternString); + jobId = parseJobIdFromUrl(patternStringList, jobPath); + } + + boolean isStandAloneMode = "standalone".equalsIgnoreCase(RUNTIME_MODE); + if (!isStandAloneMode && jobId == -1) { + // 如果不是 standalone 模式,那么 jobId 一定不能为-1 + throw DataXException.asDataXException(FrameworkErrorCode.CONFIG_ERROR, "非 standalone 模式必须在 URL 中提供有效的 jobId."); + } + configuration.set(CoreConstant.DATAX_CORE_CONTAINER_JOB_ID, jobId); + + //打印vmInfo + VMInfo vmInfo = VMInfo.getVmInfo(); + if (vmInfo != null) { + LOG.info(vmInfo.toString()); + } + + LOG.info("\n" + Engine.filterJobConfiguration(configuration) + "\n"); + + LOG.debug(configuration.toJSON()); + + ConfigurationValidate.doValidate(configuration); + Engine engine = new Engine(); + engine.start(configuration); + } + + + /** + * -1 表示未能解析到 jobId + * + * only for dsc & ds & datax 3 update + */ + private static long parseJobIdFromUrl(List patternStringList, String url) { + long result = -1; + for (String patternString : patternStringList) { + result = doParseJobIdFromUrl(patternString, url); + if (result != -1) { + return result; + } + } + return result; + } + + private static long doParseJobIdFromUrl(String patternString, String url) { + Pattern pattern = Pattern.compile(patternString); + Matcher matcher = pattern.matcher(url); + if (matcher.find()) { + return Long.parseLong(matcher.group(1)); + } + + return -1; + } + + /** + * Save PID file + * Added by davidhua@webank.com + * @param workDir + * @throws IOException + */ + private static void savePID(String workDir) throws IOException{ + String name = ManagementFactory.getRuntimeMXBean().getName(); + String pid = name.split("@")[0].trim(); + File pidFile = new File(workDir + "/pid"); + FileUtils.write(pidFile, pid); + } + + /** + * Delete PID file + * Added by davidhua@webank.com + * @param workDir + * @throws IOException + */ + private static void removePID(String workDir) throws IOException{ + FileUtils.forceDelete(new File(workDir + "/pid")); + } + + public static void main(String[] args) { + int exitCode = 0; + LOG.info("start to run"); + String workDir = System.getProperty("user.dir"); + if(StringUtils.isBlank(workDir)){ + exitCode = 1; + }else { + try { + //Write current process's pid into file + savePID(workDir); + //Set security manager + System.setSecurityManager(new ThreadLocalSecurityManager()); + Engine.entry(args); + removePID(workDir); + } catch (Throwable e) { + exitCode = 1; + LOG.error("\n\n该任务最可能的错误原因是:\n" + ExceptionTracker.trace(e)); + if (e instanceof DataXException) { + DataXException tempException = (DataXException) e; + ErrorCode errorCode = tempException.getErrorCode(); + if (errorCode instanceof FrameworkErrorCode) { + FrameworkErrorCode tempErrorCode = (FrameworkErrorCode) errorCode; + exitCode = tempErrorCode.toExitValue(); + } + } + //Throw OutOfMemoryError + Throwable cause = e; + try { + while (null != cause) { + if (cause instanceof OutOfMemoryError) { + throw (OutOfMemoryError) cause; + } + cause = cause.getCause(); + } + }finally{ + System.exit(exitCode); + } + } + } + System.exit(exitCode); + } + +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/container/util/HookInvoker.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/container/util/HookInvoker.java new file mode 100644 index 000000000..6e0ef1782 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/container/util/HookInvoker.java @@ -0,0 +1,91 @@ +package com.alibaba.datax.core.container.util; + +/** + * Created by xiafei.qiuxf on 14/12/17. + */ + +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.spi.Hook; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.core.util.container.JarLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FilenameFilter; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.ServiceLoader; + +/** + * 扫描给定目录的所有一级子目录,每个子目录当作一个Hook的目录。 + * 对于每个子目录,必须符合ServiceLoader的标准目录格式,见http://docs.oracle.com/javase/6/docs/api/java/util/ServiceLoader.html。 + * 加载里头的jar,使用ServiceLoader机制调用。 + */ +public class HookInvoker { + + private static final Logger LOG = LoggerFactory.getLogger(HookInvoker.class); + private final Map msg; + private final Configuration conf; + + private File baseDir; + + public HookInvoker(String baseDirName, Configuration conf, Map msg) { + this.baseDir = new File(baseDirName); + this.conf = conf; + this.msg = msg; + } + + public void invokeAll() { + if (!baseDir.exists() || baseDir.isFile()) { + LOG.info("No hook invoked, because base dir not exists or is a file: " + baseDir.getAbsolutePath()); + return; + } + + String[] subDirs = baseDir.list(new FilenameFilter() { + @Override + public boolean accept(File dir, String name) { + return new File(dir, name).isDirectory(); + } + }); + + if (subDirs == null) { + throw DataXException.asDataXException(FrameworkErrorCode.HOOK_LOAD_ERROR, "获取HOOK子目录返回null"); + } + + for (String subDir : subDirs) { + doInvoke(new File(baseDir, subDir).getAbsolutePath()); + } + + } + + private void doInvoke(String path) { + ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader(); + try { + JarLoader jarLoader = new JarLoader(new String[]{path}); + Thread.currentThread().setContextClassLoader(jarLoader); + Iterator hookIt = ServiceLoader.load(Hook.class).iterator(); + if (!hookIt.hasNext()) { + LOG.warn("No hook defined under path: " + path); + } else { + Hook hook = hookIt.next(); + LOG.info("Invoke hook [{}], path: {}", hook.getName(), path); + hook.invoke(conf, msg); + } + } catch (Exception e) { + LOG.error("Exception when invoke hook", e); + throw DataXException.asDataXException( + CommonErrorCode.HOOK_INTERNAL_ERROR, "Exception when invoke hook", e); + } finally { + Thread.currentThread().setContextClassLoader(oldClassLoader); + } + } + + public static void main(String[] args) { + new HookInvoker("/Users/xiafei/workspace/datax3/target/datax/datax/hook", + null, new HashMap()).invokeAll(); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/container/util/JobAssignUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/container/util/JobAssignUtil.java new file mode 100644 index 000000000..8f0d17afe --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/container/util/JobAssignUtil.java @@ -0,0 +1,181 @@ +package com.alibaba.datax.core.container.util; + +import com.alibaba.datax.common.constant.CommonConstant; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.util.container.CoreConstant; +import org.apache.commons.lang.Validate; +import org.apache.commons.lang3.StringUtils; + +import java.util.*; + +public final class JobAssignUtil { + private JobAssignUtil() { + } + + /** + * 公平的分配 task 到对应的 taskGroup 中。 + * 公平体现在:会考虑 task 中对资源负载作的 load 标识进行更均衡的作业分配操作。 + * TODO 具体文档举例说明 + */ + public static List assignFairly(Configuration configuration, int channelNumber, int channelsPerTaskGroup) { + Validate.isTrue(configuration != null, "框架获得的 Job 不能为 null."); + + List contentConfig = configuration.getListConfiguration(CoreConstant.DATAX_JOB_CONTENT); + Validate.isTrue(contentConfig.size() > 0, "框架获得的切分后的 Job 无内容."); + + Validate.isTrue(channelNumber > 0 && channelsPerTaskGroup > 0, + "每个channel的平均task数[averTaskPerChannel],channel数目[channelNumber],每个taskGroup的平均channel数[channelsPerTaskGroup]都应该为正数"); + + int taskGroupNumber = (int) Math.ceil(1.0 * channelNumber / channelsPerTaskGroup); + + Configuration aTaskConfig = contentConfig.get(0); + + String readerResourceMark = aTaskConfig.getString(CoreConstant.JOB_READER_PARAMETER + "." + + CommonConstant.LOAD_BALANCE_RESOURCE_MARK); + String writerResourceMark = aTaskConfig.getString(String.format(CoreConstant.JOB_WRITER_PARAMETER, 0) + "." + + CommonConstant.LOAD_BALANCE_RESOURCE_MARK); + + boolean hasLoadBalanceResourceMark = StringUtils.isNotBlank(readerResourceMark) || + StringUtils.isNotBlank(writerResourceMark); + + if (!hasLoadBalanceResourceMark) { + // fake 一个固定的 key 作为资源标识(在 reader 或者 writer 上均可,此处选择在 reader 上进行 fake) + for (Configuration conf : contentConfig) { + conf.set(CoreConstant.JOB_READER_PARAMETER + "." + + CommonConstant.LOAD_BALANCE_RESOURCE_MARK, "aFakeResourceMarkForLoadBalance"); + } + // 是为了避免某些插件没有设置 资源标识 而进行了一次随机打乱操作 + Collections.shuffle(contentConfig, new Random(System.currentTimeMillis())); + } + + LinkedHashMap> resourceMarkAndTaskIdMap = parseAndGetResourceMarkAndTaskIdMap(contentConfig); + List taskGroupConfig = doAssign(resourceMarkAndTaskIdMap, configuration, taskGroupNumber); + + // 调整 每个 taskGroup 对应的 RecordChannel 个数(属于优化范畴) + adjustChannelNumPerTaskGroup(taskGroupConfig, channelNumber); + return taskGroupConfig; + } + + private static void adjustChannelNumPerTaskGroup(List taskGroupConfig, int channelNumber) { + int taskGroupNumber = taskGroupConfig.size(); + int avgChannelsPerTaskGroup = channelNumber / taskGroupNumber; + int remainderChannelCount = channelNumber % taskGroupNumber; + // 表示有 remainderChannelCount 个 taskGroup,其对应 RecordChannel 个数应该为:avgChannelsPerTaskGroup + 1; + // (taskGroupNumber - remainderChannelCount)个 taskGroup,其对应 RecordChannel 个数应该为:avgChannelsPerTaskGroup + + int i = 0; + for (; i < remainderChannelCount; i++) { + taskGroupConfig.get(i).set(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL, avgChannelsPerTaskGroup + 1); + } + + for (int j = 0; j < taskGroupNumber - remainderChannelCount; j++) { + taskGroupConfig.get(i + j).set(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL, avgChannelsPerTaskGroup); + } + } + + /** + * 根据task 配置,获取到: + * 资源名称 --> taskId(List) 的 map 映射关系 + */ + private static LinkedHashMap> parseAndGetResourceMarkAndTaskIdMap(List contentConfig) { + // key: resourceMark, value: taskId + LinkedHashMap> readerResourceMarkAndTaskIdMap = new LinkedHashMap<>(); + LinkedHashMap> writerResourceMarkAndTaskIdMap = new LinkedHashMap<>(); + + for (Configuration aTaskConfig : contentConfig) { + int taskId = aTaskConfig.getInt(CoreConstant.TASK_ID); + // 把 readerResourceMark 加到 readerResourceMarkAndTaskIdMap 中 + String readerResourceMark = aTaskConfig.getString(CoreConstant.JOB_READER_PARAMETER + "." + CommonConstant.LOAD_BALANCE_RESOURCE_MARK); + readerResourceMarkAndTaskIdMap.computeIfAbsent(readerResourceMark, k -> new LinkedList<>()); + readerResourceMarkAndTaskIdMap.get(readerResourceMark).add(taskId); + + // 把 writerResourceMark 加到 writerResourceMarkAndTaskIdMap 中 + List writerResourceMarks = new ArrayList<>(); + List taskWriterList = aTaskConfig.getList(CoreConstant.JOB_WRITER); + for(int i = 0; i < taskWriterList.size(); i++){ + writerResourceMarks.add(aTaskConfig.getString(String.format(CoreConstant.JOB_WRITER_PARAMETER, i) + + "." + CommonConstant.LOAD_BALANCE_RESOURCE_MARK)); + } + String[] resourceMarksArray = new String[writerResourceMarks.size()]; + //Join all task writers' resource marks + String writerResourceMark = StringUtils.join(writerResourceMarks.toArray(resourceMarksArray), "-"); + writerResourceMarkAndTaskIdMap.computeIfAbsent(writerResourceMark, k -> new LinkedList<>()); + writerResourceMarkAndTaskIdMap.get(writerResourceMark).add(taskId); + } + + if (readerResourceMarkAndTaskIdMap.size() >= writerResourceMarkAndTaskIdMap.size()) { + // 采用 reader 对资源做的标记进行 shuffle + return readerResourceMarkAndTaskIdMap; + } else { + // 采用 writer 对资源做的标记进行 shuffle + return writerResourceMarkAndTaskIdMap; + } + } + + + /** + * /** + * 需要实现的效果通过例子来说是: + *
+     * a 库上有表:0, 1, 2
+     * a 库上有表:3, 4
+     * c 库上有表:5, 6, 7
+     *
+     * 如果有 4个 taskGroup
+     * 则 assign 后的结果为:
+     * taskGroup-0: 0,  4,
+     * taskGroup-1: 3,  6,
+     * taskGroup-2: 5,  2,
+     * taskGroup-3: 1,  7
+     *
+     * 
+ */ + private static List doAssign(LinkedHashMap> resourceMarkAndTaskIdMap, Configuration jobConfiguration, int taskGroupNumber) { + List contentConfig = jobConfiguration.getListConfiguration(CoreConstant.DATAX_JOB_CONTENT); + + Configuration taskGroupTemplate = jobConfiguration.clone(); + taskGroupTemplate.remove(CoreConstant.DATAX_JOB_CONTENT); + + List result = new LinkedList(); + + List> taskGroupConfigList = new ArrayList>(taskGroupNumber); + for (int i = 0; i < taskGroupNumber; i++) { + taskGroupConfigList.add(new LinkedList()); + } + + int mapValueMaxLength = -1; + + List resourceMarks = new ArrayList(); + for (Map.Entry> entry : resourceMarkAndTaskIdMap.entrySet()) { + resourceMarks.add(entry.getKey()); + if (entry.getValue().size() > mapValueMaxLength) { + mapValueMaxLength = entry.getValue().size(); + } + } + + int taskGroupIndex = 0; + for (int i = 0; i < mapValueMaxLength; i++) { + for (String resourceMark : resourceMarks) { + if (resourceMarkAndTaskIdMap.get(resourceMark).size() > 0) { + int taskId = resourceMarkAndTaskIdMap.get(resourceMark).get(0); + taskGroupConfigList.get(taskGroupIndex % taskGroupNumber).add(contentConfig.get(taskId)); + taskGroupIndex++; + + resourceMarkAndTaskIdMap.get(resourceMark).remove(0); + } + } + } + + Configuration tempTaskGroupConfig; + for (int i = 0; i < taskGroupNumber; i++) { + tempTaskGroupConfig = taskGroupTemplate.clone(); + tempTaskGroupConfig.set(CoreConstant.DATAX_JOB_CONTENT, taskGroupConfigList.get(i)); + tempTaskGroupConfig.set(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID, i); + + result.add(tempTaskGroupConfig); + } + + return result; + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/JobContainer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/JobContainer.java new file mode 100644 index 000000000..73d1aceba --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/JobContainer.java @@ -0,0 +1,1221 @@ +package com.alibaba.datax.core.job; + +import com.alibaba.datax.common.constant.PluginType; +import com.webank.wedatasphere.exchangis.datax.common.constant.TransportType; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.AbstractJobPlugin; +import com.alibaba.datax.common.plugin.JobPluginCollector; +import com.alibaba.datax.common.plugin.PluginProcessorLoader; +import com.alibaba.datax.common.spi.Reader; +import com.alibaba.datax.common.spi.Writer; +import com.alibaba.datax.common.statistics.PerfTrace; +import com.alibaba.datax.common.statistics.VMInfo; +import com.alibaba.datax.common.util.Configuration; +import com.webank.wedatasphere.exchangis.datax.common.GsonUtil; +import com.alibaba.datax.common.util.StrUtil; +import com.alibaba.datax.core.AbstractContainer; +import com.alibaba.datax.core.Engine; +import com.alibaba.datax.core.container.util.HookInvoker; +import com.alibaba.datax.core.container.util.JobAssignUtil; +import com.webank.wedatasphere.exchangis.datax.core.job.meta.MetaSchema; +import com.alibaba.datax.core.job.scheduler.AbstractScheduler; +import com.alibaba.datax.core.job.scheduler.processinner.StandAloneScheduler; +import com.webank.wedatasphere.exchangis.datax.core.processor.loader.JavaSrcUtils; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.communication.CommunicationTool; +import com.alibaba.datax.core.statistics.container.communicator.AbstractContainerCommunicator; +import com.alibaba.datax.core.statistics.container.communicator.job.StandAloneJobContainerCommunicator; +import com.alibaba.datax.core.statistics.plugin.DefaultJobPluginCollector; +import com.alibaba.datax.core.util.ClassUtil; +import com.alibaba.datax.core.util.ErrorRecordChecker; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.core.util.HttpClientUtil; +import com.alibaba.datax.core.util.container.ClassLoaderSwapper; +import com.alibaba.datax.core.util.container.CoreConstant; +import com.alibaba.datax.core.util.container.LoadUtil; +import com.alibaba.datax.dataxservice.face.domain.enums.ExecuteMode; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.filefilter.FileFileFilter; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.Validate; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.StringEntity; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.text.SimpleDateFormat; +import java.util.*; + +/** + * Created by jingxing on 14-8-24. + *

+ * job实例运行在jobContainer容器中,它是所有任务的master,负责初始化、拆分、调度、运行、回收、监控和汇报 + * 但它并不做实际的数据同步操作 + */ +public class JobContainer extends AbstractContainer { + private static final Logger LOG = LoggerFactory + .getLogger(JobContainer.class); + + private ClassLoaderSwapper classLoaderSwapper = ClassLoaderSwapper + .newCurrentThreadClassLoaderSwapper(); + + private long jobId; + + private String readerPluginName; + + private String[] writerPluginNames; + + /** + * reader和writer jobContainer的实例 + */ + private Reader.Job jobReader; + + private List jobWriters = new ArrayList<>(); + + private long startTimeStamp; + + private long endTimeStamp; + + private long startTransferTimeStamp; + + private long endTransferTimeStamp; + + private int needChannelNumber; + + private int totalStage = 1; + + private ErrorRecordChecker errorLimit; + + private TransportType transportType; + + private AbstractScheduler taskGroupScheduler; + + public JobContainer(Configuration configuration) { + super(configuration); + //Adjust configuration + adjustJobConfiguration(this.configuration); + this.transportType = TransportType.valueOf( + this.configuration.getString(CoreConstant.DATAX_JOB_SETTING_TRANSPORT_TYPE, "record").toUpperCase()); + errorLimit = new ErrorRecordChecker(configuration); + } + + /** + * jobContainer主要负责的工作全部在start()里面,包括init、prepare、split、scheduler、 + * post以及destroy和statistics + */ + @Override + public void start() { + LOG.info("DataX jobContainer starts job."); + + boolean hasException = false; + boolean isDryRun = false; + try { + this.startTimeStamp = System.currentTimeMillis(); + isDryRun = configuration.getBool(CoreConstant.DATAX_JOB_SETTING_DRYRUN, false); + if (isDryRun) { + LOG.info("jobContainer starts to do preCheck ..."); + this.preCheck(); + } else { +// userConf = configuration.clone(); + LOG.debug("jobContainer starts to do preHandle ..."); + this.preHandle(); + LOG.debug("jobContainer starts to do init ..."); + this.init(); + if(configuration.getBool(CoreConstant.DATAX_JOB_SETTING_SYNCMETA, false)){ + LOG.info("jobContainer starts to do syncMetaData ..."); + this.syncMetaData(); + } + LOG.info("jobContainer starts to do prepare ..."); + this.prepare(); + LOG.info("jobContainer starts to do split ..."); + this.totalStage = this.split(); + if(this.totalStage > 0) { + LOG.info("jobContainer starts to do schedule ..."); + this.schedule(); + } + LOG.debug("jobContainer starts to do post ..."); + this.post(); + + LOG.debug("jobContainer starts to do postHandle ..."); + this.postHandle(); + LOG.info("DataX jobId [{}] completed successfully.", this.jobId); + + this.invokeHooks(); + } + } catch (Throwable e) { + LOG.error("Exception when job run", e); + + hasException = true; + + if (e instanceof OutOfMemoryError) { + try { + this.destroy(); + }catch(Exception e1){ + //ignore + } + System.gc(); + } + + + if (super.getContainerCommunicator() == null) { + // 由于 containerCollector 是在 scheduler() 中初始化的,所以当在 scheduler() 之前出现异常时,需要在此处对 containerCollector 进行初始化 + + AbstractContainerCommunicator tempContainerCollector; + // standalone + tempContainerCollector = new StandAloneJobContainerCommunicator(configuration); + + super.setContainerCommunicator(tempContainerCollector); + } + + Communication communication = super.getContainerCommunicator().collect(); + // 汇报前的状态,不需要手动进行设置 + // communication.setState(State.FAILED); + communication.setThrowable(e); + communication.setTimestamp(this.endTimeStamp); + + Communication tempComm = new Communication(); + tempComm.setTimestamp(this.startTransferTimeStamp); + + Communication reportCommunication = CommunicationTool.getReportCommunication(communication, tempComm, this.totalStage); + super.getContainerCommunicator().report(reportCommunication); + + throw DataXException.asDataXException( + FrameworkErrorCode.RUNTIME_ERROR, e); + } finally { + if (!isDryRun) { + + this.destroy(); + this.endTimeStamp = System.currentTimeMillis(); + if (!hasException) { + //最后打印cpu的平均消耗,GC的统计 + VMInfo vmInfo = VMInfo.getVmInfo(); + if (vmInfo != null) { + vmInfo.getDelta(false); + LOG.info(vmInfo.totalString()); + } + + LOG.info(PerfTrace.getInstance().summarizeNoException()); + this.logStatistics(); + } + } + } + } + + @Override + public void shutdown() { + LOG.info("shutdown job container and clean the dirty data"); + if(null != taskGroupScheduler){ + taskGroupScheduler.stopSchedule(); + } + LOG.info("invoke destroy method"); + this.destroy(); + + } + + private void preCheck() { + this.preCheckInit(); + this.adjustChannelNumber(); + + if (this.needChannelNumber <= 0) { + this.needChannelNumber = 1; + } + this.preCheckReader(); + this.preCheckWriter(); + LOG.info("PreCheck通过"); + } + + private void preCheckInit() { + this.jobId = this.configuration.getLong( + CoreConstant.DATAX_CORE_CONTAINER_JOB_ID, -1); + + if (this.jobId < 0) { + LOG.info("Set jobId = 0"); + this.jobId = 0; + this.configuration.set(CoreConstant.DATAX_CORE_CONTAINER_JOB_ID, + this.jobId); + } + + Thread.currentThread().setName("job-" + this.jobId); + + JobPluginCollector jobPluginCollector = new DefaultJobPluginCollector( + this.getContainerCommunicator()); + this.jobReader = this.preCheckReaderInit(jobPluginCollector); + this.jobWriters = this.preCheckWriterInit(jobPluginCollector); + } + + private Reader.Job preCheckReaderInit(JobPluginCollector jobPluginCollector) { + this.readerPluginName = this.configuration.getString( + CoreConstant.DATAX_JOB_CONTENT_READER_NAME); + classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader( + PluginType.READER, this.readerPluginName)); + + Reader.Job jobReader = (Reader.Job) LoadUtil.loadJobPlugin( + PluginType.READER, this.readerPluginName); + // Set reader parameters in configuration + jobReader.setPluginJobConf(this.configuration.getConfiguration( + CoreConstant.DATAX_JOB_CONTENT_READER_PARAMETER)); + //Set writer parameters in configuration + List writerConfList = getJobWriterConfigList(this.configuration); + writerConfList.forEach(jobReader::addPeerPluginJobConf); + jobReader.setJobPluginCollector(jobPluginCollector); + classLoaderSwapper.restoreCurrentThreadClassLoader(); + return jobReader; + } + + + private List preCheckWriterInit(JobPluginCollector jobPluginCollector) { + List writerConfList = getJobWriterConfigList(this.configuration); + List writerJobs = new ArrayList<>(); + this.writerPluginNames = new String[writerConfList.size()]; + for(int i = 0; i < writerConfList.size(); i ++){ + Configuration writerConf = writerConfList.get(i); + this.writerPluginNames[i] = this.configuration.getString( + String.format(CoreConstant.DATAX_JOB_CONTENT_WRITER_ARRAY_NAME, i)); + classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader( + PluginType.WRITER, this.writerPluginNames[i] + )); + Writer.Job jobWriter = (Writer.Job) LoadUtil.loadJobPlugin( + PluginType.WRITER, this.writerPluginNames[i]); + //Set writer parameter in configuration + jobWriter.setPluginJobConf(writerConf); + //Set reader parameter in configuration + jobWriter.addPeerPluginJobConf(this.configuration.getConfiguration( + CoreConstant.DATAX_JOB_CONTENT_READER_PARAMETER)); + //Set peer plugin name + jobWriter.addPeerPluginName(this.readerPluginName); + jobWriter.setJobPluginCollector(jobPluginCollector); + writerJobs.add(jobWriter); + classLoaderSwapper.restoreCurrentThreadClassLoader(); + } + //Remain JVM to collect garbage? + writerConfList.clear(); + return writerJobs; + } + + private void preCheckReader() { + classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader( + PluginType.READER, this.readerPluginName)); + LOG.info(String.format("DataX Reader.Job [%s] do preCheck work .", + this.readerPluginName)); + this.jobReader.preCheck(); + classLoaderSwapper.restoreCurrentThreadClassLoader(); + } + + private void preCheckWriter() { + LOG.info("DataX Writer.Jobs [" + StringUtils.join(this.writerPluginNames, ",") + "] do preCheck work"); + this.jobWriters.forEach( jobWriter -> { + classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader( + PluginType.WRITER, jobWriter.getPluginName())); + LOG.info("Writer.Job:[" + jobWriter.getPluginName() + "] start to pre check"); + jobWriter.preCheck(); + classLoaderSwapper.restoreCurrentThreadClassLoader(); + }); + } + + /** + * reader和writer的初始化 + */ + private void init() { + this.jobId = this.configuration.getLong( + CoreConstant.DATAX_CORE_CONTAINER_JOB_ID, -1); + + if (this.jobId < 0) { + LOG.info("Set jobId = 0"); + this.jobId = 0; + this.configuration.set(CoreConstant.DATAX_CORE_CONTAINER_JOB_ID, + this.jobId); + } + + Thread.currentThread().setName("job-" + this.jobId); + + JobPluginCollector jobPluginCollector = new DefaultJobPluginCollector( + this.getContainerCommunicator()); + //add shutdown hook to jvm in order to clear dirty data + Runtime.getRuntime().addShutdownHook(new Thread(this::shutdown)); + //必须先Reader ,后Writer + this.jobReader = this.initJobReader(jobPluginCollector); + this.jobWriters = this.initJobWriter(jobPluginCollector); + } + + /** + * Sync metadata + * Added by davidhua@webankcom + */ + private void syncMetaData(){ + classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil + .getJarLoader(PluginType.READER, this.readerPluginName)); + LOG.info("Reader.Job [{}] do get meta schema", this.readerPluginName); + MetaSchema metaSchema = this.jobReader.syncMetaData(); + LOG.info("Meta schema: [{}]", GsonUtil.toJson(metaSchema)); + classLoaderSwapper.restoreCurrentThreadClassLoader(); + if(null != metaSchema) { + this.jobWriters.forEach(jobWriter -> { + classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil + .getJarLoader(PluginType.WRITER, jobWriter.getPluginName())); + LOG.info("Writer.Job [{}] do sync meta schema", jobWriter.getPluginName()); + jobWriter.syncMetaData(metaSchema); + classLoaderSwapper.restoreCurrentThreadClassLoader(); + }); + } + + } + private void prepare() { + this.prepareJobReader(); + this.prepareJobWriter(); + } + + private void preHandle() { + String handlerPluginTypeStr = this.configuration.getString( + CoreConstant.DATAX_JOB_PREHANDLER_PLUGINTYPE); + if (!StringUtils.isNotEmpty(handlerPluginTypeStr)) { + return; + } + PluginType handlerPluginType; + try { + handlerPluginType = PluginType.valueOf(handlerPluginTypeStr.toUpperCase()); + } catch (IllegalArgumentException e) { + throw DataXException.asDataXException( + FrameworkErrorCode.CONFIG_ERROR, + String.format("Job preHandler's pluginType(%s) set error, reason(%s)", handlerPluginTypeStr.toUpperCase(), e.getMessage())); + } + + String handlerPluginName = this.configuration.getString( + CoreConstant.DATAX_JOB_PREHANDLER_PLUGINNAME); + + classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader( + handlerPluginType, handlerPluginName)); + + AbstractJobPlugin handler = LoadUtil.loadJobPlugin( + handlerPluginType, handlerPluginName); + + JobPluginCollector jobPluginCollector = new DefaultJobPluginCollector( + this.getContainerCommunicator()); + handler.setJobPluginCollector(jobPluginCollector); + + //todo configuration的安全性,将来必须保证 + handler.preHandler(configuration); + classLoaderSwapper.restoreCurrentThreadClassLoader(); + + LOG.info("After PreHandler: \n" + Engine.filterJobConfiguration(configuration) + "\n"); + } + + private void postHandle() { + String handlerPluginTypeStr = this.configuration.getString( + CoreConstant.DATAX_JOB_POSTHANDLER_PLUGINTYPE); + + if (!StringUtils.isNotEmpty(handlerPluginTypeStr)) { + return; + } + PluginType handlerPluginType; + try { + handlerPluginType = PluginType.valueOf(handlerPluginTypeStr.toUpperCase()); + } catch (IllegalArgumentException e) { + throw DataXException.asDataXException( + FrameworkErrorCode.CONFIG_ERROR, + String.format("Job postHandler's pluginType(%s) set error, reason(%s)", handlerPluginTypeStr.toUpperCase(), e.getMessage())); + } + + String handlerPluginName = this.configuration.getString( + CoreConstant.DATAX_JOB_POSTHANDLER_PLUGINNAME); + + classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader( + handlerPluginType, handlerPluginName)); + + AbstractJobPlugin handler = LoadUtil.loadJobPlugin( + handlerPluginType, handlerPluginName); + + JobPluginCollector jobPluginCollector = new DefaultJobPluginCollector( + this.getContainerCommunicator()); + handler.setJobPluginCollector(jobPluginCollector); + + handler.postHandler(configuration); + classLoaderSwapper.restoreCurrentThreadClassLoader(); + } + + + /** + * 执行reader和writer最细粒度的切分,需要注意的是,writer的切分结果要参照reader的切分结果, + * 达到切分后数目相等,才能满足1:1的通道模型,所以这里可以将reader和writer的配置整合到一起, + * 然后,为避免顺序给读写端带来长尾影响,将整合的结果shuffler掉 + */ + private int split() { + this.adjustChannelNumber(); + + if (this.needChannelNumber <= 0) { + this.needChannelNumber = 1; + } + + List readerTaskConfigs = this + .doReaderSplit(this.needChannelNumber); + if(readerTaskConfigs.isEmpty()){ + return 0; + } + int taskNumber = readerTaskConfigs.size(); + List writerTaskConfigs = this + .doWriterSplit(taskNumber); + //adjust the speed limitation of channel + if(taskNumber <= needChannelNumber){ + needChannelNumber = taskNumber; + adjustChannelSpeedByNumber(needChannelNumber); + } + LOG.info("Job final Channel-Number: [" + needChannelNumber + "]"); + //change the channel speed when channel speed * taskNumber + List transformerList = this.configuration.getListConfiguration(CoreConstant.DATAX_JOB_CONTENT_TRANSFORMER); + + LOG.debug("transformer configuration: " + Json.toJson(transformerList, null)); + //input: reader parameter list and writer task list(contain properties: parameter, name and processor) + //output: "content" array + List contentConfig = mergeReaderAndWriterTaskConfigs( + readerTaskConfigs, writerTaskConfigs, transformerList); + + + LOG.debug("contentConfig configuration: " + Json.toJson(contentConfig, null)); + + this.configuration.set(CoreConstant.DATAX_JOB_CONTENT, contentConfig); + + return contentConfig.size(); + } + + private void adjustChannelNumber() { + int needChannelNumberByByte = Integer.MAX_VALUE; + int needChannelNumberByRecord = Integer.MAX_VALUE; + + boolean isByteLimit = (this.configuration.getLong( + CoreConstant.DATAX_JOB_SETTING_SPEED_BYTE, 0) > 0); + if (isByteLimit) { + long globalLimitedByteSpeed = this.configuration.getLong( + CoreConstant.DATAX_JOB_SETTING_SPEED_BYTE, 10 * 1024L * 1024L); + + // 在byte流控情况下,单个Channel流量最大值必须设置,否则报错! + Long channelLimitedByteSpeed = this.configuration + .getLong(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_BYTE); + if (channelLimitedByteSpeed == null || channelLimitedByteSpeed <= 0) { + DataXException.asDataXException( + FrameworkErrorCode.CONFIG_ERROR, + "在有总bps限速条件下,单个channel的bps值不能为空,也不能为非正数"); + } + + needChannelNumberByByte = + (int) (globalLimitedByteSpeed / channelLimitedByteSpeed); + if(needChannelNumberByByte <= 0){ + needChannelNumberByByte = 1; + //globalLimitedByteSpeed < channelLimitedByteSpeed + this.configuration.set(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_BYTE, globalLimitedByteSpeed); + } + LOG.info("Job set Max-Byte-Speed to " + globalLimitedByteSpeed + " bytes."); + } + + boolean isRecordLimit = (this.configuration.getLong( + CoreConstant.DATAX_JOB_SETTING_SPEED_RECORD, 0)) > 0; + if (isRecordLimit) { + long globalLimitedRecordSpeed = this.configuration.getLong( + CoreConstant.DATAX_JOB_SETTING_SPEED_RECORD, 100000); + + Long channelLimitedRecordSpeed = this.configuration.getLong( + CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_RECORD); + if (channelLimitedRecordSpeed == null || channelLimitedRecordSpeed <= 0) { + DataXException.asDataXException(FrameworkErrorCode.CONFIG_ERROR, + "在有总tps限速条件下,单个channel的tps值不能为空,也不能为非正数"); + } + + needChannelNumberByRecord = + (int) (globalLimitedRecordSpeed / channelLimitedRecordSpeed); + if(needChannelNumberByRecord <= 0){ + needChannelNumberByRecord = 1; + //globalLimitedRecordSpeed < channelLimitedRecordSpeed + this.configuration.set(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_RECORD, globalLimitedRecordSpeed); + } + LOG.info("Job set Max-Record-Speed to " + globalLimitedRecordSpeed + " records."); + } + + // 取较小值 + this.needChannelNumber = needChannelNumberByByte < needChannelNumberByRecord ? + needChannelNumberByByte : needChannelNumberByRecord; + boolean isChannelLimit = (this.configuration.getLong( + CoreConstant.DATAX_JOB_SETTING_SPEED_CHANNEL, 0) > 0); + if (isChannelLimit) { + //limit the max number of channel + int maxChannelNumber = this.configuration.getInt( + CoreConstant.DATAX_JOB_SETTING_SPEED_CHANNEL); + if(this.needChannelNumber > maxChannelNumber){ + this.needChannelNumber = maxChannelNumber; + } + } + if(needChannelNumber >= Integer.MAX_VALUE) { + throw DataXException.asDataXException( + FrameworkErrorCode.CONFIG_ERROR, + "Job运行速度必须设置"); + } + adjustChannelSpeedByNumber(needChannelNumber); + } + + /** + * schedule首先完成的工作是把上一步reader和writer split的结果整合到具体taskGroupContainer中, + * 同时不同的执行模式调用不同的调度策略,将所有任务调度起来 + */ + private void schedule() { + /** + * 这里的全局speed和每个channel的速度设置为B/s + */ + int channelsPerTaskGroup = this.configuration.getInt( + CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL, 5); + int taskNumber = this.configuration.getList( + CoreConstant.DATAX_JOB_CONTENT).size(); + + this.needChannelNumber = Math.min(this.needChannelNumber, taskNumber); + PerfTrace.getInstance().setChannelNumber(needChannelNumber); + + /** + * 通过获取配置信息得到每个taskGroup需要运行哪些tasks任务 + */ + + List taskGroupConfigs = JobAssignUtil.assignFairly(this.configuration, + this.needChannelNumber, channelsPerTaskGroup); + + LOG.info("Scheduler starts [{}] taskGroups.", taskGroupConfigs.size()); + + ExecuteMode executeMode = null; + AbstractScheduler scheduler; + try { + executeMode = ExecuteMode.STANDALONE; + scheduler = initStandaloneScheduler(this.configuration); + //设置 executeMode 和 transportType + for (Configuration taskGroupConfig : taskGroupConfigs) { + taskGroupConfig.set(CoreConstant.DATAX_CORE_CONTAINER_JOB_MODE, executeMode.getValue()); + taskGroupConfig.set(CoreConstant.DATAX_CORE_TRANSPORT_TYPE, this.configuration.getString(CoreConstant.DATAX_JOB_SETTING_TRANSPORT_TYPE, "record")); + } + + LOG.info("Running by {} Mode.", executeMode); + + this.startTransferTimeStamp = System.currentTimeMillis(); + this.taskGroupScheduler = scheduler; + scheduler.schedule(taskGroupConfigs); + + this.endTransferTimeStamp = System.currentTimeMillis(); + } catch (Exception e) { + LOG.error("运行scheduler 模式[{}]出错.", executeMode); + this.endTransferTimeStamp = System.currentTimeMillis(); + throw DataXException.asDataXException( + FrameworkErrorCode.RUNTIME_ERROR, e); + } + + /** + * 检查任务执行情况 + */ + this.checkLimit(); + } + + + private AbstractScheduler initStandaloneScheduler(Configuration configuration) { + AbstractContainerCommunicator containerCommunicator = new StandAloneJobContainerCommunicator(configuration); + super.setContainerCommunicator(containerCommunicator); + + return new StandAloneScheduler(containerCommunicator); + } + + private void post() { + this.postJobWriter(); + this.postJobReader(); + } + + private void destroy() { + if (!this.jobWriters.isEmpty()) { + jobWriters.removeIf(jobWriter ->{ + classLoaderSwapper.setCurrentThreadClassLoader( + LoadUtil.getJarLoader(PluginType.WRITER, jobWriter.getPluginName()) + ); + jobWriter.destroy(); + classLoaderSwapper.restoreCurrentThreadClassLoader(); + return true; + }); + } + if (this.jobReader != null) { + classLoaderSwapper.setCurrentThreadClassLoader( + LoadUtil.getJarLoader(PluginType.READER, this.readerPluginName) + ); + this.jobReader.destroy(); + this.jobReader = null; + classLoaderSwapper.restoreCurrentThreadClassLoader(); + } + } + + private void logStatistics() { + long totalCosts = (this.endTimeStamp - this.startTimeStamp) / 1000; + long transferCosts = (this.endTransferTimeStamp - this.startTransferTimeStamp) / 1000; + if (0L == transferCosts) { + transferCosts = 1L; + } + + if (super.getContainerCommunicator() == null) { + return; + } + Communication communication = super.getContainerCommunicator().collect(); + communication.setTimestamp(this.endTimeStamp); + + Communication tempComm = new Communication(); + tempComm.setTimestamp(this.startTransferTimeStamp); + + Communication reportCommunication = CommunicationTool.getReportCommunication(communication, tempComm, this.totalStage); + + // 字节速率 + long byteSpeedPerSecond = communication.getLongCounter(CommunicationTool.READ_SUCCEED_BYTES) + / transferCosts; + + long recordSpeedPerSecond = communication.getLongCounter(CommunicationTool.READ_SUCCEED_RECORDS) + / transferCosts; + + reportCommunication.setLongCounter(CommunicationTool.BYTE_SPEED, byteSpeedPerSecond); + reportCommunication.setLongCounter(CommunicationTool.RECORD_SPEED, recordSpeedPerSecond); + + super.getContainerCommunicator().report(reportCommunication); + + SimpleDateFormat dateFormat = new SimpleDateFormat( + "yyyy-MM-dd HH:mm:ss"); + String msg = String.format( + "\n" + "%-26s: %-18s\n" + "%-26s: %-18s\n" + "%-26s: %19s\n" + + "%-26s: %19s\n" + "%-26s: %19s\n" + "%-26s: %19s\n" + + "%-26s: %19s\n", + "任务启动时刻", + dateFormat.format(startTimeStamp), + + "任务结束时刻", + dateFormat.format(endTimeStamp), + + "任务总计耗时", + String.valueOf(totalCosts) + "s", + "任务平均流量", + StrUtil.stringify(byteSpeedPerSecond) + + "/s", + "记录写入速度", + String.valueOf(recordSpeedPerSecond) + + "rec/s", + transportType == TransportType.RECORD?"读出记录总数":"读出数据总数", + transportType == TransportType.RECORD? + String.valueOf(CommunicationTool.getTotalReadRecords(communication)) : + String.valueOf(CommunicationTool.getTotalReadBytes(communication)) +"(Bytes)", + "读写失败总数", + transportType == TransportType.RECORD? + String.valueOf(CommunicationTool.getTotalErrorRecords(communication)):"" + ); + LOG.info(msg); + + if (communication.getLongCounter(CommunicationTool.TRANSFORMER_SUCCEED_RECORDS) > 0 + || communication.getLongCounter(CommunicationTool.TRANSFORMER_FAILED_RECORDS) > 0 + || communication.getLongCounter(CommunicationTool.TRANSFORMER_FILTER_RECORDS) > 0) { + String tmsg = String.format( + "\n" + "%-26s: %19s\n" + "%-26s: %19s\n" + "%-26s: %19s\n", + "Transformer成功记录总数", + communication.getLongCounter(CommunicationTool.TRANSFORMER_SUCCEED_RECORDS), + + "Transformer失败记录总数", + communication.getLongCounter(CommunicationTool.TRANSFORMER_FAILED_RECORDS), + + "Transformer过滤记录总数", + communication.getLongCounter(CommunicationTool.TRANSFORMER_FILTER_RECORDS) + ); + LOG.info(tmsg); + } + + //report to server + if (StringUtils.isNotBlank(configuration.getString(CoreConstant.DATAX_CORE_DATAXSERVER_ENDPOINT_REPORT))) { + try { + HttpClientUtil httpClientUtil = HttpClientUtil.getHttpClientUtil(); + Map report = new HashMap<>(10); + report.put("id", configuration.getLong(CoreConstant.DATAX_CORE_CONTAINER_JOB_ID)); + report.put("byteSpeedPerSecond", byteSpeedPerSecond); + report.put("recordSpeedPerSecond", recordSpeedPerSecond); + report.put("totalCosts", totalCosts); + report.put("totalErrorRecords", CommunicationTool.getTotalErrorRecords(communication)); + report.put("totalReadRecords", CommunicationTool.getTotalReadRecords(communication)); + report.put("totalReadBytes", CommunicationTool.getTotalReadBytes(communication)); + report.put("transformerFailedRecords", communication.getLongCounter(CommunicationTool.TRANSFORMER_FAILED_RECORDS)); + report.put("transformerFilterRecords", communication.getLongCounter(CommunicationTool.TRANSFORMER_FILTER_RECORDS)); + report.put("transformerTotalRecords", communication.getLongCounter(CommunicationTool.TRANSFORMER_SUCCEED_RECORDS)); + StringEntity entity = new StringEntity(Json.toJson(report, null)); + entity.setContentEncoding("UTF-8"); + entity.setContentType("application/json"); + HttpPost post = HttpClientUtil.getPostRequest(configuration.getString(CoreConstant.DATAX_CORE_DATAXSERVER_PROTOCOL) + + "://" + configuration.getString(CoreConstant.DATAX_CORE_DATAXSERVER_ADDRESS) + + configuration.getString(CoreConstant.DATAX_CORE_DATAXSERVER_ENDPOINT_REPORT), + entity, + "Content-Type", "application/json;charset=UTF-8"); + String response = httpClientUtil.executeAndGet(post, String.class); + LOG.info("Send report respone,{}", response); + } catch (Exception e) { + LOG.error("Post report error", e); + } + } + } + + /** + * reader job的初始化,返回Reader.Job + * + * @return + */ + private Reader.Job initJobReader( + JobPluginCollector jobPluginCollector) { + this.readerPluginName = this.configuration.getString( + CoreConstant.DATAX_JOB_CONTENT_READER_NAME); + classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader( + PluginType.READER, this.readerPluginName)); + + Reader.Job jobReader = (Reader.Job) LoadUtil.loadJobPlugin( + PluginType.READER, this.readerPluginName); + if(this.transportType == TransportType.STREAM && !jobReader.isSupportStream()){ + throw DataXException.asDataXException(FrameworkErrorCode.CONFIG_ERROR, + this.readerPluginName + " don't support transport type STREAM"); + } + jobReader.setTransportType(this.transportType); + // Set reader parameters in configuration + jobReader.setPluginJobConf(this.configuration.getConfiguration( + CoreConstant.DATAX_JOB_CONTENT_READER_PARAMETER)); + //Set writer parameters in configuration + List writerConfList = getJobWriterConfigList(this.configuration); + writerConfList.forEach(jobReader::addPeerPluginJobConf); + jobReader.setJobPluginCollector(jobPluginCollector); + jobReader.init(); + classLoaderSwapper.restoreCurrentThreadClassLoader(); + return jobReader; + } + + /** + * writer job的初始化,返回Writer.Job + * + * @return + */ + private List initJobWriter( + JobPluginCollector jobPluginCollector) { + List writerConfList = getJobWriterConfigList(this.configuration); + List writerJobs = new ArrayList<>(); + this.writerPluginNames = new String[writerConfList.size()]; + for(int i = 0; i < writerConfList.size(); i++){ + Configuration writerConf = writerConfList.get(i); + this.writerPluginNames[i] = this.configuration.getString( + String.format(CoreConstant.DATAX_JOB_CONTENT_WRITER_ARRAY_NAME, i)); + classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader( + PluginType.WRITER, this.writerPluginNames[i] + )); + //Load processor before the writer plugin initialization + //TODO Use plugin name as namespace + List processors = doLoadProcessor(""); + Writer.Job jobWriter = (Writer.Job) LoadUtil.loadJobPlugin( + PluginType.WRITER, this.writerPluginNames[i]); + if(this.transportType == TransportType.STREAM && !jobWriter.isSupportStream()){ + throw DataXException.asDataXException(FrameworkErrorCode.CONFIG_ERROR, + this.writerPluginNames[i] + " don't support transport type STREAM"); + } + jobWriter.setTransportType(transportType); + //Set writer parameter in configuration + jobWriter.setPluginJobConf(writerConf); + //Set reader parameter in configuration + jobWriter.addPeerPluginJobConf(this.configuration.getConfiguration( + CoreConstant.DATAX_JOB_CONTENT_READER_PARAMETER)); + //Set peer plugin name + jobWriter.addPeerPluginName(this.readerPluginName); + jobWriter.setJobPluginCollector(jobPluginCollector); + if(!processors.isEmpty()){ + //Choose the first one + jobWriter.setProcessor(processors.get(0)); + } + jobWriter.init(); + writerJobs.add(jobWriter); + classLoaderSwapper.restoreCurrentThreadClassLoader(); + } + //Remain JVM to collect garbage? + writerConfList.clear(); + return writerJobs; + } + + private void prepareJobReader() { + classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader( + PluginType.READER, this.readerPluginName)); + LOG.info(String.format("DataX Reader.Job [%s] do prepare work .", + this.readerPluginName)); + this.jobReader.prepare(); + classLoaderSwapper.restoreCurrentThreadClassLoader(); + } + + private void prepareJobWriter() { + this.jobWriters.forEach(jobWriter -> { + classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil + .getJarLoader(PluginType.WRITER, jobWriter.getPluginName())); + LOG.info(String.format("DataX Writer.Job [%s] do prepare work .", + jobWriter.getPluginName())); + jobWriter.prepare(); + classLoaderSwapper.restoreCurrentThreadClassLoader(); + }); + } + + private List doReaderSplit(int adviceNumber) { + classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader( + PluginType.READER, this.readerPluginName)); + List readerTaskConfigs = new ArrayList<>(); + List readerSlicesConfigs = + this.jobReader.split(adviceNumber); + LOG.info("DataX Reader.Job [{}] splits to [{}] tasks.", + this.readerPluginName, readerSlicesConfigs.size()); + //Wrap as task configuration + readerSlicesConfigs.forEach( readerSlices -> { + Configuration readerTaskConfig = Configuration.newDefault(); + readerTaskConfig.set(CoreConstant.TASK_NAME, this.readerPluginName); + readerTaskConfig.set(CoreConstant.TASK_PARAMETER, readerSlices); + readerTaskConfigs.add(readerTaskConfig); + }); + classLoaderSwapper.restoreCurrentThreadClassLoader(); + return readerTaskConfigs; + } + + private List doWriterSplit(int readerTaskNumber) { + List writerTaskConfigs = new ArrayList<>(); + int[] counter = new int[readerTaskNumber]; + this.jobWriters.forEach(jobWriter ->{ + classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader( + PluginType.WRITER, jobWriter.getPluginName())); + List writerSlicesConfigs = jobWriter.split(readerTaskNumber); + if (writerSlicesConfigs == null || writerSlicesConfigs.size() <= 0) { + throw DataXException.asDataXException( + FrameworkErrorCode.PLUGIN_SPLIT_ERROR, + "writer切分的task不能小于等于0"); + } + if (writerSlicesConfigs.size() != readerTaskNumber) { + throw DataXException.asDataXException( + FrameworkErrorCode.PLUGIN_SPLIT_ERROR, + String.format("reader切分的task数目[%d]不等于writer切分的task数目[%d].", + readerTaskNumber, writerSlicesConfigs.size()) + ); + } + for(int i = 0 ; i < writerSlicesConfigs.size(); i++){ + if(i >= writerTaskConfigs.size()){ + writerTaskConfigs.add(Configuration.from("[]")); + } + Configuration writerTaskConfig = writerTaskConfigs.get(i); + Configuration writerSlicesConfig = writerSlicesConfigs.get(i); + Configuration taskConfigElement = Configuration.newDefault(); + //Build writer task configuration + taskConfigElement.set(CoreConstant.TASK_NAME, jobWriter.getPluginName()); + taskConfigElement.set(CoreConstant.TASK_PARAMETER, writerSlicesConfig); + taskConfigElement.set(CoreConstant.TASK_PROCESSOR, jobWriter.getProcessors()); + writerTaskConfig.set("[" + counter[i] + "]", taskConfigElement); + counter[i] ++; + } + LOG.info("DataX Writer.Job [{}] splits to [{}] tasks.", + jobWriter.getPluginName(), writerSlicesConfigs.size()); + classLoaderSwapper.restoreCurrentThreadClassLoader(); + }); + return writerTaskConfigs; + } + + /** + * 按顺序整合reader和writer的配置,这里的顺序不能乱! 输入是reader、writer级别的配置,输出是一个完整task的配置 + * + * @param readerTasksConfigs + * @param writerTasksConfigs + * @return + */ + private List mergeReaderAndWriterTaskConfigs( + List readerTasksConfigs, + List writerTasksConfigs) { + return mergeReaderAndWriterTaskConfigs(readerTasksConfigs, writerTasksConfigs, null); + } + + private List mergeReaderAndWriterTaskConfigs( + List readerTasksConfigs, + List writerTasksConfigs, + List transformerConfigs) { + + + List contentConfigs = new ArrayList<>(); + for (int i = 0; i < readerTasksConfigs.size(); i++) { + Configuration taskConfig = Configuration.newDefault(); + taskConfig.set(CoreConstant.JOB_READER, readerTasksConfigs.get(i)); + taskConfig.set(CoreConstant.JOB_WRITER, writerTasksConfigs.get(i)); + if (transformerConfigs != null && transformerConfigs.size() > 0) { + taskConfig.set(CoreConstant.JOB_TRANSFORMER, transformerConfigs); + } + + taskConfig.set(CoreConstant.TASK_ID, i); + contentConfigs.add(taskConfig); + } + + return contentConfigs; + } + + /** + * 这里比较复杂,分两步整合 1. tasks到channel 2. channel到taskGroup + * 合起来考虑,其实就是把tasks整合到taskGroup中,需要满足计算出的channel数,同时不能多起channel + *

+ * example: + *

+ * 前提条件: 切分后是1024个分表,假设用户要求总速率是1000M/s,每个channel的速率的3M/s, + * 每个taskGroup负责运行7个channel + *

+ * 计算: 总channel数为:1000M/s / 3M/s = + * 333个,为平均分配,计算可知有308个每个channel有3个tasks,而有25个每个channel有4个tasks, + * 需要的taskGroup数为:333 / 7 = + * 47...4,也就是需要48个taskGroup,47个是每个负责7个channel,有4个负责1个channel + *

+ * 处理:我们先将这负责4个channel的taskGroup处理掉,逻辑是: + * 先按平均为3个tasks找4个channel,设置taskGroupId为0, + * 接下来就像发牌一样轮询分配task到剩下的包含平均channel数的taskGroup中 + *

+ * TODO delete it + * + * @param averTaskPerChannel + * @param channelNumber + * @param channelsPerTaskGroup + * @return 每个taskGroup独立的全部配置 + */ + @SuppressWarnings("serial") + private List distributeTasksToTaskGroup( + int averTaskPerChannel, int channelNumber, + int channelsPerTaskGroup) { + Validate.isTrue(averTaskPerChannel > 0 && channelNumber > 0 + && channelsPerTaskGroup > 0, + "每个channel的平均task数[averTaskPerChannel],channel数目[channelNumber],每个taskGroup的平均channel数[channelsPerTaskGroup]都应该为正数"); + List taskConfigs = this.configuration + .getListConfiguration(CoreConstant.DATAX_JOB_CONTENT); + int taskGroupNumber = channelNumber / channelsPerTaskGroup; + int leftChannelNumber = channelNumber % channelsPerTaskGroup; + if (leftChannelNumber > 0) { + taskGroupNumber += 1; + } + + /** + * 如果只有一个taskGroup,直接打标返回 + */ + if (taskGroupNumber == 1) { + final Configuration taskGroupConfig = this.configuration.clone(); + /** + * configure的clone不能clone出 + */ + taskGroupConfig.set(CoreConstant.DATAX_JOB_CONTENT, this.configuration + .getListConfiguration(CoreConstant.DATAX_JOB_CONTENT)); + taskGroupConfig.set(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL, + channelNumber); + taskGroupConfig.set(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID, 0); + List result = new ArrayList<>(); + result.add(taskGroupConfig); + return result; + } + + List taskGroupConfigs = new ArrayList(); + /** + * 将每个taskGroup中content的配置清空 + */ + for (int i = 0; i < taskGroupNumber; i++) { + Configuration taskGroupConfig = this.configuration.clone(); + List taskGroupJobContent = taskGroupConfig + .getListConfiguration(CoreConstant.DATAX_JOB_CONTENT); + taskGroupJobContent.clear(); + taskGroupConfig.set(CoreConstant.DATAX_JOB_CONTENT, taskGroupJobContent); + + taskGroupConfigs.add(taskGroupConfig); + } + + int taskConfigIndex = 0; + int channelIndex = 0; + int taskGroupConfigIndex = 0; + + /** + * 先处理掉taskGroup包含channel数不是平均值的taskGroup + */ + if (leftChannelNumber > 0) { + Configuration taskGroupConfig = taskGroupConfigs.get(taskGroupConfigIndex); + for (; channelIndex < leftChannelNumber; channelIndex++) { + for (int i = 0; i < averTaskPerChannel; i++) { + List taskGroupJobContent = taskGroupConfig + .getListConfiguration(CoreConstant.DATAX_JOB_CONTENT); + taskGroupJobContent.add(taskConfigs.get(taskConfigIndex++)); + taskGroupConfig.set(CoreConstant.DATAX_JOB_CONTENT, + taskGroupJobContent); + } + } + + taskGroupConfig.set(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL, + leftChannelNumber); + taskGroupConfig.set(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID, + taskGroupConfigIndex++); + } + + /** + * 下面需要轮询分配,并打上channel数和taskGroupId标记 + */ + int equalDivisionStartIndex = taskGroupConfigIndex; + for (; taskConfigIndex < taskConfigs.size() + && equalDivisionStartIndex < taskGroupConfigs.size(); ) { + for (taskGroupConfigIndex = equalDivisionStartIndex; taskGroupConfigIndex < taskGroupConfigs + .size() && taskConfigIndex < taskConfigs.size(); taskGroupConfigIndex++) { + Configuration taskGroupConfig = taskGroupConfigs.get(taskGroupConfigIndex); + List taskGroupJobContent = taskGroupConfig + .getListConfiguration(CoreConstant.DATAX_JOB_CONTENT); + taskGroupJobContent.add(taskConfigs.get(taskConfigIndex++)); + taskGroupConfig.set( + CoreConstant.DATAX_JOB_CONTENT, taskGroupJobContent); + } + } + + for (taskGroupConfigIndex = equalDivisionStartIndex; + taskGroupConfigIndex < taskGroupConfigs.size(); ) { + Configuration taskGroupConfig = taskGroupConfigs.get(taskGroupConfigIndex); + taskGroupConfig.set(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL, + channelsPerTaskGroup); + taskGroupConfig.set(CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID, + taskGroupConfigIndex++); + } + + return taskGroupConfigs; + } + + private void postJobReader() { + classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader( + PluginType.READER, this.readerPluginName)); + LOG.info("DataX Reader.Job [{}] do post work.", + this.readerPluginName); + this.jobReader.post(); + classLoaderSwapper.restoreCurrentThreadClassLoader(); + } + + private void postJobWriter() { + this.jobWriters.forEach( jobWriter -> { + classLoaderSwapper.setCurrentThreadClassLoader(LoadUtil.getJarLoader( + PluginType.WRITER, jobWriter.getPluginName())); + LOG.info("DataX Writer.Job [{}] do post work.", + jobWriter.getPluginName()); + jobWriter.post(); + classLoaderSwapper.restoreCurrentThreadClassLoader(); + }); + } + + /** + * 检查最终结果是否超出阈值,如果阈值设定小于1,则表示百分数阈值,大于1表示条数阈值。 + * + * @param + */ + private void checkLimit() { + Communication communication = super.getContainerCommunicator().collect(); + errorLimit.checkRecordLimit(communication); + errorLimit.checkPercentageLimit(communication); + } + + /** + * 调用外部hook + */ + private void invokeHooks() { + AbstractContainerCommunicator tempContainerCollector = super.getContainerCommunicator(); + if (tempContainerCollector == null) { + tempContainerCollector = new StandAloneJobContainerCommunicator(configuration); + super.setContainerCommunicator(tempContainerCollector); + } + Communication comm = tempContainerCollector.collect(); + HookInvoker invoker = new HookInvoker(CoreConstant.DATAX_HOME + "/hook", configuration, comm.getCounter()); + invoker.invokeAll(); + } + + /** + * Adjust channel speed by channel number + * Added by davidhua@webank.com + * @param channelNumber channel number + */ + private void adjustChannelSpeedByNumber(int channelNumber){ + long globalLimitedByteSpeed = this.configuration.getLong( + CoreConstant.DATAX_JOB_SETTING_SPEED_BYTE, 0); + if(globalLimitedByteSpeed > 0){ + Long channelLimitedByteSpeed = this.configuration + .getLong(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_BYTE); + if(channelLimitedByteSpeed * channelNumber < globalLimitedByteSpeed){ + this.configuration.set(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_BYTE, + globalLimitedByteSpeed / channelNumber); + } + } + long globalLimitedRecordSpeed = this.configuration.getLong( + CoreConstant.DATAX_JOB_SETTING_SPEED_RECORD, 100000); + if(globalLimitedRecordSpeed > 0){ + Long channelLimitedRecordSpeed = this.configuration.getLong( + CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_RECORD); + if(channelLimitedRecordSpeed * channelNumber < globalLimitedRecordSpeed){ + this.configuration.set(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_RECORD, + globalLimitedRecordSpeed / channelNumber); + } + } + } + + /** + * Get Job.Writer configuration list + * Added by davidhua@webank.com + * @param configuration root configuration + * @return list + */ + private List getJobWriterConfigList(Configuration configuration){ + List writerConfList = new ArrayList<>(); + //Try to get writer list + List writerList = configuration.getList(CoreConstant.DATAX_JOB_CONTENT_WRITER); + for(int i = 0; i < writerList.size(); i++){ + writerConfList.add(this.configuration.getConfiguration( + String.format(CoreConstant.DATAX_JOB_CONTENT_WRITER_ARRAY_PARAMETER, i))); + } + return writerConfList; + } + + /** + * Adjust job configuration to general structure + * Added by davidhua@webank.com + * @param configuration job configuration + */ + private void adjustJobConfiguration(Configuration configuration){ + //Change structure of 'CoreConstant.DATAX_JOB_CONTENT_WRITER' to list + try { + configuration.getList(CoreConstant.DATAX_JOB_CONTENT_WRITER); + }catch(Exception e){ + //Means that "CoreConstant.DATAX_JOB_CONTENT_WRITER" is not a list + Configuration emptyListConf = Configuration.from("[]"); + emptyListConf.set("[0]", configuration.getConfiguration(CoreConstant.DATAX_JOB_CONTENT_WRITER)); + configuration.set(CoreConstant.DATAX_JOB_CONTENT_WRITER, emptyListConf); + } + } + /** + * Load processor + * Added by davidhua@webank.com + * @param namespace space + * @return + */ + private List doLoadProcessor(String namespace){ + List processors = new ArrayList<>(); + PluginProcessorLoader processorLoader = ClassUtil.instantiate(configuration.getString( + CoreConstant.DATAX_CORE_PROCESSOR_LOADER_PLUGIN_CLASS + ), PluginProcessorLoader.class); + String sourcePath = configuration.getString(CoreConstant.DATAX_CORE_PROCESSOR_LODAER_PLUGIN_SOURCEPATH) + + namespace; + String packageName = configuration.getString(CoreConstant.DATAX_CORE_PROCESSOR_LOADER_PLUGIN_PACKAGE); + LOG.info("Loading processors, sourcePath: [" + sourcePath + "]"); + if(new File(sourcePath).exists()) { + Collection javaSourceFiles = FileUtils.listFiles(new File(sourcePath), FileFileFilter.FILE, FileFileFilter.FILE); + javaSourceFiles.forEach(javaSourceFile -> { + try { + String javaCode = FileUtils.readFileToString(javaSourceFile); + String javaFileName = JavaSrcUtils.parseJavaFileName(javaCode); + if (StringUtils.isBlank(javaFileName)) { + throw DataXException.asDataXException(FrameworkErrorCode.PROCESSOR_LOAD_ERROR, + "Error in loading processor [" + javaSourceFile.getName() + "], cannot find class Name"); + } + javaCode = JavaSrcUtils.addPackageName(javaCode, packageName); + String fullClassName = packageName + "." + javaFileName; + boolean result = processorLoader.load(fullClassName, javaCode); + if (!result) { + throw DataXException.asDataXException(FrameworkErrorCode.PROCESSOR_LOAD_ERROR, + "Loading processor [" + javaSourceFile.getName() + "] failed"); + } else { + processors.add(fullClassName); + } + } catch (IOException e) { + throw DataXException.asDataXException(FrameworkErrorCode.PROCESSOR_LOAD_ERROR, e); + } + }); + } + LOG.info("Loading processors finished, " + Json.toJson(processors, null)); + return processors; + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/meta/ExecuteMode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/meta/ExecuteMode.java new file mode 100644 index 000000000..6a608996b --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/meta/ExecuteMode.java @@ -0,0 +1,22 @@ +package com.alibaba.datax.core.job.meta; + +/** + * Created by liupeng on 15/12/21. + */ +public enum ExecuteMode { + STANDALONE("standalone"),; + + String value; + + private ExecuteMode(String value) { + this.value = value; + } + + public String value() { + return this.value; + } + + public String getValue() { + return this.value; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/meta/State.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/meta/State.java new file mode 100644 index 000000000..73c8071a0 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/meta/State.java @@ -0,0 +1,32 @@ +package com.alibaba.datax.core.job.meta; + +/** + * Created by liupeng on 15/12/21. + */ +public enum State { + SUBMITTING(10), + WAITING(20), + RUNNING(30), + KILLING(40), + KILLED(50), + FAILED(60), + SUCCEEDED(70),; + + int value; + + private State(int value) { + this.value = value; + } + + public int value() { + return this.value; + } + + public boolean isFinished() { + return this == KILLED || this == FAILED || this == SUCCEEDED; + } + + public boolean isRunning() { + return !this.isFinished(); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/AbstractScheduler.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/AbstractScheduler.java new file mode 100644 index 000000000..39db68f59 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/AbstractScheduler.java @@ -0,0 +1,188 @@ +package com.alibaba.datax.core.job.scheduler; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import com.webank.wedatasphere.exchangis.datax.core.job.scheduler.speed.DefaultVariableTaskGroupSpeedStrategy; +import com.webank.wedatasphere.exchangis.datax.core.job.scheduler.speed.VariableTaskGroupSpeedStrategy; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.communication.CommunicationTool; +import com.alibaba.datax.core.statistics.container.communicator.AbstractContainerCommunicator; +import com.alibaba.datax.core.util.ErrorRecordChecker; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.core.util.container.CoreConstant; +import com.alibaba.datax.dataxservice.face.domain.enums.State; +import org.apache.commons.lang.Validate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; + +public abstract class AbstractScheduler { + private static final Logger LOG = LoggerFactory + .getLogger(AbstractScheduler.class); + + private ErrorRecordChecker errorLimit; + + private AbstractContainerCommunicator containerCommunicator; + + private VariableTaskGroupSpeedStrategy variableTaskGroupSpeedStrategy; + + private Long jobId; + + private boolean scheduable; + + public Long getJobId() { + return jobId; + } + + public AbstractScheduler(AbstractContainerCommunicator containerCommunicator) { + this(containerCommunicator, new DefaultVariableTaskGroupSpeedStrategy()); + } + + public AbstractScheduler(AbstractContainerCommunicator containerCommunicator, + VariableTaskGroupSpeedStrategy variableTaskGroupSpeedStrategy){ + this.containerCommunicator = containerCommunicator; + this.variableTaskGroupSpeedStrategy = variableTaskGroupSpeedStrategy; + } + + public void schedule(List configurations) { + Validate.notNull(configurations, + "scheduler配置不能为空"); + int jobReportIntervalInMillSec = configurations.get(0).getInt( + CoreConstant.DATAX_CORE_CONTAINER_JOB_REPORTINTERVAL, 30000); + int jobSleepIntervalInMillSec = configurations.get(0).getInt( + CoreConstant.DATAX_CORE_CONTAINER_JOB_SLEEPINTERVAL, 10000); + + this.jobId = configurations.get(0).getLong( + CoreConstant.DATAX_CORE_CONTAINER_JOB_ID); + + errorLimit = new ErrorRecordChecker(configurations.get(0)); + + /** + * 给 taskGroupContainer 的 Communication 注册 + */ + this.containerCommunicator.registerCommunication(configurations); + + int totalTasks = calculateTaskCount(configurations); + startAllTaskGroup(configurations); + + Communication lastJobContainerCommunication = new Communication(); + + long lastReportTimeStamp = System.currentTimeMillis(); + scheduable = true; + try { + while (scheduable) { + /* + step 1: collect job stat + step 2: getReport info, then report it + step 3: errorLimit do check + step 4: dealSucceedStat(); + step 5: dealKillingStat(); + step 6: dealFailedStat(); + step 7: refresh last job stat, and then sleep for next while + + above steps, some ones should report info to DS + + */ + Communication nowJobContainerCommunication = this.containerCommunicator.collect(); + nowJobContainerCommunication.setTimestamp(System.currentTimeMillis()); + LOG.debug(nowJobContainerCommunication.toString()); + //reporting cycle + long now = System.currentTimeMillis(); + if (now - lastReportTimeStamp > jobReportIntervalInMillSec) { + Communication reportCommunication = CommunicationTool + .getReportCommunication(nowJobContainerCommunication, lastJobContainerCommunication, totalTasks); + + this.containerCommunicator.report(reportCommunication); + lastReportTimeStamp = now; + lastJobContainerCommunication = nowJobContainerCommunication; + } + + errorLimit.checkRecordLimit(nowJobContainerCommunication); + + if (nowJobContainerCommunication.getState() == State.SUCCEEDED) { + LOG.info("Scheduler accomplished all tasks."); + break; + } + + if (isJobKilling(this.getJobId())) { + dealKillingStat(this.containerCommunicator, totalTasks); + } else if (nowJobContainerCommunication.getState() == State.FAILED) { + dealFailedStat(this.containerCommunicator, nowJobContainerCommunication.getThrowable()); + } + Configuration configuration = this.containerCommunicator.getConfiguration(); + boolean adjust = variableTaskGroupSpeedStrategy.adjustSpeed(nowJobContainerCommunication, configuration); + if(adjust){ + adjustTaskGroupSpeed(configuration.getLong(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_BYTE, 0), + configuration.getLong(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_RECORD, 0)); + } + Thread.sleep(jobSleepIntervalInMillSec); + } + } catch (InterruptedException e) { + LOG.error("Catch InterruptedException exception", e); + Thread.currentThread().interrupt(); + throw DataXException.asDataXException( + FrameworkErrorCode.RUNTIME_ERROR, e); + } + + } + + /** + * stop schedule + */ + public void stopSchedule(){ + scheduable = false; + cancelAllTaskGroup(); + } + + /** + * start all task groups + * @param configurations + */ + protected abstract void startAllTaskGroup(List configurations); + + /** + * cancel all task groups + */ + protected abstract void cancelAllTaskGroup(); + + /** + * deal the fail status + * @param frameworkCollector + * @param throwable + */ + protected abstract void dealFailedStat(AbstractContainerCommunicator frameworkCollector, Throwable throwable); + + /** + * deal the kill status + * @param frameworkCollector + * @param totalTasks + */ + protected abstract void dealKillingStat(AbstractContainerCommunicator frameworkCollector, int totalTasks); + + /** + * adjust speed + * @param byteSpeed + * @param recordSpeed + */ + protected abstract void adjustTaskGroupSpeed(long byteSpeed, long recordSpeed); + + + /** + * if the job has been killed + * @param jobId + * @return + */ + protected abstract boolean isJobKilling(Long jobId); + + private int calculateTaskCount(List configurations) { + int totalTasks = 0; + for (Configuration taskGroupConfiguration : configurations) { + totalTasks += taskGroupConfiguration.getListConfiguration( + CoreConstant.DATAX_JOB_CONTENT).size(); + } + + return totalTasks; + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/processinner/ProcessInnerScheduler.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/processinner/ProcessInnerScheduler.java new file mode 100644 index 000000000..348ff9a37 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/processinner/ProcessInnerScheduler.java @@ -0,0 +1,76 @@ +package com.alibaba.datax.core.job.scheduler.processinner; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.job.scheduler.AbstractScheduler; +import com.alibaba.datax.core.statistics.container.communicator.AbstractContainerCommunicator; +import com.alibaba.datax.core.taskgroup.TaskGroupContainer; +import com.alibaba.datax.core.taskgroup.runner.TaskGroupContainerRunner; +import com.alibaba.datax.core.util.FrameworkErrorCode; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.ThreadPoolExecutor; + +public abstract class ProcessInnerScheduler extends AbstractScheduler { + + private ExecutorService taskGroupContainerExecutorService; + + private ThreadPoolExecutor threadPoolExecutor; + + private List taskGroupContainers = new ArrayList<>(); + + ProcessInnerScheduler(AbstractContainerCommunicator containerCommunicator) { + super(containerCommunicator); + } + + @Override + public void startAllTaskGroup(List configurations) { + this.taskGroupContainerExecutorService = Executors + .newFixedThreadPool(configurations.size()); + + for (Configuration taskGroupConfiguration : configurations) { + TaskGroupContainerRunner taskGroupContainerRunner = newTaskGroupContainerRunner(taskGroupConfiguration); + //store task group containers + taskGroupContainers.add(taskGroupContainerRunner.getTaskGroupContainer()); + this.taskGroupContainerExecutorService.execute(taskGroupContainerRunner); + } + + this.taskGroupContainerExecutorService.shutdown(); + } + + @Override + public void dealFailedStat(AbstractContainerCommunicator frameworkCollector, Throwable throwable) { + this.taskGroupContainerExecutorService.shutdownNow(); + throw DataXException.asDataXException( + FrameworkErrorCode.PLUGIN_RUNTIME_ERROR, throwable); + } + + + @Override + public void dealKillingStat(AbstractContainerCommunicator frameworkCollector, int totalTasks) { + this.taskGroupContainerExecutorService.shutdownNow(); + } + + @Override + protected void cancelAllTaskGroup() { + //shutdown each task group container + taskGroupContainers.forEach(TaskGroupContainer::shutdown); + //then to close the thread pool + this.taskGroupContainerExecutorService.shutdownNow(); + } + + @Override + protected void adjustTaskGroupSpeed(long byteSpeed, long recordSpeed) { + taskGroupContainers.forEach(taskGroupContainer -> taskGroupContainer.adjustSpeed(byteSpeed, recordSpeed)); + } + + private TaskGroupContainerRunner newTaskGroupContainerRunner( + Configuration configuration) { + TaskGroupContainer taskGroupContainer = new TaskGroupContainer(configuration); + return new TaskGroupContainerRunner(taskGroupContainer); + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/processinner/StandAloneScheduler.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/processinner/StandAloneScheduler.java new file mode 100644 index 000000000..b6b19261f --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/scheduler/processinner/StandAloneScheduler.java @@ -0,0 +1,19 @@ +package com.alibaba.datax.core.job.scheduler.processinner; + +import com.alibaba.datax.core.statistics.container.communicator.AbstractContainerCommunicator; + +/** + * Created by hongjiao.hj on 2014/12/22. + */ +public class StandAloneScheduler extends ProcessInnerScheduler { + + public StandAloneScheduler(AbstractContainerCommunicator containerCommunicator) { + super(containerCommunicator); + } + + @Override + protected boolean isJobKilling(Long jobId) { + return false; + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/Communication.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/Communication.java new file mode 100644 index 000000000..cd3554ebb --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/Communication.java @@ -0,0 +1,327 @@ +package com.alibaba.datax.core.statistics.communication; + +import com.alibaba.datax.common.base.BaseObject; +import com.alibaba.datax.dataxservice.face.domain.enums.State; +import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.Validate; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.concurrent.ConcurrentHashMap; + +/** + * DataX所有的状态及统计信息交互类,job、taskGroup、task等的消息汇报都走该类 + */ +public class Communication extends BaseObject implements Cloneable { + /** + * 所有的数值key-value对 * + */ + private Map counter; + + /** + * 运行状态 * + */ + private State state; + + /** + * 异常记录 * + */ + private Throwable throwable; + + /** + * 记录的timestamp * + */ + private long timestamp; + + /** + * task给job的信息 * + */ + Map> message; + + /** + * Task's parameter + */ + Map params; + public Communication() { + this.init(); + } + + public synchronized void reset() { + this.init(); + } + + private void init() { + this.counter = new ConcurrentHashMap<>(); + this.state = State.RUNNING; + this.throwable = null; + this.message = new ConcurrentHashMap<>(); + this.timestamp = System.currentTimeMillis(); + this.params = new ConcurrentHashMap<>(); + } + + public Map getCounter() { + return this.counter; + } + + public synchronized State getState() { + return this.state; + } + + public synchronized void setState(State state, boolean isForce) { + if (!isForce && this.state.equals(State.FAILED)) { + return; + } + + this.state = state; + } + + public synchronized void setState(State state) { + setState(state, false); + } + + public Throwable getThrowable() { + return this.throwable; + } + + public synchronized String getThrowableMessage() { + return this.throwable == null ? "" : this.throwable.getMessage(); + } + + public void setThrowable(Throwable throwable) { + setThrowable(throwable, false); + } + + public synchronized void setThrowable(Throwable throwable, boolean isForce) { + if (isForce) { + this.throwable = throwable; + } else { + this.throwable = this.throwable == null ? throwable : this.throwable; + } + } + + public long getTimestamp() { + return this.timestamp; + } + + public void setTimestamp(long timestamp) { + this.timestamp = timestamp; + } + + public Map> getMessage() { + return this.message; + } + + public List getMessage(final String key) { + return message.get(key); + } + + public synchronized void addMessage(final String key, final String value) { + Validate.isTrue(StringUtils.isNotBlank(key), "增加message的key不能为空"); + List valueList = this.message.computeIfAbsent(key, k -> new ArrayList<>()); + valueList.add(value); + } + + /** + * Add parameter + * @param key key + * @param value value + */ + public synchronized void addParameter(final String key, final String value, boolean isUnique){ + Validate.isTrue(StringUtils.isNotBlank(key), "Parameter key cannot be empty(参数的key不能为空)"); + if(isUnique){ + this.params.put(key, value); + }else { + Object current = this.params.computeIfAbsent(key, k -> value); + if (current instanceof List) { + ((List) current).add(value); + } else if(!current.equals(value)){ + List valueList = new ArrayList<>(); + valueList.add(value); + this.params.put(key, valueList); + } + } + } + + /** + * Get parameter + * @param key key + * @return value object + */ + public Object getParameter(final String key){ + return this.params.get(key); + } + + /** + * Get parameter map + * @return value map + */ + public Map getParameters(){ + return params; + } + public synchronized Long getLongCounter(final String key) { + Number value = this.counter.get(key); + + return value == null ? 0 : value.longValue(); + } + + public synchronized void setLongCounter(final String key, final long value) { + Validate.isTrue(StringUtils.isNotBlank(key), "设置counter的key不能为空"); + this.counter.put(key, value); + } + + public synchronized Double getDoubleCounter(final String key) { + Number value = this.counter.get(key); + + return value == null ? 0.0d : value.doubleValue(); + } + + public synchronized void setDoubleCounter(final String key, final double value) { + Validate.isTrue(StringUtils.isNotBlank(key), "设置counter的key不能为空"); + this.counter.put(key, value); + } + + public synchronized void increaseCounter(final String key, final long deltaValue) { + Validate.isTrue(StringUtils.isNotBlank(key), "增加counter的key不能为空"); + + long value = this.getLongCounter(key); + + this.counter.put(key, value + deltaValue); + } + + @Override + public Communication clone() { + Communication communication = new Communication(); + + /** + * clone counter + */ + if (this.counter != null) { + for (Map.Entry entry : this.counter.entrySet()) { + String key = entry.getKey(); + Number value = entry.getValue(); + if (value instanceof Long) { + communication.setLongCounter(key, (Long) value); + } else if (value instanceof Double) { + communication.setDoubleCounter(key, (Double) value); + } + } + } + + communication.setState(this.state, true); + communication.setThrowable(this.throwable, true); + communication.setTimestamp(this.timestamp); + + /** + * clone message + */ + if (this.message != null) { + for (final Map.Entry> entry : this.message.entrySet()) { + String key = entry.getKey(); + List value = new ArrayList<>(entry.getValue()); + communication.getMessage().put(key, value); + } + } + + return communication; + } + + public synchronized Communication mergeFrom(final Communication otherComm) { + if (otherComm == null) { + return this; + } + + /** + * counter的合并,将otherComm的值累加到this中,不存在的则创建 + * 同为long + */ + for (Entry entry : otherComm.getCounter().entrySet()) { + String key = entry.getKey(); + Number otherValue = entry.getValue(); + if (otherValue == null) { + continue; + } + + Number value = this.counter.get(key); + if (value == null) { + value = otherValue; + } else { + if (value instanceof Long && otherValue instanceof Long) { + value = value.longValue() + otherValue.longValue(); + } else { + value = value.doubleValue() + value.doubleValue(); + } + } + + this.counter.put(key, value); + } + + // 合并state + mergeStateFrom(otherComm); + + /** + * 合并throwable,当this的throwable为空时, + * 才将otherComm的throwable合并进来 + */ + this.throwable = this.throwable == null ? otherComm.getThrowable() : this.throwable; + + /** + * timestamp是整个一次合并的时间戳,单独两两communication不作合并 + */ + + /** + * message的合并采取求并的方式,即全部累计在一起 + */ + for (Entry> entry : otherComm.getMessage().entrySet()) { + String key = entry.getKey(); + List valueList = this.message.computeIfAbsent(key, k -> new ArrayList<>()); + valueList.addAll(entry.getValue()); + } + //Merge parameters + otherComm.getParameters().forEach((key, otherValue) ->{ + Object currentValue = this.params.computeIfAbsent(key, k -> new ArrayList<>()); + List valueList; + if(!(currentValue instanceof List)){ + valueList = new ArrayList<>(); + this.params.put(key, valueList); + valueList.add(String.valueOf(currentValue)); + }else{ + valueList = (List)currentValue; + } + if(otherValue instanceof List){ + valueList.addAll((List)otherValue); + }else{ + valueList.add(String.valueOf(otherValue)); + } + }); + return this; + } + + /** + * 合并state,优先级: (Failed | Killed) > Running > Success + * 这里不会出现 Killing 状态,killing 状态只在 Job 自身状态上才有. + */ + public synchronized State mergeStateFrom(final Communication otherComm) { + State retState = this.getState(); + if (otherComm == null) { + return retState; + } + + if (this.state == State.FAILED || otherComm.getState() == State.FAILED + || this.state == State.KILLED || otherComm.getState() == State.KILLED) { + retState = State.FAILED; + } else if (this.state.isRunning() || otherComm.state.isRunning()) { + retState = State.RUNNING; + } + + this.setState(retState); + return retState; + } + + public synchronized boolean isFinished() { + return this.state == State.SUCCEEDED || this.state == State.FAILED + || this.state == State.KILLED; + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/CommunicationTool.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/CommunicationTool.java new file mode 100644 index 000000000..5a106c7e2 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/CommunicationTool.java @@ -0,0 +1,289 @@ +package com.alibaba.datax.core.statistics.communication; + +import com.alibaba.datax.common.statistics.PerfTrace; +import com.alibaba.datax.common.util.StrUtil; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.lang.Validate; + +import java.text.DecimalFormat; +import java.util.HashMap; +import java.util.Map; + +/** + * 这里主要是业务层面的处理 + */ +public final class CommunicationTool { + + public static final String CHANNEL_RUNNING = "channelRunning"; + + public static final String STAGE = "stage"; + public static final String BYTE_SPEED = "byteSpeed"; + public static final String RECORD_SPEED = "recordSpeed"; + public static final String PERCENTAGE = "percentage"; + + public static final String READ_SUCCEED_RECORDS = "readSucceedRecords"; + public static final String READ_SUCCEED_BYTES = "readSucceedBytes"; + + public static final String READ_FAILED_RECORDS = "readFailedRecords"; + public static final String READ_FAILED_BYTES = "readFailedBytes"; + + public static final String WRITE_RECEIVED_RECORDS = "writeReceivedRecords"; + public static final String WRITE_RECEIVED_BYTES = "writeReceivedBytes"; + + public static final String WRITE_FAILED_RECORDS = "writeFailedRecords"; + public static final String WRITE_FAILED_BYTES = "writeFailedBytes"; + + public static final String TOTAL_READ_RECORDS = "totalReadRecords"; + private static final String TOTAL_READ_BYTES = "totalReadBytes"; + + private static final String TOTAL_ERROR_RECORDS = "totalErrorRecords"; + private static final String TOTAL_ERROR_BYTES = "totalErrorBytes"; + + private static final String WRITE_SUCCEED_RECORDS = "writeSucceedRecords"; + private static final String WRITE_SUCCEED_BYTES = "writeSucceedBytes"; + + public static final String WAIT_WRITER_TIME = "waitWriterTime"; + + public static final String WAIT_READER_TIME = "waitReaderTime"; + + public static final String TASK_RUNNING_WRITERS = "taskRunningWriters"; + + public static final String TRANSFORMER_USED_TIME = "totalTransformerUsedTime"; + public static final String TRANSFORMER_SUCCEED_RECORDS = "totalTransformerSuccessRecords"; + public static final String TRANSFORMER_FAILED_RECORDS = "totalTransformerFailedRecords"; + public static final String TRANSFORMER_FILTER_RECORDS = "totalTransformerFilterRecords"; + public static final String TRANSFORMER_NAME_PREFIX = "usedTimeByTransformer_"; + + public static Communication getReportCommunication(Communication now, Communication old, int totalStage) { + Validate.isTrue(now != null && old != null, + "为汇报准备的新旧metric不能为null"); + + long totalReadRecords = getTotalReadRecords(now); + long totalReadBytes = getTotalReadBytes(now); + now.setLongCounter(TOTAL_READ_RECORDS, totalReadRecords); + now.setLongCounter(TOTAL_READ_BYTES, totalReadBytes); + now.setLongCounter(TOTAL_ERROR_RECORDS, getTotalErrorRecords(now)); + now.setLongCounter(TOTAL_ERROR_BYTES, getTotalErrorBytes(now)); + now.setLongCounter(WRITE_SUCCEED_RECORDS, getWriteSucceedRecords(now)); + now.setLongCounter(WRITE_SUCCEED_BYTES, getWriteSucceedBytes(now)); + + long timeInterval = now.getTimestamp() - old.getTimestamp(); + long sec = timeInterval <= 1000 ? 1 : timeInterval / 1000; + long bytesSpeed = (totalReadBytes + - getTotalReadBytes(old)) / sec; + long recordsSpeed = (totalReadRecords + - getTotalReadRecords(old)) / sec; + + now.setLongCounter(BYTE_SPEED, bytesSpeed < 0 ? 0 : bytesSpeed); + now.setLongCounter(RECORD_SPEED, recordsSpeed < 0 ? 0 : recordsSpeed); + now.setDoubleCounter(PERCENTAGE, now.getLongCounter(STAGE) / (double) totalStage); + + if (old.getThrowable() != null) { + now.setThrowable(old.getThrowable()); + } + + return now; + } + + public static long getTotalReadRecords(final Communication communication) { + return communication.getLongCounter(READ_SUCCEED_RECORDS) + + communication.getLongCounter(READ_FAILED_RECORDS); + } + + public static long getTotalReadBytes(final Communication communication) { + return communication.getLongCounter(READ_SUCCEED_BYTES) + + communication.getLongCounter(READ_FAILED_BYTES); + } + + public static long getTotalErrorRecords(final Communication communication) { + return communication.getLongCounter(READ_FAILED_RECORDS) + + communication.getLongCounter(WRITE_FAILED_RECORDS); + } + + public static long getTotalErrorBytes(final Communication communication) { + return communication.getLongCounter(READ_FAILED_BYTES) + + communication.getLongCounter(WRITE_FAILED_BYTES); + } + + public static long getWriteSucceedRecords(final Communication communication) { + return communication.getLongCounter(WRITE_RECEIVED_RECORDS) - + communication.getLongCounter(WRITE_FAILED_RECORDS); + } + + public static long getWriteSucceedBytes(final Communication communication) { + return communication.getLongCounter(WRITE_RECEIVED_BYTES) - + communication.getLongCounter(WRITE_FAILED_BYTES); + } + + public static class Stringify { + private final static DecimalFormat df = new DecimalFormat("0.00"); + + public static String getSnapshot(final Communication communication) { + StringBuilder sb = new StringBuilder(); + sb.append("Total "); + sb.append(getTotal(communication)); + sb.append(" | "); + sb.append("Speed "); + sb.append(getSpeed(communication)); + sb.append(" | "); + sb.append("Error "); + sb.append(getError(communication)); + sb.append(" | "); + sb.append(" All task WaitWriterTime "); + sb.append(PerfTrace.unitTime(communication.getLongCounter(WAIT_WRITER_TIME))); + sb.append(" | "); + sb.append(" All task WaitReaderTime "); + sb.append(PerfTrace.unitTime(communication.getLongCounter(WAIT_READER_TIME))); + sb.append(" | "); + if (communication.getLongCounter(CommunicationTool.TRANSFORMER_USED_TIME) > 0 + || communication.getLongCounter(CommunicationTool.TRANSFORMER_SUCCEED_RECORDS) > 0 + || communication.getLongCounter(CommunicationTool.TRANSFORMER_FAILED_RECORDS) > 0 + || communication.getLongCounter(CommunicationTool.TRANSFORMER_FILTER_RECORDS) > 0) { + sb.append("Transfermor Success "); + sb.append(String.format("%d records", communication.getLongCounter(CommunicationTool.TRANSFORMER_SUCCEED_RECORDS))); + sb.append(" | "); + sb.append("Transformer Error "); + sb.append(String.format("%d records", communication.getLongCounter(CommunicationTool.TRANSFORMER_FAILED_RECORDS))); + sb.append(" | "); + sb.append("Transformer Filter "); + sb.append(String.format("%d records", communication.getLongCounter(CommunicationTool.TRANSFORMER_FILTER_RECORDS))); + sb.append(" | "); + sb.append("Transformer usedTime "); + sb.append(PerfTrace.unitTime(communication.getLongCounter(CommunicationTool.TRANSFORMER_USED_TIME))); + sb.append(" | "); + } + sb.append("Percentage "); + sb.append(getPercentage(communication)); + return sb.toString(); + } + + private static String getTotal(final Communication communication) { + return String.format("%d records, %d bytes", + communication.getLongCounter(TOTAL_READ_RECORDS), + communication.getLongCounter(TOTAL_READ_BYTES)); + } + + private static String getSpeed(final Communication communication) { + return String.format("%s/s, %d records/s", + StrUtil.stringify(communication.getLongCounter(BYTE_SPEED)), + communication.getLongCounter(RECORD_SPEED)); + } + + private static String getError(final Communication communication) { + return String.format("%d records, %d bytes", + communication.getLongCounter(TOTAL_ERROR_RECORDS), + communication.getLongCounter(TOTAL_ERROR_BYTES)); + } + + private static String getPercentage(final Communication communication) { + return df.format(communication.getDoubleCounter(PERCENTAGE) * 100) + "%"; + } + } + + public static class Jsonify { + @SuppressWarnings("rawtypes") + public static String getSnapshot(Communication communication) { + Validate.notNull(communication); + + Map state = new HashMap(); + + Pair pair = getTotalBytes(communication); + state.put((String) pair.getKey(), pair.getValue()); + + pair = getTotalRecords(communication); + state.put((String) pair.getKey(), pair.getValue()); + + pair = getSpeedRecord(communication); + state.put((String) pair.getKey(), pair.getValue()); + + pair = getSpeedByte(communication); + state.put((String) pair.getKey(), pair.getValue()); + + pair = getStage(communication); + state.put((String) pair.getKey(), pair.getValue()); + + pair = getErrorRecords(communication); + state.put((String) pair.getKey(), pair.getValue()); + + pair = getErrorBytes(communication); + state.put((String) pair.getKey(), pair.getValue()); + + pair = getErrorMessage(communication); + state.put((String) pair.getKey(), pair.getValue()); + + pair = getPercentage(communication); + state.put((String) pair.getKey(), pair.getValue()); + + pair = getWaitReaderTime(communication); + state.put((String) pair.getKey(), pair.getValue()); + + pair = getWaitWriterTime(communication); + state.put((String) pair.getKey(), pair.getValue()); + + return Json.toJson(state, null); + } + + private static Pair getTotalBytes(final Communication communication) { + return new Pair("totalBytes", communication.getLongCounter(TOTAL_READ_BYTES)); + } + + private static Pair getTotalRecords(final Communication communication) { + return new Pair("totalRecords", communication.getLongCounter(TOTAL_READ_RECORDS)); + } + + private static Pair getSpeedByte(final Communication communication) { + return new Pair("speedBytes", communication.getLongCounter(BYTE_SPEED)); + } + + private static Pair getSpeedRecord(final Communication communication) { + return new Pair("speedRecords", communication.getLongCounter(RECORD_SPEED)); + } + + private static Pair getErrorRecords(final Communication communication) { + return new Pair("errorRecords", communication.getLongCounter(TOTAL_ERROR_RECORDS)); + } + + private static Pair getErrorBytes(final Communication communication) { + return new Pair("errorBytes", communication.getLongCounter(TOTAL_ERROR_BYTES)); + } + + private static Pair getStage(final Communication communication) { + return new Pair("stage", communication.getLongCounter(STAGE)); + } + + private static Pair getPercentage(final Communication communication) { + return new Pair("percentage", communication.getDoubleCounter(PERCENTAGE)); + } + + private static Pair getErrorMessage(final Communication communication) { + return new Pair("errorMessage", communication.getThrowableMessage()); + } + + private static Pair getWaitReaderTime(final Communication communication) { + return new Pair("waitReaderTime", communication.getLongCounter(CommunicationTool.WAIT_READER_TIME)); + } + + private static Pair getWaitWriterTime(final Communication communication) { + return new Pair("waitWriterTime", communication.getLongCounter(CommunicationTool.WAIT_WRITER_TIME)); + } + + static class Pair { + public Pair(final K key, final V value) { + this.key = key; + this.value = value; + } + + public K getKey() { + return key; + } + + public V getValue() { + return value; + } + + private K key; + + private V value; + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/LocalTGCommunicationManager.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/LocalTGCommunicationManager.java new file mode 100644 index 000000000..0b0529f82 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/communication/LocalTGCommunicationManager.java @@ -0,0 +1,62 @@ +package com.alibaba.datax.core.statistics.communication; + +import com.alibaba.datax.dataxservice.face.domain.enums.State; +import org.apache.commons.lang3.Validate; + +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; + +public final class LocalTGCommunicationManager { + private static Map taskGroupCommunicationMap = + new ConcurrentHashMap(); + + public static void registerTaskGroupCommunication( + int taskGroupId, Communication communication) { + taskGroupCommunicationMap.put(taskGroupId, communication); + } + + public static Communication getJobCommunication() { + Communication communication = new Communication(); + communication.setState(State.SUCCEEDED); + + for (Communication taskGroupCommunication : + taskGroupCommunicationMap.values()) { + communication.mergeFrom(taskGroupCommunication); + } + + return communication; + } + + /** + * 采用获取taskGroupId后再获取对应communication的方式, + * 防止map遍历时修改,同时也防止对map key-value对的修改 + * + * @return + */ + public static Set getTaskGroupIdSet() { + return taskGroupCommunicationMap.keySet(); + } + + public static Communication getTaskGroupCommunication(int taskGroupId) { + Validate.isTrue(taskGroupId >= 0, "taskGroupId不能小于0"); + + return taskGroupCommunicationMap.get(taskGroupId); + } + + public static void updateTaskGroupCommunication(final int taskGroupId, + final Communication communication) { + Validate.isTrue(taskGroupCommunicationMap.containsKey( + taskGroupId), String.format("taskGroupCommunicationMap中没有注册taskGroupId[%d]的Communication," + + "无法更新该taskGroup的信息", taskGroupId)); + taskGroupCommunicationMap.put(taskGroupId, communication); + } + + public static void clear() { + taskGroupCommunicationMap.clear(); + } + + public static Map getTaskGroupCommunicationMap() { + return taskGroupCommunicationMap; + } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/collector/AbstractCollector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/collector/AbstractCollector.java new file mode 100644 index 000000000..996949b4d --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/collector/AbstractCollector.java @@ -0,0 +1,73 @@ +package com.alibaba.datax.core.statistics.container.collector; + +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.communication.LocalTGCommunicationManager; +import com.alibaba.datax.core.util.container.CoreConstant; +import com.alibaba.datax.dataxservice.face.domain.enums.State; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +import static com.alibaba.datax.core.statistics.communication.CommunicationTool.TASK_RUNNING_WRITERS; + +public abstract class AbstractCollector { + private Map taskCommunicationMap = new ConcurrentHashMap(); + private Long jobId; + + public Map getTaskCommunicationMap() { + return taskCommunicationMap; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public void registerTGCommunication(List taskGroupConfigurationList) { + for (Configuration config : taskGroupConfigurationList) { + int taskGroupId = config.getInt( + CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID); + LocalTGCommunicationManager.registerTaskGroupCommunication(taskGroupId, new Communication()); + } + } + + public void registerTaskCommunication(List taskConfigurationList) { + for (Configuration taskConfig : taskConfigurationList) { + int taskId = taskConfig.getInt(CoreConstant.TASK_ID); + Communication taskCommunication = new Communication(); + taskCommunication.setLongCounter(TASK_RUNNING_WRITERS, taskConfig.getList(CoreConstant.JOB_WRITER).size()); + this.taskCommunicationMap.put(taskId, taskCommunication); + } + } + + public Communication collectFromTask() { + Communication communication = new Communication(); + communication.setState(State.SUCCEEDED); + + for (Communication taskCommunication : + this.taskCommunicationMap.values()) { + communication.mergeFrom(taskCommunication); + } + + return communication; + } + + public abstract Communication collectFromTaskGroup(); + + public Map getTGCommunicationMap() { + return LocalTGCommunicationManager.getTaskGroupCommunicationMap(); + } + + public Communication getTGCommunication(Integer taskGroupId) { + return LocalTGCommunicationManager.getTaskGroupCommunication(taskGroupId); + } + + public Communication getTaskCommunication(Integer taskId) { + return this.taskCommunicationMap.get(taskId); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/collector/ProcessInnerCollector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/collector/ProcessInnerCollector.java new file mode 100644 index 000000000..530794b56 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/collector/ProcessInnerCollector.java @@ -0,0 +1,17 @@ +package com.alibaba.datax.core.statistics.container.collector; + +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.communication.LocalTGCommunicationManager; + +public class ProcessInnerCollector extends AbstractCollector { + + public ProcessInnerCollector(Long jobId) { + super.setJobId(jobId); + } + + @Override + public Communication collectFromTaskGroup() { + return LocalTGCommunicationManager.getJobCommunication(); + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/AbstractContainerCommunicator.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/AbstractContainerCommunicator.java new file mode 100644 index 000000000..09cc06957 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/AbstractContainerCommunicator.java @@ -0,0 +1,88 @@ +package com.alibaba.datax.core.statistics.container.communicator; + + +import com.alibaba.datax.common.statistics.VMInfo; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.container.collector.AbstractCollector; +import com.alibaba.datax.core.statistics.container.report.AbstractReporter; +import com.alibaba.datax.core.util.container.CoreConstant; +import com.alibaba.datax.dataxservice.face.domain.enums.State; + +import java.util.List; +import java.util.Map; + +public abstract class AbstractContainerCommunicator { + private Configuration configuration; + private AbstractCollector collector; + private AbstractReporter reporter; + + private Long jobId; + + private VMInfo vmInfo = VMInfo.getVmInfo(); + private long lastReportTime = System.currentTimeMillis(); + + + public Configuration getConfiguration() { + return this.configuration; + } + + public AbstractCollector getCollector() { + return collector; + } + + public AbstractReporter getReporter() { + return reporter; + } + + public void setCollector(AbstractCollector collector) { + this.collector = collector; + } + + public void setReporter(AbstractReporter reporter) { + this.reporter = reporter; + } + + public Long getJobId() { + return jobId; + } + + public AbstractContainerCommunicator(Configuration configuration) { + this.configuration = configuration; + this.jobId = configuration.getLong(CoreConstant.DATAX_CORE_CONTAINER_JOB_ID); + } + + + public abstract void registerCommunication(List configurationList); + + public abstract Communication collect(); + + public abstract void report(Communication communication); + + public abstract State collectState(); + + public abstract Communication getCommunication(Integer id); + + /** + * 当 实现是 TGContainerCommunicator 时,返回的 Map: key=taskId, value=Communication + * 当 实现是 JobContainerCommunicator 时,返回的 Map: key=taskGroupId, value=Communication + */ + public abstract Map getCommunicationMap(); + + public void resetCommunication(Integer id) { + Map map = getCommunicationMap(); + map.put(id, new Communication()); + } + + public void reportVmInfo() { + long now = System.currentTimeMillis(); + //每5分钟打印一次 + if (now - lastReportTime >= 300000) { + //当前仅打印 + if (vmInfo != null) { + vmInfo.getDelta(true); + } + lastReportTime = now; + } + } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/job/StandAloneJobContainerCommunicator.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/job/StandAloneJobContainerCommunicator.java new file mode 100644 index 000000000..b05ac38c0 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/job/StandAloneJobContainerCommunicator.java @@ -0,0 +1,67 @@ +package com.alibaba.datax.core.statistics.container.communicator.job; + +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.communication.CommunicationTool; +import com.alibaba.datax.core.statistics.container.collector.ProcessInnerCollector; +import com.alibaba.datax.core.statistics.container.communicator.AbstractContainerCommunicator; +import com.alibaba.datax.core.statistics.container.report.AbstractReporter; +import com.alibaba.datax.core.statistics.container.report.ProcessInnerReporter; +import com.alibaba.datax.core.util.ClassUtil; +import com.alibaba.datax.core.util.container.CoreConstant; +import com.alibaba.datax.dataxservice.face.domain.enums.State; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.Map; + +public class StandAloneJobContainerCommunicator extends AbstractContainerCommunicator { + private static final Logger LOG = LoggerFactory + .getLogger(StandAloneJobContainerCommunicator.class); + + public StandAloneJobContainerCommunicator(Configuration configuration) { + super(configuration); + super.setCollector(new ProcessInnerCollector(configuration.getLong( + CoreConstant.DATAX_CORE_CONTAINER_JOB_ID))); + // Set the reporter defined in configuration + super.setReporter(ClassUtil.instantiate(configuration.getString(CoreConstant.DATAX_CORE_STATISTICS_REPORTER_PLUGIN_CLASS), + AbstractReporter.class, configuration)); + } + + @Override + public void registerCommunication(List configurationList) { + super.getCollector().registerTGCommunication(configurationList); + } + + @Override + public Communication collect() { + return super.getCollector().collectFromTaskGroup(); + } + + @Override + public State collectState() { + return this.collect().getState(); + } + + /** + * 和 DistributeJobContainerCollector 的 report 实现一样 + */ + @Override + public void report(Communication communication) { + super.getReporter().reportJobCommunication(super.getJobId(), communication); + String info = CommunicationTool.Stringify.getSnapshot(communication); + LOG.info(info); + reportVmInfo(); + } + + @Override + public Communication getCommunication(Integer taskGroupId) { + return super.getCollector().getTGCommunication(taskGroupId); + } + + @Override + public Map getCommunicationMap() { + return super.getCollector().getTGCommunicationMap(); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/taskgroup/AbstractTGContainerCommunicator.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/taskgroup/AbstractTGContainerCommunicator.java new file mode 100644 index 000000000..cefcd7544 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/taskgroup/AbstractTGContainerCommunicator.java @@ -0,0 +1,74 @@ +package com.alibaba.datax.core.statistics.container.communicator.taskgroup; + +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.container.collector.ProcessInnerCollector; +import com.alibaba.datax.core.statistics.container.communicator.AbstractContainerCommunicator; +import com.alibaba.datax.core.util.container.CoreConstant; +import com.alibaba.datax.dataxservice.face.domain.enums.State; +import org.apache.commons.lang.Validate; + +import java.util.List; +import java.util.Map; + +/** + * 该类是用于处理 taskGroupContainer 的 communication 的收集汇报的父类 + * 主要是 taskCommunicationMap 记录了 taskExecutor 的 communication 属性 + */ +public abstract class AbstractTGContainerCommunicator extends AbstractContainerCommunicator { + + protected long jobId; + + /** + * 由于taskGroupContainer是进程内部调度 + * 其registerCommunication(),getCommunication(), + * getCommunications(),collect()等方法是一致的 + * 所有TG的Collector都是ProcessInnerCollector + */ + protected int taskGroupId; + + public AbstractTGContainerCommunicator(Configuration configuration) { + super(configuration); + this.jobId = configuration.getLong( + CoreConstant.DATAX_CORE_CONTAINER_JOB_ID); + super.setCollector(new ProcessInnerCollector(this.jobId)); + this.taskGroupId = configuration.getInt( + CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID); + } + + @Override + public void registerCommunication(List configurationList) { + super.getCollector().registerTaskCommunication(configurationList); + } + + @Override + public final Communication collect() { + return this.getCollector().collectFromTask(); + } + + @Override + public final State collectState() { + Communication communication = new Communication(); + communication.setState(State.SUCCEEDED); + + for (Communication taskCommunication : + super.getCollector().getTaskCommunicationMap().values()) { + communication.mergeStateFrom(taskCommunication); + } + + return communication.getState(); + } + + @Override + public final Communication getCommunication(Integer taskId) { + Validate.isTrue(taskId >= 0, "注册的taskId不能小于0"); + + return super.getCollector().getTaskCommunication(taskId); + } + + @Override + public final Map getCommunicationMap() { + return super.getCollector().getTaskCommunicationMap(); + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/taskgroup/StandaloneTGContainerCommunicator.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/taskgroup/StandaloneTGContainerCommunicator.java new file mode 100644 index 000000000..8ea0ac5d6 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/communicator/taskgroup/StandaloneTGContainerCommunicator.java @@ -0,0 +1,24 @@ +package com.alibaba.datax.core.statistics.container.communicator.taskgroup; + +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.container.report.AbstractReporter; +import com.alibaba.datax.core.statistics.container.report.ProcessInnerReporter; +import com.alibaba.datax.core.util.ClassUtil; +import com.alibaba.datax.core.util.container.CoreConstant; + +public class StandaloneTGContainerCommunicator extends AbstractTGContainerCommunicator { + + public StandaloneTGContainerCommunicator(Configuration configuration) { + super(configuration); + // Set the reporter defined in configuration + super.setReporter(ClassUtil.instantiate(configuration.getString(CoreConstant.DATAX_CORE_STATISTICS_REPORTER_PLUGIN_CLASS), + AbstractReporter.class, configuration)); + } + + @Override + public void report(Communication communication) { + super.getReporter().reportTGCommunication(super.taskGroupId, communication); + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/report/AbstractReporter.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/report/AbstractReporter.java new file mode 100644 index 000000000..57f98587a --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/report/AbstractReporter.java @@ -0,0 +1,11 @@ +package com.alibaba.datax.core.statistics.container.report; + +import com.alibaba.datax.core.statistics.communication.Communication; + +public abstract class AbstractReporter { + + public abstract void reportJobCommunication(Long jobId, Communication communication); + + public abstract void reportTGCommunication(Integer taskGroupId, Communication communication); + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/report/ProcessInnerReporter.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/report/ProcessInnerReporter.java new file mode 100644 index 000000000..34cc71122 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/container/report/ProcessInnerReporter.java @@ -0,0 +1,73 @@ +package com.alibaba.datax.core.statistics.container.report; + +import com.alibaba.datax.common.util.Configuration; +import com.webank.wedatasphere.exchangis.datax.common.GsonUtil; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.communication.CommunicationTool; +import com.alibaba.datax.core.statistics.communication.LocalTGCommunicationManager; +import com.alibaba.datax.core.util.HttpClientUtil; +import com.alibaba.datax.core.util.container.CoreConstant; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.StringEntity; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.math.BigDecimal; +import java.util.HashMap; +import java.util.Map; + +public class ProcessInnerReporter extends AbstractReporter { + + private static final String REPORT_RESP_DATA_PATH = "data"; + + private static final Logger LOG = LoggerFactory.getLogger(ProcessInnerReporter.class); + + private Configuration configuration; + + public ProcessInnerReporter(Configuration configuration){ + this.configuration = configuration; + } + + /** + * Updated by davidhua@webank.com + * @param jobId + * @param communication + */ + @Override + @SuppressWarnings("unchecked") + public void reportJobCommunication(Long jobId, Communication communication) { + try { + Map requestBody = new HashMap<>(); + requestBody.put("currentByteSpeed", communication.getLongCounter(CommunicationTool.BYTE_SPEED)); + requestBody.put("taskId", configuration.getLong(CoreConstant.DATAX_CORE_CONTAINER_JOB_ID)); + StringEntity entity = new StringEntity(GsonUtil.toJson(requestBody)); + entity.setContentEncoding("UTF-8"); + entity.setContentType("application/json"); + HttpPost post = HttpClientUtil.getPostRequest(configuration.getString(CoreConstant.DATAX_CORE_DATAXSERVER_PROTOCOL) + + "://" + configuration.getString(CoreConstant.DATAX_CORE_DATAXSERVER_ADDRESS) + + configuration.getString(CoreConstant.DATAX_CORE_DATAXSERVER_ENDPOINT_REPORT_STATE), + entity, + "Content-Type", "application/json;charset=UTF-8"); + String body = HttpClientUtil.getHttpClientUtil().executeAndGet(post, String.class); + Map response = GsonUtil.fromJson(body, Map.class, String.class, Object.class); + if(response.get(REPORT_RESP_DATA_PATH) != null) { + Map runtimeParams = (Map) response.get(REPORT_RESP_DATA_PATH); + Long maxByteSpeed = new BigDecimal(String.valueOf(runtimeParams + .getOrDefault("maxByteSpeed", 0))).longValue(); + if (maxByteSpeed > 0) { + //Update the speed configuration + this.configuration.set(CoreConstant.DATAX_JOB_SETTING_SPEED_BYTE, maxByteSpeed); + } + } + }catch(Exception e){ + LOG.info(e.getMessage(), e); + //Do nothing + } + } + + @Override + public void reportTGCommunication(Integer taskGroupId, Communication communication) { + LocalTGCommunicationManager.updateTaskGroupCommunication(taskGroupId, communication); + } + +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/DefaultJobPluginCollector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/DefaultJobPluginCollector.java new file mode 100644 index 000000000..a06f89b32 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/DefaultJobPluginCollector.java @@ -0,0 +1,31 @@ +package com.alibaba.datax.core.statistics.plugin; + +import com.alibaba.datax.common.plugin.JobPluginCollector; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.container.communicator.AbstractContainerCommunicator; + +import java.util.List; +import java.util.Map; + +/** + * Created by jingxing on 14-9-9. + */ +public final class DefaultJobPluginCollector implements JobPluginCollector { + private AbstractContainerCommunicator jobCollector; + + public DefaultJobPluginCollector(AbstractContainerCommunicator containerCollector) { + this.jobCollector = containerCollector; + } + + @Override + public Map> getMessage() { + Communication totalCommunication = this.jobCollector.collect(); + return totalCommunication.getMessage(); + } + + @Override + public List getMessage(String key) { + Communication totalCommunication = this.jobCollector.collect(); + return totalCommunication.getMessage(key); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/AbstractTaskPluginCollector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/AbstractTaskPluginCollector.java new file mode 100644 index 000000000..9597c6bfc --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/AbstractTaskPluginCollector.java @@ -0,0 +1,81 @@ +package com.alibaba.datax.core.statistics.plugin.task; + +import com.alibaba.datax.common.constant.PluginType; +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.communication.CommunicationTool; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Created by jingxing on 14-9-11. + */ +public abstract class AbstractTaskPluginCollector extends TaskPluginCollector { + private static final Logger LOG = LoggerFactory + .getLogger(AbstractTaskPluginCollector.class); + + private Communication communication; + + private Configuration configuration; + + private PluginType pluginType; + + public AbstractTaskPluginCollector(Configuration conf, Communication communication, + PluginType type) { + this.configuration = conf; + this.communication = communication; + this.pluginType = type; + } + + public Communication getCommunication() { + return communication; + } + + public Configuration getConfiguration() { + return configuration; + } + + public PluginType getPluginType() { + return pluginType; + } + + @Override + final public void collectMessage(String key, String value) { + this.communication.addMessage(key, value); + } + + @Override + public void collectParameter(String key, String value, boolean isUnique) { + this.communication.addParameter(key, value, isUnique); + } + + @Override + public void collectDirtyRecord(Record dirtyRecord, Throwable t, + String errorMessage) { + + if (null == dirtyRecord) { + LOG.warn("脏数据record=null."); + return; + } + + if (this.pluginType.equals(PluginType.READER)) { + this.communication.increaseCounter( + CommunicationTool.READ_FAILED_RECORDS, 1); + this.communication.increaseCounter( + CommunicationTool.READ_FAILED_BYTES, dirtyRecord.getByteSize()); + } else if (this.pluginType.equals(PluginType.WRITER)) { + this.communication.increaseCounter( + CommunicationTool.WRITE_FAILED_RECORDS, 1); + this.communication.increaseCounter( + CommunicationTool.WRITE_FAILED_BYTES, dirtyRecord.getByteSize()); + } else { + throw DataXException.asDataXException( + FrameworkErrorCode.RUNTIME_ERROR, + String.format("不知道的插件类型[%s].", this.pluginType)); + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/HttpPluginCollector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/HttpPluginCollector.java new file mode 100644 index 000000000..e479fe2c1 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/HttpPluginCollector.java @@ -0,0 +1,23 @@ +package com.alibaba.datax.core.statistics.plugin.task; + +import com.alibaba.datax.common.constant.PluginType; +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.communication.Communication; + +/** + * Created by jingxing on 14-9-9. + */ +public class HttpPluginCollector extends AbstractTaskPluginCollector { + public HttpPluginCollector(Configuration configuration, Communication Communication, + PluginType type) { + super(configuration, Communication, type); + } + + @Override + public void collectDirtyRecord(Record dirtyRecord, Throwable t, + String errorMessage) { + super.collectDirtyRecord(dirtyRecord, t, errorMessage); + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/StdoutPluginCollector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/StdoutPluginCollector.java new file mode 100644 index 000000000..a54ae4fb7 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/StdoutPluginCollector.java @@ -0,0 +1,71 @@ +package com.alibaba.datax.core.statistics.plugin.task; + +import com.alibaba.datax.common.constant.PluginType; +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.plugin.task.util.DirtyRecord; +import com.alibaba.datax.core.util.container.CoreConstant; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Created by jingxing on 14-9-9. + */ +public class StdoutPluginCollector extends AbstractTaskPluginCollector { + private static final Logger LOG = LoggerFactory + .getLogger(StdoutPluginCollector.class); + + private static final int DEFAULT_MAX_DIRTYNUM = 128; + + private AtomicInteger maxLogNum = new AtomicInteger(0); + + private AtomicInteger currentLogNum = new AtomicInteger(0); + + public StdoutPluginCollector(Configuration configuration, Communication communication, + PluginType type) { + super(configuration, communication, type); + maxLogNum = new AtomicInteger( + configuration.getInt( + CoreConstant.DATAX_CORE_STATISTICS_COLLECTOR_PLUGIN_MAXDIRTYNUM, + DEFAULT_MAX_DIRTYNUM)); + } + + private String formatDirty(final Record dirty, final Throwable t, + final String msg) { + Map msgGroup = new HashMap(); + + msgGroup.put("type", super.getPluginType().toString()); + if (StringUtils.isNotBlank(msg)) { + msgGroup.put("message", msg); + } + if (null != t && StringUtils.isNotBlank(t.getMessage())) { + msgGroup.put("exception", t.getMessage()); + } + if (null != dirty) { + msgGroup.put("record", dirty instanceof DirtyRecord ? dirty.getColumns():DirtyRecord.asDirtyRecord(dirty) + .getColumns()); + } + + return Json.toJson(msgGroup, null); + } + + @Override + public void collectDirtyRecord(Record dirtyRecord, Throwable t, + String errorMessage) { + int logNum = currentLogNum.getAndIncrement(); + if (logNum == 0 && t != null) { + LOG.error("", t); + } + LOG.error("脏数据: \n" + + this.formatDirty(dirtyRecord, t, errorMessage)); + + super.collectDirtyRecord(dirtyRecord, t, errorMessage); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/util/DirtyRecord.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/util/DirtyRecord.java new file mode 100644 index 000000000..4ced82ca5 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/statistics/plugin/task/util/DirtyRecord.java @@ -0,0 +1,161 @@ +package com.alibaba.datax.core.statistics.plugin.task.util; + +import com.alibaba.datax.common.element.Column; +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.webank.wedatasphere.exchangis.datax.util.Json; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +public class DirtyRecord implements Record { + private List columns = new ArrayList(); + + public static DirtyRecord asDirtyRecord(final Record record) { + DirtyRecord result = new DirtyRecord(); + for (int i = 0; i < record.getColumnNumber(); i++) { + result.addColumn(record.getColumn(i)); + } + + return result; + } + + @Override + public void addColumn(Column column) { + this.columns.add( + DirtyColumn.asDirtyColumn(column, this.columns.size())); + } + + @Override + public String toString() { + return Json.toJson(this.columns, null); + } + + @Override + public void setColumn(int i, Column column) { + throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, + "该方法不支持!"); + } + + @Override + public Column getColumn(int i) { + throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, + "该方法不支持!"); + } + + @Override + public int getColumnNumber() { + throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, + "该方法不支持!"); + } + + @Override + public int getByteSize() { + return 0; + } + + @Override + public int getMemorySize() { + throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, + "该方法不支持!"); + } + + @Override + public T copyElement() { + return (T)this; + } + + @Override + public List getColumns() { + return columns; + } + + @Override + public String uid() { + return ""; + } + + public void setColumns(List columns) { + this.columns = columns; + } + +} + +class DirtyColumn extends Column { + private int index; + + public static Column asDirtyColumn(final Column column, int index) { + return new DirtyColumn(column, index); + } + + private DirtyColumn(Column column, int index) { + this(null == column ? null : column.getRawData(), + null == column ? Column.Type.NULL : column.getType(), + null == column ? 0 : column.getByteSize(), index); + } + + public int getIndex() { + return index; + } + + public void setIndex(int index) { + this.index = index; + } + + @Override + public Long asLong() { + throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, + "该方法不支持!"); + } + + @Override + public Double asDouble() { + throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, + "该方法不支持!"); + } + + @Override + public String asString() { + throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, + "该方法不支持!"); + } + + @Override + public Date asDate() { + throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, + "该方法不支持!"); + } + + @Override + public byte[] asBytes() { + throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, + "该方法不支持!"); + } + + @Override + public Boolean asBoolean() { + throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, + "该方法不支持!"); + } + + @Override + public BigDecimal asBigDecimal() { + throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, + "该方法不支持!"); + } + + @Override + public BigInteger asBigInteger() { + throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, + "该方法不支持!"); + } + + private DirtyColumn(Object object, Type type, int byteSize, int index) { + super(object, type, byteSize); + this.setIndex(index); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/TaskGroupContainer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/TaskGroupContainer.java new file mode 100644 index 000000000..76cfa836e --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/TaskGroupContainer.java @@ -0,0 +1,663 @@ +package com.alibaba.datax.core.taskgroup; + +import com.alibaba.datax.common.constant.PluginType; +import com.webank.wedatasphere.exchangis.datax.common.constant.TransportType; +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordSender; +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import com.alibaba.datax.common.statistics.PerfRecord; +import com.alibaba.datax.common.statistics.PerfTrace; +import com.alibaba.datax.common.statistics.VMInfo; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.AbstractContainer; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.communication.CommunicationTool; +import com.alibaba.datax.core.statistics.container.communicator.taskgroup.StandaloneTGContainerCommunicator; +import com.alibaba.datax.core.statistics.plugin.task.AbstractTaskPluginCollector; +import com.alibaba.datax.core.taskgroup.runner.AbstractRunner; +import com.alibaba.datax.core.taskgroup.runner.ReaderRunner; +import com.alibaba.datax.core.taskgroup.runner.WriterRunner; +import com.alibaba.datax.core.transport.channel.RecordChannel; +import com.webank.wedatasphere.exchangis.datax.core.transport.channel.StreamChannel; +import com.alibaba.datax.core.transport.exchanger.BufferedRecordExchanger; +import com.alibaba.datax.core.transport.exchanger.BufferedRecordTransformerExchanger; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.ChannelInput; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.ChannelOutput; +import com.alibaba.datax.core.transport.transformer.TransformerExecution; +import com.alibaba.datax.core.util.ClassUtil; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.core.util.TransformerUtil; +import com.alibaba.datax.core.util.container.CoreConstant; +import com.alibaba.datax.core.util.container.LoadUtil; +import com.alibaba.datax.dataxservice.face.domain.enums.State; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.lang3.Validate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; +import java.util.concurrent.CopyOnWriteArrayList; + +public class TaskGroupContainer extends AbstractContainer { + private static final Logger LOG = LoggerFactory + .getLogger(TaskGroupContainer.class); + + /** + * 当前taskGroup所属jobId + */ + private long jobId; + + /** + * 当前taskGroupId + */ + private int taskGroupId; + + /** + * 使用的record channel类 + */ + private String recordChannelClazz; + + private String streamChannelClazz; + /** + * task收集器使用的类 + */ + private String taskCollectorClass; + + private TransportType transportType; + + private TaskMonitor taskMonitor = TaskMonitor.getInstance(); + + private volatile boolean isShutdown = false; + /** + * running task + */ + private CopyOnWriteArrayList runTasks; + + public TaskGroupContainer(Configuration configuration) { + super(configuration); + initCommunicator(configuration); + + this.jobId = this.configuration.getLong( + CoreConstant.DATAX_CORE_CONTAINER_JOB_ID); + this.taskGroupId = this.configuration.getInt( + CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID); + + this.recordChannelClazz = this.configuration.getString( + CoreConstant.DATAX_CORE_TRANSPORT_RECORD_CHANNEL_CLASS); + + this.streamChannelClazz = this.configuration.getString( + CoreConstant.DATAX_CORE_TRANSPORT_STREAM_CHANNEL_CLASS); + + this.taskCollectorClass = this.configuration.getString( + CoreConstant.DATAX_CORE_STATISTICS_COLLECTOR_PLUGIN_TASKCLASS); + + this.transportType = TransportType.valueOf(this.configuration.getString( + CoreConstant.DATAX_CORE_TRANSPORT_TYPE).toUpperCase()); + + } + + private void initCommunicator(Configuration configuration) { + super.setContainerCommunicator(new StandaloneTGContainerCommunicator(configuration)); + + } + + public long getJobId() { + return jobId; + } + + public int getTaskGroupId() { + return taskGroupId; + } + + @Override + public void start() { + try { + /** + * 状态check时间间隔,较短,可以把任务及时分发到对应channel中 + */ + int sleepIntervalInMillSec = this.configuration.getInt( + CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_SLEEPINTERVAL, 100); + /** + * 状态汇报时间间隔,稍长,避免大量汇报 + */ + long reportIntervalInMillSec = this.configuration.getLong( + CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_REPORTINTERVAL, + 10000); + /** + * 2分钟汇报一次性能统计 + */ + + // 获取channel数目 + int channelNumber = this.configuration.getInt( + CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL); + + int taskMaxRetryTimes = this.configuration.getInt( + CoreConstant.DATAX_CORE_CONTAINER_TASK_FAILOVER_MAXRETRYTIMES, 1); + + long taskRetryIntervalInMsec = this.configuration.getLong( + CoreConstant.DATAX_CORE_CONTAINER_TASK_FAILOVER_RETRYINTERVALINMSEC, 10000); + + long taskMaxWaitInMsec = this.configuration.getLong(CoreConstant.DATAX_CORE_CONTAINER_TASK_FAILOVER_MAXWAITINMSEC, 60000); + + List taskConfigs = this.configuration + .getListConfiguration(CoreConstant.DATAX_JOB_CONTENT); + + if (LOG.isDebugEnabled()) { + LOG.debug("taskGroup[{}]'s task configs[{}]", this.taskGroupId, + Json.toJson(taskConfigs, null)); + } + + int taskCountInThisTaskGroup = taskConfigs.size(); + LOG.info(String.format( + "taskGroupId=[%d] start [%d] channels for [%d] tasks.", + this.taskGroupId, channelNumber, taskCountInThisTaskGroup)); + + this.containerCommunicator.registerCommunication(taskConfigs); + + runTasks = new CopyOnWriteArrayList<>(); + //taskId与task配置 + Map taskConfigMap = buildTaskConfigMap(taskConfigs); + //待运行task列表 + List taskQueue = buildRemainTasks(taskConfigs); + //taskId与上次失败实例 + Map taskFailedExecutorMap = new HashMap<>(8); + //任务开始时间 + Map taskStartTimeMap = new HashMap<>(8); + + long lastReportTimeStamp = 0; + Communication lastTaskGroupContainerCommunication = new Communication(); + + while (!isShutdown) { + //1.判断task状态 + boolean failedOrKilled = false; + Map communicationMap = containerCommunicator.getCommunicationMap(); + for (Map.Entry entry : communicationMap.entrySet()) { + Integer taskId = entry.getKey(); + Communication taskCommunication = entry.getValue(); + if (!taskCommunication.isFinished()) { + continue; + } + TaskExecutor taskExecutor = removeTask(runTasks, taskId); + + //上面从runTasks里移除了,因此对应在monitor里移除 + taskMonitor.removeTask(taskId); + + //失败,看task是否支持failover,重试次数未超过最大限制 + if (taskCommunication.getState() == State.FAILED) { + taskFailedExecutorMap.put(taskId, taskExecutor); + if (null != taskExecutor && + taskExecutor.supportFailOver() && taskExecutor.getAttemptCount() < taskMaxRetryTimes) { + taskExecutor.shutdown(); //关闭老的executor + //将task的状态重置 + containerCommunicator.resetCommunication(taskId); + Configuration taskConfig = taskConfigMap.get(taskId); + //重新加入任务列表 + taskQueue.add(taskConfig); + } else { + failedOrKilled = true; + break; + } + } else if (taskCommunication.getState() == State.KILLED) { + failedOrKilled = true; + break; + } else if (taskCommunication.getState() == State.SUCCEEDED) { + Long taskStartTime = taskStartTimeMap.get(taskId); + if (taskStartTime != null) { + Long usedTime = System.currentTimeMillis() - taskStartTime; + LOG.info("taskGroup[{}] taskId[{}] is successed, used[{}]ms", + this.taskGroupId, taskId, usedTime); + //usedTime*1000*1000 转换成PerfRecord记录的ns,这里主要是简单登记,进行最长任务的打印。因此增加特定静态方法 + PerfRecord.addPerfRecord(taskGroupId, taskId, PerfRecord.PHASE.TASK_TOTAL, taskStartTime, usedTime * 1000L * 1000L); + taskStartTimeMap.remove(taskId); + taskConfigMap.remove(taskId); + } + } + } + + // 2.发现该taskGroup下taskExecutor的总状态失败则汇报错误 + if (failedOrKilled) { + lastTaskGroupContainerCommunication = reportTaskGroupCommunication( + lastTaskGroupContainerCommunication, taskCountInThisTaskGroup); + + throw DataXException.asDataXException( + FrameworkErrorCode.PLUGIN_RUNTIME_ERROR, lastTaskGroupContainerCommunication.getThrowable()); + } + + //3.有任务未执行,且正在运行的任务数小于最大通道限制 + Iterator iterator = taskQueue.iterator(); + while (iterator.hasNext() && null != runTasks && runTasks.size() < channelNumber) { + Configuration taskConfig = iterator.next(); + Integer taskId = taskConfig.getInt(CoreConstant.TASK_ID); + int attemptCount = 1; + TaskExecutor lastExecutor = taskFailedExecutorMap.get(taskId); + if (lastExecutor != null) { + attemptCount = lastExecutor.getAttemptCount() + 1; + long now = System.currentTimeMillis(); + long failedTime = lastExecutor.getTimeStamp(); + //未到等待时间,继续留在队列 + if (now - failedTime < taskRetryIntervalInMsec) { + continue; + } + //上次失败的task仍未结束 + if (!lastExecutor.isShutdown()) { + if (now - failedTime > taskMaxWaitInMsec) { + markCommunicationFailed(taskId); + reportTaskGroupCommunication(lastTaskGroupContainerCommunication, taskCountInThisTaskGroup); + throw DataXException.asDataXException(CommonErrorCode.WAIT_TIME_EXCEED, "task failover等待超时"); + } else { + lastExecutor.shutdown(); //再次尝试关闭 + continue; + } + } else { + LOG.info("taskGroup[{}] taskId[{}] attemptCount[{}] has already shutdown", + this.taskGroupId, taskId, lastExecutor.getAttemptCount()); + } + } + Configuration taskConfigForRun = taskMaxRetryTimes > 1 ? taskConfig.clone() : taskConfig; + TaskExecutor taskExecutor = new TaskExecutor(taskConfigForRun, attemptCount); + taskStartTimeMap.put(taskId, System.currentTimeMillis()); + iterator.remove(); + //first to add into the list of running task, then start + runTasks.add(taskExecutor); + taskExecutor.doStart(); + //上面,增加task到runTasks列表,因此在monitor里注册。 + taskMonitor.registerTask(taskId, this.containerCommunicator.getCommunication(taskId)); + + taskFailedExecutorMap.remove(taskId); + LOG.info("taskGroup[{}] taskId[{}] attemptCount[{}] is started", + this.taskGroupId, taskId, attemptCount); + } + //4.任务列表为空,executor已结束, 搜集状态为success--->成功 + if (taskQueue.isEmpty() && isAllTaskDone(runTasks) && containerCommunicator.collectState() == State.SUCCEEDED) { + // 成功的情况下,也需要汇报一次。否则在任务结束非常快的情况下,采集的信息将会不准确 + lastTaskGroupContainerCommunication = reportTaskGroupCommunication( + lastTaskGroupContainerCommunication, taskCountInThisTaskGroup); + + LOG.info("taskGroup[{}] completed it's tasks.", this.taskGroupId); + break; + } + // 5.如果当前时间已经超出汇报时间的interval,那么我们需要马上汇报 + long now = System.currentTimeMillis(); + if (now - lastReportTimeStamp > reportIntervalInMillSec) { + lastTaskGroupContainerCommunication = reportTaskGroupCommunication( + lastTaskGroupContainerCommunication, taskCountInThisTaskGroup); + + lastReportTimeStamp = now; + + //taskMonitor对于正在运行的task,每reportIntervalInMillSec进行检查 + for (TaskExecutor taskExecutor : runTasks) { + taskMonitor.report(taskExecutor.getTaskId(), this.containerCommunicator.getCommunication(taskExecutor.getTaskId())); + } + + } + + Thread.sleep(sleepIntervalInMillSec); + } + + //6.最后还要汇报一次 + reportTaskGroupCommunication(lastTaskGroupContainerCommunication, taskCountInThisTaskGroup); + + + } catch (Throwable e) { + Communication nowTaskGroupContainerCommunication = this.containerCommunicator.collect(); + + if (nowTaskGroupContainerCommunication.getThrowable() == null) { + nowTaskGroupContainerCommunication.setThrowable(e); + } + nowTaskGroupContainerCommunication.setState(State.FAILED); + this.containerCommunicator.report(nowTaskGroupContainerCommunication); + + throw DataXException.asDataXException( + FrameworkErrorCode.RUNTIME_ERROR, e); + } finally { + if (!PerfTrace.getInstance().isJob()) { + //最后打印cpu的平均消耗,GC的统计 + VMInfo vmInfo = VMInfo.getVmInfo(); + if (vmInfo != null) { + vmInfo.getDelta(false); + LOG.info(vmInfo.totalString()); + } + + LOG.info(PerfTrace.getInstance().summarizeNoException()); + } + } + } + + @Override + public void shutdown() { + isShutdown = true; + if(null != runTasks && !runTasks.isEmpty()){ + runTasks.forEach(taskExecutor -> { + taskExecutor.shutdown(); + runTasks.remove(taskExecutor); + }); + if(runTasks.size() > 0){ + //maybe have new task executors + runTasks.forEach(TaskExecutor::shutdown); + runTasks.clear(); + } + } + } + + public void adjustSpeed(long byteSpeed, long recordSpeed){ + //first to update configuration + configuration.set(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_BYTE, byteSpeed); + configuration.set(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_RECORD, recordSpeed); + //adjust dynamically + runTasks.forEach(runTask -> runTask.adjustChannelSpeed(byteSpeed, recordSpeed)); + } + + private Map buildTaskConfigMap(List configurations) { + Map map = new HashMap<>(); + for (Configuration taskConfig : configurations) { + int taskId = taskConfig.getInt(CoreConstant.TASK_ID); + map.put(taskId, taskConfig); + } + return map; + } + + private List buildRemainTasks(List configurations) { + List remainTasks = new LinkedList<>(); + for (Configuration taskConfig : configurations) { + remainTasks.add(taskConfig); + } + return remainTasks; + } + + private TaskExecutor removeTask(List taskList, int taskId) { + for(TaskExecutor taskExecutor : taskList){ + if(taskExecutor.getTaskId() == taskId){ + taskList.remove(taskExecutor); + return taskExecutor; + } + } + return null; + } + + private boolean isAllTaskDone(List taskList) { + for (TaskExecutor taskExecutor : taskList) { + if (!taskExecutor.isTaskFinished()) { + return false; + } + } + return true; + } + + private Communication reportTaskGroupCommunication(Communication lastTaskGroupContainerCommunication, int taskCount) { + Communication nowTaskGroupContainerCommunication = this.containerCommunicator.collect(); + nowTaskGroupContainerCommunication.setTimestamp(System.currentTimeMillis()); + Communication reportCommunication = CommunicationTool.getReportCommunication(nowTaskGroupContainerCommunication, + lastTaskGroupContainerCommunication, taskCount); + //mark the number of channel running + reportCommunication.setLongCounter(CommunicationTool.CHANNEL_RUNNING, runTasks.size()); + this.containerCommunicator.report(reportCommunication); + return reportCommunication; + } + + private void markCommunicationFailed(Integer taskId) { + Communication communication = containerCommunicator.getCommunication(taskId); + communication.setState(State.FAILED); + } + + /** + * TaskExecutor是一个完整task的执行器 + * 其中包括1:1的reader和writerlastTaskGroupContainerCommunication + */ + class TaskExecutor { + private Configuration taskConfig; + + private int taskId; + + private int attemptCount; + + private RecordChannel recordChannel; + + private StreamChannel streamChannel; + + private Thread readerThread; + + private List writerThreads = new ArrayList<>(); + + private ReaderRunner readerRunner; + + /** + * Support multiply writer runners in executor + */ + private List writerRunners = new ArrayList<>(); + + /** + * 该处的taskCommunication在多处用到: + * 1. recordChannel + * 2. readerRunner和writerRunner + * 3. reader和writer的taskPluginCollector + */ + private Communication taskCommunication; + + public TaskExecutor(Configuration taskConf, int attemptCount) { + // 获取该taskExecutor的配置 + this.taskConfig = taskConf; + Validate.isTrue(null != this.taskConfig.getConfiguration(CoreConstant.JOB_READER) + && null != this.taskConfig.getConfiguration(CoreConstant.JOB_WRITER), + "[reader|writer]的插件参数不能为空!"); + + // 得到taskId + this.taskId = this.taskConfig.getInt(CoreConstant.TASK_ID); + this.attemptCount = attemptCount; + + /** + * 由taskId得到该taskExecutor的Communication + * 要传给readerRunner和writerRunner,同时要传给channel作统计用 + */ + this.taskCommunication = containerCommunicator + .getCommunication(taskId); + Validate.notNull(this.taskCommunication, + String.format("taskId[%d]的Communication没有注册过", taskId)); + this.recordChannel = ClassUtil.instantiate(recordChannelClazz, + RecordChannel.class, configuration); + this.streamChannel = ClassUtil.instantiate(streamChannelClazz, + StreamChannel.class, configuration); + this.recordChannel.setCommunication(this.taskCommunication); + this.streamChannel.setCommunication(this.taskCommunication); + + /** + * 获取transformer的参数 + */ + + List transformerInfoExecs = TransformerUtil.buildTransformerInfo(taskConfig); + + /** + * 生成writerThread + */ + List writerTaskList = this.taskConfig.getListConfiguration(CoreConstant.JOB_WRITER); + writerTaskList.forEach(writerTask -> { + String writerPluginName = writerTask.getString(CoreConstant.TASK_NAME); + Configuration writerTaskConf = writerTask.getConfiguration(CoreConstant.TASK_PARAMETER); + System.out.println("writer: " + writerTaskConf.toJSON()); + WriterRunner writerRunner = (WriterRunner)generateRunner(PluginType.WRITER, writerPluginName, + writerTaskConf); + writerRunner.setProcessor(writerTask.getString(CoreConstant.TASK_PROCESSOR)); + Thread writerThread = new Thread(writerRunner, String.format("%d-%d-%d-%s", + jobId, taskGroupId, this.taskId, writerPluginName)); + //Set thread's contextClassLoader + writerThread.setContextClassLoader(LoadUtil.getJarLoader(PluginType.WRITER, writerPluginName)); + writerRunners.add(writerRunner); + writerThreads.add(writerThread); + }); + /** + * 生成readerThread + */ + String readPluginName = this.taskConfig.getString(CoreConstant.JOB_READER_NAME); + Configuration readerTaskConf = this.taskConfig.getConfiguration( + CoreConstant.JOB_READER_PARAMETER); + readerRunner = (ReaderRunner) generateRunner(PluginType.READER, readPluginName, readerTaskConf, transformerInfoExecs); + this.readerThread = new Thread(readerRunner, + String.format("%d-%d-%d-%s", + jobId, taskGroupId, this.taskId, readPluginName)); + /** + * 通过设置thread的contextClassLoader,即可实现同步和主程序不通的加载器 + */ + this.readerThread.setContextClassLoader(LoadUtil.getJarLoader( + PluginType.READER, this.taskConfig.getString( + CoreConstant.JOB_READER_NAME))); + } + + public void doStart() { + //Run writer threads + this.writerThreads.forEach(Thread::start); + + this.writerThreads.forEach(writerThread ->{ + if(!writerThread.isAlive() || this.taskCommunication.getState() == State.FAILED){ + throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, + this.taskCommunication.getThrowable()); + } + }); + + //Run reader thread + this.readerThread.start(); + + // 这里reader可能很快结束 + if (!this.readerThread.isAlive() && this.taskCommunication.getState() == State.FAILED) { + // 这里有可能出现Reader线上启动即挂情况 对于这类情况 需要立刻抛出异常 + throw DataXException.asDataXException( + FrameworkErrorCode.RUNTIME_ERROR, + this.taskCommunication.getThrowable()); + } + } + + + private AbstractRunner generateRunner(PluginType pluginType, String pluginName, Configuration taskConf) { + return generateRunner(pluginType, pluginName, taskConf, null); + } + + private AbstractRunner generateRunner(PluginType pluginType, String pluginName, Configuration taskConf, List transformerInfoExecs) { + AbstractRunner newRunner = null; + TaskPluginCollector pluginCollector; + + switch (pluginType) { + case READER: + newRunner = LoadUtil.loadPluginRunner(pluginType, pluginName); + newRunner.setJobConf(taskConf); + if(null != transportType && transportType.equals(TransportType.STREAM)){ + ChannelOutput outputStream = new ChannelOutput(streamChannel); + ((ReaderRunner)newRunner).setChannelOutput(outputStream); + }else { + pluginCollector = ClassUtil.instantiate( + taskCollectorClass, AbstractTaskPluginCollector.class, + configuration, this.taskCommunication, + PluginType.READER); + + RecordSender recordSender; + if (transformerInfoExecs != null && transformerInfoExecs.size() > 0) { + recordSender = new BufferedRecordTransformerExchanger(taskGroupId, this.taskId, this.recordChannel, this.taskCommunication, pluginCollector, transformerInfoExecs); + } else { + recordSender = new BufferedRecordExchanger(this.recordChannel, pluginCollector); + } + /** + * 设置taskPlugin的collector,用来处理脏数据和job/task通信 + */ + newRunner.setTaskPluginCollector(pluginCollector); + ((ReaderRunner) newRunner).setRecordSender(recordSender); + } + break; + case WRITER: + newRunner = LoadUtil.loadPluginRunner(pluginType, pluginName); + newRunner.setJobConf(taskConf); + if(null != transportType && transportType.equals(TransportType.STREAM)){ + ChannelInput inputStream = new ChannelInput(streamChannel); + ((WriterRunner)newRunner).setChannelInput(inputStream); + //Increase consumer + streamChannel.incConsumer(); + }else { + pluginCollector = ClassUtil.instantiate( + taskCollectorClass, AbstractTaskPluginCollector.class, + configuration, this.taskCommunication, + PluginType.WRITER); + ((WriterRunner) newRunner).setRecordReceiver(new BufferedRecordExchanger( + this.recordChannel, pluginCollector)); + /** + * 设置taskPlugin的collector,用来处理脏数据和job/task通信 + */ + newRunner.setTaskPluginCollector(pluginCollector); + //Increase consumer + this.recordChannel.incConsumer(); + } + break; + default: + throw DataXException.asDataXException(FrameworkErrorCode.ARGUMENT_ERROR, "Cant generateRunner for:" + pluginType); + } + + newRunner.setTaskGroupId(taskGroupId); + newRunner.setTaskId(this.taskId); + newRunner.setRunnerCommunication(this.taskCommunication); + + return newRunner; + } + + /** + * 检查任务是否结束 + */ + private boolean isTaskFinished() { + // 如果reader 或 writer没有完成工作,那么直接返回工作没有完成 + if (readerThread.isAlive()) { + return false; + } + for(Thread writerThread : writerThreads){ + if(writerThread.isAlive()){ + return false; + } + } + return taskCommunication != null && taskCommunication.isFinished(); + } + + private int getTaskId() { + return taskId; + } + + private long getTimeStamp() { + return taskCommunication.getTimestamp(); + } + + private int getAttemptCount() { + return attemptCount; + } + + private boolean supportFailOver() { + for(WriterRunner runner : writerRunners){ + if(!runner.supportFailOver()){ + return false; + } + } + return true; + } + + private void adjustChannelSpeed(long byteSpeed, long dataSpeed){ + this.recordChannel.adjustRateLimit(byteSpeed, dataSpeed); + this.streamChannel.adjustRateLimit(byteSpeed, dataSpeed); + } + private void shutdown() { + writerRunners.forEach(WriterRunner::shutdown); + readerRunner.shutdown(); + writerThreads.forEach(writerThread -> { + if(writerThread.isAlive()){ + writerThread.interrupt(); + } + }); + if (readerThread.isAlive()) { + readerThread.interrupt(); + } + } + + private boolean isShutdown() { + if(readerThread.isAlive()){ + return false; + } + for(Thread writerThread : writerThreads){ + if(writerThread.isAlive()){ + return false; + } + } + return true; + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/TaskMonitor.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/TaskMonitor.java new file mode 100644 index 000000000..597940bfc --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/TaskMonitor.java @@ -0,0 +1,113 @@ +package com.alibaba.datax.core.taskgroup; + +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.communication.CommunicationTool; +import com.alibaba.datax.dataxservice.face.domain.enums.State; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.ConcurrentHashMap; + +/** + * Created by liqiang on 15/7/23. + */ +public class TaskMonitor { + + private static final Logger LOG = LoggerFactory.getLogger(TaskMonitor.class); + private static final TaskMonitor instance = new TaskMonitor(); + private static long EXPIRED_TIME = 172800L * 1000; + + private ConcurrentHashMap tasks = new ConcurrentHashMap(); + + private TaskMonitor() { + } + + public static TaskMonitor getInstance() { + return instance; + } + + public void registerTask(Integer taskid, Communication communication) { + //如果task已经finish,直接返回 + if (communication.isFinished()) { + return; + } + tasks.putIfAbsent(taskid, new TaskCommunication(taskid, communication)); + } + + public void removeTask(Integer taskid) { + tasks.remove(taskid); + } + + public void report(Integer taskid, Communication communication) { + //如果task已经finish,直接返回 + if (communication.isFinished()) { + return; + } + if (!tasks.containsKey(taskid)) { + LOG.warn("unexpected: taskid({}) missed.", taskid); + tasks.putIfAbsent(taskid, new TaskCommunication(taskid, communication)); + } else { + tasks.get(taskid).report(communication); + } + } + + public TaskCommunication getTaskCommunication(Integer taskid) { + return tasks.get(taskid); + } + + + public static class TaskCommunication { + private Integer taskid; + //记录最后更新的communication + private long lastAllReadRecords = -1; + //只有第一次,或者统计变更时才会更新TS + private long lastUpdateComunicationTS; + private long ttl; + + private TaskCommunication(Integer taskid, Communication communication) { + this.taskid = taskid; + lastAllReadRecords = CommunicationTool.getTotalReadRecords(communication); + ttl = System.currentTimeMillis(); + lastUpdateComunicationTS = ttl; + } + + public void report(Communication communication) { + + ttl = System.currentTimeMillis(); + //采集的数量增长,则变更当前记录, 优先判断这个条件,因为目的是不卡住,而不是expired + if (CommunicationTool.getTotalReadRecords(communication) > lastAllReadRecords) { + lastAllReadRecords = CommunicationTool.getTotalReadRecords(communication); + lastUpdateComunicationTS = ttl; + } else if (isExpired(lastUpdateComunicationTS)) { + communication.setState(State.FAILED); + communication.setTimestamp(ttl); + communication.setThrowable(DataXException.asDataXException(CommonErrorCode.TASK_HUNG_EXPIRED, + String.format("task(%s) hung expired [allReadRecord(%s), elased(%s)]", taskid, lastAllReadRecords, (ttl - lastUpdateComunicationTS)))); + } + + + } + + private boolean isExpired(long lastUpdateComunicationTS) { + return System.currentTimeMillis() - lastUpdateComunicationTS > EXPIRED_TIME; + } + + public Integer getTaskid() { + return taskid; + } + + public long getLastAllReadRecords() { + return lastAllReadRecords; + } + + public long getLastUpdateComunicationTS() { + return lastUpdateComunicationTS; + } + + public long getTtl() { + return ttl; + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/AbstractRunner.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/AbstractRunner.java new file mode 100644 index 000000000..8de95be62 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/AbstractRunner.java @@ -0,0 +1,119 @@ +package com.alibaba.datax.core.taskgroup.runner; + +import com.alibaba.datax.common.plugin.AbstractTaskPlugin; +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.communication.CommunicationTool; +import com.alibaba.datax.dataxservice.face.domain.enums.State; +import org.apache.commons.lang.Validate; + +public abstract class AbstractRunner { + private AbstractTaskPlugin plugin; + + private Configuration jobConf; + + private Communication runnerCommunication; + + private int taskGroupId; + + private int taskId; + + public AbstractRunner(AbstractTaskPlugin taskPlugin) { + this.plugin = taskPlugin; + } + + public void destroy() { + if (this.plugin != null) { + this.plugin.destroy(); + } + } + + public State getRunnerState() { + return this.runnerCommunication.getState(); + } + + public AbstractTaskPlugin getPlugin() { + return plugin; + } + + public void setPlugin(AbstractTaskPlugin plugin) { + this.plugin = plugin; + } + + public Configuration getJobConf() { + return jobConf; + } + + public void setJobConf(Configuration jobConf) { + this.jobConf = jobConf; + this.plugin.setPluginJobConf(jobConf); + } + + public void setTaskPluginCollector(TaskPluginCollector pluginCollector) { + this.plugin.setTaskPluginCollector(pluginCollector); + } + + private void mark(State state) { + if (state == State.SUCCEEDED) { + this.runnerCommunication.increaseCounter(CommunicationTool.TASK_RUNNING_WRITERS, -1); + if(this.runnerCommunication.getLongCounter(CommunicationTool.TASK_RUNNING_WRITERS) <= 0){ + this.runnerCommunication.setState(state); + // 对 stage + 1 + this.runnerCommunication.setLongCounter(CommunicationTool.STAGE, + this.runnerCommunication.getLongCounter(CommunicationTool.STAGE) + 1); + } + } + this.runnerCommunication.setState(state); + } + + public void markRun() { + mark(State.RUNNING); + } + + public void markSuccess() { + mark(State.SUCCEEDED); + } + + public void markFail(final Throwable throwable) { + mark(State.FAILED); + this.runnerCommunication.setTimestamp(System.currentTimeMillis()); + this.runnerCommunication.setThrowable(throwable); + } + + /** + * @param taskGroupId the taskGroupId to set + */ + public void setTaskGroupId(int taskGroupId) { + this.taskGroupId = taskGroupId; + this.plugin.setTaskGroupId(taskGroupId); + } + + /** + * @return the taskGroupId + */ + public int getTaskGroupId() { + return taskGroupId; + } + + public int getTaskId() { + return taskId; + } + + public void setTaskId(int taskId) { + this.taskId = taskId; + this.plugin.setTaskId(taskId); + } + + public void setRunnerCommunication(final Communication runnerCommunication) { + Validate.notNull(runnerCommunication, + "插件的Communication不能为空"); + this.runnerCommunication = runnerCommunication; + } + + public Communication getRunnerCommunication() { + return runnerCommunication; + } + + public abstract void shutdown(); +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/ReaderRunner.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/ReaderRunner.java new file mode 100644 index 000000000..b02801612 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/ReaderRunner.java @@ -0,0 +1,126 @@ +package com.alibaba.datax.core.taskgroup.runner; + +import com.alibaba.datax.common.plugin.AbstractTaskPlugin; +import com.alibaba.datax.common.plugin.RecordSender; +import com.alibaba.datax.common.spi.Reader; +import com.alibaba.datax.common.statistics.PerfRecord; +import com.alibaba.datax.core.statistics.communication.CommunicationTool; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.ChannelOutput; +import com.alibaba.datax.dataxservice.face.domain.enums.State; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.InterruptedIOException; + +/** + * Created by jingxing on 14-9-1. + *

+ * 单个slice的reader执行调用 + */ +public class ReaderRunner extends AbstractRunner implements Runnable { + + private static final Logger LOG = LoggerFactory + .getLogger(ReaderRunner.class); + + private RecordSender recordSender; + + private ChannelOutput channelOutput; + + private boolean shutdown; + + public void setRecordSender(RecordSender recordSender) { + this.recordSender = recordSender; + } + + public void setChannelOutput(ChannelOutput channelOutput){ + this.channelOutput = channelOutput; + } + public ReaderRunner(AbstractTaskPlugin abstractTaskPlugin) { + super(abstractTaskPlugin); + } + + @Override + public void run() { + Reader.Task taskReader = (Reader.Task) this.getPlugin(); + + //统计waitWriterTime,并且在finally才end。 + PerfRecord channelWaitWrite = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.WAIT_WRITE_TIME); + try { + channelWaitWrite.start(); + + LOG.debug("task reader starts to do init ..."); + PerfRecord initPerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.READ_TASK_INIT); + initPerfRecord.start(); + taskReader.init(); + initPerfRecord.end(); + + LOG.debug("task reader starts to do prepare ..."); + PerfRecord preparePerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.READ_TASK_PREPARE); + preparePerfRecord.start(); + taskReader.prepare(); + preparePerfRecord.end(); + + LOG.debug("task reader starts to read ..."); + PerfRecord dataPerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.READ_TASK_DATA); + dataPerfRecord.start(); + if(null != recordSender) { + taskReader.startRead(recordSender); + recordSender.terminate(); + dataPerfRecord.addCount(CommunicationTool.getTotalReadRecords(super.getRunnerCommunication())); + }else if(null != channelOutput){ + taskReader.startRead(channelOutput); + channelOutput.close(); + } + dataPerfRecord.addSize(CommunicationTool.getTotalReadBytes(super.getRunnerCommunication())); + dataPerfRecord.end(); + LOG.debug("task reader starts to do post ..."); + PerfRecord postPerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.READ_TASK_POST); + postPerfRecord.start(); + taskReader.post(); + postPerfRecord.end(); + // automatic flush + // super.markSuccess(); 这里不能标记为成功,成功的标志由 writerRunner 来标志(否则可能导致 reader 先结束,而 writer 还没有结束的严重 bug) + } catch (Throwable e) { + if(shutdown){ + //have been shutdown by task group container + return; + } + Throwable cause = e; + do{ + if(cause instanceof InterruptedException || cause instanceof InterruptedIOException){ + this.getRunnerCommunication().setState(State.KILLED); + return; + } + cause = cause.getCause(); + }while(null != cause); + LOG.error("Reader runner Received Exceptions:", e); + super.markFail(e); + } finally { + LOG.debug("task reader starts to do destroy ..."); + PerfRecord desPerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.READ_TASK_DESTROY); + desPerfRecord.start(); + super.destroy(); + desPerfRecord.end(); + + channelWaitWrite.end(super.getRunnerCommunication().getLongCounter(CommunicationTool.WAIT_WRITER_TIME)); + + long transformerUsedTime = super.getRunnerCommunication().getLongCounter(CommunicationTool.TRANSFORMER_USED_TIME); + if (transformerUsedTime > 0) { + PerfRecord transformerRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.TRANSFORMER_TIME); + transformerRecord.start(); + transformerRecord.end(transformerUsedTime); + } + } + } + + @Override + public void shutdown() { + shutdown = true; + if(null != recordSender){ + recordSender.shutdown(); + } + if(null != channelOutput){ + channelOutput.shutdown(); + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/TaskGroupContainerRunner.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/TaskGroupContainerRunner.java new file mode 100644 index 000000000..2a2416c3f --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/TaskGroupContainerRunner.java @@ -0,0 +1,44 @@ +package com.alibaba.datax.core.taskgroup.runner; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.core.taskgroup.TaskGroupContainer; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.dataxservice.face.domain.enums.State; + +public class TaskGroupContainerRunner implements Runnable { + + private TaskGroupContainer taskGroupContainer; + + private State state; + + public TaskGroupContainerRunner(TaskGroupContainer taskGroup) { + this.taskGroupContainer = taskGroup; + this.state = State.SUCCEEDED; + } + + @Override + public void run() { + try { + Thread.currentThread().setName( + String.format("taskGroup-%d", this.taskGroupContainer.getTaskGroupId())); + this.taskGroupContainer.start(); + this.state = State.SUCCEEDED; + } catch (Throwable e) { + this.state = State.FAILED; + throw DataXException.asDataXException( + FrameworkErrorCode.RUNTIME_ERROR, e); + } + } + + public TaskGroupContainer getTaskGroupContainer() { + return taskGroupContainer; + } + + public State getState() { + return state; + } + + public void setState(State state) { + this.state = state; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/WriterRunner.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/WriterRunner.java new file mode 100644 index 000000000..e0ecfbe45 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/taskgroup/runner/WriterRunner.java @@ -0,0 +1,260 @@ +package com.alibaba.datax.core.taskgroup.runner; + +import com.alibaba.datax.common.element.*; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.AbstractTaskPlugin; +import com.alibaba.datax.common.plugin.BasicDataReceiver; +import com.alibaba.datax.common.plugin.RecordReceiver; +import com.alibaba.datax.common.spi.Writer; +import com.alibaba.datax.common.statistics.PerfRecord; +import com.webank.wedatasphere.exchangis.datax.core.ThreadLocalSecurityManager; +import com.webank.wedatasphere.exchangis.datax.core.processor.Processor; +import com.webank.wedatasphere.exchangis.datax.core.processor.ProcessorSecurityManager; +import com.alibaba.datax.core.statistics.communication.CommunicationTool; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.ChannelInput; +import com.alibaba.datax.core.util.ClassUtil; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.dataxservice.face.domain.enums.State; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.InterruptedIOException; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +/** + * Created by jingxing on 14-9-1. + *

+ * 单个slice的writer执行调用 + */ +public class WriterRunner extends AbstractRunner implements Runnable { + + private static final Logger LOG = LoggerFactory + .getLogger(WriterRunner.class); + + + private boolean shutdown; + + private RecordReceiver recordReceiver; + + private ChannelInput channelInput; + + private String processor; + + public void setRecordReceiver(RecordReceiver receiver) { + this.recordReceiver = receiver; + } + + public void setChannelInput(ChannelInput channelInput){ + this.channelInput = channelInput; + } + + public void setProcessor(String processor){ + this.processor = processor; + } + + public WriterRunner(AbstractTaskPlugin abstractTaskPlugin) { + super(abstractTaskPlugin); + } + + @Override + public void run() { + Writer.Task taskWriter = (Writer.Task) this.getPlugin(); + //统计waitReadTime,并且在finally end + PerfRecord channelWaitRead = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.WAIT_READ_TIME); + try { + channelWaitRead.start(); + LOG.debug("task writer starts to do init ..."); + PerfRecord initPerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.WRITE_TASK_INIT); + initPerfRecord.start(); + taskWriter.init(); + initPerfRecord.end(); + LOG.debug("task writer starts to do prepare ..."); + PerfRecord preparePerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.WRITE_TASK_PREPARE); + preparePerfRecord.start(); + taskWriter.prepare(); + preparePerfRecord.end(); + LOG.debug("task writer starts to write ..."); + PerfRecord dataPerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.WRITE_TASK_DATA); + dataPerfRecord.start(); + if(null != recordReceiver){ + if(StringUtils.isNotBlank(processor)){ + ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader(); + Processor processIns = ClassUtil.instantiate(processor, Processor.class, + currentClassLoader); + startToWrite(taskWriter, recordReceiver, processIns, currentClassLoader); + }else { + taskWriter.startWrite(recordReceiver); + } + dataPerfRecord.addCount(CommunicationTool.getTotalReadRecords(super.getRunnerCommunication())); + }else if(null != channelInput){ + taskWriter.startWrite(channelInput); + } + dataPerfRecord.addSize(CommunicationTool.getTotalReadBytes(super.getRunnerCommunication())); + dataPerfRecord.end(); + + LOG.debug("task writer starts to do post ..."); + PerfRecord postPerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.WRITE_TASK_POST); + postPerfRecord.start(); + taskWriter.post(); + postPerfRecord.end(); + super.markSuccess(); + } catch(Throwable e) { + if(shutdown){ + //have been shutdown by task group container + return; + } + Throwable cause = e; + while(null != cause){ + if(cause instanceof InterruptedException || cause instanceof InterruptedIOException){ + this.getRunnerCommunication().setState(State.KILLED); + return; + } + cause = cause.getCause(); + } + LOG.error("Writer Runner Received Exceptions:", e); + super.markFail(e); + } finally { + LOG.debug("task writer starts to do destroy ..."); + PerfRecord desPerfRecord = new PerfRecord(getTaskGroupId(), getTaskId(), PerfRecord.PHASE.WRITE_TASK_DESTROY); + desPerfRecord.start(); + super.destroy(); + desPerfRecord.end(); + channelWaitRead.end(super.getRunnerCommunication().getLongCounter(CommunicationTool.WAIT_READER_TIME)); + } + } + + public boolean supportFailOver() { + Writer.Task taskWriter = (Writer.Task) this.getPlugin(); + return taskWriter.supportFailOver(); + } + + @Override + public void shutdown() { + shutdown = true; + if(null != recordReceiver){ + recordReceiver.shutdown(); + } + if(null != channelInput){ + channelInput.shutdown(); + } + } + + private void startToWrite(Writer.Task taskWriter, RecordReceiver recordReceiver, Processor processor + , ClassLoader runtimeClassLoader){ + Class clazz = Object.class; + Type[] types = processor.getClass().getGenericInterfaces(); + for(Type type : types){ + if(type instanceof ParameterizedType){ + ParameterizedType parameterizedType = (ParameterizedType)type; + if(parameterizedType.getRawType().getTypeName().equals( + Processor.class.getTypeName() + )){ + clazz = (Class)parameterizedType.getActualTypeArguments()[0]; + break; + } + } + } + ThreadLocalSecurityManager rootSecurityManager = null; + if(System.getSecurityManager() instanceof ThreadLocalSecurityManager){ + rootSecurityManager = (ThreadLocalSecurityManager)System.getSecurityManager(); + }else{ + rootSecurityManager = new ThreadLocalSecurityManager(); + System.setSecurityManager(rootSecurityManager); + } + ThreadLocalSecurityManager finalRootSecurityManager = rootSecurityManager; + ProcessorSecurityManager processorSecurityManager = new ProcessorSecurityManager(System.getProperty("user.dir")); + if(clazz.equals(Record.class)){ + taskWriter.startWrite(new RecordReceiver() { + @Override + public Record getFromReader() { + return doInSecurity(finalRootSecurityManager, processorSecurityManager, ()-> + { + try { + Record record ; + if(!Thread.currentThread().getContextClassLoader().equals(runtimeClassLoader)){ + Thread.currentThread().setContextClassLoader(runtimeClassLoader); + } + while(null != (record = recordReceiver.getFromReader())){ + Record result = (Record) processor.process(transformColumns(record.getColumns())); + if (null != result){ + return result; + } + } + return null; + } catch (Exception e) { + throw DataXException.asDataXException(FrameworkErrorCode.PROCESSOR_RUN_ERROR, e); + } + }); + } + + @Override + public void shutdown() { + recordReceiver.shutdown(); + } + }); + }else{ + taskWriter.startWrite(new BasicDataReceiver() { + @Override + public Object getFromReader() { + return doInSecurity(finalRootSecurityManager, processorSecurityManager, ()-> + { + try { + Record record; + while(null != (record = recordReceiver.getFromReader())){ + Object result = processor.process(transformColumns(record.getColumns())); + if (null != result){ + return result; + } + } + return null; + } catch (Exception e) { + throw DataXException.asDataXException(FrameworkErrorCode.PROCESSOR_RUN_ERROR, e); + } + }); + } + + @Override + public void shutdown() { + recordReceiver.shutdown(); + } + }, clazz); + } + } + + private T doInSecurity(ThreadLocalSecurityManager rootSecurityManager, + ProcessorSecurityManager processorSecurityManager, Supplier supplier){ + rootSecurityManager.setThreadSecurityManager(this, processorSecurityManager); + try { + return supplier.get(); + }finally { + rootSecurityManager.removeThreadSecurityManager(this); + } + } + + private List transformColumns(List columns){ + List columnData = new ArrayList<>(); + columns.forEach(column -> { + if(column instanceof StringColumn){ + columnData.add(column.asString()); + }else if(column instanceof BytesColumn){ + columnData.add(column.asBytes()); + }else if(column instanceof BoolColumn){ + columnData.add(column.asBoolean()); + }else if(column instanceof DateColumn){ + columnData.add(column.asDate()); + }else if(column instanceof DoubleColumn){ + columnData.add(column.asDouble()); + }else if(column instanceof LongColumn){ + columnData.add(column.asLong()); + }else{ + columnData.add(column.asString()); + } + }); + return columnData; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/AbstractChannel.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/AbstractChannel.java new file mode 100644 index 000000000..9107c23fa --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/AbstractChannel.java @@ -0,0 +1,358 @@ +package com.alibaba.datax.core.transport.channel; + +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.communication.CommunicationTool; +import com.alibaba.datax.core.util.container.CoreConstant; +import com.google.common.util.concurrent.RateLimiter; +import com.webank.wedatasphere.exchangis.datax.core.transport.channel.ChannelElement; +import org.apache.commons.lang3.Validate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.Condition; +import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Consumer; + +/** + * Created by jingxing on 14-8-25. + * Abstracted and enhanced by davidhua@webank.com + */ +public abstract class AbstractChannel { + private static final Logger LOG = LoggerFactory.getLogger(AbstractChannel.class); + + protected int taskGroupId; + + private int capacity; + + protected int byteCapacity; + + /** + * bps: bytes/s + */ + private long byteSpeed; + private RateLimiter rateLimiterByte; + + /** + * tps: domain/s + */ + protected long dataSpeed; + + RateLimiter rateLimiterData; + + private long flowControlInterval; + + private volatile boolean isClosed = false; + + protected Configuration configuration = null; + + protected AtomicLong waitReaderTime = new AtomicLong(0); + + protected AtomicLong waitWriterTime = new AtomicLong(0); + + private Boolean isFirstPrint = true; + + private Communication currentCommunication; + + private Communication lastCommunication = new Communication(); + + /** + * Consume lock + */ + private ReentrantLock consumeLock = new ReentrantLock(); + + /** + * Number of consumers + */ + protected int consumers = 0; + + /** + * Cache from pulling + */ + private List cachePulled = new ArrayList<>(); + /** + * If set consumeIsolated = true, channel will use consume cache to distinct different consuming + */ + protected boolean consumeIsolated = false; + /** + * Semaphore for consumers + */ + private AtomicInteger consumeSem = new AtomicInteger(consumers - 1); + /** + * Counters of consumers for consuming from cache + */ + private ConcurrentHashMap consumeCache = new ConcurrentHashMap<>(); + + private Condition notConsumed = consumeLock.newCondition(); + + /** + * + * @param configuration task configuration + */ + public AbstractChannel(final Configuration configuration){ + this.configuration = configuration; + int capacity = configuration.getInt( + CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_CAPACITY, 2048); + long byteSpeed = configuration.getLong( + CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_BYTE, 1024L * 1024L); + + if(capacity <= 0){ + throw new IllegalArgumentException(String.format( + "通道容量[%d]必须大于0.", capacity)); + } + if(isFirstPrint){ + firstPrint(); + isFirstPrint = false; + } + this.taskGroupId = configuration.getInt( + CoreConstant.DATAX_CORE_CONTAINER_TASKGROUP_ID); + this.capacity = capacity; + this.byteSpeed = byteSpeed; + this.rateLimiterByte = RateLimiter.create(this.byteSpeed); + this.flowControlInterval = configuration.getLong( + CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_FLOWCONTROLINTERVAL, 1000); + this.byteCapacity = configuration.getInt( + CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_CAPACITY_BYTE, 8 * 1024 * 1024); + } + + public void close(){ + this.isClosed = true; + } + + public void open(){ + this.isClosed = false; + } + + public boolean isClosed(){ + return isClosed; + } + + public int getTaskGroupId(){ + return this.taskGroupId; + } + + public int getCapacity(){ + return capacity; + } + + public long getByteSpeed(){ + return byteSpeed; + } + + public Configuration getConfiguration(){ + return this.configuration; + } + + public void setCommunication(final Communication communication){ + this.currentCommunication = communication; + this.lastCommunication.reset(); + } + + public void push(final T t){ + Validate.notNull(t, "push domain cannot be empty in channel"); + this.doPush(t); + this.statPush(1L, t.getByteSize()); + } + + public void pushTerminate(final T t){ + Validate.notNull(t, "push domain cannot be empty in channel"); + this.doPush(t); + } + + public void pushAll(final Collection collection){ + Validate.notNull(collection); + Validate.noNullElements(collection); + for(T t : collection){ + push(t); + } + } + + public T pull(){ + List pulled = (List) doPullInSync(1, new ArrayList<>(), collection -> collection.add(this.doPull())); + T data = pulled.get(0); + this.statPull(1L, data.getByteSize()); + return data; + } + + public void pullAll(final Collection collection){ + Validate.notNull(collection); + doPullInSync(Integer.MAX_VALUE, collection, this::doPullAll); + this.statPull(collection.size(), this.getByteSize(collection)); + } + + private Collection doPullInSync(int maxPullSize, Collection pulled, Consumer> pullFunction){ + String hashCode = String.valueOf(Thread.currentThread().hashCode()); + pulled.clear(); + consumeLock.lock(); + try { + while(!cachePulled.isEmpty()){ + AtomicInteger counter = consumeCache.computeIfAbsent(hashCode, key -> new AtomicInteger(0)); + //Only the different consuming threads can consume the cache + int pos = counter.get(); + if(consumeSem.get() > 0 && pos >= cachePulled.size()){ + try { + //Await other consumers + notConsumed.await(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new IllegalStateException(e); + } + }else { + int count = 0; + for(int i = pos; count < maxPullSize && i < cachePulled.size(); i++){ + //Consume cache + pulled.add((T) cachePulled.get(i).copyElement()); + count ++; + } + if(counter.addAndGet(count) >= cachePulled.size() && consumeSem.decrementAndGet() <= 0){ + //Empty the cache and reset the semaphore + cachePulled.clear(); + consumeSem.set(consumers - 1); + consumeCache.forEach((key, value) -> value.set(0)); + notConsumed.signalAll(); + } + return pulled; + } + } + //Fill pulled collection + pullFunction.accept(pulled); + if(consumers > 1 && consumeIsolated){ + //In the situation of multiply consumers, use the cache + cachePulled.addAll(pulled); + AtomicInteger counter = consumeCache.computeIfAbsent(hashCode, key -> new AtomicInteger(0)); + //Mark the current thread that has consumed the cache + counter.set(pulled.size()); + } + return pulled; + }finally{ + consumeLock.unlock(); + } + } + private void statPull(long dataSize, long byteSize){ + + statPull(currentCommunication, dataSize); + currentCommunication.increaseCounter(CommunicationTool.WRITE_RECEIVED_BYTES, byteSize); + } + + public void statPush(long dataSize,long byteSize){ + boolean isChannelByteSpeedLimit = (this.byteSpeed > 0); + boolean isChannelDataSpeedLimit = (this.dataSpeed > 0); + + if(!isChannelByteSpeedLimit && !isChannelDataSpeedLimit){ + return; + } + if(byteSize > 0){ + rateLimiterByte.acquire((int)byteSize); + } + statPush(currentCommunication, dataSize); + if(rateLimiterData != null && dataSize > 0){ + rateLimiterData.acquire((int)dataSize); + } + currentCommunication.increaseCounter(CommunicationTool.READ_SUCCEED_BYTES, byteSize); + + currentCommunication.setLongCounter(CommunicationTool.WAIT_READER_TIME, waitReaderTime.get()); + currentCommunication.setLongCounter(CommunicationTool.WAIT_WRITER_TIME, waitWriterTime.get()); + } + + public synchronized void adjustRateLimit(long byteSpeed, long dataSpeed){ + if(byteSpeed > 0 && null != this.rateLimiterByte && + byteSpeed != this.rateLimiterByte.getRate()) { + this.rateLimiterByte.setRate(byteSpeed); + } + if(dataSpeed > 0 && null != this.rateLimiterData && + dataSpeed != this.rateLimiterData.getRate()) { + this.rateLimiterData.setRate(dataSpeed); + } + } + + private long getByteSize(final Collection rs){ + final long[] size = {0}; + rs.forEach(t -> size[0] += t.getByteSize()); + return size[0]; + } + + /** + * do push + * @param t + */ + protected abstract void doPush(T t); + + /** + * do push all + * @param collection + */ + protected void doPushAll(Collection collection){ + //default not support + } + + /** + * do pull + */ + protected abstract T doPull(); + + /** + * do pull all + * @param collection + */ + protected void doPullAll(Collection collection){ + //default not support + } + + /** + * Increase consumer + */ + public void incConsumer(){ + consumeSem.compareAndSet(consumers - 1, consumers); + consumers ++; + } + + /** + * Do checkpoint + * @param checkPointId + */ + public void doCheckPoint(String checkPointId){ + } + + public abstract int size(); + + public abstract boolean isEmpty(); + + public abstract void clear(); + + /** + * stat push + * @param dataSize + */ + protected abstract void statPush(Communication currentCommunication, long dataSize); + + /** + * stat pull + * @param currentCommunication + * @param dataSize + */ + protected abstract void statPull(Communication currentCommunication, long dataSize); + /** + * current domain speed + * @return + */ + protected abstract long currentDataSpeed(Communication currentCommunication, Communication lastCommunication, long interval); + + /** + * update counter + * @param currentCommunication + * @param lastCommunication + */ + protected abstract void updateCounter(Communication currentCommunication, Communication lastCommunication); + /** + * the log printed in the first time + */ + protected abstract void firstPrint(); + + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/RecordChannel.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/RecordChannel.java new file mode 100644 index 000000000..9a20a18b3 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/RecordChannel.java @@ -0,0 +1,61 @@ +package com.alibaba.datax.core.transport.channel; + +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.communication.CommunicationTool; +import com.alibaba.datax.core.util.container.CoreConstant; +import com.google.common.util.concurrent.RateLimiter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author davidhua + */ +public abstract class RecordChannel extends AbstractChannel{ + + private static final Logger LOG = LoggerFactory.getLogger(RecordChannel.class); + + + public RecordChannel(Configuration configuration) { + super(configuration); + this.dataSpeed = configuration.getLong( + CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_RECORD, 10000); + this.rateLimiterData = RateLimiter.create(this.dataSpeed); + } + + @Override + protected void statPush(Communication currentCommunication, long dataSize) { + currentCommunication.increaseCounter(CommunicationTool.READ_SUCCEED_RECORDS, dataSize); + } + + @Override + protected void statPull(Communication currentCommunication, long dataSize) { + currentCommunication.increaseCounter(CommunicationTool.WRITE_RECEIVED_RECORDS, dataSize); + } + + @Override + protected long currentDataSpeed(Communication currentCommunication, Communication lastCommunication + , long interval) { + return (CommunicationTool.getTotalReadRecords(currentCommunication) - + CommunicationTool.getTotalReadRecords(lastCommunication)) * 1000/ interval; + } + + @Override + protected void updateCounter(Communication currentCommunication, Communication lastCommunication) { + lastCommunication.setLongCounter(CommunicationTool.READ_SUCCEED_RECORDS, + currentCommunication.getLongCounter(CommunicationTool.READ_SUCCEED_RECORDS)); + lastCommunication.setLongCounter(CommunicationTool.READ_FAILED_RECORDS, + currentCommunication.getLongCounter(CommunicationTool.READ_FAILED_RECORDS)); + } + + @Override + protected void firstPrint() { + long dataSpeed = configuration.getLong( + CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_RECORD, 10000); + LOG.info("RecordChannel set byte_speed_limit to " + getByteSpeed() + + (getByteSpeed() <= 0 ? ", No bps activated." : ".")); + LOG.info("RecordChannel set record_speed_limit to " + dataSpeed + + (dataSpeed <= 0 ? ", No tps activated." : ".")); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/memory/MemoryRecordChannel.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/memory/MemoryRecordChannel.java new file mode 100644 index 000000000..b704b67d2 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/channel/memory/MemoryRecordChannel.java @@ -0,0 +1,148 @@ +package com.alibaba.datax.core.transport.channel.memory; + +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.transport.channel.RecordChannel; +import com.alibaba.datax.core.transport.record.TerminateRecord; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.core.util.container.CoreConstant; +import org.slf4j.LoggerFactory; + +import java.util.Collection; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.locks.Condition; +import java.util.concurrent.locks.ReentrantLock; +/** + * 内存Channel的具体实现,底层其实是一个ArrayBlockingQueue + */ +public class MemoryRecordChannel extends RecordChannel { + private static final org.slf4j.Logger LOG = LoggerFactory + .getLogger(MemoryRecordChannel.class); + private int bufferSize = 0; + + private AtomicInteger memoryBytes = new AtomicInteger(0); + + private ArrayBlockingQueue queue = null; + + private ReentrantLock lock; + + private Condition notInsufficient, notEmpty; + + public MemoryRecordChannel(final Configuration configuration) { + super(configuration); + super.consumeIsolated = true; + this.queue = new ArrayBlockingQueue<>(this.getCapacity()); + this.bufferSize = configuration.getInt(CoreConstant.DATAX_CORE_TRANSPORT_RECORD_EXCHANGER_BUFFERSIZE); + lock = new ReentrantLock(); + notInsufficient = lock.newCondition(); + notEmpty = lock.newCondition(); + } + + @Override + public void close() { + super.close(); + try { + this.queue.put(TerminateRecord.get()); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } + + @Override + public void clear() { + this.queue.clear(); + } + + @Override + protected void doPush(Record r) { + try { + long startTime = System.nanoTime(); + this.queue.put(r); + waitWriterTime.addAndGet(System.nanoTime() - startTime); + memoryBytes.addAndGet(r.getMemorySize()); + } catch (InterruptedException ex) { + Thread.currentThread().interrupt(); + } + } + + @Override + protected void doPushAll(Collection rs) { + try { + long startTime = System.nanoTime(); + lock.lockInterruptibly(); + int bytes = getRecordBytes(rs); + while (memoryBytes.get() + bytes > this.byteCapacity || rs.size() > this.queue.remainingCapacity()) { + notInsufficient.await(200L, TimeUnit.MILLISECONDS); + } + this.queue.addAll(rs); + waitWriterTime.addAndGet(System.nanoTime() - startTime); + memoryBytes.addAndGet(bytes); + notEmpty.signalAll(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw DataXException.asDataXException( + FrameworkErrorCode.RUNTIME_ERROR, e); + } finally { + lock.unlock(); + } + } + + @Override + protected Record doPull() { + try { + long startTime = System.nanoTime(); + Record r = this.queue.take(); + waitReaderTime.addAndGet(System.nanoTime() - startTime); + memoryBytes.addAndGet(-r.getMemorySize()); + return r; + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new IllegalStateException(e); + } + } + + @Override + protected void doPullAll(Collection rs) { + assert rs != null; + rs.clear(); + try { + long startTime = System.nanoTime(); + lock.lockInterruptibly(); + while (this.queue.drainTo(rs, bufferSize) <= 0) { + notEmpty.await(200L, TimeUnit.MILLISECONDS); + } + waitReaderTime.addAndGet(System.nanoTime() - startTime); + int bytes = getRecordBytes(rs); + memoryBytes.addAndGet(-bytes); + notInsufficient.signalAll(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw DataXException.asDataXException( + FrameworkErrorCode.RUNTIME_ERROR, e); + } finally { + lock.unlock(); + } + } + + private int getRecordBytes(Collection rs) { + int bytes = 0; + for (Record r : rs) { + bytes += r.getMemorySize(); + } + return bytes; + } + + @Override + public int size() { + return this.queue.size(); + } + + @Override + public boolean isEmpty() { + return this.queue.isEmpty(); + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/BufferedRecordExchanger.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/BufferedRecordExchanger.java new file mode 100644 index 000000000..54c65c961 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/BufferedRecordExchanger.java @@ -0,0 +1,152 @@ +package com.alibaba.datax.core.transport.exchanger; + +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordReceiver; +import com.alibaba.datax.common.plugin.RecordSender; +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.transport.channel.RecordChannel; +import com.alibaba.datax.core.transport.record.TerminateRecord; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.core.util.container.CoreConstant; +import org.apache.commons.lang.Validate; + +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.atomic.AtomicInteger; + +public class BufferedRecordExchanger implements RecordSender, RecordReceiver { + + private final RecordChannel recordChannel; + + private final Configuration configuration; + + private final List buffer; + + private int bufferSize; + + protected final int byteCapacity; + + private final AtomicInteger memoryBytes = new AtomicInteger(0); + + private int bufferIndex = 0; + + private static Class RECORD_CLASS; + + private volatile boolean shutdown = false; + + private final TaskPluginCollector pluginCollector; + + @SuppressWarnings("unchecked") + public BufferedRecordExchanger(final RecordChannel recordChannel, final TaskPluginCollector pluginCollector) { + assert null != recordChannel; + assert null != recordChannel.getConfiguration(); + + this.recordChannel = recordChannel; + this.pluginCollector = pluginCollector; + this.configuration = recordChannel.getConfiguration(); + + this.bufferSize = configuration + .getInt(CoreConstant.DATAX_CORE_TRANSPORT_RECORD_EXCHANGER_BUFFERSIZE); + this.buffer = new CopyOnWriteArrayList<>(); + + //channel的queue默认大小为8M,原来为64M + this.byteCapacity = configuration.getInt( + CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_CAPACITY_BYTE, 8 * 1024 * 1024); + + try { + BufferedRecordExchanger.RECORD_CLASS = ((Class) Class + .forName(configuration.getString( + CoreConstant.DATAX_CORE_TRANSPORT_RECORD_CLASS, + "com.alibaba.datax.core.transport.record.DefaultRecord"))); + } catch (Exception e) { + throw DataXException.asDataXException( + FrameworkErrorCode.CONFIG_ERROR, e); + } + } + + @Override + public Record createRecord() { + try { + return BufferedRecordExchanger.RECORD_CLASS.newInstance(); + } catch (Exception e) { + throw DataXException.asDataXException( + FrameworkErrorCode.CONFIG_ERROR, e); + } + } + + @Override + public void sendToWriter(Record record) { + if (shutdown) { + throw DataXException.asDataXException(CommonErrorCode.SHUT_DOWN_TASK, ""); + } + + Validate.notNull(record, "record不能为空."); + + if (record.getMemorySize() > this.byteCapacity) { + this.pluginCollector.collectDirtyRecord(record, new Exception(String.format("单条记录超过大小限制,当前限制为:%s", this.byteCapacity))); + return; + } + + boolean isFull = (this.bufferIndex >= this.bufferSize || this.memoryBytes.get() + record.getMemorySize() > this.byteCapacity); + if (isFull) { + flush(); + } + + this.buffer.add(record); + this.bufferIndex++; + memoryBytes.addAndGet(record.getMemorySize()); + } + + @Override + public void flush() { + if (shutdown) { + throw DataXException.asDataXException(CommonErrorCode.SHUT_DOWN_TASK, ""); + } + this.recordChannel.pushAll(this.buffer); + this.buffer.clear(); + this.bufferIndex = 0; + this.memoryBytes.set(0); + } + + @Override + public void terminate() { + if (shutdown) { + throw DataXException.asDataXException(CommonErrorCode.SHUT_DOWN_TASK, ""); + } + flush(); + this.recordChannel.pushTerminate(TerminateRecord.get()); + } + + @Override + public Record getFromReader() { + if (shutdown) { + throw DataXException.asDataXException(CommonErrorCode.SHUT_DOWN_TASK, ""); + } + boolean isEmpty = (this.bufferIndex >= this.buffer.size()); + if (isEmpty) { + receive(); + } + + Record record = this.buffer.get(this.bufferIndex++); + if (record instanceof TerminateRecord) { + record = null; + } + return record; + } + + @Override + public void shutdown() { + shutdown = true; + buffer.clear(); + recordChannel.clear(); + } + + private void receive() { + this.recordChannel.pullAll(this.buffer); + this.bufferIndex = 0; + this.bufferSize = this.buffer.size(); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/BufferedRecordTransformerExchanger.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/BufferedRecordTransformerExchanger.java new file mode 100644 index 000000000..29defd341 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/BufferedRecordTransformerExchanger.java @@ -0,0 +1,164 @@ +package com.alibaba.datax.core.transport.exchanger; + +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordReceiver; +import com.alibaba.datax.common.plugin.RecordSender; +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.transport.channel.RecordChannel; +import com.alibaba.datax.core.transport.record.TerminateRecord; +import com.alibaba.datax.core.transport.transformer.TransformerExecution; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.core.util.container.CoreConstant; +import org.apache.commons.lang.Validate; + +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.atomic.AtomicInteger; + +public class BufferedRecordTransformerExchanger extends TransformerExchanger implements RecordSender, RecordReceiver { + + private final RecordChannel recordChannel; + + private final Configuration configuration; + + private final List buffer; + + private int bufferSize; + + protected final int byteCapacity; + + private final AtomicInteger memoryBytes = new AtomicInteger(0); + + private int bufferIndex = 0; + + private static Class RECORD_CLASS; + + private volatile boolean shutdown = false; + + + @SuppressWarnings("unchecked") + public BufferedRecordTransformerExchanger(final int taskGroupId, final int taskId, + final RecordChannel recordChannel, final Communication communication, + final TaskPluginCollector pluginCollector, + final List tInfoExecs) { + super(taskGroupId, taskId, communication, tInfoExecs, pluginCollector); + assert null != recordChannel; + assert null != recordChannel.getConfiguration(); + + this.recordChannel = recordChannel; + this.configuration = recordChannel.getConfiguration(); + + this.bufferSize = configuration + .getInt(CoreConstant.DATAX_CORE_TRANSPORT_RECORD_EXCHANGER_BUFFERSIZE); + this.buffer = new CopyOnWriteArrayList<>(); + + //channel的queue默认大小为8M,原来为64M + this.byteCapacity = configuration.getInt( + CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_CAPACITY_BYTE, 8 * 1024 * 1024); + + try { + BufferedRecordTransformerExchanger.RECORD_CLASS = ((Class) Class + .forName(configuration.getString( + CoreConstant.DATAX_CORE_TRANSPORT_RECORD_CLASS, + "com.alibaba.datax.core.transport.record.DefaultRecord"))); + } catch (Exception e) { + throw DataXException.asDataXException( + FrameworkErrorCode.CONFIG_ERROR, e); + } + } + + @Override + public Record createRecord() { + try { + return BufferedRecordTransformerExchanger.RECORD_CLASS.newInstance(); + } catch (Exception e) { + throw DataXException.asDataXException( + FrameworkErrorCode.CONFIG_ERROR, e); + } + } + + @Override + public void sendToWriter(Record record) { + if (shutdown) { + throw DataXException.asDataXException(CommonErrorCode.SHUT_DOWN_TASK, ""); + } + + Validate.notNull(record, "record不能为空."); + + record = doTransformer(record); + + if (record == null) { + return; + } + + if (record.getMemorySize() > this.byteCapacity) { + this.pluginCollector.collectDirtyRecord(record, new Exception(String.format("单条记录超过大小限制,当前限制为:%s", this.byteCapacity))); + return; + } + + boolean isFull = (this.bufferIndex >= this.bufferSize || this.memoryBytes.get() + record.getMemorySize() > this.byteCapacity); + if (isFull) { + flush(); + } + + this.buffer.add(record); + this.bufferIndex++; + memoryBytes.addAndGet(record.getMemorySize()); + } + + @Override + public void flush() { + if (shutdown) { + throw DataXException.asDataXException(CommonErrorCode.SHUT_DOWN_TASK, ""); + } + this.recordChannel.pushAll(this.buffer); + //和channel的统计保持同步 + doStat(); + this.buffer.clear(); + this.bufferIndex = 0; + this.memoryBytes.set(0); + } + + @Override + public void terminate() { + if (shutdown) { + throw DataXException.asDataXException(CommonErrorCode.SHUT_DOWN_TASK, ""); + } + flush(); + this.recordChannel.pushTerminate(TerminateRecord.get()); + } + + @Override + public Record getFromReader() { + if (shutdown) { + throw DataXException.asDataXException(CommonErrorCode.SHUT_DOWN_TASK, ""); + } + boolean isEmpty = (this.bufferIndex >= this.buffer.size()); + if (isEmpty) { + receive(); + } + + Record record = this.buffer.get(this.bufferIndex++); + if (record instanceof TerminateRecord) { + record = null; + } + return record; + } + + @Override + public void shutdown() { + shutdown = true; + buffer.clear(); + recordChannel.clear(); + } + + private void receive() { + this.recordChannel.pullAll(this.buffer); + this.bufferIndex = 0; + this.bufferSize = this.buffer.size(); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/RecordExchanger.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/RecordExchanger.java new file mode 100644 index 000000000..276b2fa3f --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/RecordExchanger.java @@ -0,0 +1,114 @@ +/** + * (C) 2010-2014 Alibaba Group Holding Limited. + *

+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.alibaba.datax.core.transport.exchanger; + +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordReceiver; +import com.alibaba.datax.common.plugin.RecordSender; +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.transport.channel.RecordChannel; +import com.alibaba.datax.core.transport.record.TerminateRecord; +import com.alibaba.datax.core.transport.transformer.TransformerExecution; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.core.util.container.CoreConstant; + +import java.util.List; + +public class RecordExchanger extends TransformerExchanger implements RecordSender, RecordReceiver { + + private RecordChannel recordChannel; + + private Configuration configuration; + + private static Class RECORD_CLASS; + + private volatile boolean shutdown = false; + + @SuppressWarnings("unchecked") + public RecordExchanger(final int taskGroupId, final int taskId, final RecordChannel recordChannel, final Communication communication, List transformerExecs, final TaskPluginCollector pluginCollector) { + super(taskGroupId, taskId, communication, transformerExecs, pluginCollector); + assert recordChannel != null; + this.recordChannel = recordChannel; + this.recordChannel.incConsumer(); + this.configuration = recordChannel.getConfiguration(); + try { + RecordExchanger.RECORD_CLASS = (Class) Class + .forName(configuration.getString( + CoreConstant.DATAX_CORE_TRANSPORT_RECORD_CLASS, + "com.alibaba.datax.core.transport.record.DefaultRecord")); + } catch (ClassNotFoundException e) { + throw DataXException.asDataXException( + FrameworkErrorCode.CONFIG_ERROR, e); + } + } + + @Override + public Record getFromReader() { + if (shutdown) { + throw DataXException.asDataXException(CommonErrorCode.SHUT_DOWN_TASK, ""); + } + Record record = this.recordChannel.pull(); + return (record instanceof TerminateRecord ? null : record); + } + + @Override + public Record createRecord() { + try { + return RECORD_CLASS.newInstance(); + } catch (Exception e) { + throw DataXException.asDataXException( + FrameworkErrorCode.CONFIG_ERROR, e); + } + } + + @Override + public void sendToWriter(Record record) { + if (shutdown) { + throw DataXException.asDataXException(CommonErrorCode.SHUT_DOWN_TASK, ""); + } + record = doTransformer(record); + if (record == null) { + return; + } + this.recordChannel.push(record); + //和channel的统计保持同步 + doStat(); + } + + @Override + public void flush() { + } + + @Override + public void terminate() { + if (shutdown) { + throw DataXException.asDataXException(CommonErrorCode.SHUT_DOWN_TASK, ""); + } + this.recordChannel.pushTerminate(TerminateRecord.get()); + //和channel的统计保持同步 + doStat(); + } + + @Override + public void shutdown() { + shutdown = true; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/TransformerExchanger.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/TransformerExchanger.java new file mode 100644 index 000000000..92a5b7320 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/exchanger/TransformerExchanger.java @@ -0,0 +1,147 @@ +package com.alibaba.datax.core.transport.exchanger; + +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.communication.CommunicationTool; +import com.alibaba.datax.core.transport.transformer.TransformerErrorCode; +import com.alibaba.datax.core.transport.transformer.TransformerExecution; +import com.alibaba.datax.core.util.container.ClassLoaderSwapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; + +/** + * no comments. + * Created by liqiang on 16/3/9. + */ +public abstract class TransformerExchanger { + + private static final Logger LOG = LoggerFactory.getLogger(TransformerExchanger.class); + protected final TaskPluginCollector pluginCollector; + + protected final int taskGroupId; + protected final int taskId; + protected final Communication currentCommunication; + + private long totalExaustedTime = 0; + private long totalFilterRecords = 0; + private long totalSuccessRecords = 0; + private long totalFailedRecords = 0; + + + private List transformerExecs; + + private ClassLoaderSwapper classLoaderSwapper = ClassLoaderSwapper + .newCurrentThreadClassLoaderSwapper(); + + + public TransformerExchanger(int taskGroupId, int taskId, Communication communication, + List transformerExecs, + final TaskPluginCollector pluginCollector) { + + this.transformerExecs = transformerExecs; + this.pluginCollector = pluginCollector; + this.taskGroupId = taskGroupId; + this.taskId = taskId; + this.currentCommunication = communication; + } + + + public Record doTransformer(Record record) { + if (transformerExecs == null || transformerExecs.size() == 0) { + return record; + } + + Record result = record; + + long diffExaustedTime = 0; + String errorMsg = null; + boolean failed = false; + for (TransformerExecution transformerInfoExec : transformerExecs) { + long startTs = System.nanoTime(); + + if (transformerInfoExec.getClassLoader() != null) { + classLoaderSwapper.setCurrentThreadClassLoader(transformerInfoExec.getClassLoader()); + } + + /** + * 延迟检查transformer参数的有效性,直接抛出异常,不作为脏数据 + * 不需要在插件中检查参数的有效性。但参数的个数等和插件相关的参数,在插件内部检查 + */ + if (!transformerInfoExec.isChecked()) { + + if (transformerInfoExec.getColumnIndex() != null && transformerInfoExec.getColumnIndex() >= record.getColumnNumber()) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_ILLEGAL_PARAMETER, + String.format("columnIndex[%s] out of bound[%s]. name=%s", + transformerInfoExec.getColumnIndex(), record.getColumnNumber(), + transformerInfoExec.getTransformerName())); + } + transformerInfoExec.setIsChecked(true); + } + + try { + result = transformerInfoExec.getTransformer().evaluate(result, transformerInfoExec.gettContext(), transformerInfoExec.getFinalParas()); + } catch (Exception e) { + errorMsg = String.format("transformer(%s) has Exception(%s)", transformerInfoExec.getTransformerName(), + e.getMessage()); + failed = true; + //LOG.error(errorMsg, e); + // transformerInfoExec.addFailedRecords(1); + //脏数据不再进行后续transformer处理,按脏数据处理,并过滤该record。 + break; + + } finally { + if (transformerInfoExec.getClassLoader() != null) { + classLoaderSwapper.restoreCurrentThreadClassLoader(); + } + } + + if (result == null) { + /** + * 这个null不能传到writer,必须消化掉 + */ + totalFilterRecords++; + //transformerInfoExec.addFilterRecords(1); + break; + } + + long diff = System.nanoTime() - startTs; + //transformerInfoExec.addExaustedTime(diff); + diffExaustedTime += diff; + //transformerInfoExec.addSuccessRecords(1); + } + + totalExaustedTime += diffExaustedTime; + + if (failed) { + totalFailedRecords++; + this.pluginCollector.collectDirtyRecord(record, errorMsg); + return null; + } else { + totalSuccessRecords++; + return result; + } + } + + public void doStat() { + + /** + * todo 对于多个transformer时,各个transformer的单独统计进行显示。最后再汇总整个transformer的时间消耗. + * 暂时不统计。 + */ +// if (transformers.size() > 1) { +// for (ransformerInfoExec transformerInfoExec : transformers) { +// currentCommunication.setLongCounter(CommunicationTool.TRANSFORMER_NAME_PREFIX + transformerInfoExec.getTransformerName(), transformerInfoExec.getExaustedTime()); +// } +// } + currentCommunication.setLongCounter(CommunicationTool.TRANSFORMER_SUCCEED_RECORDS, totalSuccessRecords); + currentCommunication.setLongCounter(CommunicationTool.TRANSFORMER_FAILED_RECORDS, totalFailedRecords); + currentCommunication.setLongCounter(CommunicationTool.TRANSFORMER_FILTER_RECORDS, totalFilterRecords); + currentCommunication.setLongCounter(CommunicationTool.TRANSFORMER_USED_TIME, totalExaustedTime); + } + + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/record/DefaultRecord.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/record/DefaultRecord.java new file mode 100644 index 000000000..3d6446702 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/record/DefaultRecord.java @@ -0,0 +1,137 @@ +package com.alibaba.datax.core.transport.record; + +import com.alibaba.datax.common.element.Column; +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.core.util.ClassSize; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.webank.wedatasphere.exchangis.datax.util.Json; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Created by jingxing on 14-8-24. + */ + +public class DefaultRecord implements Record { + + private static final int RECORD_AVERGAE_COLUMN_NUMBER = 16; + + private List columns; + + private String uid = ""; + + private int byteSize; + + // 首先是Record本身需要的内存 + private int memorySize = ClassSize.DefaultRecordHead; + + public DefaultRecord() { + this.columns = new ArrayList(RECORD_AVERGAE_COLUMN_NUMBER); + } + + @Override + public void addColumn(Column column) { + columns.add(column); + incrByteSize(column); + } + + @Override + public Column getColumn(int i) { + if (i < 0 || i >= columns.size()) { + return null; + } + return columns.get(i); + } + + @Override + public void setColumn(int i, final Column column) { + if (i < 0) { + throw DataXException.asDataXException(FrameworkErrorCode.ARGUMENT_ERROR, + "不能给index小于0的column设置值"); + } + + if (i >= columns.size()) { + expandCapacity(i + 1); + } + + decrByteSize(getColumn(i)); + this.columns.set(i, column); + incrByteSize(getColumn(i)); + } + + @Override + public String toString() { + Map json = new HashMap(); + json.put("size", this.getColumnNumber()); + json.put("data", this.columns); + return Json.toJson(json, null); + } + + @Override + public int getColumnNumber() { + return this.columns.size(); + } + + @Override + public List getColumns() { + return columns; + } + + @Override + public String uid() { + return uid; + } + + @Override + public int getByteSize() { + return byteSize; + } + + @Override + public int getMemorySize() { + return memorySize; + } + + @Override + public Object copyElement() { + return this; + } + + private void decrByteSize(final Column column) { + if (null == column) { + return; + } + + byteSize -= column.getByteSize(); + + //内存的占用是column对象的头 再加实际大小 + uid + memorySize = memorySize - ClassSize.ColumnHead - column.getByteSize() - uid.getBytes().length; + } + + private void incrByteSize(final Column column) { + if (null == column) { + return; + } + + byteSize += column.getByteSize(); + + //内存的占用是column对象的头 再加实际大小 + uid + memorySize = memorySize + ClassSize.ColumnHead + column.getByteSize() + uid.getBytes().length; + } + + private void expandCapacity(int totalSize) { + if (totalSize <= 0) { + return; + } + + int needToExpand = totalSize - columns.size(); + while (needToExpand-- > 0) { + this.columns.add(null); + } + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/record/TerminateRecord.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/record/TerminateRecord.java new file mode 100644 index 000000000..0c24a2bff --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/record/TerminateRecord.java @@ -0,0 +1,66 @@ +package com.alibaba.datax.core.transport.record; + +import com.alibaba.datax.common.element.Column; +import com.alibaba.datax.common.element.Record; + +import java.util.ArrayList; +import java.util.List; + +/** + * 作为标示 生产者已经完成生产的标志 + */ +public class TerminateRecord implements Record { + private final static TerminateRecord SINGLE = new TerminateRecord(); + + private TerminateRecord() { + } + + public static TerminateRecord get() { + return SINGLE; + } + + @Override + public void addColumn(Column column) { + } + + @Override + public Column getColumn(int i) { + return null; + } + + @Override + public int getColumnNumber() { + return 0; + } + + @Override + public List getColumns() { + return new ArrayList<>(); + } + + @Override + public String uid() { + return ""; + } + + @Override + public int getByteSize() { + return 0; + } + + @Override + public int getMemorySize() { + return 0; + } + + @Override + public T copyElement() { + return (T)this; + } + + @Override + public void setColumn(int i, Column column) { + return; + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/ComplexTransformerProxy.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/ComplexTransformerProxy.java new file mode 100644 index 000000000..a160e61df --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/ComplexTransformerProxy.java @@ -0,0 +1,29 @@ +package com.alibaba.datax.core.transport.transformer; + +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.transformer.ComplexTransformer; +import com.alibaba.datax.transformer.Transformer; + +import java.util.Map; + +/** + * no comments. + * Created by liqiang on 16/3/8. + */ +public class ComplexTransformerProxy extends ComplexTransformer { + private Transformer realTransformer; + + public ComplexTransformerProxy(Transformer transformer) { + setTransformerName(transformer.getTransformerName()); + this.realTransformer = transformer; + } + + @Override + public Record evaluate(Record record, Map tContext, Object... paras) { + return this.realTransformer.evaluate(record, paras); + } + + public Transformer getRealTransformer() { + return realTransformer; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/FilterTransformer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/FilterTransformer.java new file mode 100644 index 000000000..9e6ee4f71 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/FilterTransformer.java @@ -0,0 +1,311 @@ +package com.alibaba.datax.core.transport.transformer; + +import com.alibaba.datax.common.element.*; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.transformer.Transformer; +import org.apache.commons.lang3.StringUtils; + +import java.util.Arrays; + +/** + * no comments. + * Created by liqiang on 16/3/4. + */ +public class FilterTransformer extends Transformer { + public FilterTransformer() { + setTransformerName("dx_filter"); + } + + @Override + public Record evaluate(Record record, Object... paras) { + + int columnIndex; + String code; + String value; + + try { + if (paras.length != 3) { + throw new RuntimeException("dx_filter paras must be 3"); + } + + columnIndex = (Integer) paras[0]; + code = (String) paras[1]; + value = (String) paras[2]; + + if (StringUtils.isEmpty(value)) { + throw new RuntimeException("dx_filter para 2 can't be null"); + } + } catch (Exception e) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_ILLEGAL_PARAMETER, "paras:" + Arrays.asList(paras).toString() + " => " + e.getMessage()); + } + + + Column column = record.getColumn(columnIndex); + + try { + + if (code.equalsIgnoreCase("not like")) { + return doLike(record, value, column); + } else if (code.equalsIgnoreCase("like")) { + return doNotLike(record, value, column); + } else if (code.equalsIgnoreCase("<=")) { + return doGreat(record, value, column, false); + } else if (code.equalsIgnoreCase(">=")) { + return doLess(record, value, column, false); + } else if (code.equalsIgnoreCase("!=") ) { + return doEqual(record, value, column); + } else if (code.equalsIgnoreCase("=") || code.equalsIgnoreCase("==")) { + return doNotEqual(record, value, column); + } else if (code.equalsIgnoreCase("<")) { + return doGreat(record, value, column, true); + } else if (code.equalsIgnoreCase(">")) { + return doLess(record, value, column, true); + } else { + throw new RuntimeException("dx_filter can't suport code:" + code); + } + } catch (Exception e) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_RUN_EXCEPTION, e.getMessage(), e); + } + } + + + private Record doGreat(Record record, String value, Column column, boolean hasEqual) { + + //如果字段为空,直接不参与比较。即空也属于无穷小 + if (column.getRawData() == null) { + return record; + } + if (column instanceof DoubleColumn) { + Double ori = column.asDouble(); + double val = Double.parseDouble(value); + + if (hasEqual) { + if (ori >= val) { + return null; + } else { + return record; + } + } else { + if (ori > val) { + return null; + } else { + return record; + } + } + } else if (column instanceof LongColumn || column instanceof DateColumn) { + Long ori = column.asLong(); + long val = Long.parseLong(value); + + if (hasEqual) { + if (ori >= val) { + return null; + } else { + return record; + } + } else { + if (ori > val) { + return null; + } else { + return record; + } + } + } else if (column instanceof StringColumn || column instanceof BytesColumn || column instanceof BoolColumn) { + String ori = column.asString(); + if (hasEqual) { + if (ori.compareTo(value) >= 0) { + return null; + } else { + return record; + } + } else { + if (ori.compareTo(value) > 0) { + return null; + } else { + return record; + } + } + } else { + throw new RuntimeException(">=,> can't support this columnType:" + column.getClass().getSimpleName()); + } + } + + private Record doLess(Record record, String value, Column column, boolean hasEqual) { + + //如果字段为空,直接不参与比较。即空也属于无穷大 + if (column.getRawData() == null) { + return record; + } + + if (column instanceof DoubleColumn) { + Double ori = column.asDouble(); + double val = Double.parseDouble(value); + + if (hasEqual) { + if (ori <= val) { + return null; + } else { + return record; + } + } else { + if (ori < val) { + return null; + } else { + return record; + } + } + } else if (column instanceof LongColumn || column instanceof DateColumn) { + Long ori = column.asLong(); + long val = Long.parseLong(value); + + if (hasEqual) { + if (ori <= val) { + return null; + } else { + return record; + } + } else { + if (ori < val) { + return null; + } else { + return record; + } + } + } else if (column instanceof StringColumn || column instanceof BytesColumn || column instanceof BoolColumn) { + String ori = column.asString(); + if (hasEqual) { + if (ori.compareTo(value) <= 0) { + return null; + } else { + return record; + } + } else { + if (ori.compareTo(value) < 0) { + return null; + } else { + return record; + } + } + } else { + throw new RuntimeException("<=,< can't support this columnType:" + column.getClass().getSimpleName()); + } + + } + + /** + * DateColumn将比较long值,StringColumn,ByteColumn以及BooleanColumn比较其String值 + * + * @param record + * @param value + * @param column + * @return 如果相等,则过滤。 + */ + + private Record doEqual(Record record, String value, Column column) { + + //如果字段为空,只比较目标字段为"null",否则null字段均不过滤 + if (column.getRawData() == null) { + if (value.equalsIgnoreCase("null")) { + return null; + } else { + return record; + } + } + + if (column instanceof DoubleColumn) { + Double ori = column.asDouble(); + double val = Double.parseDouble(value); + + if (ori == val) { + return null; + } else { + return record; + } + } else if (column instanceof LongColumn || column instanceof DateColumn) { + Long ori = column.asLong(); + long val = Long.parseLong(value); + + if (ori == val) { + return null; + } else { + return record; + } + } else if (column instanceof StringColumn || column instanceof BytesColumn || column instanceof BoolColumn) { + String ori = column.asString(); + if (ori.compareTo(value) == 0) { + return null; + } else { + return record; + } + } else { + throw new RuntimeException("== can't support this columnType:" + column.getClass().getSimpleName()); + } + + } + + /** + * DateColumn将比较long值,StringColumn,ByteColumn以及BooleanColumn比较其String值 + * + * @param record + * @param value + * @param column + * @return 如果不相等,则过滤。 + */ + private Record doNotEqual(Record record, String value, Column column) { + + //如果字段为空,只比较目标字段为"null", 否则null字段均过滤。 + if (column.getRawData() == null) { + if (value.equalsIgnoreCase("null")) { + return record; + } else { + return null; + } + } + + if (column instanceof DoubleColumn) { + Double ori = column.asDouble(); + double val = Double.parseDouble(value); + + if (ori != val) { + return null; + } else { + return record; + } + } else if (column instanceof LongColumn || column instanceof DateColumn) { + Long ori = column.asLong(); + long val = Long.parseLong(value); + + if (ori != val) { + return null; + } else { + return record; + } + } else if (column instanceof StringColumn || column instanceof BytesColumn || column instanceof BoolColumn) { + String ori = column.asString(); + if (ori.compareTo(value) != 0) { + return null; + } else { + return record; + } + } else { + throw new RuntimeException("== can't support this columnType:" + column.getClass().getSimpleName()); + } + } + + private Record doLike(Record record, String value, Column column) { + String orivalue = column.asString(); + if (orivalue != null && orivalue.matches(value)) { + return null; + } else { + return record; + } + } + + private Record doNotLike(Record record, String value, Column column) { + String orivalue = column.asString(); + if (orivalue != null && orivalue.matches(value)) { + return record; + } else { + return null; + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/GroovyTransformer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/GroovyTransformer.java new file mode 100644 index 000000000..53c4f1085 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/GroovyTransformer.java @@ -0,0 +1,91 @@ +package com.alibaba.datax.core.transport.transformer; + +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.transformer.Transformer; +import groovy.lang.GroovyClassLoader; +import org.apache.commons.lang3.StringUtils; +import org.codehaus.groovy.control.CompilationFailedException; + +import java.util.Arrays; +import java.util.List; + +/** + * no comments. + * Created by liqiang on 16/3/4. + */ +public class GroovyTransformer extends Transformer { + public GroovyTransformer() { + setTransformerName("dx_groovy"); + } + + private Transformer groovyTransformer; + + @Override + public Record evaluate(Record record, Object... paras) { + + if (groovyTransformer == null) { + //全局唯一 + if (paras.length < 1 || paras.length > 2) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_ILLEGAL_PARAMETER, "dx_groovy paras must be 1 or 2 . now paras is: " + Arrays.asList(paras).toString()); + } + synchronized (this) { + + if (groovyTransformer == null) { + String code = (String) paras[0]; + @SuppressWarnings("unchecked") List extraPackage = paras.length == 2 ? (List) paras[1] : null; + initGroovyTransformer(code, extraPackage); + } + } + } + + return this.groovyTransformer.evaluate(record); + } + + private void initGroovyTransformer(String code, List extraPackage) { + GroovyClassLoader loader = new GroovyClassLoader(GroovyTransformer.class.getClassLoader()); + String groovyRule = getGroovyRule(code, extraPackage); + + Class groovyClass; + try { + groovyClass = loader.parseClass(groovyRule); + } catch (CompilationFailedException cfe) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_GROOVY_INIT_EXCEPTION, cfe); + } + + try { + Object t = groovyClass.newInstance(); + if (!(t instanceof Transformer)) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_GROOVY_INIT_EXCEPTION, "datax bug! contact askdatax"); + } + this.groovyTransformer = (Transformer) t; + } catch (Throwable ex) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_GROOVY_INIT_EXCEPTION, ex); + } + } + + + private String getGroovyRule(String expression, List extraPackagesStrList) { + StringBuffer sb = new StringBuffer(); + if (extraPackagesStrList != null) { + for (String extraPackagesStr : extraPackagesStrList) { + if (StringUtils.isNotEmpty(extraPackagesStr)) { + sb.append(extraPackagesStr); + } + } + } + sb.append("import static com.alibaba.datax.core.transport.transformer.GroovyTransformerStaticUtil.*;"); + sb.append("import com.alibaba.datax.common.element.*;"); + sb.append("import DataXException;"); + sb.append("import Transformer;"); + sb.append("import java.util.*;"); + sb.append("static class RULE extends Transformer").append("{"); + sb.append("static Record evaluate(Record record, Object... paras) {"); + sb.append(expression); + sb.append("}}"); + + return sb.toString(); + } + + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/GroovyTransformerStaticUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/GroovyTransformerStaticUtil.java new file mode 100644 index 000000000..dff33f1e5 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/GroovyTransformerStaticUtil.java @@ -0,0 +1,10 @@ +package com.alibaba.datax.core.transport.transformer; + +/** + * GroovyTransformer的帮助类,供groovy代码使用,必须全是static的方法 + * Created by liqiang on 16/3/4. + */ +public class GroovyTransformerStaticUtil { + + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/PadTransformer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/PadTransformer.java new file mode 100644 index 000000000..eb02947da --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/PadTransformer.java @@ -0,0 +1,91 @@ +package com.alibaba.datax.core.transport.transformer; + +import com.alibaba.datax.common.element.Column; +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.element.StringColumn; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.transformer.Transformer; + +import java.util.Arrays; + +/** + * no comments. + * Created by liqiang on 16/3/4. + */ +public class PadTransformer extends Transformer { + public PadTransformer() { + setTransformerName("dx_pad"); + } + + @Override + public Record evaluate(Record record, Object... paras) { + + int columnIndex; + String padType; + int length; + String padString; + + try { + if (paras.length != 4) { + throw new RuntimeException("dx_pad paras must be 4"); + } + + columnIndex = (Integer) paras[0]; + padType = (String) paras[1]; + length = Integer.valueOf((String) paras[2]); + padString = (String) paras[3]; + } catch (Exception e) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_ILLEGAL_PARAMETER, "paras:" + Arrays.asList(paras).toString() + " => " + e.getMessage()); + } + + Column column = record.getColumn(columnIndex); + + try { + String oriValue = column.asString(); + + //如果字段为空,作为空字符串处理 + if (oriValue == null) { + oriValue = ""; + } + String newValue; + if (!padType.equalsIgnoreCase("r") && !padType.equalsIgnoreCase("l")) { + throw new RuntimeException(String.format("dx_pad first para(%s) support l or r", padType)); + } + if (length <= oriValue.length()) { + newValue = oriValue.substring(0, length); + } else { + + newValue = doPad(padType, oriValue, length, padString); + } + + record.setColumn(columnIndex, new StringColumn(newValue)); + + } catch (Exception e) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_RUN_EXCEPTION, e.getMessage(), e); + } + return record; + } + + private String doPad(String padType, String oriValue, int length, String padString) { + + String finalPad = ""; + int NeedLength = length - oriValue.length(); + while (NeedLength > 0) { + + if (NeedLength >= padString.length()) { + finalPad += padString; + NeedLength -= padString.length(); + } else { + finalPad += padString.substring(0, NeedLength); + NeedLength = 0; + } + } + + if (padType.equalsIgnoreCase("l")) { + return finalPad + oriValue; + } else { + return oriValue + finalPad; + } + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/PrecisionTransformer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/PrecisionTransformer.java new file mode 100644 index 000000000..0ab4fda6d --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/PrecisionTransformer.java @@ -0,0 +1,56 @@ +package com.alibaba.datax.core.transport.transformer; + +import com.alibaba.datax.common.element.Column; +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.element.StringColumn; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.transformer.Transformer; + +import java.math.BigDecimal; +import java.util.Arrays; + +public class PrecisionTransformer extends Transformer { + public PrecisionTransformer() { + setTransformerName("dx_precision"); + } + + @Override + public Record evaluate(Record record, Object... paras) { + + int columnIndex; + int precision; + try { + if (paras.length != 2) { + throw new RuntimeException("dx_precision paras must be 2"); + } + + columnIndex = (Integer) paras[0]; + precision = Integer.valueOf((String) paras[1]); + } catch (Exception e) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_ILLEGAL_PARAMETER, "paras:" + Arrays.asList(paras).toString() + " => " + e.getMessage()); + } + + Column column = record.getColumn(columnIndex); + + try { + String oriValue = column.asString(); + + //如果字段为空,跳过replace处理 + if (oriValue == null) { + return record; + } + BigDecimal oriNum = new BigDecimal(oriValue); + BigDecimal zeroNum = new BigDecimal("0"); + if(oriNum.doubleValue() == zeroNum.doubleValue()){ + record.setColumn(columnIndex, new StringColumn("0")); + } + else { + BigDecimal newValue = new BigDecimal(oriValue).setScale(precision, BigDecimal.ROUND_DOWN); + record.setColumn(columnIndex, new StringColumn(newValue.toString())); + } + } catch (Exception e) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_RUN_EXCEPTION, e.getMessage(), e); + } + return record; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/ReplaceTransformer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/ReplaceTransformer.java new file mode 100644 index 000000000..40bcb1b1a --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/ReplaceTransformer.java @@ -0,0 +1,66 @@ +package com.alibaba.datax.core.transport.transformer; + +import com.alibaba.datax.common.element.Column; +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.element.StringColumn; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.transformer.Transformer; + +import java.util.Arrays; + +/** + * no comments. + * Created by liqiang on 16/3/4. + */ +public class ReplaceTransformer extends Transformer { + public ReplaceTransformer() { + setTransformerName("dx_replace"); + } + + @Override + public Record evaluate(Record record, Object... paras) { + + int columnIndex; + int startIndex; + int length; + String replaceString; + try { + if (paras.length != 4) { + throw new RuntimeException("dx_replace paras must be 4"); + } + + columnIndex = (Integer) paras[0]; + startIndex = Integer.valueOf((String) paras[1]); + length = Integer.valueOf((String) paras[2]); + replaceString = (String) paras[3]; + } catch (Exception e) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_ILLEGAL_PARAMETER, "paras:" + Arrays.asList(paras).toString() + " => " + e.getMessage()); + } + + Column column = record.getColumn(columnIndex); + + try { + String oriValue = column.asString(); + + //如果字段为空,跳过replace处理 + if (oriValue == null) { + return record; + } + String newValue; + if (startIndex > oriValue.length()) { + throw new RuntimeException(String.format("dx_replace startIndex(%s) out of range(%s)", startIndex, oriValue.length())); + } + if (startIndex + length >= oriValue.length()) { + newValue = oriValue.substring(0, startIndex) + replaceString; + } else { + newValue = oriValue.substring(0, startIndex) + replaceString + oriValue.substring(startIndex + length, oriValue.length()); + } + + record.setColumn(columnIndex, new StringColumn(newValue)); + + } catch (Exception e) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_RUN_EXCEPTION, e.getMessage(), e); + } + return record; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/SubstrTransformer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/SubstrTransformer.java new file mode 100644 index 000000000..b2441cc1e --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/SubstrTransformer.java @@ -0,0 +1,65 @@ +package com.alibaba.datax.core.transport.transformer; + +import com.alibaba.datax.common.element.Column; +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.element.StringColumn; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.transformer.Transformer; + +import java.util.Arrays; + +/** + * no comments. + * Created by liqiang on 16/3/4. + */ +public class SubstrTransformer extends Transformer { + public SubstrTransformer() { + setTransformerName("dx_substr"); + } + + @Override + public Record evaluate(Record record, Object... paras) { + + int columnIndex; + int startIndex; + int length; + + try { + if (paras.length != 3) { + throw new RuntimeException("dx_substr paras must be 3"); + } + + columnIndex = (Integer) paras[0]; + startIndex = Integer.valueOf((String) paras[1]); + length = Integer.valueOf((String) paras[2]); + + } catch (Exception e) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_ILLEGAL_PARAMETER, "paras:" + Arrays.asList(paras).toString() + " => " + e.getMessage()); + } + + Column column = record.getColumn(columnIndex); + + try { + String oriValue = column.asString(); + //如果字段为空,跳过subStr处理 + if (oriValue == null) { + return record; + } + String newValue; + if (startIndex > oriValue.length()) { + throw new RuntimeException(String.format("dx_substr startIndex(%s) out of range(%s)", startIndex, oriValue.length())); + } + if (startIndex + length >= oriValue.length()) { + newValue = oriValue.substring(startIndex, oriValue.length()); + } else { + newValue = oriValue.substring(startIndex, startIndex + length); + } + + record.setColumn(columnIndex, new StringColumn(newValue)); + + } catch (Exception e) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_RUN_EXCEPTION, e.getMessage(), e); + } + return record; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerErrorCode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerErrorCode.java new file mode 100644 index 000000000..20cd2b134 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerErrorCode.java @@ -0,0 +1,39 @@ +package com.alibaba.datax.core.transport.transformer; + +import com.alibaba.datax.common.spi.ErrorCode; + +public enum TransformerErrorCode implements ErrorCode { + //重复命名 + TRANSFORMER_NAME_ERROR("TransformerErrorCode-01", "Transformer name illegal"), + TRANSFORMER_DUPLICATE_ERROR("TransformerErrorCode-02", "Transformer name has existed"), + TRANSFORMER_NOTFOUND_ERROR("TransformerErrorCode-03", "Transformer name not found"), + TRANSFORMER_CONFIGURATION_ERROR("TransformerErrorCode-04", "Transformer configuration error"), + TRANSFORMER_ILLEGAL_PARAMETER("TransformerErrorCode-05", "Transformer parameter illegal"), + TRANSFORMER_RUN_EXCEPTION("TransformerErrorCode-06", "Transformer run exception"), + TRANSFORMER_GROOVY_INIT_EXCEPTION("TransformerErrorCode-07", "Transformer Groovy init exception"),; + + private final String code; + + private final String description; + + private TransformerErrorCode(String code, String description) { + this.code = code; + this.description = description; + } + + @Override + public String getCode() { + return this.code; + } + + @Override + public String getDescription() { + return this.description; + } + + @Override + public String toString() { + return String.format("Code:[%s], Description:[%s]. ", this.code, + this.description); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerExecution.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerExecution.java new file mode 100644 index 000000000..8921baaf7 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerExecution.java @@ -0,0 +1,122 @@ +package com.alibaba.datax.core.transport.transformer; + +import com.alibaba.datax.transformer.ComplexTransformer; + +import java.util.Map; + +/** + * 每个func对应一个实例. + * Created by liqiang on 16/3/16. + */ +public class TransformerExecution { + + private Object[] finalParas; + + private final TransformerExecutionParas transformerExecutionParas; + private final TransformerInfo transformerInfo; + + + public TransformerExecution(TransformerInfo transformerInfo, TransformerExecutionParas transformerExecutionParas) { + this.transformerExecutionParas = transformerExecutionParas; + this.transformerInfo = transformerInfo; + } + + /** + * 以下是动态统计信息,暂时未用 + */ + private long exaustedTime = 0; + private long successRecords = 0; + private long failedRecords = 0; + private long filterRecords = 0; + + /** + * 参数采取延迟检查 + */ + + private boolean isChecked = false; + + public void genFinalParas() { + + /** + * groovy不支持传参 + */ + if (transformerInfo.getTransformer().getTransformerName().equals("dx_groovy")) { + finalParas = new Object[2]; + finalParas[0] = transformerExecutionParas.getCode(); + finalParas[1] = transformerExecutionParas.getExtraPackage(); + return; + } + /** + * 其他function,按照columnIndex和para的顺序,如果columnIndex为空,跳过conlumnIndex + */ + if (transformerExecutionParas.getColumnIndex() != null) { + if (transformerExecutionParas.getParas() != null) { + finalParas = new Object[transformerExecutionParas.getParas().length + 1]; + System.arraycopy(transformerExecutionParas.getParas(), 0, finalParas, 1, transformerExecutionParas.getParas().length); + } else { + finalParas = new Object[1]; + } + finalParas[0] = transformerExecutionParas.getColumnIndex(); + + } else { + if (transformerExecutionParas.getParas() != null) { + finalParas = transformerExecutionParas.getParas(); + } else { + finalParas = null; + } + + } + } + + + public Object[] getFinalParas() { + return finalParas; + } + + public long getExaustedTime() { + return exaustedTime; + } + + public long getSuccessRecords() { + return successRecords; + } + + public long getFailedRecords() { + return failedRecords; + } + + public long getFilterRecords() { + return filterRecords; + } + + public void setIsChecked(boolean isChecked) { + this.isChecked = isChecked; + } + + public boolean isChecked() { + return isChecked; + } + + /** + * 一些代理方法 + */ + public ClassLoader getClassLoader() { + return transformerInfo.getClassLoader(); + } + + public Integer getColumnIndex() { + return transformerExecutionParas.getColumnIndex(); + } + + public String getTransformerName() { + return transformerInfo.getTransformer().getTransformerName(); + } + + public ComplexTransformer getTransformer() { + return transformerInfo.getTransformer(); + } + + public Map gettContext() { + return transformerExecutionParas.gettContext(); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerExecutionParas.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerExecutionParas.java new file mode 100644 index 000000000..7645c2544 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerExecutionParas.java @@ -0,0 +1,62 @@ +package com.alibaba.datax.core.transport.transformer; + +import java.util.List; +import java.util.Map; + +/** + * no comments. + * Created by liqiang on 16/3/16. + */ +public class TransformerExecutionParas { + + /** + * 以下是function参数 + */ + + private Integer columnIndex; + private String[] paras; + private Map tContext; + private String code; + private List extraPackage; + + + public Integer getColumnIndex() { + return columnIndex; + } + + public String[] getParas() { + return paras; + } + + public Map gettContext() { + return tContext; + } + + public String getCode() { + return code; + } + + public List getExtraPackage() { + return extraPackage; + } + + public void setColumnIndex(Integer columnIndex) { + this.columnIndex = columnIndex; + } + + public void setParas(String[] paras) { + this.paras = paras; + } + + public void settContext(Map tContext) { + this.tContext = tContext; + } + + public void setCode(String code) { + this.code = code; + } + + public void setExtraPackage(List extraPackage) { + this.extraPackage = extraPackage; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerInfo.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerInfo.java new file mode 100644 index 000000000..7b2b3d74b --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerInfo.java @@ -0,0 +1,42 @@ +package com.alibaba.datax.core.transport.transformer; + +import com.alibaba.datax.transformer.ComplexTransformer; + +/** + * 单实例. + * Created by liqiang on 16/3/9. + */ +public class TransformerInfo { + + /** + * function基本信息 + */ + private ComplexTransformer transformer; + private ClassLoader classLoader; + private boolean isNative; + + + public ComplexTransformer getTransformer() { + return transformer; + } + + public ClassLoader getClassLoader() { + return classLoader; + } + + public boolean isNative() { + return isNative; + } + + public void setTransformer(ComplexTransformer transformer) { + this.transformer = transformer; + } + + public void setClassLoader(ClassLoader classLoader) { + this.classLoader = classLoader; + } + + public void setIsNative(boolean isNative) { + this.isNative = isNative; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerRegistry.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerRegistry.java new file mode 100644 index 000000000..9a657b691 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/transport/transformer/TransformerRegistry.java @@ -0,0 +1,177 @@ +package com.alibaba.datax.core.transport.transformer; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.util.container.CoreConstant; +import com.alibaba.datax.core.util.container.JarLoader; +import com.alibaba.datax.transformer.ComplexTransformer; +import com.alibaba.datax.transformer.Transformer; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * no comments. + * Created by liqiang on 16/3/3. + */ +public class TransformerRegistry { + + private static final Logger LOG = LoggerFactory.getLogger(TransformerRegistry.class); + private static Map registedTransformer = new HashMap(); + + static { + /** + * add native transformer + * local storage and from server will be delay load. + */ + + registTransformer(new SubstrTransformer()); + registTransformer(new PadTransformer()); + registTransformer(new ReplaceTransformer()); + registTransformer(new FilterTransformer()); + registTransformer(new GroovyTransformer()); + registTransformer(new PrecisionTransformer()); + } + + public static void loadTransformerFromLocalStorage() { + //add local_storage transformer + loadTransformerFromLocalStorage(null); + } + + + public static void loadTransformerFromLocalStorage(List transformers) { + + String[] paths = new File(CoreConstant.DATAX_STORAGE_TRANSFORMER_HOME).list(); + if (null == paths) { + return; + } + + for (final String each : paths) { + try { + if (transformers == null || transformers.contains(each)) { + loadTransformer(each); + } + } catch (Exception e) { + LOG.error(String.format("skip transformer(%s) loadTransformer has Exception(%s)", each, e.getMessage()), e); + } + + } + } + + public static void loadTransformer(String each) { + String transformerPath = CoreConstant.DATAX_STORAGE_TRANSFORMER_HOME + File.separator + each; + Configuration transformerConfiguration; + try { + transformerConfiguration = loadTransFormerConfig(transformerPath); + } catch (Exception e) { + LOG.error(String.format("skip transformer(%s),load transformer.json error, path = %s, ", each, transformerPath), e); + return; + } + + String className = transformerConfiguration.getString("class"); + if (StringUtils.isEmpty(className)) { + LOG.error(String.format("skip transformer(%s),class not config, path = %s, config = %s", each, transformerPath, transformerConfiguration.beautify())); + return; + } + + String funName = transformerConfiguration.getString("name"); + if (!each.equals(funName)) { + LOG.warn(String.format("transformer(%s) name not match transformer.json config name[%s], will ignore json's name, path = %s, config = %s", each, funName, transformerPath, transformerConfiguration.beautify())); + } + JarLoader jarLoader = new JarLoader(new String[]{transformerPath}); + try { + Class transformerClass = jarLoader.loadClass(className); + Object transformer = transformerClass.newInstance(); + if (ComplexTransformer.class.isAssignableFrom(transformer.getClass())) { + ((ComplexTransformer) transformer).setTransformerName(each); + registComplexTransformer((ComplexTransformer) transformer, jarLoader, false); + } else if (Transformer.class.isAssignableFrom(transformer.getClass())) { + ((Transformer) transformer).setTransformerName(each); + registTransformer((Transformer) transformer, jarLoader, false); + } else { + LOG.error(String.format("load Transformer class(%s) error, path = %s", className, transformerPath)); + } + } catch (Exception e) { + //错误funciton跳过 + LOG.error(String.format("skip transformer(%s),load Transformer class error, path = %s ", each, transformerPath), e); + } + } + + private static Configuration loadTransFormerConfig(String transformerPath) { + return Configuration.from(new File(transformerPath + File.separator + "transformer.json")); + } + + public static TransformerInfo getTransformer(String transformerName) { + + TransformerInfo result = registedTransformer.get(transformerName); + + //if (result == null) { + //todo 再尝试从disk读取 + //} + + return result; + } + + public static synchronized void registTransformer(Transformer transformer) { + registTransformer(transformer, null, true); + } + + public static synchronized void registTransformer(Transformer transformer, ClassLoader classLoader, boolean isNative) { + + checkName(transformer.getTransformerName(), isNative); + + if (registedTransformer.containsKey(transformer.getTransformerName())) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_DUPLICATE_ERROR, " name=" + transformer.getTransformerName()); + } + + registedTransformer.put(transformer.getTransformerName(), buildTransformerInfo(new ComplexTransformerProxy(transformer), isNative, classLoader)); + + } + + public static synchronized void registComplexTransformer(ComplexTransformer complexTransformer, ClassLoader classLoader, boolean isNative) { + + checkName(complexTransformer.getTransformerName(), isNative); + + if (registedTransformer.containsKey(complexTransformer.getTransformerName())) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_DUPLICATE_ERROR, " name=" + complexTransformer.getTransformerName()); + } + + registedTransformer.put(complexTransformer.getTransformerName(), buildTransformerInfo(complexTransformer, isNative, classLoader)); + } + + private static void checkName(String functionName, boolean isNative) { + boolean checkResult = true; + if (isNative) { + if (!functionName.startsWith("dx_")) { + checkResult = false; + } + } else { + if (functionName.startsWith("dx_")) { + checkResult = false; + } + } + + if (!checkResult) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_NAME_ERROR, " name=" + functionName + ": isNative=" + isNative); + } + + } + + private static TransformerInfo buildTransformerInfo(ComplexTransformer complexTransformer, boolean isNative, ClassLoader classLoader) { + TransformerInfo transformerInfo = new TransformerInfo(); + transformerInfo.setClassLoader(classLoader); + transformerInfo.setIsNative(isNative); + transformerInfo.setTransformer(complexTransformer); + return transformerInfo; + } + + public static List getAllSuportTransformer() { + return new ArrayList(registedTransformer.keySet()); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ClassSize.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ClassSize.java new file mode 100644 index 000000000..fadefa246 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ClassSize.java @@ -0,0 +1,46 @@ +package com.alibaba.datax.core.util; + +/** + * Created by liqiang on 15/12/12. + */ +public class ClassSize { + + public static final int DefaultRecordHead; + public static final int ByteBufferHead; + public static final int ColumnHead; + + //objectHead的大小 + public static final int REFERENCE; + public static final int OBJECT; + public static final int ARRAY; + public static final int ARRAYLIST; + + static { + //only 64位 + REFERENCE = 8; + + OBJECT = 2 * REFERENCE; + + ARRAY = align(3 * REFERENCE); + + // 16+8+24+16 + ARRAYLIST = align(OBJECT + align(REFERENCE) + align(ARRAY) + + (2 * Long.SIZE / Byte.SIZE)); + // 8+64+8 + DefaultRecordHead = align(align(REFERENCE) + ClassSize.ARRAYLIST + 2 * Integer.SIZE / Byte.SIZE); + //16+4 + ColumnHead = align(2 * REFERENCE + Integer.SIZE / Byte.SIZE); + // 8 + 16 + 1 + ByteBufferHead = align(align(REFERENCE) + 4 * Integer.SIZE /Byte.SIZE + 1); + } + + public static int align(int num) { + return (int) (align((long) num)); + } + + public static long align(long num) { + //The 7 comes from that the alignSize is 8 which is the number of bytes + //stored and sent together + return ((num + 7) >> 3) << 3; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ClassUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ClassUtil.java new file mode 100644 index 000000000..9707406e6 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ClassUtil.java @@ -0,0 +1,47 @@ +package com.alibaba.datax.core.util; + +import java.lang.reflect.Constructor; + +public final class ClassUtil { + + /** + * 通过反射构造类对象 + * + * @param className 反射的类名称 + * @param t 反射类的类型Class对象 + * @param args 构造参数 + */ + @SuppressWarnings({"rawtypes", "unchecked"}) + public static T instantiate(String className, Class t, + Object... args) { + try { + Constructor constructor = (Constructor) Class.forName(className) + .getConstructor(ClassUtil.toClassType(args)); + return (T) constructor.newInstance(args); + } catch (Exception e) { + throw new IllegalArgumentException(e); + } + } + + public static T instantiate(String className, Class t, ClassLoader classLoader, + Object... args) { + try { + Constructor constructor = (Constructor) Class.forName(className, true, classLoader) + .getConstructor(ClassUtil.toClassType(args)); + return (T) constructor.newInstance(args); + } catch (Exception e) { + throw new IllegalArgumentException(e); + } + } + + private static Class[] toClassType(Object[] args) { + Class[] clazzs = new Class[args.length]; + + for (int i = 0, length = args.length; i < length; i++) { + clazzs[i] = args[i].getClass(); + } + + return clazzs; + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/CompressSuffixName.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/CompressSuffixName.java new file mode 100644 index 000000000..347ebaed3 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/CompressSuffixName.java @@ -0,0 +1,46 @@ +package com.alibaba.datax.core.util; + + +import org.apache.commons.lang3.StringUtils; + +/** + * @author davidhua + * 2019/5/30 + */ +public enum CompressSuffixName { + //Compress list + LZO_DEFLAT("lzo_deflat", ".lzo_deflat"), + LZO("lzo", ".lzo"), + GZIP("gzip", ".gz"), + BZIP2("bzip2", ".bz2"), + HADOOP_SNAPPY("hadoop-snappy", ".snappy"), + SNAPPY("snappy", ".snappy"), + FRAMING_SNAPPY("framing-snappy", ".snappy"), + ZIP("zip", ".zip"), + NONE("none", ""); + + private String compressName; + + private String suffix; + + CompressSuffixName(String compressName, String suffix){ + this.compressName = compressName; + this.suffix = suffix; + } + + public static String chooseSuffix(String compressName){ + if(StringUtils.isBlank(compressName)){ + return null; + } + CompressSuffixName compressSuffixName = + CompressSuffixName.valueOf(compressName.toUpperCase()); + return compressSuffixName.getSuffix(); + } + public String getSuffix(){ + return suffix; + } + + public String getCompressName(){ + return compressName; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ConfigParser.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ConfigParser.java new file mode 100644 index 000000000..0581939eb --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ConfigParser.java @@ -0,0 +1,230 @@ +package com.alibaba.datax.core.util; + +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.util.container.CoreConstant; +import com.webank.wedatasphere.exchangis.datax.common.PatternInjectUtils; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.http.client.methods.HttpGet; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.net.URL; +import java.util.*; + +public final class ConfigParser { + private static final Logger LOG = LoggerFactory.getLogger(ConfigParser.class); + + /** + * 指定Job配置路径,ConfigParser会解析Job、Plugin、Core全部信息,并以Configuration返回 + */ + public static Configuration parse(final String jobPath) { + Configuration configuration = ConfigParser.parseJobConfig(jobPath); + + mergeConfiguration(configuration); + + return configuration; + } + + private static void mergeConfiguration(Configuration configuration) { + //Inject System properties into 'core.json' + try { + String coreConfig = IOUtils.toString(new FileInputStream(CoreConstant.DATAX_CONF_PATH)); + Map systemPropsMap = new HashMap((Map) System.getProperties()); + coreConfig = PatternInjectUtils.inject(coreConfig, systemPropsMap); + configuration.merge(Configuration.from(coreConfig), false); + } catch (FileNotFoundException e) { + throw DataXException.asDataXException(CommonErrorCode.CONFIG_ERROR, "Cannot find configuration file: " + + new File(CoreConstant.DATAX_CONF_PATH).getAbsolutePath()); + } catch (IOException e) { + throw DataXException.asDataXException(CommonErrorCode.CONFIG_ERROR, e); + } + Set pluginList = new HashSet(); + + // todo config优化,只捕获需要的plugin + String readerPluginName = configuration.getString( + CoreConstant.DATAX_JOB_CONTENT_READER_NAME); + pluginList.add(readerPluginName); + try{ + //Support multiple writers + List writerList = configuration.getList(CoreConstant.DATAX_JOB_CONTENT_WRITER); + if(null != writerList){ + for(int i = 0; i < writerList.size(); i++){ + pluginList.add(configuration.getString( + String.format(CoreConstant.DATAX_JOB_CONTENT_WRITER_ARRAY_NAME, i))); + } + } + }catch(Exception e){ + pluginList.add(configuration.getString(CoreConstant.DATAX_JOB_CONTENT_WRITER_NAME)); + } + String preHandlerName = configuration.getString( + CoreConstant.DATAX_JOB_PREHANDLER_PLUGINNAME); + + String postHandlerName = configuration.getString( + CoreConstant.DATAX_JOB_POSTHANDLER_PLUGINNAME); + if (StringUtils.isNotEmpty(preHandlerName)) { + pluginList.add(preHandlerName); + } + if (StringUtils.isNotEmpty(postHandlerName)) { + pluginList.add(postHandlerName); + } + try { + configuration.merge(parsePluginConfig(new ArrayList(pluginList)), false); + } catch (Exception e) { + //吞掉异常,保持log干净。这里message足够。 + LOG.warn(String.format("插件[%s]加载失败,1s后重试... Exception:%s ", StringUtils.join(pluginList, ","), e.getMessage())); + try { + Thread.sleep(1000); + } catch (InterruptedException e1) { + // + } + configuration.merge(parsePluginConfig(new ArrayList(pluginList)), false); + } + } + + public static Configuration parseByString(String jobContent) { + Configuration configuration = Configuration.from(jobContent); + + configuration = SecretUtil.decryptSecretKey(configuration); + + mergeConfiguration(configuration); + + return configuration; + } + + private static Configuration parseCoreConfig(final String path) { + return Configuration.from(new File(path)); + } + + public static Configuration parseJobConfig(final String path) { + String jobContent = getJobContent(path); + Configuration config = Configuration.from(jobContent); + + return SecretUtil.decryptSecretKey(config); + } + + private static String getJobContent(String jobResource) { + String jobContent; + + boolean isJobResourceFromHttp = jobResource.trim().toLowerCase().startsWith("http"); + + + if (isJobResourceFromHttp) { + //设置httpclient的 HTTP_TIMEOUT_INMILLIONSECONDS + Configuration coreConfig = ConfigParser.parseCoreConfig(CoreConstant.DATAX_CONF_PATH); + int httpTimeOutInMillionSeconds = coreConfig.getInt( + CoreConstant.DATAX_CORE_DATAXSERVER_TIMEOUT, 5000); + HttpClientUtil.setHttpTimeoutInMillionSeconds(httpTimeOutInMillionSeconds); + + HttpClientUtil httpClientUtil = new HttpClientUtil(); + try { + URL url = new URL(jobResource); + HttpGet httpGet = HttpClientUtil.getGetRequest(); + httpGet.setURI(url.toURI()); + + jobContent = httpClientUtil.executeAndGetWithRetry(httpGet, String.class, 1, 1000L); + } catch (Exception e) { + throw DataXException.asDataXException(FrameworkErrorCode.CONFIG_ERROR, "获取作业配置信息失败:" + jobResource, e); + } + } else { + // jobResource 是本地文件绝对路径 + try { + jobContent = FileUtils.readFileToString(new File(jobResource)); + } catch (IOException e) { + throw DataXException.asDataXException(FrameworkErrorCode.CONFIG_ERROR, "获取作业配置信息失败:" + jobResource, e); + } + } + + if (jobContent == null) { + throw DataXException.asDataXException(FrameworkErrorCode.CONFIG_ERROR, "获取作业配置信息失败:" + jobResource); + } + return jobContent; + } + + public static Configuration parsePluginConfig(List wantPluginNames) { + Configuration configuration = Configuration.newDefault(); + + Set replicaCheckPluginSet = new HashSet(); + int complete = 0; + for (final String each : ConfigParser + .getDirAsList(CoreConstant.DATAX_PLUGIN_READER_HOME)) { + Configuration eachReaderConfig = ConfigParser.parseOnePluginConfig(each, "reader", replicaCheckPluginSet, wantPluginNames); + if (eachReaderConfig != null) { + configuration.merge(eachReaderConfig, true); + complete += 1; + } + } + + for (final String each : ConfigParser + .getDirAsList(CoreConstant.DATAX_PLUGIN_WRITER_HOME)) { + Configuration eachWriterConfig = ConfigParser.parseOnePluginConfig(each, "writer", replicaCheckPluginSet, wantPluginNames); + if (eachWriterConfig != null) { + configuration.merge(eachWriterConfig, true); + complete += 1; + } + } + + if (wantPluginNames != null && wantPluginNames.size() > 0 && wantPluginNames.size() != complete) { + throw DataXException.asDataXException(FrameworkErrorCode.PLUGIN_INIT_ERROR, "插件加载失败,未完成指定插件加载:" + wantPluginNames); + } + + return configuration; + } + + + public static Configuration parseOnePluginConfig(final String path, + final String type, + Set pluginSet, List wantPluginNames) { + String filePath = path + File.separator + "plugin.json"; + Configuration configuration = Configuration.from(new File(filePath)); + + String pluginPath = configuration.getString("path"); + String pluginName = configuration.getString("name"); + if (!pluginSet.contains(pluginName)) { + pluginSet.add(pluginName); + } else { + throw DataXException.asDataXException(FrameworkErrorCode.PLUGIN_INIT_ERROR, "插件加载失败,存在重复插件:" + filePath); + } + + //不是想要的插件,返回null + if (wantPluginNames != null && wantPluginNames.size() > 0 && !wantPluginNames.contains(pluginName)) { + return null; + } + + boolean isDefaultPath = StringUtils.isBlank(pluginPath); + if (isDefaultPath) { + configuration.set("path", path); + } + + Configuration result = Configuration.newDefault(); + + result.set( + String.format("plugin.%s.%s", type, pluginName), + configuration.getInternal()); + + return result; + } + + private static List getDirAsList(String path) { + List result = new ArrayList(); + + String[] paths = new File(path).list(); + if (null == paths) { + return result; + } + + for (final String each : paths) { + result.add(path + File.separator + each); + } + + return result; + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ConfigurationValidate.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ConfigurationValidate.java new file mode 100644 index 000000000..1c549b548 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ConfigurationValidate.java @@ -0,0 +1,33 @@ +package com.alibaba.datax.core.util; + +import com.alibaba.datax.common.util.Configuration; +import org.apache.commons.lang.Validate; + +/** + * Created by jingxing on 14-9-16. + *

+ * 对配置文件做整体检查 + */ +public class ConfigurationValidate { + public static void doValidate(Configuration allConfig) { + Validate.isTrue(allConfig != null, ""); + + coreValidate(allConfig); + + pluginValidate(allConfig); + + jobValidate(allConfig); + } + + private static void coreValidate(Configuration allconfig) { + return; + } + + private static void pluginValidate(Configuration allConfig) { + return; + } + + private static void jobValidate(Configuration allConfig) { + return; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ErrorRecordChecker.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ErrorRecordChecker.java new file mode 100644 index 000000000..ad7f80f61 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ErrorRecordChecker.java @@ -0,0 +1,82 @@ +package com.alibaba.datax.core.util; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.communication.CommunicationTool; +import com.alibaba.datax.core.util.container.CoreConstant; +import org.apache.commons.lang3.Validate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * 检查任务是否到达错误记录限制。有检查条数(recordLimit)和百分比(percentageLimit)两种方式。 + * 1. errorRecord表示出错条数不能大于限制数,当超过时任务失败。比如errorRecord为0表示不容许任何脏数据。 + * 2. errorPercentage表示出错比例,在任务结束时校验。 + * 3. errorRecord优先级高于errorPercentage。 + */ +public final class ErrorRecordChecker { + private static final Logger LOG = LoggerFactory + .getLogger(ErrorRecordChecker.class); + + private Long recordLimit; + private Double percentageLimit; + + public ErrorRecordChecker(Configuration configuration) { + this(configuration.getLong(CoreConstant.DATAX_JOB_SETTING_ERRORLIMIT_RECORD), + configuration.getDouble(CoreConstant.DATAX_JOB_SETTING_ERRORLIMIT_PERCENT)); + } + + public ErrorRecordChecker(Long rec, Double percentage) { + recordLimit = rec; + percentageLimit = percentage; + + if (percentageLimit != null) { + Validate.isTrue(0.0 <= percentageLimit && percentageLimit <= 1.0, + "脏数据百分比限制应该在[0.0, 1.0]之间"); + } + + if (recordLimit != null) { + Validate.isTrue(recordLimit >= 0, + "脏数据条数现在应该为非负整数"); + + // errorRecord优先级高于errorPercentage. + percentageLimit = null; + } + } + + public void checkRecordLimit(Communication communication) { + if (recordLimit == null) { + return; + } + + long errorNumber = CommunicationTool.getTotalErrorRecords(communication); + if (recordLimit < errorNumber) { + LOG.debug( + String.format("Error-limit set to %d, error count check.", + recordLimit)); + throw DataXException.asDataXException( + FrameworkErrorCode.PLUGIN_DIRTY_DATA_LIMIT_EXCEED, + String.format("脏数据条数检查不通过,限制是[%d]条,但实际上捕获了[%d]条.", + recordLimit, errorNumber)); + } + } + + public void checkPercentageLimit(Communication communication) { + if (percentageLimit == null) { + return; + } + LOG.debug(String.format( + "Error-limit set to %f, error percent check.", percentageLimit)); + + long total = CommunicationTool.getTotalReadRecords(communication); + long error = CommunicationTool.getTotalErrorRecords(communication); + + if (total > 0 && ((double) error / (double) total) > percentageLimit) { + throw DataXException.asDataXException( + FrameworkErrorCode.PLUGIN_DIRTY_DATA_LIMIT_EXCEED, + String.format("脏数据百分比检查不通过,限制是[%f],但实际上捕获到[%f].", + percentageLimit, ((double) error / (double) total))); + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ExceptionTracker.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ExceptionTracker.java new file mode 100644 index 000000000..5d83c7d5f --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/ExceptionTracker.java @@ -0,0 +1,15 @@ +package com.alibaba.datax.core.util; + +import java.io.PrintWriter; +import java.io.StringWriter; + +public class ExceptionTracker { + public static final int STRING_BUFFER = 4096; + + public static String trace(Throwable ex) { + StringWriter sw = new StringWriter(STRING_BUFFER); + PrintWriter pw = new PrintWriter(sw); + ex.printStackTrace(pw); + return sw.toString(); + } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/FrameworkErrorCode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/FrameworkErrorCode.java new file mode 100644 index 000000000..b88796081 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/FrameworkErrorCode.java @@ -0,0 +1,72 @@ +package com.alibaba.datax.core.util; + +import com.alibaba.datax.common.spi.ErrorCode; + +/** + * TODO: 根据现有日志数据分析各类错误,进行细化。 + *

+ *

请不要格式化本类代码

+ */ +public enum FrameworkErrorCode implements ErrorCode { + // + INSTALL_ERROR("Framework-00", "DataX引擎安装错误, 请联系您的运维解决 ."), + ARGUMENT_ERROR("Framework-01", "DataX引擎运行错误,该问题通常是由于内部编程错误引起,请联系DataX开发团队解决 ."), + RUNTIME_ERROR("Framework-02", "DataX引擎运行过程出错,具体原因请参看DataX运行结束时的错误诊断信息 ."), + CONFIG_ERROR("Framework-03", "DataX引擎配置错误,该问题通常是由于DataX安装错误引起,请联系您的运维解决 ."), + SECRET_ERROR("Framework-04", "DataX引擎加解密出错,该问题通常是由于DataX密钥配置错误引起,请联系您的运维解决 ."), + HOOK_LOAD_ERROR("Framework-05", "加载外部Hook出现错误,通常是由于DataX安装引起的"), + HOOK_FAIL_ERROR("Framework-06", "执行外部Hook出现错误"), + + PLUGIN_INSTALL_ERROR("Framework-10", "DataX插件安装错误, 该问题通常是由于DataX安装错误引起,请联系您的运维解决 ."), + PLUGIN_NOT_FOUND("Framework-11", "DataX插件配置错误, 该问题通常是由于DataX安装错误引起,请联系您的运维解决 ."), + PLUGIN_INIT_ERROR("Framework-12", "DataX插件初始化错误, 该问题通常是由于DataX安装错误引起,请联系您的运维解决 ."), + PLUGIN_RUNTIME_ERROR("Framework-13", "DataX插件运行时出错, 具体原因请参看DataX运行结束时的错误诊断信息 ."), + PLUGIN_DIRTY_DATA_LIMIT_EXCEED("Framework-14", "DataX传输脏数据超过用户预期,该错误通常是由于源端数据存在较多业务脏数据导致,请仔细检查DataX汇报的脏数据日志信息, 或者您可以适当调大脏数据阈值 ."), + PLUGIN_SPLIT_ERROR("Framework-15", "DataX插件切分出错, 该问题通常是由于DataX各个插件编程错误引起,请联系DataX开发团队解决"), + KILL_JOB_TIMEOUT_ERROR("Framework-16", "kill 任务超时,请联系PE解决"), + START_TASKGROUP_ERROR("Framework-17", "taskGroup启动失败,请联系DataX开发团队解决"), + CALL_DATAX_SERVICE_FAILED("Framework-18", "请求 DataX Service 出错."), + CALL_REMOTE_FAILED("Framework-19", "远程调用失败"), + KILLED_EXIT_VALUE("Framework-143", "Job 收到了 Kill 命令."), + LDAP_ERROR("Framework-145", "DataX LDAP配置错误,请联系您的运维解决 ."), + UM_ERROR("Framework-146", "DataX UM配置错误,请联系您的运维解决 ."), + CHANNEL_STREAM_ERROR("Framework-200", "DataX的Stream传输通道出错,请联系您的运维解决"), + PROCESSOR_LOAD_ERROR("Framework-201", "处理器动态加载出错,请检查处理器代码"), + PROCESSOR_RUN_ERROR("Framework-202", "处理器执行错误"); + private final String code; + + private final String description; + + private FrameworkErrorCode(String code, String description) { + this.code = code; + this.description = description; + } + + @Override + public String getCode() { + return this.code; + } + + @Override + public String getDescription() { + return this.description; + } + + @Override + public String toString() { + return String.format("Code:[%s], Description:[%s]. ", this.code, + this.description); + } + + /** + * 通过 "Framework-143" 来标示 任务是 Killed 状态 + */ + public int toExitValue() { + if (this == FrameworkErrorCode.KILLED_EXIT_VALUE) { + return 143; + } else { + return 1; + } + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/HttpClientUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/HttpClientUtil.java new file mode 100644 index 000000000..04fea1c40 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/HttpClientUtil.java @@ -0,0 +1,155 @@ +package com.alibaba.datax.core.util; + +import com.alibaba.datax.common.exception.DataXException; +import com.webank.wedatasphere.exchangis.datax.common.GsonUtil; +import com.alibaba.datax.common.util.RetryUtil; +import org.apache.http.Consts; +import org.apache.http.HttpEntity; +import org.apache.http.HttpStatus; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.methods.*; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.util.EntityUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; +import java.util.Properties; +import java.util.concurrent.ThreadPoolExecutor; + + +/** + * @author davidhua + */ +public class HttpClientUtil { + + private static Logger logger = LoggerFactory.getLogger(HttpClientUtil.class); + + private static CredentialsProvider provider; + + private CloseableHttpClient httpClient; + + private volatile static HttpClientUtil clientUtil; + + private static int HTTP_TIMEOUT_IN_MILLISECONDS = 5000; + + private static final int POOL_SIZE = 20; + + private static ThreadPoolExecutor asyncExecutor = RetryUtil.createThreadPoolExecutor(); + + public static void setHttpTimeoutInMillionSeconds(int httpTimeoutInMillionSeconds) { + HTTP_TIMEOUT_IN_MILLISECONDS = httpTimeoutInMillionSeconds; + } + + public static synchronized HttpClientUtil getHttpClientUtil() { + if (null == clientUtil) { + synchronized (HttpClientUtil.class) { + if (null == clientUtil) { + clientUtil = new HttpClientUtil(); + } + } + } + return clientUtil; + } + + public HttpClientUtil() { + initApacheHttpClient(); + } + + public void destroy() { + destroyApacheHttpClient(); + } + + public static void setBasicAuth(String username, String password) { + Properties prob = SecretUtil.getSecurityProperties(); + provider = new BasicCredentialsProvider(); + provider.setCredentials(AuthScope.ANY, + new UsernamePasswordCredentials(prob.getProperty("auth.user"), prob.getProperty("auth.pass"))); + } + + private void initApacheHttpClient() { + RequestConfig requestConfig = RequestConfig.custom().setSocketTimeout(HTTP_TIMEOUT_IN_MILLISECONDS) + .setConnectTimeout(HTTP_TIMEOUT_IN_MILLISECONDS).setConnectionRequestTimeout(HTTP_TIMEOUT_IN_MILLISECONDS) + .setStaleConnectionCheckEnabled(true).build(); + + if (null == provider) { + httpClient = HttpClientBuilder.create().setMaxConnTotal(POOL_SIZE).setMaxConnPerRoute(POOL_SIZE) + .setDefaultRequestConfig(requestConfig).build(); + } else { + httpClient = HttpClientBuilder.create().setMaxConnTotal(POOL_SIZE).setMaxConnPerRoute(POOL_SIZE) + .setDefaultRequestConfig(requestConfig).setDefaultCredentialsProvider(provider).build(); + } + } + + private void destroyApacheHttpClient() { + try { + if (httpClient != null) { + httpClient.close(); + httpClient = null; + } + } catch (IOException e) { + logger.error(e.getMessage(), e); + } + } + + public static HttpGet getGetRequest() { + return new HttpGet(); + } + + public static HttpPost getPostRequest(String uri, HttpEntity entity, String... headers) { + HttpPost httpPost = new HttpPost(uri); + httpPost.setEntity(entity); + if(headers.length % 2 == 0){ + for(int i = 0; i < headers.length; i++){ + httpPost.addHeader(headers[i], headers[++i]); + } + } + return httpPost; + } + + public static HttpPut getPutRequest() { + return new HttpPut(); + } + + public static HttpDelete getDeleteRequest() { + return new HttpDelete(); + } + + public T executeAndGet(HttpRequestBase httpRequestBase, Class type) throws Exception { + return httpClient.execute(httpRequestBase, httpResponse -> { + if (httpResponse.getStatusLine().getStatusCode() != HttpStatus.SC_OK) { + logger.info("Request path: " + httpRequestBase.getURI() + ", method:" + httpRequestBase.getMethod() + + ",STATUS CODE = " + httpResponse.getStatusLine().getStatusCode()); + httpRequestBase.abort(); + throw new RuntimeException("Response Status Code : " + httpResponse.getStatusLine().getStatusCode()); + } else { + HttpEntity entity = httpResponse.getEntity(); + if (entity != null) { + String entityString = EntityUtils.toString(entity, Consts.UTF_8); + if(type.equals(String.class)){ + return (T)entityString; + } + return GsonUtil.fromJson(entityString, type); + } else { + throw new RuntimeException("Response Entity Is Null"); + } + } + }); + } + + public T executeAndGetWithRetry(final HttpRequestBase httpRequestBase, + Class type, final int retryTimes, final long retryInterval) { + try { + return RetryUtil.asyncExecuteWithRetry(() -> executeAndGet(httpRequestBase, type), + retryTimes, retryInterval, true, HTTP_TIMEOUT_IN_MILLISECONDS + 1000L, asyncExecutor); + } catch (Exception e) { + throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, e); + } + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/LdapUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/LdapUtil.java new file mode 100644 index 000000000..69e81e06a --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/LdapUtil.java @@ -0,0 +1,40 @@ +package com.alibaba.datax.core.util; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.core.util.container.CoreConstant; + +import java.io.*; +import java.util.Properties; + +/** + * Created by jingxing on 14/12/15. + */ +public class LdapUtil { + private static Properties properties; + + + public static synchronized Properties getLdapProperties() { + if (properties == null && new File(CoreConstant.DATAX_LDAP_PATH).exists()) { + InputStream secretStream = null; + try { + secretStream = new FileInputStream( + CoreConstant.DATAX_LDAP_PATH); + } catch (FileNotFoundException e) { + throw DataXException.asDataXException( + FrameworkErrorCode.LDAP_ERROR, + "DataX LDAP配置错误"); + } + + properties = new Properties(); + try { + properties.load(secretStream); + secretStream.close(); + } catch (IOException e) { + throw DataXException.asDataXException( + FrameworkErrorCode.SECRET_ERROR, "读取LDAP置文件出错", e); + } + } + + return properties; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/SecretUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/SecretUtil.java new file mode 100644 index 000000000..fc6d5356f --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/SecretUtil.java @@ -0,0 +1,439 @@ +package com.alibaba.datax.core.util; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.util.container.CoreConstant; +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang3.tuple.ImmutableTriple; +import org.apache.commons.lang3.tuple.Triple; + +import javax.crypto.Cipher; +import javax.crypto.SecretKey; +import javax.crypto.spec.SecretKeySpec; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.security.Key; +import java.security.KeyFactory; +import java.security.KeyPair; +import java.security.KeyPairGenerator; +import java.security.interfaces.RSAPrivateKey; +import java.security.interfaces.RSAPublicKey; +import java.security.spec.PKCS8EncodedKeySpec; +import java.security.spec.X509EncodedKeySpec; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +/** + * Created by jingxing on 14/12/15. + */ +public class SecretUtil { + private static Properties properties; + + //RSA Key:keyVersion value:left:privateKey, right:publicKey, middle: type + //DESede Key: keyVersion value:left:keyContent, right:keyContent, middle: type + private static Map> versionKeyMap; + + private static final String ENCODING = "UTF-8"; + + public static final String KEY_ALGORITHM_RSA = "RSA"; + + public static final String KEY_ALGORITHM_3DES = "DESede"; + + private static final String CIPHER_ALGORITHM_3DES = "DESede/ECB/PKCS5Padding"; + + private static final Base64 base64 = new Base64(); + + /** + * BASE64加密 + * + * @param plaintextBytes + * @return + * @throws Exception + */ + public static String encryptBASE64(byte[] plaintextBytes) throws Exception { + return new String(base64.encode(plaintextBytes), ENCODING); + } + + /** + * BASE64解密 + * + * @param cipherText + * @return + * @throws Exception + */ + public static byte[] decryptBASE64(String cipherText) { + return base64.decode(cipherText); + } + + /** + * 加密
+ * + * @param data 裸的原始数据 + * @param key 经过base64加密的公钥(RSA)或者裸密钥(3DES) + */ + public static String encrypt(String data, String key, String method) { + if (SecretUtil.KEY_ALGORITHM_RSA.equals(method)) { + return SecretUtil.encryptRSA(data, key); + } else if (SecretUtil.KEY_ALGORITHM_3DES.equals(method)) { + return SecretUtil.encrypt3DES(data, key); + } else { + throw DataXException.asDataXException( + FrameworkErrorCode.SECRET_ERROR, + String.format("系统编程错误,不支持的加密类型", method)); + } + } + + /** + * 解密
+ * + * @param data 已经经过base64加密的密文 + * @param key 已经经过base64加密私钥(RSA)或者裸密钥(3DES) + */ + public static String decrypt(String data, String key, String method) { + if (SecretUtil.KEY_ALGORITHM_RSA.equals(method)) { + return SecretUtil.decryptRSA(data, key); + } else if (SecretUtil.KEY_ALGORITHM_3DES.equals(method)) { + return SecretUtil.decrypt3DES(data, key); + } else { + throw DataXException.asDataXException( + FrameworkErrorCode.SECRET_ERROR, + String.format("系统编程错误,不支持的加密类型", method)); + } + } + + /** + * 加密
+ * 用公钥加密 encryptByPublicKey + * + * @param data 裸的原始数据 + * @param key 经过base64加密的公钥 + * @return 结果也采用base64加密 + * @throws Exception + */ + public static String encryptRSA(String data, String key) { + try { + // 对公钥解密,公钥被base64加密过 + byte[] keyBytes = decryptBASE64(key); + + // 取得公钥 + X509EncodedKeySpec x509KeySpec = new X509EncodedKeySpec(keyBytes); + KeyFactory keyFactory = KeyFactory.getInstance(KEY_ALGORITHM_RSA); + Key publicKey = keyFactory.generatePublic(x509KeySpec); + + // 对数据加密 + Cipher cipher = Cipher.getInstance(keyFactory.getAlgorithm()); + cipher.init(Cipher.ENCRYPT_MODE, publicKey); + + return encryptBASE64(cipher.doFinal(data.getBytes(ENCODING))); + } catch (Exception e) { + throw DataXException.asDataXException( + FrameworkErrorCode.SECRET_ERROR, "rsa加密出错", e); + } + } + + /** + * 解密
+ * 用私钥解密 + * + * @param data 已经经过base64加密的密文 + * @param key 已经经过base64加密私钥 + * @return + * @throws Exception + */ + public static String decryptRSA(String data, String key) { + try { + // 对密钥解密 + byte[] keyBytes = decryptBASE64(key); + + // 取得私钥 + PKCS8EncodedKeySpec pkcs8KeySpec = new PKCS8EncodedKeySpec(keyBytes); + KeyFactory keyFactory = KeyFactory.getInstance(KEY_ALGORITHM_RSA); + Key privateKey = keyFactory.generatePrivate(pkcs8KeySpec); + + // 对数据解密 + Cipher cipher = Cipher.getInstance(keyFactory.getAlgorithm()); + cipher.init(Cipher.DECRYPT_MODE, privateKey); + + return new String(cipher.doFinal(decryptBASE64(data)), ENCODING); + } catch (Exception e) { + throw DataXException.asDataXException( + FrameworkErrorCode.SECRET_ERROR, "rsa解密出错", e); + } + } + + /** + * 初始化密钥 for RSA ALGORITHM + * + * @return + * @throws Exception + */ + public static String[] initKey() throws Exception { + KeyPairGenerator keyPairGen = KeyPairGenerator + .getInstance(KEY_ALGORITHM_RSA); + keyPairGen.initialize(1024); + + KeyPair keyPair = keyPairGen.generateKeyPair(); + + // 公钥 + RSAPublicKey publicKey = (RSAPublicKey) keyPair.getPublic(); + + // 私钥 + RSAPrivateKey privateKey = (RSAPrivateKey) keyPair.getPrivate(); + + String[] publicAndPrivateKey = { + encryptBASE64(publicKey.getEncoded()), + encryptBASE64(privateKey.getEncoded())}; + + return publicAndPrivateKey; + } + + /** + * 加密 DESede
+ * 用密钥加密 + * + * @param data 裸的原始数据 + * @param key 加密的密钥 + * @return 结果也采用base64加密 + * @throws Exception + */ + public static String encrypt3DES(String data, String key) { + try { + // 生成密钥 + SecretKey desKey = new SecretKeySpec(build3DesKey(key), + KEY_ALGORITHM_3DES); + // 对数据加密 + Cipher cipher = Cipher.getInstance(CIPHER_ALGORITHM_3DES); + cipher.init(Cipher.ENCRYPT_MODE, desKey); + return encryptBASE64(cipher.doFinal(data.getBytes(ENCODING))); + } catch (Exception e) { + throw DataXException.asDataXException( + FrameworkErrorCode.SECRET_ERROR, "3重DES加密出错", e); + } + } + + /** + * 解密
+ * 用密钥解密 + * + * @param data 已经经过base64加密的密文 + * @param key 解密的密钥 + * @return + * @throws Exception + */ + public static String decrypt3DES(String data, String key) { + try { + // 生成密钥 + SecretKey desKey = new SecretKeySpec(build3DesKey(key), + KEY_ALGORITHM_3DES); + // 对数据解密 + Cipher cipher = Cipher.getInstance(CIPHER_ALGORITHM_3DES); + cipher.init(Cipher.DECRYPT_MODE, desKey); + return new String(cipher.doFinal(decryptBASE64(data)), ENCODING); + } catch (Exception e) { + throw DataXException.asDataXException( + FrameworkErrorCode.SECRET_ERROR, "rsa解密出错", e); + } + } + + /** + * 根据字符串生成密钥字节数组 + * + * @param keyStr 密钥字符串 + * @return key 符合DESede标准的24byte数组 + */ + private static byte[] build3DesKey(String keyStr) { + try { + // 声明一个24位的字节数组,默认里面都是0,warn: 字符串0(48)和数组默认值0不一样,统一字符串0(48) + byte[] key = "000000000000000000000000".getBytes(ENCODING); + byte[] temp = keyStr.getBytes(ENCODING); + if (key.length > temp.length) { + // 如果temp不够24位,则拷贝temp数组整个长度的内容到key数组中 + System.arraycopy(temp, 0, key, 0, temp.length); + } else { + // 如果temp大于24位,则拷贝temp数组24个长度的内容到key数组中 + System.arraycopy(temp, 0, key, 0, key.length); + } + return key; + } catch (Exception e) { + throw DataXException.asDataXException( + FrameworkErrorCode.SECRET_ERROR, "构建三重DES密匙出错", e); + } + } + + public static synchronized Properties getSecurityProperties() { + if (properties == null) { + InputStream secretStream = null; + try { + secretStream = new FileInputStream( + CoreConstant.DATAX_SECRET_PATH); + } catch (FileNotFoundException e) { + throw DataXException.asDataXException( + FrameworkErrorCode.SECRET_ERROR, + "DataX配置要求加解密,但无法找到密钥的配置文件"); + } + + properties = new Properties(); + try { + properties.load(secretStream); + secretStream.close(); + } catch (IOException e) { + throw DataXException.asDataXException( + FrameworkErrorCode.SECRET_ERROR, "读取加解密配置文件出错", e); + } + } + + return properties; + } + + + public static Configuration encryptSecretKey(Configuration configuration) { + String keyVersion = configuration + .getString(CoreConstant.DATAX_JOB_SETTING_KEYVERSION); + // 没有设置keyVersion,表示不用解密 + if (StringUtils.isBlank(keyVersion)) { + return configuration; + } + + Map> versionKeyMap = getPrivateKeyMap(); + + if (null == versionKeyMap.get(keyVersion)) { + throw DataXException.asDataXException( + FrameworkErrorCode.SECRET_ERROR, + String.format("DataX配置的密钥版本为[%s],但在系统中没有配置,任务密钥配置错误,不存在您配置的密钥版本", keyVersion)); + } + + String key = versionKeyMap.get(keyVersion).getRight(); + String method = versionKeyMap.get(keyVersion).getMiddle(); + // keyVersion要求的私钥没有配置 + if (StringUtils.isBlank(key)) { + throw DataXException.asDataXException( + FrameworkErrorCode.SECRET_ERROR, + String.format("DataX配置的密钥版本为[%s],但在系统中没有配置,可能是任务密钥配置错误,也可能是系统维护问题", keyVersion)); + } + + String tempEncrptedData = null; + for (String path : configuration.getSecretKeyPathSet()) { + tempEncrptedData = SecretUtil.encrypt(configuration.getString(path), key, method); + int lastPathIndex = path.lastIndexOf(".") + 1; + String lastPathKey = path.substring(lastPathIndex); + + String newPath = path.substring(0, lastPathIndex) + "*" + + lastPathKey; + configuration.set(newPath, tempEncrptedData); + configuration.remove(path); + } + + return configuration; + } + + public static Configuration decryptSecretKey(Configuration config) { + String keyVersion = config + .getString(CoreConstant.DATAX_JOB_SETTING_KEYVERSION); + // 没有设置keyVersion,表示不用解密 + if (StringUtils.isBlank(keyVersion)) { + return config; + } + + Map> versionKeyMap = getPrivateKeyMap(); + if (null == versionKeyMap.get(keyVersion)) { + throw DataXException.asDataXException( + FrameworkErrorCode.SECRET_ERROR, + String.format("DataX配置的密钥版本为[%s],但在系统中没有配置,任务密钥配置错误,不存在您配置的密钥版本", keyVersion)); + } + String decryptKey = versionKeyMap.get(keyVersion).getLeft(); + String method = versionKeyMap.get(keyVersion).getMiddle(); + // keyVersion要求的私钥没有配置 + if (StringUtils.isBlank(decryptKey)) { + throw DataXException.asDataXException( + FrameworkErrorCode.SECRET_ERROR, + String.format("DataX配置的密钥版本为[%s],但在系统中没有配置,可能是任务密钥配置错误,也可能是系统维护问题", keyVersion)); + } + + // 对包含*号key解密处理 + for (String key : config.getKeys()) { + int lastPathIndex = key.lastIndexOf(".") + 1; + String lastPathKey = key.substring(lastPathIndex); + if (lastPathKey.length() > 1 && lastPathKey.charAt(0) == '*' + && lastPathKey.charAt(1) != '*') { + Object value = config.get(key); + if (value instanceof String) { + String newKey = key.substring(0, lastPathIndex) + + lastPathKey.substring(1); + config.set(newKey, + SecretUtil.decrypt((String) value, decryptKey, method)); + config.addSecretKeyPath(newKey); + config.remove(key); + } + } + } + + return config; + } + + private static synchronized Map> getPrivateKeyMap() { + if (versionKeyMap == null) { + versionKeyMap = new HashMap>(); + Properties properties = SecretUtil.getSecurityProperties(); + + String[] serviceUsernames = new String[]{ + CoreConstant.LAST_SERVICE_USERNAME, + CoreConstant.CURRENT_SERVICE_USERNAME}; + String[] servicePasswords = new String[]{ + CoreConstant.LAST_SERVICE_PASSWORD, + CoreConstant.CURRENT_SERVICE_PASSWORD}; + + for (int i = 0; i < serviceUsernames.length; i++) { + String serviceUsername = properties + .getProperty(serviceUsernames[i]); + if (StringUtils.isNotBlank(serviceUsername)) { + String servicePassword = properties + .getProperty(servicePasswords[i]); + if (StringUtils.isNotBlank(servicePassword)) { + versionKeyMap.put(serviceUsername, ImmutableTriple.of( + servicePassword, SecretUtil.KEY_ALGORITHM_3DES, + servicePassword)); + } else { + throw DataXException.asDataXException( + FrameworkErrorCode.SECRET_ERROR, String.format( + "DataX配置要求加解密,但配置的密钥版本[%s]存在密钥为空的情况", + serviceUsername)); + } + } + } + + String[] keyVersions = new String[]{CoreConstant.LAST_KEYVERSION, + CoreConstant.CURRENT_KEYVERSION}; + String[] privateKeys = new String[]{CoreConstant.LAST_PRIVATEKEY, + CoreConstant.CURRENT_PRIVATEKEY}; + String[] publicKeys = new String[]{CoreConstant.LAST_PUBLICKEY, + CoreConstant.CURRENT_PUBLICKEY}; + for (int i = 0; i < keyVersions.length; i++) { + String keyVersion = properties.getProperty(keyVersions[i]); + if (StringUtils.isNotBlank(keyVersion)) { + String privateKey = properties.getProperty(privateKeys[i]); + String publicKey = properties.getProperty(publicKeys[i]); + if (StringUtils.isNotBlank(privateKey) + && StringUtils.isNotBlank(publicKey)) { + versionKeyMap.put(keyVersion, ImmutableTriple.of( + privateKey, SecretUtil.KEY_ALGORITHM_RSA, + publicKey)); + } else { + throw DataXException.asDataXException( + FrameworkErrorCode.SECRET_ERROR, String.format( + "DataX配置要求加解密,但配置的公私钥对存在为空的情况,版本[%s]", + keyVersion)); + } + } + } + } + if (versionKeyMap.size() <= 0) { + throw DataXException.asDataXException( + FrameworkErrorCode.SECRET_ERROR, "DataX配置要求加解密,但无法找到加解密配置"); + } + return versionKeyMap; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/TransformerUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/TransformerUtil.java new file mode 100644 index 000000000..1b4696234 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/TransformerUtil.java @@ -0,0 +1,107 @@ +package com.alibaba.datax.core.util; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.transport.transformer.*; +import com.alibaba.datax.core.util.container.CoreConstant; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.List; + +/** + * no comments. + * Created by liqiang on 16/3/9. + */ +public class TransformerUtil { + + private static final Logger LOG = LoggerFactory.getLogger(TransformerUtil.class); + + public static List buildTransformerInfo(Configuration taskConfig) { + List tfConfigs = taskConfig.getListConfiguration(CoreConstant.JOB_TRANSFORMER); + if (tfConfigs == null || tfConfigs.size() == 0) { + return null; + } + + List result = new ArrayList(); + + + List functionNames = new ArrayList(); + + + for (Configuration configuration : tfConfigs) { + String functionName = configuration.getString("name"); + if (StringUtils.isEmpty(functionName)) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_CONFIGURATION_ERROR, "config=" + configuration.toJSON()); + } + + if (functionName.equals("dx_groovy") && functionNames.contains("dx_groovy")) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_CONFIGURATION_ERROR, "dx_groovy can be invoke once only."); + } + functionNames.add(functionName); + } + + /** + * 延迟load 第三方插件的function,并按需load + */ + LOG.info(String.format(" user config tranformers [%s], loading...", functionNames)); + TransformerRegistry.loadTransformerFromLocalStorage(functionNames); + + int i = 0; + + for (Configuration configuration : tfConfigs) { + String functionName = configuration.getString("name"); + TransformerInfo transformerInfo = TransformerRegistry.getTransformer(functionName); + if (transformerInfo == null) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_NOTFOUND_ERROR, "name=" + functionName); + } + + /** + * 具体的UDF对应一个paras + */ + TransformerExecutionParas transformerExecutionParas = new TransformerExecutionParas(); + /** + * groovy function仅仅只有code + */ + if (!functionName.equals("dx_groovy") && !functionName.equals("dx_fackGroovy")) { + Integer columnIndex = configuration.getInt(CoreConstant.TRANSFORMER_PARAMETER_COLUMNINDEX); + + if (columnIndex == null) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_ILLEGAL_PARAMETER, "columnIndex must be set by UDF:name=" + functionName); + } + + transformerExecutionParas.setColumnIndex(columnIndex); + List paras = configuration.getList(CoreConstant.TRANSFORMER_PARAMETER_PARAS, String.class); + if (paras != null && paras.size() > 0) { + transformerExecutionParas.setParas(paras.toArray(new String[0])); + } + } else { + String code = configuration.getString(CoreConstant.TRANSFORMER_PARAMETER_CODE); + if (StringUtils.isEmpty(code)) { + throw DataXException.asDataXException(TransformerErrorCode.TRANSFORMER_ILLEGAL_PARAMETER, "groovy code must be set by UDF:name=" + functionName); + } + transformerExecutionParas.setCode(code); + + List extraPackage = configuration.getList(CoreConstant.TRANSFORMER_PARAMETER_EXTRAPACKAGE, String.class); + if (extraPackage != null && extraPackage.size() > 0) { + transformerExecutionParas.setExtraPackage(extraPackage); + } + } + transformerExecutionParas.settContext(configuration.getMap(CoreConstant.TRANSFORMER_PARAMETER_CONTEXT)); + + TransformerExecution transformerExecution = new TransformerExecution(transformerInfo, transformerExecutionParas); + + transformerExecution.genFinalParas(); + result.add(transformerExecution); + i++; + LOG.info(String.format(" %s of transformer init success. name=%s, isNative=%s parameter = %s" + , i, transformerInfo.getTransformer().getTransformerName() + , transformerInfo.isNative(), configuration.getConfiguration("parameter"))); + } + + return result; + + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/ClassLoaderSwapper.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/ClassLoaderSwapper.java new file mode 100644 index 000000000..25d90a749 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/ClassLoaderSwapper.java @@ -0,0 +1,42 @@ +package com.alibaba.datax.core.util.container; + +/** + * Created by jingxing on 14-8-29. + *

+ * 为避免jar冲突,比如hbase可能有多个版本的读写依赖jar包,JobContainer和TaskGroupContainer + * 就需要脱离当前classLoader去加载这些jar包,执行完成后,又退回到原来classLoader上继续执行接下来的代码 + */ +public final class ClassLoaderSwapper { + private ClassLoader storeClassLoader = null; + + private ClassLoaderSwapper() { + } + + public static ClassLoaderSwapper newCurrentThreadClassLoaderSwapper() { + return new ClassLoaderSwapper(); + } + + /** + * 保存当前classLoader,并将当前线程的classLoader设置为所给classLoader + * + * @param + * @return + */ + public ClassLoader setCurrentThreadClassLoader(ClassLoader classLoader) { + this.storeClassLoader = Thread.currentThread().getContextClassLoader(); + Thread.currentThread().setContextClassLoader(classLoader); + return this.storeClassLoader; + } + + /** + * 将当前线程的类加载器设置为保存的类加载 + * + * @return + */ + public ClassLoader restoreCurrentThreadClassLoader() { + ClassLoader classLoader = Thread.currentThread() + .getContextClassLoader(); + Thread.currentThread().setContextClassLoader(this.storeClassLoader); + return classLoader; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/CoreConstant.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/CoreConstant.java new file mode 100644 index 000000000..1ce02b4d1 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/CoreConstant.java @@ -0,0 +1,236 @@ +package com.alibaba.datax.core.util.container; + +import org.apache.commons.lang.StringUtils; + +import java.io.File; + +/** + * Created by jingxing on 14-8-25. + */ +public class CoreConstant { + // --------------------------- 全局使用的变量(最好按照逻辑顺序,调整下成员变量顺序) + // -------------------------------- + + public static final String DATAX_CORE_CONTAINER_TASKGROUP_CHANNEL = "core.container.taskGroup.channel"; + + public static final String DATAX_CORE_CONTAINER_MODEL = "core.container.model"; + + public static final String DATAX_CORE_CONTAINER_JOB_ID = "core.container.job.id"; + + public static final String DATAX_CORE_CONTAINER_TRACE_ENABLE = "core.container.trace.enable"; + + public static final String DATAX_CORE_CONTAINER_JOB_MODE = "core.container.job.mode"; + + public static final String DATAX_CORE_CONTAINER_JOB_REPORTINTERVAL = "core.container.job.reportInterval"; + + public static final String DATAX_CORE_CONTAINER_JOB_SLEEPINTERVAL = "core.container.job.sleepInterval"; + + public static final String DATAX_CORE_CONTAINER_TASKGROUP_ID = "core.container.taskGroup.id"; + + public static final String DATAX_CORE_CONTAINER_TASKGROUP_SLEEPINTERVAL = "core.container.taskGroup.sleepInterval"; + + public static final String DATAX_CORE_CONTAINER_TASKGROUP_REPORTINTERVAL = "core.container.taskGroup.reportInterval"; + + public static final String DATAX_CORE_CONTAINER_TASK_FAILOVER_MAXRETRYTIMES = "core.container.task.failOver.maxRetryTimes"; + + public static final String DATAX_CORE_CONTAINER_TASK_FAILOVER_RETRYINTERVALINMSEC = "core.container.task.failOver.retryIntervalInMsec"; + + public static final String DATAX_CORE_CONTAINER_TASK_FAILOVER_MAXWAITINMSEC = "core.container.task.failOver.maxWaitInMsec"; + + public static final String DATAX_CORE_DATAXSERVER_ADDRESS = "core.dataXServer.address"; + + public static final String DATAX_CORE_DATAXSERVER_PROTOCOL = "core.dataXServer.protocol"; + + public static final String DATAX_CORE_DATAXSERVER_ENDPOINT_REPORT = "core.dataXServer.endpoint.report"; + + public static final String DATAX_CORE_DATAXSERVER_ENDPOINT_REPORT_STATE = "core.dataXServer.endpoint.reportState"; + + public static final String DATAX_CORE_DSC_ADDRESS = "core.dsc.address"; + + public static final String DATAX_CORE_DATAXSERVER_TIMEOUT = "core.dataXServer.timeout"; + + public static final String DATAX_CORE_REPORT_DATAX_LOG = "core.dataXServer.reportDataxLog"; + + public static final String DATAX_CORE_REPORT_DATAX_PERFLOG = "core.dataXServer.reportPerfLog"; + + + public static final String DATAX_CORE_TRANSPORT_CHANNEL_CAPACITY = "core.transport.channel.capacity"; + + public static final String DATAX_CORE_TRANSPORT_CHANNEL_CAPACITY_BYTE = "core.transport.channel.byteCapacity"; + + public static final String DATAX_CORE_TRANSPORT_CHANNEL_SPEED_BYTE = "core.transport.channel.speed.byte"; + + public static final String DATAX_CORE_TRANSPORT_CHANNEL_SPEED_RECORD = "core.transport.channel.speed.record"; + + public static final String DATAX_CORE_TRANSPORT_CHANNEL_FLOWCONTROLINTERVAL = "core.transport.channel.flowControlInterval"; + + public static final String DATAX_CORE_TRANSPORT_RECORD_CHANNEL_CLASS = "core.transport.record.channel.class"; + + public static final String DATAX_CORE_TRANSPORT_STREAM_CHANNEL_CLASS = "core.transport.stream.channel.class"; + + public static final String DATAX_CORE_TRANSPORT_STREAM_CHANNEL_BLOCKSIZE = "core.transport.stream.channel.blockSize"; + + public static final String DATAX_CORE_TRANSPORT_RECORD_EXCHANGER_BUFFERSIZE = "core.transport.record.exchanger.bufferSize"; + + public static final String DATAX_CORE_TRANSPORT_RECORD_CLASS = "core.transport.record.class"; + + public static final String DATAX_CORE_TRANSPORT_TYPE = "core.transport.type"; + + public static final String DATAX_CORE_STATISTICS_COLLECTOR_PLUGIN_TASKCLASS = "core.statistics.collector.plugin.taskClass"; + + public static final String DATAX_CORE_STATISTICS_COLLECTOR_PLUGIN_MAXDIRTYNUM = "core.statistics.collector.plugin.maxDirtyNumber"; + + public static final String DATAX_CORE_STATISTICS_REPORTER_PLUGIN_CLASS = "core.statistics.reporter.plugin.class"; + + public static final String DATAX_CORE_PROCESSOR_LOADER_PLUGIN_CLASS = "core.processor.loader.plugin.class"; + + public static final String DATAX_CORE_PROCESSOR_LODAER_PLUGIN_SOURCEPATH = "core.processor.loader.plugin.sourcePath"; + + public static final String DATAX_CORE_PROCESSOR_LOADER_PLUGIN_PACKAGE = "core.processor.loader.plugin.package"; + + + public static final String DATAX_JOB_CONTENT_READER_NAME = "job.content[0].reader.name"; + + public static final String DATAX_JOB_CONTENT_READER_PARAMETER = "job.content[0].reader.parameter"; + + public static final String DATAX_JOB_CONTENT_WRITER_ARRAY_NAME= "job.content[0].writer[%d].name"; + + public static final String DATAX_JOB_CONTENT_WRITER_NAME = "job.content[0].writer.name"; + + public static final String DATAX_JOB_CONTENT_WRITER_PARAMETER = "job.content[0].writer.parameter"; + + public static final String DATAX_JOB_CONTENT_WRITER_ARRAY_PARAMETER = "job.content[0].writer[%d].parameter"; + + public static final String DATAX_JOB_JOBINFO = "job.jobInfo"; + + public static final String DATAX_JOB_CONTENT = "job.content"; + + public static final String DATAX_JOB_CONTENT_WRITER = "job.content[0].writer"; + + public static final String DATAX_JOB_CONTENT_READER = "job.content[0].reader"; + + public static final String DATAX_JOB_CONTENT_TRANSFORMER = "job.content[0].transformer"; + + public static final String DATAX_JOB_SETTING_SYNCMETA = "job.setting.syncMeta"; + + public static final String DATAX_JOB_SETTING_TRANSPORT_TYPE = "job.setting.transport.type"; + + public static final String DATAX_JOB_SETTING_KEYVERSION = "job.setting.keyVersion"; + + public static final String DATAX_JOB_SETTING_SPEED_BYTE = "job.setting.speed.byte"; + + public static final String DATAX_JOB_SETTING_SPEED_RECORD = "job.setting.speed.record"; + + public static final String DATAX_JOB_SETTING_SPEED_CHANNEL = "job.setting.speed.channel"; + + public static final String DATAX_JOB_SETTING_ERRORLIMIT = "job.setting.errorLimit"; + + public static final String DATAX_JOB_SETTING_ERRORLIMIT_RECORD = "job.setting.errorLimit.record"; + + public static final String DATAX_JOB_SETTING_ERRORLIMIT_PERCENT = "job.setting.errorLimit.percentage"; + + public static final String DATAX_JOB_SETTING_DRYRUN = "job.setting.dryRun"; + + public static final String DATAX_JOB_PREHANDLER_PLUGINTYPE = "job.preHandler.pluginType"; + + public static final String DATAX_JOB_PREHANDLER_PLUGINNAME = "job.preHandler.pluginName"; + + public static final String DATAX_JOB_POSTHANDLER_PLUGINTYPE = "job.postHandler.pluginType"; + + public static final String DATAX_JOB_POSTHANDLER_PLUGINNAME = "job.postHandler.pluginName"; + // ----------------------------- 局部使用的变量 + public static final String JOB_WRITER = "writer"; + + public static final String JOB_READER = "reader"; + + public static final String JOB_TRANSFORMER = "transformer"; + + public static final String JOB_READER_NAME = "reader.name"; + + public static final String JOB_READER_PARAMETER = "reader.parameter"; + + public static final String JOB_WRITER_PARAMETER = "writer[%d].parameter"; + + public static final String TRANSFORMER_PARAMETER_COLUMNINDEX = "parameter.columnIndex"; + public static final String TRANSFORMER_PARAMETER_PARAS = "parameter.paras"; + public static final String TRANSFORMER_PARAMETER_CONTEXT = "parameter.context"; + public static final String TRANSFORMER_PARAMETER_CODE = "parameter.code"; + public static final String TRANSFORMER_PARAMETER_EXTRAPACKAGE = "parameter.extraPackage"; + + public static final String TASK_ID = "taskId"; + + public static final String TASK_NAME = "name"; + + public static final String TASK_PARAMETER = "parameter"; + + public static final String TASK_PROCESSOR = "processor"; + + // ----------------------------- 安全模块变量 ------------------ + + public static final String LAST_KEYVERSION = "last.keyVersion"; + + public static final String LAST_PUBLICKEY = "last.publicKey"; + + public static final String LAST_PRIVATEKEY = "last.privateKey"; + + public static final String LAST_SERVICE_USERNAME = "last.service.username"; + + public static final String LAST_SERVICE_PASSWORD = "last.service.password"; + + public static final String CURRENT_KEYVERSION = "current.keyVersion"; + + public static final String CURRENT_PUBLICKEY = "current.publicKey"; + + public static final String CURRENT_PRIVATEKEY = "current.privateKey"; + + public static final String CURRENT_SERVICE_USERNAME = "current.service.username"; + + public static final String CURRENT_SERVICE_PASSWORD = "current.service.password"; + + // ----------------------------- 环境变量 --------------------------------- + + public static final String DATAX_HOME = System.getProperty("datax.home"); + + public static final String DATAX_CONF_PATH = StringUtils.join(new String[] { + DATAX_HOME, "conf", "core.json"}, File.separator); + + public static final String DATAX_CONF_LOG_PATH = StringUtils.join(new String[] { + DATAX_HOME, "conf", "logback-server.xml"}, File.separator); + + public static final String DATAX_SECRET_PATH = StringUtils.join(new String[] { + DATAX_HOME, "conf", ".secret.properties" }, File.separator); + + public static final String DATAX_LDAP_PATH = StringUtils.join(new String[] { + DATAX_HOME, "conf", "ldap.properties" }, File.separator); + + public static final String DATAX_KERBEROS_PATH = StringUtils.join(new String[] { + DATAX_HOME, "conf", "kerberos.properties"}, File.separator); + + public static final String DATAX_UM_PATH = StringUtils.join(new String[] { + DATAX_HOME, "conf", "um-login.properties"}, File.separator); + + public static final String DATAX_PLUGIN_HOME = StringUtils.join(new String[] { + DATAX_HOME, "plugin" }, File.separator); + + public static final String DATAX_PLUGIN_READER_HOME = StringUtils.join( + new String[] { DATAX_HOME, "plugin", "reader" }, File.separator); + + public static final String DATAX_PLUGIN_WRITER_HOME = StringUtils.join( + new String[] { DATAX_HOME, "plugin", "writer" }, File.separator); + + public static final String DATAX_BIN_HOME = StringUtils.join(new String[] { + DATAX_HOME, "bin" }, File.separator); + + public static final String DATAX_JOB_HOME = StringUtils.join(new String[] { + DATAX_HOME, "job" }, File.separator); + + public static final String DATAX_STORAGE_TRANSFORMER_HOME = StringUtils.join( + new String[] { DATAX_HOME, "local_storage", "transformer" }, File.separator); + + public static final String DATAX_STORAGE_PLUGIN_READ_HOME = StringUtils.join( + new String[] { DATAX_HOME, "local_storage", "plugin","reader" }, File.separator); + + public static final String DATAX_STORAGE_PLUGIN_WRITER_HOME = StringUtils.join( + new String[] { DATAX_HOME, "local_storage", "plugin","writer" }, File.separator); + +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/JarLoader.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/JarLoader.java new file mode 100644 index 000000000..a23af0d49 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/JarLoader.java @@ -0,0 +1,142 @@ +package com.alibaba.datax.core.util.container; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.Validate; + +import java.io.File; +import java.io.FileFilter; +import java.net.URL; +import java.net.URLClassLoader; +import java.util.ArrayList; +import java.util.List; + +/** + * 提供Jar隔离的加载机制,会把传入的路径、及其子路径、以及路径中的jar文件加入到class path。 + */ +public class JarLoader extends URLClassLoader { + public JarLoader(String[] paths) { + this(paths, JarLoader.class.getClassLoader()); + } + + public JarLoader(String[] paths, ClassLoader parent) { + super(getURLs(paths), parent); + } + + private static URL[] getURLs(String[] paths) { + Validate.isTrue(null != paths && 0 != paths.length, + "jar包路径不能为空."); + + List dirs = new ArrayList(); + for (String path : paths) { + dirs.add(path); + JarLoader.collectDirs(path, dirs); + } + + List urls = new ArrayList(); + for (String path : dirs) { + urls.addAll(doGetURLs(path)); + } + return urls.toArray(new URL[0]); + } + + private static void collectDirs(String path, List collector) { + if (null == path || StringUtils.isBlank(path)) { + return; + } + + File current = new File(path); + if (!current.exists() || !current.isDirectory()) { + return; + } + + if(null != current.listFiles()) { + for (File child : current.listFiles()) { + if (!child.isDirectory()) { + continue; + } + + collector.add(child.getAbsolutePath()); + collectDirs(child.getAbsolutePath(), collector); + } + } + } + + private static List doGetURLs(final String path) { + Validate.isTrue(!StringUtils.isBlank(path), "jar包路径不能为空."); + + File jarPath = new File(path); + + Validate.isTrue(jarPath.exists() && jarPath.isDirectory(), + "jar包路径必须存在且为目录."); + + /* set filter */ + FileFilter jarFilter = new FileFilter() { + @Override + public boolean accept(File pathname) { + return pathname.getName().endsWith(".jar"); + } + }; + + /* iterate all jar */ + File[] allJars = new File(path).listFiles(jarFilter); + List jarURLs = new ArrayList(allJars.length); + + for (int i = 0; i < allJars.length; i++) { + try { + jarURLs.add(allJars[i].toURI().toURL()); + } catch (Exception e) { + throw DataXException.asDataXException( + FrameworkErrorCode.PLUGIN_INIT_ERROR, + "系统加载jar包出错", e); + } + } + + return jarURLs; + } + +// /** +// * change the order to load class +// * @param name +// * @param resolve +// * @return +// * @throws ClassNotFoundException +// */ +// @Override +// public Class loadClass(String name, boolean resolve) throws ClassNotFoundException { +// synchronized (getClassLoadingLock(name)){ +// //First, check if the class has already been loaded +// Class c = findLoadedClass(name); +// if(c == null){ +// long t0 = System.nanoTime(); +// try { +// //invoke findClass in this class +// c = findClass(name); +// }catch(ClassNotFoundException e){ +// // ClassNotFoundException thrown if class not found +// } +// if(c == null){ +// return super.loadClass(name, resolve); +// } +// //For compatibility with higher versions > java 1.8.0_141 +//// sun.misc.PerfCounter.getFindClasses().addElapsedTimeFrom(t0); +//// sun.misc.PerfCounter.getFindClasses().increment(); +// } +// if(resolve){ +// resolveClass(c); +// } +// return c; +// } +// } + + /** + * defined class by bytes + * @param name + * @param bytes + * @return + */ + public Class loadClass(String name, byte[] bytes){ + return this.defineClass(name, bytes, 0, bytes.length); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/LoadUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/LoadUtil.java new file mode 100644 index 000000000..30e926c38 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/util/container/LoadUtil.java @@ -0,0 +1,202 @@ +package com.alibaba.datax.core.util.container; + +import com.alibaba.datax.common.constant.PluginType; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.AbstractJobPlugin; +import com.alibaba.datax.common.plugin.AbstractPlugin; +import com.alibaba.datax.common.plugin.AbstractTaskPlugin; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.taskgroup.runner.AbstractRunner; +import com.alibaba.datax.core.taskgroup.runner.ReaderRunner; +import com.alibaba.datax.core.taskgroup.runner.WriterRunner; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import org.apache.commons.lang3.StringUtils; + +import java.util.HashMap; +import java.util.Map; + +/** + * Created by jingxing on 14-8-24. + *

+ * 插件加载器,大体上分reader、transformer(还未实现)和writer三中插件类型, + * reader和writer在执行时又可能出现Job和Task两种运行时(加载的类不同) + */ +public class LoadUtil { + private static final String pluginTypeNameFormat = "plugin.%s.%s"; + + private LoadUtil() { + } + + private enum ContainerType { + Job("Job"), Task("Task"); + private String type; + + private ContainerType(String type) { + this.type = type; + } + + public String value() { + return type; + } + } + + /** + * 所有插件配置放置在pluginRegisterCenter中,为区别reader、transformer和writer,还能区别 + * 具体pluginName,故使用pluginType.pluginName作为key放置在该map中 + */ + private static Configuration pluginRegisterCenter; + + /** + * jarLoader的缓冲 + */ + private static Map jarLoaderCenter = new HashMap(); + + /** + * 设置pluginConfigs,方便后面插件来获取 + * + * @param pluginConfigs + */ + public static void bind(Configuration pluginConfigs) { + pluginRegisterCenter = pluginConfigs; + } + + private static String generatePluginKey(PluginType pluginType, + String pluginName) { + return String.format(pluginTypeNameFormat, pluginType.toString(), + pluginName); + } + + private static Configuration getPluginConf(PluginType pluginType, + String pluginName) { + Configuration pluginConf = pluginRegisterCenter + .getConfiguration(generatePluginKey(pluginType, pluginName)); + + if (null == pluginConf) { + throw DataXException.asDataXException( + FrameworkErrorCode.PLUGIN_INSTALL_ERROR, + String.format("DataX不能找到插件[%s]的配置.", + pluginName)); + } + + return pluginConf; + } + + /** + * 加载JobPlugin,reader、writer都可能要加载 + * + * @param pluginType + * @param pluginName + * @return + */ + public static AbstractJobPlugin loadJobPlugin(PluginType pluginType, + String pluginName) { + Class clazz = LoadUtil.loadPluginClass( + pluginType, pluginName, ContainerType.Job); + + try { + AbstractJobPlugin jobPlugin = (AbstractJobPlugin) clazz + .newInstance(); + jobPlugin.setPluginConf(getPluginConf(pluginType, pluginName)); + return jobPlugin; + } catch (Exception e) { + throw DataXException.asDataXException( + FrameworkErrorCode.RUNTIME_ERROR, + String.format("DataX找到plugin[%s]的Job配置.", + pluginName), e); + } + } + + /** + * 加载taskPlugin,reader、writer都可能加载 + * + * @param pluginType + * @param pluginName + * @return + */ + public static AbstractTaskPlugin loadTaskPlugin(PluginType pluginType, + String pluginName) { + Class clazz = LoadUtil.loadPluginClass( + pluginType, pluginName, ContainerType.Task); + + try { + AbstractTaskPlugin taskPlugin = (AbstractTaskPlugin) clazz + .newInstance(); + taskPlugin.setPluginConf(getPluginConf(pluginType, pluginName)); + return taskPlugin; + } catch (Exception e) { + throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, + String.format("DataX不能找plugin[%s]的Task配置.", + pluginName), e); + } + } + + /** + * 根据插件类型、名字和执行时taskGroupId加载对应运行器 + * + * @param pluginType + * @param pluginName + * @return + */ + public static AbstractRunner loadPluginRunner(PluginType pluginType, String pluginName) { + AbstractTaskPlugin taskPlugin = LoadUtil.loadTaskPlugin(pluginType, + pluginName); + + switch (pluginType) { + case READER: + return new ReaderRunner(taskPlugin); + case WRITER: + return new WriterRunner(taskPlugin); + default: + throw DataXException.asDataXException( + FrameworkErrorCode.RUNTIME_ERROR, + String.format("插件[%s]的类型必须是[reader]或[writer]!", + pluginName)); + } + } + + /** + * 反射出具体plugin实例 + * + * @param pluginType + * @param pluginName + * @param pluginRunType + * @return + */ + @SuppressWarnings("unchecked") + private static synchronized Class loadPluginClass( + PluginType pluginType, String pluginName, + ContainerType pluginRunType) { + Configuration pluginConf = getPluginConf(pluginType, pluginName); + JarLoader jarLoader = LoadUtil.getJarLoader(pluginType, pluginName); + try { + return (Class) jarLoader + .loadClass(pluginConf.getString("class") + "$" + + pluginRunType.value()); + } catch (Exception e) { + throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, e); + } + } + + public static synchronized JarLoader getJarLoader(PluginType pluginType, + String pluginName) { + Configuration pluginConf = getPluginConf(pluginType, pluginName); + + JarLoader jarLoader = jarLoaderCenter.get(generatePluginKey(pluginType, + pluginName)); + if (null == jarLoader) { + String pluginPath = pluginConf.getString("path"); + if (StringUtils.isBlank(pluginPath)) { + throw DataXException.asDataXException( + FrameworkErrorCode.RUNTIME_ERROR, + String.format( + "%s插件[%s]路径非法!", + pluginType, pluginName)); + } + jarLoader = new JarLoader(new String[]{pluginPath}); + jarLoaderCenter.put(generatePluginKey(pluginType, pluginName), + jarLoader); + } + + return jarLoader; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/EnumStrVal.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/EnumStrVal.java new file mode 100644 index 000000000..d23b7ec26 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/EnumStrVal.java @@ -0,0 +1,5 @@ +package com.alibaba.datax.dataxservice.face.domain.enums; + +public interface EnumStrVal { + public String value(); +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/EnumVal.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/EnumVal.java new file mode 100644 index 000000000..ad4af0bc0 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/EnumVal.java @@ -0,0 +1,5 @@ +package com.alibaba.datax.dataxservice.face.domain.enums; + +public interface EnumVal { + public int value(); +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/ExecuteMode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/ExecuteMode.java new file mode 100644 index 000000000..924379646 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/ExecuteMode.java @@ -0,0 +1,49 @@ +package com.alibaba.datax.dataxservice.face.domain.enums; + +public enum ExecuteMode implements EnumStrVal { + + STANDALONE("standalone"), + LOCAL("local"), + DISTRIBUTE("distribute"); + + String value; + + ExecuteMode(String value) { + this.value = value; + } + + @Override + public String value() { + return value; + } + + public String getValue() { + return this.value; + } + + public static boolean isLocal(String mode) { + return equalsIgnoreCase(LOCAL.getValue(), mode); + } + + public static boolean isDistribute(String mode) { + return equalsIgnoreCase(DISTRIBUTE.getValue(), mode); + } + + public static ExecuteMode toExecuteMode(String modeName) { + for (ExecuteMode mode : ExecuteMode.values()) { + if (mode.value().equals(modeName)) { + return mode; + } + } + throw new RuntimeException("no such mode :" + modeName); + } + + private static boolean equalsIgnoreCase(String str1, String str2) { + return str1 == null ? str2 == null : str1.equalsIgnoreCase(str2); + } + + @Override + public String toString() { + return this.value; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/State.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/State.java new file mode 100644 index 000000000..657fe5fc3 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/dataxservice/face/domain/enums/State.java @@ -0,0 +1,35 @@ +package com.alibaba.datax.dataxservice.face.domain.enums; + +public enum State implements EnumVal { + + SUBMITTING(10), + WAITING(20), + RUNNING(30), + KILLING(40), + KILLED(50), + FAILED(60), + SUCCEEDED(70); + + + /* 一定会被初始化的 */ + int value; + + State(int value) { + this.value = value; + } + + @Override + public int value() { + return value; + } + + + public boolean isFinished() { + return this == KILLED || this == FAILED || this == SUCCEEDED; + } + + public boolean isRunning() { + return !isFinished(); + } + +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/CommonRdbmsReader.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/CommonRdbmsReader.java new file mode 100644 index 000000000..d6d037507 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/CommonRdbmsReader.java @@ -0,0 +1,359 @@ +package com.alibaba.datax.plugin.rdbms.reader; + +import com.alibaba.datax.common.element.*; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordSender; +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import com.alibaba.datax.common.statistics.PerfRecord; +import com.alibaba.datax.common.statistics.PerfTrace; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.plugin.rdbms.reader.util.OriginalConfPretreatmentUtil; +import com.alibaba.datax.plugin.rdbms.reader.util.PreCheckTask; +import com.alibaba.datax.plugin.rdbms.reader.util.ReaderSplitUtil; +import com.alibaba.datax.plugin.rdbms.reader.util.SingleTableSplitUtil; +import com.alibaba.datax.plugin.rdbms.util.*; +import com.google.common.collect.Lists; +import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; + +import static com.alibaba.datax.plugin.rdbms.util.Constant.DEFAULT_PROXY_SOCKS_HOST; +import static com.alibaba.datax.plugin.rdbms.util.Constant.DEFAULT_PROXY_SOCKS_PORT; + +public class CommonRdbmsReader { + + public static class Job { + private static final Logger LOG = LoggerFactory + .getLogger(Job.class); + + public Job(DataBaseType dataBaseType) { + OriginalConfPretreatmentUtil.DATABASE_TYPE = dataBaseType; + SingleTableSplitUtil.DATABASE_TYPE = dataBaseType; + } + + public void init(Configuration originalConfig) { + + OriginalConfPretreatmentUtil.doPretreatment(originalConfig); + + LOG.debug("After job init(), job config now is:[\n{}\n]", + originalConfig.toJSON()); + } + + public void preCheck(Configuration originalConfig, DataBaseType dataBaseType) { + /*检查每个表是否有读权限,以及querySql跟splik Key是否正确*/ + Configuration queryConf = ReaderSplitUtil.doPreCheckSplit(originalConfig); + String splitPK = queryConf.getString(Key.SPLIT_PK); + List connList = queryConf.getList(Constant.CONN_MARK, Object.class); + String username = queryConf.getString(Key.USERNAME); + String password = queryConf.getString(Key.PASSWORD); + String proxyHost = queryConf.getString(Key.PROXY_HOST, DEFAULT_PROXY_SOCKS_HOST); + int proxyPort = queryConf.getInt(Key.PROXY_PORT, DEFAULT_PROXY_SOCKS_PORT); + ExecutorService exec; + if (connList.size() < 10) { + exec = Executors.newFixedThreadPool(connList.size()); + } else { + exec = Executors.newFixedThreadPool(10); + } + Collection taskList = new ArrayList(); + for (int i = 0, len = connList.size(); i < len; i++) { + Configuration connConf = Configuration.from(Json.toJson(connList.get(i), null)); + PreCheckTask t = new PreCheckTask(username, password, proxyHost, proxyPort, connConf, dataBaseType, splitPK); + taskList.add(t); + } + List> results = Lists.newArrayList(); + try { + results = exec.invokeAll(taskList); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + + for (Future result : results) { + try { + result.get(); + } catch (ExecutionException e) { + DataXException de = (DataXException) e.getCause(); + throw de; + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } + exec.shutdownNow(); + } + + + public List split(Configuration originalConfig, + int adviceNumber) { + return ReaderSplitUtil.doSplit(originalConfig, adviceNumber); + } + + public void post(Configuration originalConfig) { + // do nothing + } + + public void destroy(Configuration originalConfig) { + // do nothing + } + + } + + public static class Task { + private static final Logger LOG = LoggerFactory + .getLogger(Task.class); + private static final boolean IS_DEBUG = LOG.isDebugEnabled(); + protected final byte[] EMPTY_CHAR_ARRAY = new byte[0]; + + private DataBaseType dataBaseType; + private int taskGroupId = -1; + private int taskId = -1; + + private String username; + private String password; + private String proxyHost; + private int proxyPort; + private String jdbcUrl; + private String mandatoryEncoding; + + // 作为日志显示信息时,需要附带的通用信息。比如信息所对应的数据库连接等信息,针对哪个表做的操作 + private String basicMsg; + + public Task(DataBaseType dataBaseType) { + this(dataBaseType, -1, -1); + } + + public Task(DataBaseType dataBaseType, int taskGropuId, int taskId) { + this.dataBaseType = dataBaseType; + this.taskGroupId = taskGropuId; + this.taskId = taskId; + } + + public void init(Configuration readerSliceConfig) { + + /* for database connection */ + + this.username = readerSliceConfig.getString(Key.USERNAME); + this.password = readerSliceConfig.getString(Key.PASSWORD); + this.proxyHost = readerSliceConfig.getString(Key.PROXY_HOST, DEFAULT_PROXY_SOCKS_HOST); + this.proxyPort = readerSliceConfig.getInt(Key.PROXY_PORT, DEFAULT_PROXY_SOCKS_PORT); + if(StringUtils.isNotBlank(password)){ + try { + password = (String) CryptoUtils.string2Object(password); + } catch (Exception e) { + throw DataXException.asDataXException(DBUtilErrorCode.CONF_ERROR, "decrypt password failed"); + } + } + this.jdbcUrl = readerSliceConfig.getString(Key.JDBC_URL); + + //ob10的处理 + if (this.jdbcUrl.startsWith(com.alibaba.datax.plugin.rdbms.writer.Constant.OB10_SPLIT_STRING) && this.dataBaseType == DataBaseType.MySql) { + String[] ss = this.jdbcUrl.split(com.alibaba.datax.plugin.rdbms.writer.Constant.OB10_SPLIT_STRING_PATTERN); + if (ss.length != 3) { + throw DataXException + .asDataXException( + DBUtilErrorCode.JDBC_OB10_ADDRESS_ERROR, "JDBC OB10格式错误,请联系askdatax"); + } + LOG.info("this is ob1_0 jdbc url."); + this.username = ss[1].trim() + ":" + this.username; + this.jdbcUrl = ss[2]; + LOG.info("this is ob1_0 jdbc url. user=" + this.username + " :url=" + this.jdbcUrl); + } + + this.mandatoryEncoding = readerSliceConfig.getString(Key.MANDATORY_ENCODING, ""); + + basicMsg = String.format("jdbcUrl:[%s]", this.jdbcUrl); + + } + + public void startRead(Configuration readerSliceConfig, + RecordSender recordSender, + TaskPluginCollector taskPluginCollector, int fetchSize) { + String querySql = readerSliceConfig.getString(Key.QUERY_SQL); + String table = readerSliceConfig.getString(Key.TABLE); + + PerfTrace.getInstance().addTaskDetails(taskId, table + "," + basicMsg); + + LOG.info("Begin to read record by Sql: [{}\n] {}.", + querySql, basicMsg); + PerfRecord queryPerfRecord = new PerfRecord(taskGroupId, taskId, PerfRecord.PHASE.SQL_QUERY); + queryPerfRecord.start(); + + Connection conn = DBUtil.getConnection(this.dataBaseType, jdbcUrl, + username, password, proxyHost, proxyPort); + + // session config .etc related + DBUtil.dealWithSessionConfig(conn, readerSliceConfig, + this.dataBaseType, basicMsg); + + int columnNumber = 0; + ResultSet rs = null; + Statement stmt = null; + try { + rs = DBUtil.query(conn, querySql, fetchSize); + stmt = rs.getStatement(); + queryPerfRecord.end(); + ResultSetMetaData metaData = rs.getMetaData(); + columnNumber = metaData.getColumnCount(); + //这个统计干净的result_Next时间 + PerfRecord allResultPerfRecord = new PerfRecord(taskGroupId, taskId, PerfRecord.PHASE.RESULT_NEXT_ALL); + allResultPerfRecord.start(); + + long rsNextUsedTime = 0; + long lastTime = System.nanoTime(); + while (rs.next()) { + rsNextUsedTime += (System.nanoTime() - lastTime); + this.transportOneRecord(recordSender, rs, + metaData, columnNumber, mandatoryEncoding, taskPluginCollector); + lastTime = System.nanoTime(); + } + + allResultPerfRecord.end(rsNextUsedTime); + //目前大盘是依赖这个打印,而之前这个Finish read record是包含了sql查询和result next的全部时间 + LOG.info("Finished read record by Sql: [{}\n] {}.", + querySql, basicMsg); + + } catch (Exception e) { + throw RdbmsException.asQueryException(this.dataBaseType, e, querySql, table, username); + } finally { + DBUtil.closeDBResources(stmt, conn); + } + } + + public void post(Configuration originalConfig) { + // do nothing + } + + public void destroy(Configuration originalConfig) { + // do nothing + } + + protected Record transportOneRecord(RecordSender recordSender, ResultSet rs, + ResultSetMetaData metaData, int columnNumber, String mandatoryEncoding, + TaskPluginCollector taskPluginCollector) { + Record record = buildRecord(recordSender, rs, metaData, columnNumber, mandatoryEncoding, taskPluginCollector); + recordSender.sendToWriter(record); + return record; + } + + protected Record buildRecord(RecordSender recordSender, ResultSet rs, ResultSetMetaData metaData, int columnNumber, String mandatoryEncoding, + TaskPluginCollector taskPluginCollector) { + Record record = recordSender.createRecord(); + + try { + for (int i = 1; i <= columnNumber; i++) { + switch (metaData.getColumnType(i)) { + + case Types.CHAR: + case Types.NCHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + case Types.NVARCHAR: + case Types.LONGNVARCHAR: + String rawData; + if (StringUtils.isBlank(mandatoryEncoding)) { + rawData = rs.getString(i); + } else { + rawData = new String((rs.getBytes(i) == null ? EMPTY_CHAR_ARRAY : + rs.getBytes(i)), mandatoryEncoding); + } + record.addColumn(new StringColumn(rawData)); + break; + + case Types.CLOB: + case Types.NCLOB: + record.addColumn(new StringColumn(rs.getString(i))); + break; + + case Types.SMALLINT: + case Types.TINYINT: + case Types.INTEGER: + case Types.BIGINT: + record.addColumn(new LongColumn(rs.getString(i))); + break; + + case Types.NUMERIC: + case Types.DECIMAL: + record.addColumn(new DoubleColumn(rs.getString(i))); + break; + + case Types.FLOAT: + case Types.REAL: + case Types.DOUBLE: + record.addColumn(new DoubleColumn(rs.getString(i))); + break; + + case Types.TIME: + record.addColumn(new DateColumn(rs.getTime(i))); + break; + + // for mysql bug, see http://bugs.mysql.com/bug.php?id=35115 + case Types.DATE: + if (metaData.getColumnTypeName(i).equalsIgnoreCase("year")) { + record.addColumn(new LongColumn(rs.getInt(i))); + } else { + record.addColumn(new DateColumn(rs.getDate(i))); + } + break; + + case Types.TIMESTAMP: + record.addColumn(new DateColumn(rs.getTimestamp(i))); + break; + + case Types.BINARY: + case Types.VARBINARY: + case Types.BLOB: + case Types.LONGVARBINARY: + record.addColumn(new BytesColumn(rs.getBytes(i))); + break; + + // warn: bit(1) -> Types.BIT 可使用BoolColumn + // warn: bit(>1) -> Types.VARBINARY 可使用BytesColumn + case Types.BOOLEAN: + case Types.BIT: + record.addColumn(new BoolColumn(rs.getBoolean(i))); + break; + + case Types.NULL: + String stringData = null; + if (rs.getObject(i) != null) { + stringData = rs.getObject(i).toString(); + } + record.addColumn(new StringColumn(stringData)); + break; + + default: + throw DataXException + .asDataXException( + DBUtilErrorCode.UNSUPPORTED_TYPE, + String.format( + "您的配置文件中的列配置信息有误. 因为DataX 不支持数据库读取这种字段类型. 字段名:[%s], 字段名称:[%s], 字段Java类型:[%s]. 请尝试使用数据库函数将其转换datax支持的类型 或者不同步该字段 .", + metaData.getColumnName(i), + metaData.getColumnType(i), + metaData.getColumnClassName(i))); + } + } + } catch (Exception e) { + if (IS_DEBUG) { + LOG.debug("read domain " + record.toString() + + " occur exception:", e); + } + //TODO 这里识别为脏数据靠谱吗? + taskPluginCollector.collectDirtyRecord(record, e); + if (e instanceof DataXException) { + throw (DataXException) e; + } + } + return record; + } + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/Constant.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/Constant.java new file mode 100644 index 000000000..3c3e14394 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/Constant.java @@ -0,0 +1,28 @@ +package com.alibaba.datax.plugin.rdbms.reader; + +public final class Constant { + public static final String PK_TYPE = "pkType"; + + public static final Object PK_TYPE_STRING = "pkTypeString"; + + public static final Object PK_TYPE_LONG = "pkTypeLong"; + + public static final Object PK_TYPE_MONTECARLO = "pkTypeMonteCarlo"; + + public static final String SPLIT_MODE_RANDOMSAMPLE = "randomSampling"; + + public static final String CONN_MARK = "connection"; + + public static final String TABLE_NUMBER_MARK = "tableNumber"; + + public static final String IS_TABLE_MODE = "isTableMode"; + + public final static String FETCH_SIZE = "fetchSize"; + + public static final String QUERY_SQL_TEMPLATE_WITHOUT_WHERE = "select %s from %s "; + + public static final String QUERY_SQL_TEMPLATE = "select %s from %s where (%s)"; + + public static final String TABLE_NAME_PLACEHOLDER = "@table"; + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/Key.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/Key.java new file mode 100644 index 000000000..5e6251491 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/Key.java @@ -0,0 +1,77 @@ +package com.alibaba.datax.plugin.rdbms.reader; + +/** + * 编码,时区等配置,暂未定. + */ +public final class Key { + public final static String JDBC_URL = "jdbcUrl"; + + public final static String USERNAME = "username"; + + public final static String PASSWORD = "password"; + + public final static String TABLE = "table"; + + public final static String MANDATORY_ENCODING = "mandatoryEncoding"; + + // 是数组配置 + public final static String COLUMN = "column"; + + public final static String COLUMN_LIST = "columnList"; + + public final static String WHERE = "where"; + + public final static String HINT = "hint"; + + public final static String SPLIT_PK = "splitPk"; + + public final static String SPLIT_MODE = "splitMode"; + + public final static String SAMPLE_PERCENTAGE = "samplePercentage"; + + public final static String QUERY_SQL = "querySql"; + + public final static String SPLIT_PK_SQL = "splitPkSql"; + + + public final static String PRE_SQL = "preSql"; + + public final static String POST_SQL = "postSql"; + + public final static String CHECK_SLAVE = "checkSlave"; + + public final static String SESSION = "session"; + + public final static String DBNAME = "dbName"; + + public final static String DRYRUN = "dryRun"; + + public final static String CONNPARM = "connParams"; + + public final static String HOST = "host"; + + public final static String PROXY_HOST = "proxyHost"; + + public final static String PORT = "port"; + + public final static String PROXY_PORT = "proxyPort"; + + public final static String DATABASE = "database"; + + public final static String JDBCTEM = "jdbc:mysql://"; + + public final static String JDBCORCL = "jdbc:oracle:thin:@"; + + public final static String SID = "sid"; + + public final static String SERVICENAME = "serviceName"; + + public final static String GTID = "gtid"; + + public final static String HEARTBEATINTERVAL = "heartBeatInterval"; + + public final static String SCHEMA = "schema"; + + public final static String SERVERID = "serverId"; + +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/ResultSetReadProxy.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/ResultSetReadProxy.java new file mode 100644 index 000000000..3a56ec1d9 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/ResultSetReadProxy.java @@ -0,0 +1,138 @@ +package com.alibaba.datax.plugin.rdbms.reader; + +import com.alibaba.datax.common.element.*; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordSender; +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import com.alibaba.datax.plugin.rdbms.util.DBUtilErrorCode; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.Types; + +public class ResultSetReadProxy { + private static final Logger LOG = LoggerFactory + .getLogger(ResultSetReadProxy.class); + + private static final boolean IS_DEBUG = LOG.isDebugEnabled(); + private static final byte[] EMPTY_CHAR_ARRAY = new byte[0]; + + //TODO + public static void transportOneRecord(RecordSender recordSender, ResultSet rs, + ResultSetMetaData metaData, int columnNumber, String mandatoryEncoding, + TaskPluginCollector taskPluginCollector) { + Record record = recordSender.createRecord(); + + try { + for (int i = 1; i <= columnNumber; i++) { + switch (metaData.getColumnType(i)) { + + case Types.CHAR: + case Types.NCHAR: + case Types.VARCHAR: + case Types.LONGVARCHAR: + case Types.NVARCHAR: + case Types.LONGNVARCHAR: + String rawData; + if (StringUtils.isBlank(mandatoryEncoding)) { + rawData = rs.getString(i); + } else { + rawData = new String((rs.getBytes(i) == null ? EMPTY_CHAR_ARRAY : + rs.getBytes(i)), mandatoryEncoding); + } + record.addColumn(new StringColumn(rawData)); + break; + + case Types.CLOB: + case Types.NCLOB: + record.addColumn(new StringColumn(rs.getString(i))); + break; + + case Types.SMALLINT: + case Types.TINYINT: + case Types.INTEGER: + case Types.BIGINT: + record.addColumn(new LongColumn(rs.getString(i))); + break; + + case Types.NUMERIC: + case Types.DECIMAL: + record.addColumn(new DoubleColumn(rs.getString(i))); + break; + + case Types.FLOAT: + case Types.REAL: + case Types.DOUBLE: + record.addColumn(new DoubleColumn(rs.getString(i))); + break; + + case Types.TIME: + record.addColumn(new DateColumn(rs.getTime(i))); + break; + + // for mysql bug, see http://bugs.mysql.com/bug.php?id=35115 + case Types.DATE: + if (metaData.getColumnTypeName(i).equalsIgnoreCase("year")) { + record.addColumn(new LongColumn(rs.getInt(i))); + } else { + record.addColumn(new DateColumn(rs.getDate(i))); + } + break; + + case Types.TIMESTAMP: + record.addColumn(new DateColumn(rs.getTimestamp(i))); + break; + + case Types.BINARY: + case Types.VARBINARY: + case Types.BLOB: + case Types.LONGVARBINARY: + record.addColumn(new BytesColumn(rs.getBytes(i))); + break; + + // warn: bit(1) -> Types.BIT 可使用BoolColumn + // warn: bit(>1) -> Types.VARBINARY 可使用BytesColumn + case Types.BOOLEAN: + case Types.BIT: + record.addColumn(new BoolColumn(rs.getBoolean(i))); + break; + + case Types.NULL: + String stringData = null; + if (rs.getObject(i) != null) { + stringData = rs.getObject(i).toString(); + } + record.addColumn(new StringColumn(stringData)); + break; + + // TODO 添加BASIC_MESSAGE + default: + throw DataXException + .asDataXException( + DBUtilErrorCode.UNSUPPORTED_TYPE, + String.format( + "您的配置文件中的列配置信息有误. 因为DataX 不支持数据库读取这种字段类型. 字段名:[%s], 字段名称:[%s], 字段Java类型:[%s]. 请尝试使用数据库函数将其转换datax支持的类型 或者不同步该字段 .", + metaData.getColumnName(i), + metaData.getColumnType(i), + metaData.getColumnClassName(i))); + } + } + } catch (Exception e) { + if (IS_DEBUG) { + LOG.debug("read domain " + record.toString() + + " occur exception:", e); + } + + //TODO 这里识别为脏数据靠谱吗? + taskPluginCollector.collectDirtyRecord(record, e); + if (e instanceof DataXException) { + throw (DataXException) e; + } + } + + recordSender.sendToWriter(record); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/HintUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/HintUtil.java new file mode 100644 index 000000000..59e3044d7 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/HintUtil.java @@ -0,0 +1,67 @@ +package com.alibaba.datax.plugin.rdbms.reader.util; + +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.plugin.rdbms.reader.Constant; +import com.alibaba.datax.plugin.rdbms.reader.Key; +import com.alibaba.datax.plugin.rdbms.util.DBUtil; +import com.alibaba.datax.plugin.rdbms.util.DataBaseType; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Created by liuyi on 15/9/18. + */ +public class HintUtil { + private static final Logger LOG = LoggerFactory.getLogger(ReaderSplitUtil.class); + + private static DataBaseType dataBaseType; + private static String username; + private static String password; + private static Pattern tablePattern; + private static String hintExpression; + + public static void initHintConf(DataBaseType type, Configuration configuration) { + dataBaseType = type; + username = configuration.getString(Key.USERNAME); + password = configuration.getString(Key.PASSWORD); + String hint = configuration.getString(Key.HINT); + if (StringUtils.isNotBlank(hint)) { + String[] tablePatternAndHint = hint.split("#"); + if (tablePatternAndHint.length == 1) { + tablePattern = Pattern.compile(".*"); + hintExpression = tablePatternAndHint[0]; + } else { + tablePattern = Pattern.compile(tablePatternAndHint[0]); + hintExpression = tablePatternAndHint[1]; + } + } + } + + public static String buildQueryColumn(String jdbcUrl, String table, String column) { + try { + if (tablePattern != null && DataBaseType.Oracle.equals(dataBaseType)) { + Matcher m = tablePattern.matcher(table); + if (m.find()) { + String[] tableStr = table.split("\\."); + String tableWithoutSchema = tableStr[tableStr.length - 1]; + String finalHint = hintExpression.replaceAll(Constant.TABLE_NAME_PLACEHOLDER, tableWithoutSchema); + //主库不并发读取 + if (finalHint.indexOf("parallel") > 0 && DBUtil.isOracleMaster(jdbcUrl, username, password)) { + LOG.info("master:{} will not use hint:{}", jdbcUrl, finalHint); + } else { + LOG.info("table:{} use hint:{}.", table, finalHint); + return finalHint + column; + } + } + } + } catch (Exception e) { + LOG.warn("match hint exception, will not use hint", e); + } + return column; + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/OriginalConfPretreatmentUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/OriginalConfPretreatmentUtil.java new file mode 100644 index 000000000..037c8ce1c --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/OriginalConfPretreatmentUtil.java @@ -0,0 +1,299 @@ +package com.alibaba.datax.plugin.rdbms.reader.util; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.common.util.ListUtil; +import com.alibaba.datax.plugin.rdbms.reader.Constant; +import com.alibaba.datax.plugin.rdbms.reader.Key; +import com.alibaba.datax.plugin.rdbms.util.*; +import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static com.alibaba.datax.plugin.rdbms.util.Constant.DEFAULT_PROXY_SOCKS_HOST; +import static com.alibaba.datax.plugin.rdbms.util.Constant.DEFAULT_PROXY_SOCKS_PORT; + +public final class OriginalConfPretreatmentUtil { + private static final Logger LOG = LoggerFactory + .getLogger(OriginalConfPretreatmentUtil.class); + + public static DataBaseType DATABASE_TYPE; + + public static void doPretreatment(Configuration originalConfig) { + // 检查 username/password 配置(必填) + originalConfig.getNecessaryValue(Key.USERNAME, + DBUtilErrorCode.REQUIRED_VALUE); + originalConfig.getNecessaryValue(Key.PASSWORD, + DBUtilErrorCode.REQUIRED_VALUE); + dealWhere(originalConfig); + + simplifyConf(originalConfig); + } + + public static void dealWhere(Configuration originalConfig) { + String where = originalConfig.getString(Key.WHERE, null); + if (StringUtils.isNotBlank(where)) { + String whereImprove = where.trim(); + if (whereImprove.endsWith(";") || whereImprove.endsWith(";")) { + whereImprove = whereImprove.substring(0, whereImprove.length() - 1); + } + originalConfig.set(Key.WHERE, whereImprove); + } + } + + /** + * 对配置进行初步处理: + *
    + *
  1. 处理同一个数据库配置了多个jdbcUrl的情况
  2. + *
  3. 识别并标记是采用querySql 模式还是 table 模式
  4. + *
  5. 对 table 模式,确定分表个数,并处理 column 转 *事项
  6. + *
+ */ + private static void simplifyConf(Configuration originalConfig) { + boolean isTableMode = recognizeTableOrQuerySqlMode(originalConfig); + originalConfig.set(Constant.IS_TABLE_MODE, isTableMode); + + dealJdbcAndTable(originalConfig); + + dealColumnConf(originalConfig); + } + + @SuppressWarnings("unchecked") + private static void dealJdbcAndTable(Configuration originalConfig) { + String username = originalConfig.getString(Key.USERNAME); + String password = originalConfig.getString(Key.PASSWORD); + String proxyHost = originalConfig.getString(Key.PROXY_HOST, DEFAULT_PROXY_SOCKS_HOST); + int proxyPort = originalConfig.getInt(Key.PROXY_PORT, DEFAULT_PROXY_SOCKS_PORT); + if(StringUtils.isNotBlank(password)){ + try { + password = (String) CryptoUtils.string2Object(password); + } catch (Exception e) { + throw DataXException.asDataXException(DBUtilErrorCode.CONF_ERROR, "decrypt password failed"); + } + } + boolean checkSlave = originalConfig.getBool(Key.CHECK_SLAVE, false); + boolean isTableMode = originalConfig.getBool(Constant.IS_TABLE_MODE); + boolean isPreCheck = originalConfig.getBool(Key.DRYRUN, false); + + List conns = originalConfig.getList(Constant.CONN_MARK, + Object.class); + List preSql = originalConfig.getList(Key.PRE_SQL, String.class); + + int tableNum = 0; + + for (int i = 0, len = conns.size(); i < len; i++) { + Configuration connConf = Configuration + .from(Json.toJson(conns.get(i), null)); + + connConf.getNecessaryValue(Key.JDBC_URL, + DBUtilErrorCode.REQUIRED_VALUE); + + List jdbcUrls = new ArrayList<>(); + if(DATABASE_TYPE.equals(DataBaseType.MySql)){ + List jdbcUrlObjects = connConf.getList(Key.JDBC_URL); + for(Object obj : jdbcUrlObjects){ + Map map = (Map) obj; + String parameter = ""; + Map parameterMap = originalConfig.getMap(Key.CONNPARM, new HashMap<>()); + for(String key : map.keySet()){ + if (key.equals(Key.CONNPARM)){ + parameterMap.putAll((Map) map.get(key)); + } + } + parameter = parameterMap.entrySet().stream().map( + e->String.join("=", e.getKey(), String.valueOf(e.getValue())) + ).collect(Collectors.joining("&")); + String jcUrl = Key.JDBCTEM + map.get(Key.HOST).toString() + ":" + map.get(Key.PORT).toString() + "/" + map.get(Key.DATABASE).toString(); + if(parameter.length() != 0){ + jcUrl = Key.JDBCTEM + map.get(Key.HOST).toString() + ":" + map.get(Key.PORT).toString() + "/" + map.get(Key.DATABASE).toString() + "?" + parameter; + } + jdbcUrls.add(jcUrl); + } + } else if (DATABASE_TYPE.equals(DataBaseType.Oracle)){ + List jdbcUrlObjects = connConf.getList(Key.JDBC_URL); + for(Object obj : jdbcUrlObjects){ + Map map = (Map) obj; + String jcUrl = Key.JDBCORCL + "//" + map.get(Key.HOST).toString() + ":" + map.get(Key.PORT).toString() + "/" + map.get(Key.SERVICENAME).toString(); + if(StringUtils.isEmpty(map.get(Key.SERVICENAME).toString())){ + jcUrl = Key.JDBCORCL + map.get(Key.HOST).toString() + ":" + map.get(Key.PORT).toString() + ":" + map.get(Key.SID).toString(); + } + jdbcUrls.add(jcUrl); + } + }else{ + jdbcUrls = connConf + .getList(Key.JDBC_URL, String.class); + } + + String jdbcUrl; + if (isPreCheck) { + jdbcUrl = DBUtil.chooseJdbcUrlWithoutRetry(DATABASE_TYPE, jdbcUrls, + username, password, proxyHost, proxyPort, preSql, checkSlave); + } else { + jdbcUrl = DBUtil.chooseJdbcUrl(DATABASE_TYPE, jdbcUrls, + username, password, proxyHost, proxyPort, preSql, checkSlave); + } + + jdbcUrl = DATABASE_TYPE.appendJDBCSuffixForReader(jdbcUrl); + + // 回写到connection[i].jdbcUrl + originalConfig.set(String.format("%s[%d].%s", Constant.CONN_MARK, + i, Key.JDBC_URL), jdbcUrl); + + LOG.info("Available jdbcUrl:{}.", jdbcUrl); + + if (isTableMode) { + // table 方式 + // 对每一个connection 上配置的table 项进行解析(已对表名称进行了 ` 处理的) + List tables = connConf.getList(Key.TABLE, String.class); + + List expandedTables = TableExpandUtil.expandTableConf( + DATABASE_TYPE, tables); + + if (null == expandedTables || expandedTables.isEmpty()) { + throw DataXException.asDataXException( + DBUtilErrorCode.ILLEGAL_VALUE, String.format("您所配置的读取数据库表:%s 不正确. 因为DataX根据您的配置找不到这张表. 请检查您的配置并作出修改." + + "请先了解 DataX 配置.", StringUtils.join(tables, ","))); + } + + tableNum += expandedTables.size(); + + originalConfig.set(String.format("%s[%d].%s", + Constant.CONN_MARK, i, Key.TABLE), expandedTables); + } else { + // 说明是配置的 querySql 方式,不做处理. + } + } + + originalConfig.set(Constant.TABLE_NUMBER_MARK, tableNum); + } + + private static void dealColumnConf(Configuration originalConfig) { + boolean isTableMode = originalConfig.getBool(Constant.IS_TABLE_MODE); + + List userConfiguredColumns = originalConfig.getList(Key.COLUMN, + String.class); + + if (isTableMode) { + if (null == userConfiguredColumns + || userConfiguredColumns.isEmpty()) { + throw DataXException.asDataXException(DBUtilErrorCode.REQUIRED_VALUE, "您未配置读取数据库表的列信息. " + + "正确的配置方式是给 column 配置上您需要读取的列名称,用英文逗号分隔. 例如: \"column\": [\"id\", \"name\"],请参考上述配置并作出修改."); + } else { + String splitPk = originalConfig.getString(Key.SPLIT_PK, null); + + if (1 == userConfiguredColumns.size() + && "*".equals(userConfiguredColumns.get(0))) { + LOG.warn("您的配置文件中的列配置存在一定的风险. 因为您未配置读取数据库表的列,当您的表字段个数、类型有变动时,可能影响任务正确性甚至会运行出错。请检查您的配置并作出修改."); + // 回填其值,需要以 String 的方式转交后续处理 + originalConfig.set(Key.COLUMN, "*"); + } else { + String jdbcUrl = originalConfig.getString(String.format( + "%s[0].%s", Constant.CONN_MARK, Key.JDBC_URL)); + + String username = originalConfig.getString(Key.USERNAME); + String password = originalConfig.getString(Key.PASSWORD); + String proxyHost = originalConfig.getString(Key.PROXY_HOST, DEFAULT_PROXY_SOCKS_HOST); + int proxyPort = originalConfig.getInt(Key.PROXY_PORT, DEFAULT_PROXY_SOCKS_PORT); + String tableName = originalConfig.getString(String.format( + "%s[0].%s[0]", Constant.CONN_MARK, Key.TABLE)); + + List allColumns = DBUtil.getTableColumns( + DATABASE_TYPE, jdbcUrl, username, password, proxyHost, proxyPort, + tableName); + LOG.info("table:[{}] has columns:[{}].", + tableName, StringUtils.join(allColumns, ",")); + // warn:注意mysql表名区分大小写 + allColumns = ListUtil.valueToLowerCase(allColumns); + List quotedColumns = new ArrayList(); + + for (String column : userConfiguredColumns) { + if ("*".equals(column)) { + throw DataXException.asDataXException( + DBUtilErrorCode.ILLEGAL_VALUE, + "您的配置文件中的列配置信息有误. 因为根据您的配置,数据库表的列中存在多个*. 请检查您的配置并作出修改. "); + } + + quotedColumns.add(column); + //以下判断没有任何意义 +// if (null == column) { +// quotedColumns.add(null); +// } else { +// if (allColumns.contains(column.toLowerCase())) { +// quotedColumns.add(column); +// } else { +// // 可能是由于用户填写为函数,或者自己对字段进行了`处理或者常量 +// quotedColumns.add(column); +// } +// } + } + + originalConfig.set(Key.COLUMN_LIST, quotedColumns); + originalConfig.set(Key.COLUMN, + StringUtils.join(quotedColumns, ",")); + if (StringUtils.isNotBlank(splitPk)) { + if (!allColumns.contains(splitPk.toLowerCase())) { + throw DataXException.asDataXException(DBUtilErrorCode.ILLEGAL_SPLIT_PK, + String.format("您的配置文件中的列配置信息有误. 因为根据您的配置,您读取的数据库表:%s 中没有主键名为:%s. 请检查您的配置并作出修改.", tableName, splitPk)); + } + } + + } + } + } + + } + + private static boolean recognizeTableOrQuerySqlMode( + Configuration originalConfig) { + List conns = originalConfig.getList(Constant.CONN_MARK, + Object.class); + + List tableModeFlags = new ArrayList(); + List querySqlModeFlags = new ArrayList(); + + String table = null; + String querySql = null; + + boolean isTableMode = false; + boolean isQuerySqlMode = false; + for (int i = 0, len = conns.size(); i < len; i++) { + Configuration connConf = Configuration + .from(Json.toJson(conns.get(i), null)); + table = connConf.getString(Key.TABLE, null); + querySql = connConf.getString(Key.QUERY_SQL, null); + + isTableMode = StringUtils.isNotBlank(table); + tableModeFlags.add(isTableMode); + + isQuerySqlMode = StringUtils.isNotBlank(querySql); + querySqlModeFlags.add(isQuerySqlMode); + + if (!isTableMode && !isQuerySqlMode) { + // table 和 querySql 二者均未配制 + throw DataXException.asDataXException( + DBUtilErrorCode.TABLE_QUERYSQL_MISSING, "您的配置有误. 因为table和querySql应该配置并且只能配置一个. 请检查您的配置并作出修改."); + } else if (isTableMode && isQuerySqlMode) { + // table 和 querySql 二者均配置 + throw DataXException.asDataXException(DBUtilErrorCode.TABLE_QUERYSQL_MIXED, + "您的配置凌乱了. 因为datax不能同时既配置table又配置querySql.请检查您的配置并作出修改."); + } + } + + // 混合配制 table 和 querySql + if (!ListUtil.checkIfValueSame(tableModeFlags)) { + throw DataXException.asDataXException(DBUtilErrorCode.TABLE_QUERYSQL_MIXED, + "您配置凌乱了. 不能同时既配置table又配置querySql. 请检查您的配置并作出修改."); + } + + return tableModeFlags.get(0); + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/PreCheckTask.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/PreCheckTask.java new file mode 100644 index 000000000..8c7b1df00 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/PreCheckTask.java @@ -0,0 +1,107 @@ +package com.alibaba.datax.plugin.rdbms.reader.util; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.plugin.rdbms.reader.Key; +import com.alibaba.datax.plugin.rdbms.util.DBUtil; +import com.alibaba.datax.plugin.rdbms.util.DataBaseType; +import com.alibaba.datax.plugin.rdbms.util.RdbmsException; +import com.alibaba.druid.sql.parser.ParserException; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.util.List; +import java.util.concurrent.Callable; + +/** + * Created by judy.lt on 2015/6/4. + */ +public class PreCheckTask implements Callable { + private static final Logger LOG = LoggerFactory.getLogger(PreCheckTask.class); + private String userName; + private String password; + private String proxyHost; + private int proxyPort; + private String splitPkId; + private Configuration connection; + private DataBaseType dataBaseType; + + public PreCheckTask(String userName, + String password, + String proxyHost, + int proxyPort, + Configuration connection, + DataBaseType dataBaseType, + String splitPkId) { + this.connection = connection; + this.userName = userName; + this.password = password; + this.proxyHost = proxyHost; + this.proxyPort = proxyPort; + this.dataBaseType = dataBaseType; + this.splitPkId = splitPkId; + } + + @Override + public Boolean call() throws DataXException { + String jdbcUrl = this.connection.getString(Key.JDBC_URL); + List querySqls = this.connection.getList(Key.QUERY_SQL, Object.class); + List splitPkSqls = this.connection.getList(Key.SPLIT_PK_SQL, Object.class); + List tables = this.connection.getList(Key.TABLE, Object.class); + Connection conn = DBUtil.getConnectionWithoutRetry(this.dataBaseType, jdbcUrl, + this.userName, password, proxyHost, proxyPort); + int fetchSize = 1; + if (DataBaseType.MySql.equals(dataBaseType) || DataBaseType.DRDS.equals(dataBaseType)) { + fetchSize = Integer.MIN_VALUE; + } + try { + for (int i = 0; i < querySqls.size(); i++) { + + String splitPkSql = null; + String querySql = Json.toJson(querySqls.get(i), null); + + String table = null; + if (tables != null && !tables.isEmpty()) { + table = Json.toJson(tables.get(i), null); + } + + /*verify query*/ + ResultSet rs = null; + try { + DBUtil.sqlValid(querySql, dataBaseType); + if (i == 0) { + rs = DBUtil.query(conn, querySql, fetchSize); + } + } catch (ParserException e) { + throw RdbmsException.asSqlParserException(this.dataBaseType, e, querySql); + } catch (Exception e) { + throw RdbmsException.asQueryException(this.dataBaseType, e, querySql, table, userName); + } finally { + DBUtil.closeDBResources(rs, null, null); + } + /*verify splitPK*/ + try { + if (splitPkSqls != null && !splitPkSqls.isEmpty()) { + splitPkSql = Json.toJson(splitPkSqls.get(i), null); + DBUtil.sqlValid(splitPkSql, dataBaseType); + if (i == 0) { + SingleTableSplitUtil.precheckSplitPk(conn, splitPkSql, fetchSize, table, userName); + } + } + } catch (ParserException e) { + throw RdbmsException.asSqlParserException(this.dataBaseType, e, splitPkSql); + } catch (DataXException e) { + throw e; + } catch (Exception e) { + throw RdbmsException.asSplitPKException(this.dataBaseType, e, splitPkSql, this.splitPkId.trim()); + } + } + } finally { + DBUtil.closeDBResources(null, conn); + } + return true; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/ReaderSplitUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/ReaderSplitUtil.java new file mode 100644 index 000000000..c72f0e61a --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/ReaderSplitUtil.java @@ -0,0 +1,164 @@ +package com.alibaba.datax.plugin.rdbms.reader.util; + +import com.alibaba.datax.common.constant.CommonConstant; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.plugin.rdbms.reader.Constant; +import com.alibaba.datax.plugin.rdbms.reader.Key; +import com.alibaba.datax.plugin.rdbms.util.DataBaseType; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.Validate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.List; + +public final class ReaderSplitUtil { + private static final Logger LOG = LoggerFactory + .getLogger(ReaderSplitUtil.class); + + public static List doSplit( + Configuration originalSliceConfig, int adviceNumber) { + boolean isTableMode = originalSliceConfig.getBool(Constant.IS_TABLE_MODE).booleanValue(); + int eachTableShouldSplittedNumber = -1; + if (isTableMode) { + // adviceNumber这里是channel数量大小, 即datax并发task数量 + // eachTableShouldSplittedNumber是单表应该切分的份数, 向上取整可能和adviceNumber没有比例关系了已经 + eachTableShouldSplittedNumber = calculateEachTableShouldSplittedNumber( + adviceNumber, originalSliceConfig.getInt(Constant.TABLE_NUMBER_MARK)); + } + + String column = originalSliceConfig.getString(Key.COLUMN); + String where = originalSliceConfig.getString(Key.WHERE, null); + + List conns = originalSliceConfig.getList(Constant.CONN_MARK, Object.class); + + List splittedConfigs = new ArrayList(); + + for (int i = 0, len = conns.size(); i < len; i++) { + Configuration sliceConfig = originalSliceConfig.clone(); + + Configuration connConf = Configuration.from(Json.toJson(conns.get(i), null)); + String jdbcUrl = connConf.getString(Key.JDBC_URL); + sliceConfig.set(Key.JDBC_URL, jdbcUrl); + + // 抽取 jdbcUrl 中的 ip/port 进行资源使用的打标,以提供给 core 做有意义的 shuffle 操作 + sliceConfig.set(CommonConstant.LOAD_BALANCE_RESOURCE_MARK, DataBaseType.parseIpFromJdbcUrl(jdbcUrl)); + + sliceConfig.remove(Constant.CONN_MARK); + + Configuration tempSlice; + + // 说明是配置的 table 方式 + if (isTableMode) { + // 已在之前进行了扩展和`处理,可以直接使用 + List tables = connConf.getList(Key.TABLE, String.class); + if(null == tables){ + tables = new ArrayList<>(); + } + Validate.isTrue(null != tables && !tables.isEmpty(), "您读取数据库表配置错误."); + + String splitPk = originalSliceConfig.getString(Key.SPLIT_PK, null); + + //最终切分份数不一定等于 eachTableShouldSplittedNumber + boolean needSplitTable = eachTableShouldSplittedNumber > 1 + && StringUtils.isNotBlank(splitPk); + if (needSplitTable) { + if (tables.size() == 1) { + //原来:如果是单表的,主键切分num=num*2+1 + // splitPk is null这类的情况的数据量本身就比真实数据量少很多, 和channel大小比率关系时,不建议考虑 + //eachTableShouldSplittedNumber = eachTableShouldSplittedNumber * 2 + 1;// 不应该加1导致长尾 + + //考虑其他比率数字?(splitPk is null, 忽略此长尾) + eachTableShouldSplittedNumber = eachTableShouldSplittedNumber * 5; + } + // 尝试对每个表,切分为eachTableShouldSplittedNumber 份 + for (String table : tables) { + tempSlice = sliceConfig.clone(); + tempSlice.set(Key.TABLE, table); + + List splittedSlices = SingleTableSplitUtil + .splitSingleTable(tempSlice, eachTableShouldSplittedNumber); + + splittedConfigs.addAll(splittedSlices); + } + } else { + for (String table : tables) { + tempSlice = sliceConfig.clone(); + tempSlice.set(Key.TABLE, table); + String queryColumn = HintUtil.buildQueryColumn(jdbcUrl, table, column); + tempSlice.set(Key.QUERY_SQL, SingleTableSplitUtil.buildQuerySql(queryColumn, table, where)); + splittedConfigs.add(tempSlice); + } + } + } else { + // 说明是配置的 querySql 方式 + List sqls = connConf.getList(Key.QUERY_SQL, String.class); + + // TODO 是否check 配置为多条语句?? + for (String querySql : sqls) { + tempSlice = sliceConfig.clone(); + tempSlice.set(Key.QUERY_SQL, querySql); + splittedConfigs.add(tempSlice); + } + } + + } + + return splittedConfigs; + } + + public static Configuration doPreCheckSplit(Configuration originalSliceConfig) { + Configuration queryConfig = originalSliceConfig.clone(); + boolean isTableMode = originalSliceConfig.getBool(Constant.IS_TABLE_MODE).booleanValue(); + + String splitPK = originalSliceConfig.getString(Key.SPLIT_PK); + String column = originalSliceConfig.getString(Key.COLUMN); + String where = originalSliceConfig.getString(Key.WHERE, null); + + List conns = queryConfig.getList(Constant.CONN_MARK, Object.class); + + for (int i = 0, len = conns.size(); i < len; i++) { + Configuration connConf = Configuration.from(Json.toJson(conns.get(i), null)); + List querys = new ArrayList(); + List splitPkQuerys = new ArrayList(); + String connPath = String.format("connection[%d]", i); + // 说明是配置的 table 方式 + if (isTableMode) { + // 已在之前进行了扩展和`处理,可以直接使用 + List tables = connConf.getList(Key.TABLE, String.class); + Validate.isTrue(null != tables && !tables.isEmpty(), "您读取数据库表配置错误."); + for (String table : tables) { + querys.add(SingleTableSplitUtil.buildQuerySql(column, table, where)); + if (splitPK != null && !splitPK.isEmpty()) { + splitPkQuerys.add(SingleTableSplitUtil.genPKSql(splitPK.trim(), table, where)); + } + } + if (!splitPkQuerys.isEmpty()) { + connConf.set(Key.SPLIT_PK_SQL, splitPkQuerys); + } + connConf.set(Key.QUERY_SQL, querys); + queryConfig.set(connPath, connConf); + } else { + // 说明是配置的 querySql 方式 + List sqls = connConf.getList(Key.QUERY_SQL, + String.class); + for (String querySql : sqls) { + querys.add(querySql); + } + connConf.set(Key.QUERY_SQL, querys); + queryConfig.set(connPath, connConf); + } + } + return queryConfig; + } + + private static int calculateEachTableShouldSplittedNumber(int adviceNumber, + int tableNumber) { + double tempNum = 1.0 * adviceNumber / tableNumber; + + return (int) Math.ceil(tempNum); + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/SingleTableSplitUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/SingleTableSplitUtil.java new file mode 100644 index 000000000..9f153ff88 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/reader/util/SingleTableSplitUtil.java @@ -0,0 +1,388 @@ +package com.alibaba.datax.plugin.rdbms.reader.util; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.plugin.rdbms.reader.Constant; +import com.alibaba.datax.plugin.rdbms.reader.Key; +import com.alibaba.datax.plugin.rdbms.util.*; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.ImmutablePair; +import org.apache.commons.lang3.tuple.Pair; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.math.BigInteger; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.Types; +import java.util.ArrayList; +import java.util.List; + +public class SingleTableSplitUtil { + private static final Logger LOG = LoggerFactory + .getLogger(SingleTableSplitUtil.class); + + public static DataBaseType DATABASE_TYPE; + + private SingleTableSplitUtil() { + } + + public static List splitSingleTable( + Configuration configuration, int adviceNum) { + List pluginParams = new ArrayList(); + List rangeList; + String splitPkName = configuration.getString(Key.SPLIT_PK); + String column = configuration.getString(Key.COLUMN); + String table = configuration.getString(Key.TABLE); + String where = configuration.getString(Key.WHERE, null); + boolean hasWhere = StringUtils.isNotBlank(where); + + //String splitMode = configuration.getString(Key.SPLIT_MODE, ""); + //if (Constant.SPLIT_MODE_RANDOMSAMPLE.equals(splitMode) && DATABASE_TYPE == DataBaseType.Oracle) { + if (DATABASE_TYPE == DataBaseType.Oracle) { + rangeList = genSplitSqlForOracle(splitPkName, table, where, + configuration, adviceNum); + // warn: mysql etc to be added... + } else { + Pair minMaxPK = getPkRange(configuration); + if (null == minMaxPK) { + throw DataXException.asDataXException(DBUtilErrorCode.ILLEGAL_SPLIT_PK, + "根据切分主键切分表失败. DataX 仅支持切分主键为一个,并且类型为整数或者字符串类型. 请尝试使用其他的切分主键或者联系 DBA 进行处理."); + } + + configuration.set(Key.QUERY_SQL, buildQuerySql(column, table, where)); + if (null == minMaxPK.getLeft() || null == minMaxPK.getRight()) { + // 切分后获取到的start/end 有 Null 的情况 + pluginParams.add(configuration); + return pluginParams; + } + + boolean isStringType = com.alibaba.datax.plugin.rdbms.reader.Constant.PK_TYPE_STRING.equals(configuration + .getString(com.alibaba.datax.plugin.rdbms.reader.Constant.PK_TYPE)); + boolean isLongType = com.alibaba.datax.plugin.rdbms.reader.Constant.PK_TYPE_LONG.equals(configuration + .getString(com.alibaba.datax.plugin.rdbms.reader.Constant.PK_TYPE)); + + + if (isStringType) { + rangeList = RdbmsRangeSplitWrap.splitAndWrap( + String.valueOf(minMaxPK.getLeft()), + String.valueOf(minMaxPK.getRight()), adviceNum, + splitPkName, "'", DATABASE_TYPE); + } else if (isLongType) { + rangeList = RdbmsRangeSplitWrap.splitAndWrap( + new BigInteger(minMaxPK.getLeft().toString()), + new BigInteger(minMaxPK.getRight().toString()), + adviceNum, splitPkName); + } else { + throw DataXException.asDataXException(DBUtilErrorCode.ILLEGAL_SPLIT_PK, + "您配置的切分主键(splitPk) 类型 DataX 不支持. DataX 仅支持切分主键为一个,并且类型为整数或者字符串类型. 请尝试使用其他的切分主键或者联系 DBA 进行处理."); + } + } + String tempQuerySql; + List allQuerySql = new ArrayList(); + + if (null != rangeList && !rangeList.isEmpty()) { + for (String range : rangeList) { + Configuration tempConfig = configuration.clone(); + + tempQuerySql = buildQuerySql(column, table, where) + + (hasWhere ? " and " : " where ") + range; + + allQuerySql.add(tempQuerySql); + tempConfig.set(Key.QUERY_SQL, tempQuerySql); + pluginParams.add(tempConfig); + } + } else { + //pluginParams.add(configuration); // this is wrong for new & old split + Configuration tempConfig = configuration.clone(); + tempQuerySql = buildQuerySql(column, table, where) + + (hasWhere ? " and " : " where ") + + String.format(" %s IS NOT NULL", splitPkName); + allQuerySql.add(tempQuerySql); + tempConfig.set(Key.QUERY_SQL, tempQuerySql); + pluginParams.add(tempConfig); + } + + // deal pk is null + Configuration tempConfig = configuration.clone(); + tempQuerySql = buildQuerySql(column, table, where) + + (hasWhere ? " and " : " where ") + + String.format(" %s IS NULL", splitPkName); + + allQuerySql.add(tempQuerySql); + + LOG.info("After split(), allQuerySql=[\n{}\n].", + StringUtils.join(allQuerySql, "\n")); + + tempConfig.set(Key.QUERY_SQL, tempQuerySql); + pluginParams.add(tempConfig); + + return pluginParams; + } + + public static String buildQuerySql(String column, String table, + String where) { + String querySql; + + if (StringUtils.isBlank(where)) { + querySql = String.format(com.alibaba.datax.plugin.rdbms.reader.Constant.QUERY_SQL_TEMPLATE_WITHOUT_WHERE, + column, table); + } else { + querySql = String.format(com.alibaba.datax.plugin.rdbms.reader.Constant.QUERY_SQL_TEMPLATE, column, + table, where); + } + + return querySql; + } + + @SuppressWarnings("resource") + private static Pair getPkRange(Configuration configuration) { + String pkRangeSQL = genPKRangeSQL(configuration); + + int fetchSize = configuration.getInt(com.alibaba.datax.plugin.rdbms.reader.Constant.FETCH_SIZE); + String jdbcURL = configuration.getString(Key.JDBC_URL); + String username = configuration.getString(Key.USERNAME); + String password = configuration.getString(Key.PASSWORD); + String table = configuration.getString(Key.TABLE); + + Connection conn = DBUtil.getConnection(DATABASE_TYPE, jdbcURL, username, password); + Pair minMaxPK = checkSplitPk(conn, pkRangeSQL, fetchSize, table, username, configuration); + DBUtil.closeDBResources(null, null, conn); + return minMaxPK; + } + + public static void precheckSplitPk(Connection conn, String pkRangeSQL, int fetchSize, + String table, String username) { + Pair minMaxPK = checkSplitPk(conn, pkRangeSQL, fetchSize, table, username, null); + if (null == minMaxPK) { + throw DataXException.asDataXException(DBUtilErrorCode.ILLEGAL_SPLIT_PK, + "根据切分主键切分表失败. DataX 仅支持切分主键为一个,并且类型为整数或者字符串类型. 请尝试使用其他的切分主键或者联系 DBA 进行处理."); + } + } + + /** + * 检测splitPk的配置是否正确。 + * configuration为null, 是precheck的逻辑,不需要回写PK_TYPE到configuration中 + */ + private static Pair checkSplitPk(Connection conn, String pkRangeSQL, int fetchSize, String table, + String username, Configuration configuration) { + LOG.info("split pk [sql={}] is running... ", pkRangeSQL); + ResultSet rs = null; + Pair minMaxPK = null; + try { + try { + rs = DBUtil.query(conn, pkRangeSQL, fetchSize); + } catch (Exception e) { + throw RdbmsException.asQueryException(DATABASE_TYPE, e, pkRangeSQL, table, username); + } + ResultSetMetaData rsMetaData = rs.getMetaData(); + if (isPKTypeValid(rsMetaData)) { + if (isStringType(rsMetaData.getColumnType(1))) { + if (configuration != null) { + configuration + .set(com.alibaba.datax.plugin.rdbms.reader.Constant.PK_TYPE, com.alibaba.datax.plugin.rdbms.reader.Constant.PK_TYPE_STRING); + } + while (DBUtil.asyncResultSetNext(rs)) { + minMaxPK = new ImmutablePair( + rs.getString(1), rs.getString(2)); + } + } else if (isLongType(rsMetaData.getColumnType(1))) { + if (configuration != null) { + configuration.set(com.alibaba.datax.plugin.rdbms.reader.Constant.PK_TYPE, com.alibaba.datax.plugin.rdbms.reader.Constant.PK_TYPE_LONG); + } + + while (DBUtil.asyncResultSetNext(rs)) { + minMaxPK = new ImmutablePair( + rs.getString(1), rs.getString(2)); + + // check: string shouldn't contain '.', for oracle + String minMax = rs.getString(1) + rs.getString(2); + if (StringUtils.contains(minMax, '.')) { + throw DataXException.asDataXException(DBUtilErrorCode.ILLEGAL_SPLIT_PK, + "您配置的DataX切分主键(splitPk)有误. 因为您配置的切分主键(splitPk) 类型 DataX 不支持. DataX 仅支持切分主键为一个,并且类型为整数或者字符串类型. 请尝试使用其他的切分主键或者联系 DBA 进行处理."); + } + } + } else { + throw DataXException.asDataXException(DBUtilErrorCode.ILLEGAL_SPLIT_PK, + "您配置的DataX切分主键(splitPk)有误. 因为您配置的切分主键(splitPk) 类型 DataX 不支持. DataX 仅支持切分主键为一个,并且类型为整数或者字符串类型. 请尝试使用其他的切分主键或者联系 DBA 进行处理."); + } + } else { + throw DataXException.asDataXException(DBUtilErrorCode.ILLEGAL_SPLIT_PK, + "您配置的DataX切分主键(splitPk)有误. 因为您配置的切分主键(splitPk) 类型 DataX 不支持. DataX 仅支持切分主键为一个,并且类型为整数或者字符串类型. 请尝试使用其他的切分主键或者联系 DBA 进行处理."); + } + } catch (DataXException e) { + throw e; + } catch (Exception e) { + throw DataXException.asDataXException(DBUtilErrorCode.ILLEGAL_SPLIT_PK, "DataX尝试切分表发生错误. 请检查您的配置并作出修改.", e); + } finally { + DBUtil.closeDBResources(rs, null, null); + } + + return minMaxPK; + } + + private static boolean isPKTypeValid(ResultSetMetaData rsMetaData) { + boolean ret = false; + try { + int minType = rsMetaData.getColumnType(1); + int maxType = rsMetaData.getColumnType(2); + + boolean isNumberType = isLongType(minType); + + boolean isStringType = isStringType(minType); + + if (minType == maxType && (isNumberType || isStringType)) { + ret = true; + } + } catch (Exception e) { + throw DataXException.asDataXException(DBUtilErrorCode.ILLEGAL_SPLIT_PK, + "DataX获取切分主键(splitPk)字段类型失败. 该错误通常是系统底层异常导致. 请联系旺旺:askdatax或者DBA处理."); + } + return ret; + } + + // warn: Types.NUMERIC is used for oracle! because oracle use NUMBER to + // store INT, SMALLINT, INTEGER etc, and only oracle need to concern + // Types.NUMERIC + private static boolean isLongType(int type) { + boolean isValidLongType = type == Types.BIGINT || type == Types.INTEGER + || type == Types.SMALLINT || type == Types.TINYINT; + + switch (SingleTableSplitUtil.DATABASE_TYPE) { + case Oracle: + isValidLongType |= type == Types.NUMERIC; + break; + default: + break; + } + return isValidLongType; + } + + private static boolean isStringType(int type) { + return type == Types.CHAR || type == Types.NCHAR + || type == Types.VARCHAR || type == Types.LONGVARCHAR + || type == Types.NVARCHAR; + } + + private static String genPKRangeSQL(Configuration configuration) { + + String splitPK = configuration.getString(Key.SPLIT_PK).trim(); + String table = configuration.getString(Key.TABLE).trim(); + String where = configuration.getString(Key.WHERE, null); + return genPKSql(splitPK, table, where); + } + + public static String genPKSql(String splitPK, String table, String where) { + + String minMaxTemplate = "SELECT MIN(%s),MAX(%s) FROM %s"; + String pkRangeSQL = String.format(minMaxTemplate, splitPK, splitPK, + table); + if (StringUtils.isNotBlank(where)) { + pkRangeSQL = String.format("%s WHERE (%s AND %s IS NOT NULL)", + pkRangeSQL, where, splitPK); + } + return pkRangeSQL; + } + + /** + * support Number and String split + */ + public static List genSplitSqlForOracle(String splitPK, + String table, String where, Configuration configuration, + int adviceNum) { + if (adviceNum < 1) { + throw new IllegalArgumentException(String.format( + "切分份数不能小于1. 此处:adviceNum=[%s].", adviceNum)); + } else if (adviceNum == 1) { + return null; + } + String whereSql = String.format("%s IS NOT NULL", splitPK); + if (StringUtils.isNotBlank(where)) { + whereSql = String.format(" WHERE (%s) AND (%s) ", whereSql, where); + } else { + whereSql = String.format(" WHERE (%s) ", whereSql); + } + Double percentage = configuration.getDouble(Key.SAMPLE_PERCENTAGE, 0.1); + String sampleSqlTemplate = "SELECT * FROM ( SELECT %s FROM %s SAMPLE (%s) %s ORDER BY DBMS_RANDOM.VALUE) WHERE ROWNUM <= %s ORDER by %s ASC"; + String splitSql = String.format(sampleSqlTemplate, splitPK, table, + percentage, whereSql, adviceNum, splitPK); + + int fetchSize = configuration.getInt(com.alibaba.datax.plugin.rdbms.reader.Constant.FETCH_SIZE, 32); + String jdbcURL = configuration.getString(Key.JDBC_URL); + String username = configuration.getString(Key.USERNAME); + String password = configuration.getString(Key.PASSWORD); + Connection conn = DBUtil.getConnection(DATABASE_TYPE, jdbcURL, + username, password); + LOG.info("split pk [sql={}] is running... ", splitSql); + ResultSet rs = null; + List> splitedRange = new ArrayList>(); + try { + try { + rs = DBUtil.query(conn, splitSql, fetchSize); + } catch (Exception e) { + throw RdbmsException.asQueryException(DATABASE_TYPE, e, + splitSql, table, username); + } + if (configuration != null) { + configuration + .set(com.alibaba.datax.plugin.rdbms.reader.Constant.PK_TYPE, Constant.PK_TYPE_MONTECARLO); + } + ResultSetMetaData rsMetaData = rs.getMetaData(); + while (DBUtil.asyncResultSetNext(rs)) { + ImmutablePair eachPoint = new ImmutablePair( + rs.getObject(1), rsMetaData.getColumnType(1)); + splitedRange.add(eachPoint); + } + } catch (DataXException e) { + throw e; + } catch (Exception e) { + throw DataXException.asDataXException( + DBUtilErrorCode.ILLEGAL_SPLIT_PK, + "DataX尝试切分表发生错误. 请检查您的配置并作出修改.", e); + } finally { + DBUtil.closeDBResources(rs, null, null); + } + LOG.debug(Json.toJson(splitedRange, null)); + List rangeSql = new ArrayList(); + int splitedRangeSize = splitedRange.size(); + // warn: splitedRangeSize may be 0 or 1,切分规则为IS NULL以及 IS NOT NULL + // demo: Parameter rangeResult can not be null and its length can not <2. detail:rangeResult=[24999930]. + if (splitedRangeSize >= 2) { + // warn: oracle Number is long type here + if (isLongType(splitedRange.get(0).getRight())) { + BigInteger[] integerPoints = new BigInteger[splitedRange.size()]; + for (int i = 0; i < splitedRangeSize; i++) { + integerPoints[i] = new BigInteger(splitedRange.get(i) + .getLeft().toString()); + } + rangeSql.addAll(RdbmsRangeSplitWrap.wrapRange(integerPoints, + splitPK)); + // its ok if splitedRangeSize is 1 + rangeSql.add(RdbmsRangeSplitWrap.wrapFirstLastPoint( + integerPoints[0], integerPoints[splitedRangeSize - 1], + splitPK)); + } else if (isStringType(splitedRange.get(0).getRight())) { + // warn: treated as string type + String[] stringPoints = new String[splitedRange.size()]; + for (int i = 0; i < splitedRangeSize; i++) { + stringPoints[i] = new String(splitedRange.get(i).getLeft() + .toString()); + } + rangeSql.addAll(RdbmsRangeSplitWrap.wrapRange(stringPoints, + splitPK, "'", DATABASE_TYPE)); + // its ok if splitedRangeSize is 1 + rangeSql.add(RdbmsRangeSplitWrap.wrapFirstLastPoint( + stringPoints[0], stringPoints[splitedRangeSize - 1], + splitPK, "'", DATABASE_TYPE)); + } else { + throw DataXException + .asDataXException( + DBUtilErrorCode.ILLEGAL_SPLIT_PK, + "您配置的DataX切分主键(splitPk)有误. 因为您配置的切分主键(splitPk) 类型 DataX 不支持. DataX 仅支持切分主键为一个,并且类型为整数或者字符串类型. 请尝试使用其他的切分主键或者联系 DBA 进行处理."); + } + } + return rangeSql; + } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/ConnectionFactory.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/ConnectionFactory.java new file mode 100644 index 000000000..3aef46b35 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/ConnectionFactory.java @@ -0,0 +1,16 @@ +package com.alibaba.datax.plugin.rdbms.util; + +import java.sql.Connection; + +/** + * Date: 15/3/16 下午2:17 + */ +public interface ConnectionFactory { + + public Connection getConnecttion(); + + public Connection getConnecttionWithoutRetry(); + + public String getConnectionInfo(); + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/Constant.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/Constant.java new file mode 100644 index 000000000..3655c4c50 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/Constant.java @@ -0,0 +1,28 @@ +package com.alibaba.datax.plugin.rdbms.util; + +public final class Constant { + static final int TIMEOUT_SECONDS = 15; + static final int MAX_TRY_TIMES = 4; + static final int SOCKET_TIMEOUT_INSECOND = 172800; + + public static final String MYSQL_DATABASE = "Unknown database"; + public static final String MYSQL_CONNEXP = "Communications link failure"; + public static final String MYSQL_ACCDENIED = "Access denied"; + public static final String MYSQL_TABLE_NAME_ERR1 = "Table"; + public static final String MYSQL_TABLE_NAME_ERR2 = "doesn't exist"; + public static final String MYSQL_SELECT_PRI = "SELECT command denied to user"; + public static final String MYSQL_COLUMN1 = "Unknown column"; + public static final String MYSQL_COLUMN2 = "field list"; + public static final String MYSQL_WHERE = "where clause"; + + public static final String ORACLE_DATABASE = "ORA-12505"; + public static final String ORACLE_CONNEXP = "The Network Adapter could not establish the connection"; + public static final String ORACLE_ACCDENIED = "ORA-01017"; + public static final String ORACLE_TABLE_NAME = "table or view does not exist"; + public static final String ORACLE_SELECT_PRI = "insufficient privileges"; + public static final String ORACLE_SQL = "invalid identifier"; + + public static final String DEFAULT_PROXY_SOCKS_HOST = ""; + + public static final Integer DEFAULT_PROXY_SOCKS_PORT = 0; +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DBUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DBUtil.java new file mode 100644 index 000000000..397e70611 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DBUtil.java @@ -0,0 +1,832 @@ +package com.alibaba.datax.plugin.rdbms.util; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.common.util.RetryUtil; +import com.alibaba.datax.plugin.rdbms.reader.Key; +import com.alibaba.druid.sql.parser.SQLParserUtils; +import com.alibaba.druid.sql.parser.SQLStatementParser; +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.ImmutableTriple; +import org.apache.commons.lang3.tuple.Triple; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.sql.*; +import java.util.*; +import java.util.concurrent.*; + +public final class DBUtil { + private static final Logger LOG = LoggerFactory.getLogger(DBUtil.class); + + private static final ThreadLocal rsExecutors = new ThreadLocal() { + @Override + protected ExecutorService initialValue() { + return Executors.newFixedThreadPool(1, new ThreadFactoryBuilder() + .setNameFormat("rsExecutors-%d") + .setDaemon(true) + .build()); + } + }; + + private static Map driverContainer = new HashMap<>(); + + private DBUtil() { + } + + public static String chooseJdbcUrl(final DataBaseType dataBaseType, + final List jdbcUrls, final String username, + final String password, final String proxyHost, final int proxyPort, final List preSql, + final boolean checkSlave) { + + if (null == jdbcUrls || jdbcUrls.isEmpty()) { + throw DataXException.asDataXException( + DBUtilErrorCode.CONF_ERROR, + String.format("您的jdbcUrl的配置信息有错, 因为jdbcUrl[%s]不能为空. 请检查您的配置并作出修改.", + StringUtils.join(jdbcUrls, ","))); + } + + try { + return RetryUtil.executeWithRetry(new Callable() { + + @Override + public String call() throws Exception { + boolean connOK = false; + for (String url : jdbcUrls) { + if (StringUtils.isNotBlank(url)) { + url = url.trim(); + if (null != preSql && !preSql.isEmpty()) { + connOK = testConnWithoutRetry(dataBaseType, + url, username, password, preSql, proxyHost, proxyPort); + } else { + connOK = testConnWithoutRetry(dataBaseType, + url, username, password, checkSlave, proxyHost, proxyPort); + } + if (connOK) { + return url; + } + } + } + throw new Exception("DataX无法连接对应的数据库,可能原因是:1) 配置的ip/port/database/jdbc错误,无法连接。2) 配置的username/password错误,鉴权失败。请和DBA确认该数据库的连接信息是否正确。"); +// throw new Exception(DBUtilErrorCode.JDBC_NULL.toString()); + } + }, 7, 1000L, true); + //warn: 7 means 2 minutes + } catch (Exception e) { + throw DataXException.asDataXException( + DBUtilErrorCode.CONN_DB_ERROR, + String.format("数据库连接失败. 因为根据您配置的连接信息,无法从:%s 中找到可连接的jdbcUrl. 请检查您的配置并作出修改.", + StringUtils.join(jdbcUrls, ",")), e); + } + } + + public static String chooseJdbcUrlWithoutRetry(final DataBaseType dataBaseType, + final List jdbcUrls, final String username, + final String password, final String proxyHost, final int proxyPort, + final List preSql, + final boolean checkSlave) throws DataXException { + + if (null == jdbcUrls || jdbcUrls.isEmpty()) { + throw DataXException.asDataXException( + DBUtilErrorCode.CONF_ERROR, + String.format("您的jdbcUrl的配置信息有错, 因为jdbcUrl[%s]不能为空. 请检查您的配置并作出修改.", + StringUtils.join(jdbcUrls, ","))); + } + + boolean connOK = false; + for (String url : jdbcUrls) { + if (StringUtils.isNotBlank(url)) { + url = url.trim(); + if (null != preSql && !preSql.isEmpty()) { + connOK = testConnWithoutRetry(dataBaseType, + url, username, password, preSql, proxyHost, proxyPort); + } else { + try { + connOK = testConnWithoutRetry(dataBaseType, + url, username, password, checkSlave, proxyHost, proxyPort); + } catch (Exception e) { + throw DataXException.asDataXException( + DBUtilErrorCode.CONN_DB_ERROR, + String.format("数据库连接失败. 因为根据您配置的连接信息,无法从:%s 中找到可连接的jdbcUrl. 请检查您的配置并作出修改.", + StringUtils.join(jdbcUrls, ",")), e); + } + } + if (connOK) { + return url; + } + } + } + throw DataXException.asDataXException( + DBUtilErrorCode.CONN_DB_ERROR, + String.format("数据库连接失败. 因为根据您配置的连接信息,无法从:%s 中找到可连接的jdbcUrl. 请检查您的配置并作出修改.", + StringUtils.join(jdbcUrls, ","))); + } + + /** + * 检查slave的库中的数据是否已到凌晨00:00 + * 如果slave同步的数据还未到00:00返回false + * 否则范围true + * + * @author ZiChi + * @version 1.0 2014-12-01 + */ + private static boolean isSlaveBehind(Connection conn) { + try { + ResultSet rs = query(conn, "SHOW VARIABLES LIKE 'read_only'"); + if (DBUtil.asyncResultSetNext(rs)) { + String readOnly = rs.getString("Value"); + if ("ON".equalsIgnoreCase(readOnly)) { //备库 + ResultSet rs1 = query(conn, "SHOW SLAVE STATUS"); + if (DBUtil.asyncResultSetNext(rs1)) { + String ioRunning = rs1.getString("Slave_IO_Running"); + String sqlRunning = rs1.getString("Slave_SQL_Running"); + long secondsBehindMaster = rs1.getLong("Seconds_Behind_Master"); + if ("Yes".equalsIgnoreCase(ioRunning) && "Yes".equalsIgnoreCase(sqlRunning)) { + ResultSet rs2 = query(conn, "SELECT TIMESTAMPDIFF(SECOND, CURDATE(), NOW())"); + DBUtil.asyncResultSetNext(rs2); + long secondsOfDay = rs2.getLong(1); + return secondsBehindMaster > secondsOfDay; + } else { + return true; + } + } else { + LOG.warn("SHOW SLAVE STATUS has no result"); + } + } + } else { + LOG.warn("SHOW VARIABLES like 'read_only' has no result"); + } + } catch (Exception e) { + LOG.warn("checkSlave failed, errorMessage:[{}].", e.getMessage()); + } + return false; + } + + /** + * 检查表是否具有insert 权限 + * insert on *.* 或者 insert on database.* 时验证通过 + * 当insert on database.tableName时,确保tableList中的所有table有insert 权限,验证通过 + * 其它验证都不通过 + * + * @author ZiChi + * @version 1.0 2015-01-28 + */ + public static boolean hasInsertPrivilege(DataBaseType dataBaseType, String jdbcURL, String userName, String password, List tableList) { + /*准备参数*/ + + String[] urls = jdbcURL.split("/"); + String dbName; + if (urls != null && urls.length != 0) { + dbName = urls[3]; + } else { + return false; + } + + String dbPattern = "`" + dbName + "`.*"; + Collection tableNames = new HashSet(tableList.size()); + tableNames.addAll(tableList); + + Connection connection = connect(dataBaseType, jdbcURL, userName, password); + try { + ResultSet rs = query(connection, "SHOW GRANTS FOR " + userName); + while (DBUtil.asyncResultSetNext(rs)) { + String grantRecord = rs.getString("Grants for " + userName + "@%"); + String[] params = grantRecord.split("\\`"); + if (params != null && params.length >= 3) { + String tableName = params[3]; + if(params[0].contains("INSERT") && !tableName.equals("*") && tableNames.contains(tableName)) { + tableNames.remove(tableName); + } + } else { + if (grantRecord.contains("INSERT") || grantRecord.contains("ALL PRIVILEGES")) { + if (grantRecord.contains("*.*")) { + return true; + }else if (grantRecord.contains(dbPattern)) { + return true; + } + } + } + } + } catch (Exception e) { + LOG.warn("Check the database has the Insert Privilege failed, errorMessage:[{}]", e.getMessage()); + } + return tableNames.isEmpty(); + } + + public static boolean checkInsertPrivilege(DataBaseType dataBaseType, String jdbcURL, String userName, String password, + String proxyHost, int proxyPort, + List tableList) { + Connection connection = getConnectionWithoutRetry(dataBaseType, jdbcURL, userName, password, proxyHost, proxyPort); + String insertTemplate = "insert into %s(select * from %s where 1 = 2)"; + + boolean hasInsertPrivilege = true; + Statement insertStmt = null; + for (String tableName : tableList) { + String checkInsertPrivilegeSql = String.format(insertTemplate, tableName, tableName); + try { + insertStmt = connection.createStatement(); + executeSqlWithoutResultSet(insertStmt, checkInsertPrivilegeSql); + } catch (Exception e) { + if (DataBaseType.Oracle.equals(dataBaseType)) { + if (e.getMessage() != null && e.getMessage().contains("insufficient privileges")) { + hasInsertPrivilege = false; + LOG.warn("User [" + userName + "] has no 'insert' privilege on table[" + tableName + "], errorMessage:[{}]", e.getMessage()); + } + } else { + hasInsertPrivilege = false; + LOG.warn("User [" + userName + "] has no 'insert' privilege on table[" + tableName + "], errorMessage:[{}]", e.getMessage()); + } + } + } + try { + connection.close(); + } catch (SQLException e) { + LOG.warn("connection close failed, " + e.getMessage()); + } + return hasInsertPrivilege; + } + + public static boolean checkDeletePrivilege(DataBaseType dataBaseType, String jdbcURL, String userName, String password, + String proxyHost, int proxyPort, + List tableList) { + Connection connection = getConnectionWithoutRetry(dataBaseType, jdbcURL, userName, password, proxyHost, proxyPort); + String deleteTemplate = "delete from %s WHERE 1 = 2"; + + boolean hasInsertPrivilege = true; + Statement deleteStmt = null; + for (String tableName : tableList) { + String checkDeletePrivilegeSQL = String.format(deleteTemplate, tableName); + try { + deleteStmt = connection.createStatement(); + executeSqlWithoutResultSet(deleteStmt, checkDeletePrivilegeSQL); + } catch (Exception e) { + hasInsertPrivilege = false; + LOG.warn("User [" + userName + "] has no 'delete' privilege on table[" + tableName + "], errorMessage:[{}]", e.getMessage()); + } + } + try { + connection.close(); + } catch (SQLException e) { + LOG.warn("connection close failed, " + e.getMessage()); + } + return hasInsertPrivilege; + } + + public static boolean needCheckDeletePrivilege(Configuration originalConfig) { + List allSqls = new ArrayList(); + List preSQLs = originalConfig.getList(Key.PRE_SQL, String.class); + List postSQLs = originalConfig.getList(Key.POST_SQL, String.class); + if (preSQLs != null && !preSQLs.isEmpty()) { + allSqls.addAll(preSQLs); + } + if (postSQLs != null && !postSQLs.isEmpty()) { + allSqls.addAll(postSQLs); + } + for (String sql : allSqls) { + if (StringUtils.isNotBlank(sql)) { + if (sql.trim().toUpperCase().startsWith("DELETE")) { + return true; + } + } + } + return false; + } + + /** + * Get direct JDBC connection + *

+ * if connecting failed, try to connect for MAX_TRY_TIMES times + *

+ * NOTE: In DataX, we don't need connection pool in fact + */ + public static Connection getConnection(final DataBaseType dataBaseType, + final String jdbcUrl, final String username, final String password) { + + return getConnection(dataBaseType, jdbcUrl, username, password, String.valueOf(Constant.SOCKET_TIMEOUT_INSECOND * 1000), null); + } + + public static Connection getConnection(final DataBaseType dataBaseType, + final String jdbcUrl, final String username, final String password, String proxyHost, int proxyPort) { + Properties props = new Properties(); + if(StringUtils.isNotBlank(proxyHost) && proxyPort > 0) { + props.put("socksProxyHost", proxyHost); + props.put("socksProxyPort", String.valueOf(proxyPort)); + } + return getConnection(dataBaseType, jdbcUrl, username, password, String.valueOf(Constant.SOCKET_TIMEOUT_INSECOND * 1000), props); + } + + /** + * @param dataBaseType + * @param jdbcUrl + * @param username + * @param password + * @param socketTimeout 设置socketTimeout,单位ms,String类型 + * @return + */ + public static Connection getConnection(final DataBaseType dataBaseType, + final String jdbcUrl, final String username, final String password, final String socketTimeout, Properties defaults) { + + try { + return RetryUtil.executeWithRetry(new Callable() { + @Override + public Connection call() throws Exception { + return DBUtil.connect(dataBaseType, jdbcUrl, username, + password, socketTimeout, defaults); + } + }, 9, 1000L, true); + } catch (Exception e) { + throw DataXException.asDataXException( + DBUtilErrorCode.CONN_DB_ERROR, + String.format("数据库连接失败. 因为根据您配置的连接信息:%s获取数据库连接失败. 请检查您的配置并作出修改.", jdbcUrl), e); + } + } + + /** + * Get direct JDBC connection + *

+ * if connecting failed, try to connect for MAX_TRY_TIMES times + *

+ * NOTE: In DataX, we don't need connection pool in fact + */ + public static Connection getConnectionWithoutRetry(final DataBaseType dataBaseType, + final String jdbcUrl, final String username, final String password) { + return getConnectionWithoutRetry(dataBaseType, jdbcUrl, username, + password, String.valueOf(Constant.SOCKET_TIMEOUT_INSECOND * 1000)); + } + + public static Connection getConnectionWithoutRetry(final DataBaseType dataBaseType, + final String jdbcUrl, final String username, final String password, String proxyHost, int proxyPort){ + Properties props = new Properties(); + if(StringUtils.isNotBlank(proxyHost) && proxyPort > 0) { + props.put("socksProxyHost", proxyHost); + props.put("socksProxyPort", String.valueOf(proxyPort)); + } + return connect(dataBaseType, jdbcUrl, username, password, String.valueOf(Constant.SOCKET_TIMEOUT_INSECOND * 1000), props); + } + public static Connection getConnectionWithoutRetry(final DataBaseType dataBaseType, + final String jdbcUrl, final String username, final String password, String socketTimeout) { + return DBUtil.connect(dataBaseType, jdbcUrl, username, + password, socketTimeout, null); + } + + private static synchronized Connection connect(DataBaseType dataBaseType, + String url, String user, String pass) { + return connect(dataBaseType, url, user, pass, String.valueOf(Constant.SOCKET_TIMEOUT_INSECOND * 1000), null); + } + + private static synchronized Connection connect(DataBaseType dataBaseType, + String url, String user, String pass, String socketTimeout, Properties defaultProps) { + + //ob10的处理 + if (url.startsWith(com.alibaba.datax.plugin.rdbms.writer.Constant.OB10_SPLIT_STRING) && dataBaseType == DataBaseType.MySql) { + String[] ss = url.split(com.alibaba.datax.plugin.rdbms.writer.Constant.OB10_SPLIT_STRING_PATTERN); + if (ss.length != 3) { + throw DataXException + .asDataXException( + DBUtilErrorCode.JDBC_OB10_ADDRESS_ERROR, "JDBC OB10格式错误,请联系askdatax"); + } + LOG.info("this is ob1_0 jdbc url."); + user = ss[1].trim() + ":" + user; + url = ss[2]; + LOG.info("this is ob1_0 jdbc url. user=" + user + " :url=" + url); + } + Properties prop = null != defaultProps? new Properties(defaultProps) : new Properties(); + prop.put("user", user); + prop.put("password", pass); + + if (dataBaseType == DataBaseType.Oracle) { + //oracle.net.READ_TIMEOUT for jdbc versions < 10.1.0.5 oracle.jdbc.ReadTimeout for jdbc versions >=10.1.0.5 + // unit ms + prop.put("oracle.jdbc.ReadTimeout", socketTimeout); + } + return connect(dataBaseType, url, prop); + } + + private static synchronized Connection connect(DataBaseType dataBaseType, + String url, Properties prop) { + Driver driver = null; + try { + Class clazz = Class.forName(dataBaseType.getDriverClassName(), true, Thread.currentThread().getContextClassLoader()); + driver = new DriverWrapper((Driver)clazz.newInstance()); + DriverManager.registerDriver(driver); + DriverManager.setLoginTimeout(Constant.TIMEOUT_SECONDS); + return DriverManager.getConnection(url, prop); + } catch (Exception e) { + throw RdbmsException.asConnException(dataBaseType, e, prop.getProperty("user"), null); + } finally{ + try { + DriverManager.deregisterDriver(driver); + } catch (SQLException e) { + //ignore + } + } + } + + /** + * a wrapped method to execute select-like sql statement . + * + * @param conn Database connection . + * @param sql sql statement to be executed + * @return a {@link ResultSet} + * @throws SQLException if occurs SQLException. + */ + public static ResultSet query(Connection conn, String sql, int fetchSize) + throws SQLException { + // 默认3600 s 的query Timeout + return query(conn, sql, fetchSize, Constant.SOCKET_TIMEOUT_INSECOND); + } + + /** + * a wrapped method to execute select-like sql statement . + * + * @param conn Database connection . + * @param sql sql statement to be executed + * @param fetchSize + * @param queryTimeout unit:second + * @return + * @throws SQLException + */ + public static ResultSet query(Connection conn, String sql, int fetchSize, int queryTimeout) + throws SQLException { + // make sure autocommit is off + conn.setAutoCommit(false); + Statement stmt = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY); + stmt.setFetchSize(fetchSize); + stmt.setQueryTimeout(queryTimeout); + return query(stmt, sql); + } + + /** + * a wrapped method to execute select-like sql statement . + * + * @param stmt {@link Statement} + * @param sql sql statement to be executed + * @return a {@link ResultSet} + * @throws SQLException if occurs SQLException. + */ + public static ResultSet query(Statement stmt, String sql) + throws SQLException { + return stmt.executeQuery(sql); + } + + public static void executeSqlWithoutResultSet(Statement stmt, String sql) + throws SQLException { + stmt.execute(sql); + } + + /** + * Close {@link ResultSet}, {@link Statement} referenced by this + * {@link ResultSet} + * + * @param rs {@link ResultSet} to be closed + * @throws IllegalArgumentException + */ + public static void closeResultSet(ResultSet rs) { + try { + if (null != rs) { + Statement stmt = rs.getStatement(); + if (null != stmt) { + stmt.close(); + stmt = null; + } + rs.close(); + } + rs = null; + } catch (SQLException e) { + throw new IllegalStateException(e); + } + } + + public static void closeDBResources(ResultSet rs, Statement stmt, + Connection conn) { + if (null != rs) { + try { + rs.close(); + } catch (SQLException unused) { + } + } + + if (null != stmt) { + try { + stmt.close(); + } catch (SQLException unused) { + } + } + + if (null != conn) { + try { + conn.close(); + } catch (SQLException unused) { + } + } + } + + public static void closeDBResources(Statement stmt, Connection conn) { + closeDBResources(null, stmt, conn); + } + + public static List getTableColumns(DataBaseType dataBaseType, + String jdbcUrl, String user, String pass, String proxyHost, int proxyPort, String tableName) { + Connection conn = getConnection(dataBaseType, jdbcUrl, user, pass, proxyHost, proxyPort); + return getTableColumnsByConn(dataBaseType, conn, tableName, "jdbcUrl:" + jdbcUrl); + } + + public static List getTableColumnsByConn(DataBaseType dataBaseType, Connection conn, String tableName, String basicMsg) { + List columns = new ArrayList(); + Statement statement = null; + ResultSet rs = null; + String queryColumnSql = null; + try { + statement = conn.createStatement(); + queryColumnSql = String.format("select * from %s where 1=2", + tableName); + rs = statement.executeQuery(queryColumnSql); + ResultSetMetaData rsMetaData = rs.getMetaData(); + for (int i = 0, len = rsMetaData.getColumnCount(); i < len; i++) { + columns.add(rsMetaData.getColumnName(i + 1)); + } + + } catch (SQLException e) { + throw RdbmsException.asQueryException(dataBaseType, e, queryColumnSql, tableName, null); + } finally { + DBUtil.closeDBResources(rs, statement, conn); + } + + return columns; + } + + /** + * @return Left:ColumnName Middle:ColumnType Right:ColumnTypeName + */ + public static Triple, List, List> getColumnMetaData( + DataBaseType dataBaseType, String jdbcUrl, String user, + String pass, String tableName, String column) { + Connection conn = null; + try { + conn = getConnection(dataBaseType, jdbcUrl, user, pass); + return getColumnMetaData(conn, tableName, column); + } finally { + DBUtil.closeDBResources(null, null, conn); + } + } + + /** + * @return Left:ColumnName Middle:ColumnType Right:ColumnTypeName + */ + public static Triple, List, List> getColumnMetaData( + Connection conn, String tableName, String column) { + Statement statement = null; + ResultSet rs = null; + + Triple, List, List> columnMetaData = new ImmutableTriple, List, List>( + new ArrayList(), new ArrayList(), + new ArrayList()); + try { + statement = conn.createStatement(); + String queryColumnSql = "select " + column + " from " + tableName + + " where 1=2"; + + rs = statement.executeQuery(queryColumnSql); + ResultSetMetaData rsMetaData = rs.getMetaData(); + for (int i = 0, len = rsMetaData.getColumnCount(); i < len; i++) { + + columnMetaData.getLeft().add(rsMetaData.getColumnName(i + 1)); + columnMetaData.getMiddle().add(rsMetaData.getColumnType(i + 1)); + columnMetaData.getRight().add( + rsMetaData.getColumnTypeName(i + 1)); + } + return columnMetaData; + + } catch (SQLException e) { + throw DataXException + .asDataXException(DBUtilErrorCode.GET_COLUMN_INFO_FAILED, + String.format("获取表:%s 的字段的元信息时失败. 请联系 DBA 核查该库、表信息.", tableName), e); + } finally { + DBUtil.closeDBResources(rs, statement, null); + } + } + + public static boolean testConnWithoutRetry(DataBaseType dataBaseType, + String url, String user, String pass, boolean checkSlave, String proxyHost, int proxyPort) { + Connection connection = null; + + try { + connection = getConnectionWithoutRetry(dataBaseType, url, user, pass, proxyHost, proxyPort); + if (connection != null) { + if (dataBaseType.equals(DataBaseType.MySql) && checkSlave) { + //dataBaseType.MySql + return !isSlaveBehind(connection); + } else { + return true; + } + } + } catch (Exception e) { + LOG.warn("test connection of [{}] failed, for {}.", url, + e.getMessage()); + } finally { + DBUtil.closeDBResources(null, connection); + } + return false; + } + + public static boolean testConnWithoutRetry(DataBaseType dataBaseType, + String url, String user, String pass, List preSql, String proxyHost, int proxyPort) { + Connection connection = null; + try { + connection = getConnectionWithoutRetry(dataBaseType, url, user, pass, proxyHost, proxyPort); + if (null != connection) { + for (String pre : preSql) { + if (doPreCheck(connection, pre) == false) { + LOG.warn("doPreCheck failed."); + return false; + } + } + return true; + } + } catch (Exception e) { + LOG.warn("test connection of [{}] failed, for {}.", url, + e.getMessage()); + } finally { + DBUtil.closeDBResources(null, connection); + } + + return false; + } + + public static boolean isOracleMaster(final String url, final String user, final String pass) { + try { + return RetryUtil.executeWithRetry(new Callable() { + @Override + public Boolean call() throws Exception { + Connection conn = null; + try { + conn = connect(DataBaseType.Oracle, url, user, pass); + ResultSet rs = query(conn, "select DATABASE_ROLE from V$DATABASE"); + if (DBUtil.asyncResultSetNext(rs, 5)) { + String role = rs.getString("DATABASE_ROLE"); + return "PRIMARY".equalsIgnoreCase(role); + } + throw DataXException.asDataXException(DBUtilErrorCode.RS_ASYNC_ERROR, + String.format("select DATABASE_ROLE from V$DATABASE failed,请检查您的jdbcUrl:%s.", url)); + } finally { + DBUtil.closeDBResources(null, conn); + } + } + }, 3, 1000L, true); + } catch (Exception e) { + throw DataXException.asDataXException(DBUtilErrorCode.CONN_DB_ERROR, + String.format("select DATABASE_ROLE from V$DATABASE failed, url: %s", url), e); + } + } + + public static ResultSet query(Connection conn, String sql) + throws SQLException { + Statement stmt = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY); + //默认3600 seconds + stmt.setQueryTimeout(Constant.SOCKET_TIMEOUT_INSECOND); + return query(stmt, sql); + } + + private static boolean doPreCheck(Connection conn, String pre) { + ResultSet rs = null; + try { + rs = query(conn, pre); + + int checkResult = -1; + if (DBUtil.asyncResultSetNext(rs)) { + checkResult = rs.getInt(1); + if (DBUtil.asyncResultSetNext(rs)) { + LOG.warn( + "pre check failed. It should return one result:0, pre:[{}].", + pre); + return false; + } + + } + + if (0 == checkResult) { + return true; + } + + LOG.warn( + "pre check failed. It should return one result:0, pre:[{}].", + pre); + } catch (Exception e) { + LOG.warn("pre check failed. pre:[{}], errorMessage:[{}].", pre, + e.getMessage()); + } finally { + DBUtil.closeResultSet(rs); + } + return false; + } + + // warn:until now, only oracle need to handle session config. + public static void dealWithSessionConfig(Connection conn, + Configuration config, DataBaseType databaseType, String message) { + List sessionConfig = null; + switch (databaseType) { + case Oracle: + sessionConfig = config.getList(Key.SESSION, + new ArrayList(), String.class); + DBUtil.doDealWithSessionConfig(conn, sessionConfig, message); + break; + case DRDS: + // 用于关闭 drds 的分布式事务开关 + sessionConfig = new ArrayList(); + sessionConfig.add("set transaction policy 4"); + DBUtil.doDealWithSessionConfig(conn, sessionConfig, message); + break; + case MySql: + sessionConfig = config.getList(Key.SESSION, + new ArrayList(), String.class); + DBUtil.doDealWithSessionConfig(conn, sessionConfig, message); + break; + default: + break; + } + } + + private static void doDealWithSessionConfig(Connection conn, + List sessions, String message) { + if (null == sessions || sessions.isEmpty()) { + return; + } + + Statement stmt; + try { + stmt = conn.createStatement(); + } catch (SQLException e) { + throw DataXException + .asDataXException(DBUtilErrorCode.SET_SESSION_ERROR, String + .format("session配置有误. 因为根据您的配置执行 session 设置失败. 上下文信息是:[%s]. 请检查您的配置并作出修改.", message), + e); + } + + for (String sessionSql : sessions) { + LOG.info("execute sql:[{}]", sessionSql); + try { + DBUtil.executeSqlWithoutResultSet(stmt, sessionSql); + } catch (SQLException e) { + throw DataXException.asDataXException( + DBUtilErrorCode.SET_SESSION_ERROR, String.format( + "session配置有误. 因为根据您的配置执行 session 设置失败. 上下文信息是:[%s]. 请检查您的配置并作出修改.", message), e); + } + } + DBUtil.closeDBResources(stmt, null); + } + + public static void sqlValid(String sql, DataBaseType dataBaseType) { + SQLStatementParser statementParser = SQLParserUtils.createSQLStatementParser(sql, dataBaseType.getTypeName()); + statementParser.parseStatementList(); + } + + /** + * 异步获取resultSet的next(),注意,千万不能应用在数据的读取中。只能用在meta的获取 + * + * @param resultSet + * @return + */ + public static boolean asyncResultSetNext(final ResultSet resultSet) { + return asyncResultSetNext(resultSet, 3600); + } + + public static boolean asyncResultSetNext(final ResultSet resultSet, int timeout) { + Future future = rsExecutors.get().submit(new Callable() { + @Override + public Boolean call() throws Exception { + return resultSet.next(); + } + }); + try { + return future.get(timeout, TimeUnit.SECONDS); + } catch (Exception e) { + throw DataXException.asDataXException( + DBUtilErrorCode.RS_ASYNC_ERROR, "异步获取ResultSet失败", e); + } + } + + public static void loadDriverClass(String pluginType, String pluginName) { + try { + String pluginJsonPath = StringUtils.join( + new String[]{System.getProperty("datax.home"), "plugin", + pluginType, + String.format("%s%s", pluginName, pluginType), + "plugin.json"}, File.separator); + Configuration configuration = Configuration.from(new File( + pluginJsonPath)); + List drivers = configuration.getList("drivers", + String.class); + for (String driver : drivers) { + Class.forName(driver); + } + } catch (ClassNotFoundException e) { + throw DataXException.asDataXException(DBUtilErrorCode.CONF_ERROR, + "数据库驱动加载错误, 请确认libs目录有驱动jar包且plugin.json中drivers配置驱动类正确!", + e); + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DBUtilErrorCode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DBUtilErrorCode.java new file mode 100644 index 000000000..aee262fa8 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DBUtilErrorCode.java @@ -0,0 +1,95 @@ +package com.alibaba.datax.plugin.rdbms.util; + +import com.alibaba.datax.common.spi.ErrorCode; + +//TODO +public enum DBUtilErrorCode implements ErrorCode { + //连接错误 + MYSQL_CONN_USERPWD_ERROR("MYSQLErrCode-01", "数据库用户名或者密码错误,请检查填写的账号密码或者联系DBA确认账号和密码是否正确"), + MYSQL_CONN_IPPORT_ERROR("MYSQLErrCode-02", "数据库服务的IP地址或者Port错误,请检查填写的IP地址和Port或者联系DBA确认IP地址和Port是否正确。如果是同步中心用户请联系DBA确认idb上录入的IP和PORT信息和数据库的当前实际信息是一致的"), + MYSQL_CONN_DB_ERROR("MYSQLErrCode-03", "数据库名称错误,请检查数据库实例名称或者联系DBA确认该实例是否存在并且在正常服务"), + + ORACLE_CONN_USERPWD_ERROR("ORACLEErrCode-01", "数据库用户名或者密码错误,请检查填写的账号密码或者联系DBA确认账号和密码是否正确"), + ORACLE_CONN_IPPORT_ERROR("ORACLEErrCode-02", "数据库服务的IP地址或者Port错误,请检查填写的IP地址和Port或者联系DBA确认IP地址和Port是否正确。如果是同步中心用户请联系DBA确认idb上录入的IP和PORT信息和数据库的当前实际信息是一致的"), + ORACLE_CONN_DB_ERROR("ORACLEErrCode-03", "数据库名称错误,请检查数据库实例名称或者联系DBA确认该实例是否存在并且在正常服务"), + + //execute query错误 + MYSQL_QUERY_TABLE_NAME_ERROR("MYSQLErrCode-04", "表不存在,请检查表名或者联系DBA确认该表是否存在"), + MYSQL_QUERY_SQL_ERROR("MYSQLErrCode-05", "SQL语句执行出错,请检查Where条件是否存在拼写或语法错误"), + MYSQL_QUERY_COLUMN_ERROR("MYSQLErrCode-06", "Column信息错误,请检查该列是否存在,如果是常量或者变量,请使用英文单引号’包起来"), + MYSQL_QUERY_SELECT_PRI_ERROR("MYSQLErrCode-07", "读表数据出错,因为账号没有读表的权限,请联系DBA确认该账号的权限并授权"), + + ORACLE_QUERY_TABLE_NAME_ERROR("ORACLEErrCode-04", "表不存在,请检查表名或者联系DBA确认该表是否存在"), + ORACLE_QUERY_SQL_ERROR("ORACLEErrCode-05", "SQL语句执行出错,原因可能是你填写的列不存在或者where条件不符合要求,1,请检查该列是否存在,如果是常量或者变量,请使用英文单引号’包起来; 2,请检查Where条件是否存在拼写或语法错误"), + ORACLE_QUERY_SELECT_PRI_ERROR("ORACLEErrCode-06", "读表数据出错,因为账号没有读表的权限,请联系DBA确认该账号的权限并授权"), + ORACLE_QUERY_SQL_PARSER_ERROR("ORACLEErrCode-07", "SQL语法出错,请检查Where条件是否存在拼写或语法错误"), + + //PreSql,Post Sql错误 + MYSQL_PRE_SQL_ERROR("MYSQLErrCode-08", "PreSQL语法错误,请检查"), + MYSQL_POST_SQL_ERROR("MYSQLErrCode-09", "PostSql语法错误,请检查"), + MYSQL_QUERY_SQL_PARSER_ERROR("MYSQLErrCode-10", "SQL语法出错,请检查Where条件是否存在拼写或语法错误"), + + ORACLE_PRE_SQL_ERROR("ORACLEErrCode-08", "PreSQL语法错误,请检查"), + ORACLE_POST_SQL_ERROR("ORACLEErrCode-09", "PostSql语法错误,请检查"), + + //SplitPK 错误 + MYSQL_SPLIT_PK_ERROR("MYSQLErrCode-11", "SplitPK错误,请检查"), + ORACLE_SPLIT_PK_ERROR("ORACLEErrCode-10", "SplitPK错误,请检查"), + + //Insert,Delete 权限错误 + MYSQL_INSERT_ERROR("MYSQLErrCode-12", "数据库没有写权限,请联系DBA"), + MYSQL_DELETE_ERROR("MYSQLErrCode-13", "数据库没有Delete权限,请联系DBA"), + ORACLE_INSERT_ERROR("ORACLEErrCode-11", "数据库没有写权限,请联系DBA"), + ORACLE_DELETE_ERROR("ORACLEErrCode-12", "数据库没有Delete权限,请联系DBA"), + + JDBC_NULL("DBUtilErrorCode-20", "JDBC URL为空,请检查配置"), + JDBC_OB10_ADDRESS_ERROR("DBUtilErrorCode-OB10-01", "JDBC OB10格式错误,请联系askdatax"), + CONF_ERROR("DBUtilErrorCode-00", "您的配置错误."), + CONN_DB_ERROR("DBUtilErrorCode-10", "连接数据库失败. 请检查您的 账号、密码、数据库名称、IP、Port或者向 DBA 寻求帮助(注意网络环境)."), + GET_COLUMN_INFO_FAILED("DBUtilErrorCode-01", "获取表字段相关信息失败."), + UNSUPPORTED_TYPE("DBUtilErrorCode-12", "不支持的数据库类型. 请注意查看 DataX 已经支持的数据库类型以及数据库版本."), + COLUMN_SPLIT_ERROR("DBUtilErrorCode-13", "根据主键进行切分失败."), + SET_SESSION_ERROR("DBUtilErrorCode-14", "设置 session 失败."), + RS_ASYNC_ERROR("DBUtilErrorCode-15", "异步获取ResultSet next失败."), + + REQUIRED_VALUE("DBUtilErrorCode-03", "您缺失了必须填写的参数值."), + ILLEGAL_VALUE("DBUtilErrorCode-02", "您填写的参数值不合法."), + ILLEGAL_SPLIT_PK("DBUtilErrorCode-04", "您填写的主键列不合法, DataX 仅支持切分主键为一个,并且类型为整数或者字符串类型."), + SPLIT_FAILED_ILLEGAL_SQL("DBUtilErrorCode-15", "DataX尝试切分表时, 执行数据库 Sql 失败. 请检查您的配置 table/splitPk/where 并作出修改."), + SQL_EXECUTE_FAIL("DBUtilErrorCode-06", "执行数据库 Sql 失败, 请检查您的配置的 column/table/where/querySql或者向 DBA 寻求帮助."), + + // only for reader + READ_RECORD_FAIL("DBUtilErrorCode-07", "读取数据库数据失败. 请检查您的配置的 column/table/where/querySql或者向 DBA 寻求帮助."), + TABLE_QUERYSQL_MIXED("DBUtilErrorCode-08", "您配置凌乱了. 不能同时既配置table又配置querySql"), + TABLE_QUERYSQL_MISSING("DBUtilErrorCode-09", "您配置错误. table和querySql 应该并且只能配置一个."), + + // only for writer + WRITE_DATA_ERROR("DBUtilErrorCode-05", "往您配置的写入表中写入数据时失败."), + NO_INSERT_PRIVILEGE("DBUtilErrorCode-11", "数据库没有写权限,请联系DBA"), + NO_DELETE_PRIVILEGE("DBUtilErrorCode-16", "数据库没有DELETE权限,请联系DBA"),; + + private final String code; + + private final String description; + + private DBUtilErrorCode(String code, String description) { + this.code = code; + this.description = description; + } + + @Override + public String getCode() { + return this.code; + } + + @Override + public String getDescription() { + return this.description; + } + + @Override + public String toString() { + return String.format("Code:[%s], Description:[%s]. ", this.code, + this.description); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DataBaseType.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DataBaseType.java new file mode 100644 index 000000000..0ebfa5efb --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DataBaseType.java @@ -0,0 +1,199 @@ +package com.alibaba.datax.plugin.rdbms.util; + +import com.alibaba.datax.common.exception.DataXException; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * refer:http://blog.csdn.net/ring0hx/article/details/6152528 + *

+ */ +public enum DataBaseType { + MySql("mysql", "com.mysql.jdbc.Driver"), + Tddl("mysql", "com.mysql.jdbc.Driver"), + DRDS("drds", "com.mysql.jdbc.Driver"), + Oracle("oracle", "oracle.jdbc.OracleDriver"), + SQLServer("sqlserver", "com.microsoft.sqlserver.jdbc.SQLServerDriver"), + PostgreSQL("postgresql", "org.postgresql.Driver"), + RDBMS("rdbms", "DataBaseType"), + DB2("db2", "com.ibm.db2.jcc.DB2Driver"), + ADS("ads", "com.mysql.jdbc.Driver"); + + + private String typeName; + private String driverClassName; + + DataBaseType(String typeName, String driverClassName) { + this.typeName = typeName; + this.driverClassName = driverClassName; + } + + public String getDriverClassName() { + return this.driverClassName; + } + + public String appendJDBCSuffixForReader(String jdbc) { + String result = jdbc; + String suffix = null; + switch (this) { + case MySql: + case DRDS: + suffix = "yearIsDateType=false&zeroDateTimeBehavior=convertToNull&tinyInt1isBit=false&rewriteBatchedStatements=true"; + if (jdbc.contains("?")) { + result = jdbc + "&" + suffix; + } else { + result = jdbc + "?" + suffix; + } + break; + case Oracle: + break; + case SQLServer: + break; + case DB2: + break; + case PostgreSQL: + break; + case RDBMS: + break; + default: + throw DataXException.asDataXException(DBUtilErrorCode.UNSUPPORTED_TYPE, "unsupported database type."); + } + + return result; + } + + public String appendJDBCSuffixForWriter(String jdbc) { + String result = jdbc; + String suffix = null; + switch (this) { + case MySql: + suffix = "yearIsDateType=false&zeroDateTimeBehavior=convertToNull&rewriteBatchedStatements=true&tinyInt1isBit=false"; + if (jdbc.contains("?")) { + result = jdbc + "&" + suffix; + } else { + result = jdbc + "?" + suffix; + } + break; + case DRDS: + suffix = "yearIsDateType=false&zeroDateTimeBehavior=convertToNull"; + if (jdbc.contains("?")) { + result = jdbc + "&" + suffix; + } else { + result = jdbc + "?" + suffix; + } + break; + case Oracle: + break; + case SQLServer: + break; + case DB2: + break; + case PostgreSQL: + break; + case RDBMS: + break; + default: + throw DataXException.asDataXException(DBUtilErrorCode.UNSUPPORTED_TYPE, "unsupported database type."); + } + + return result; + } + + public String formatPk(String splitPk) { + String result = splitPk; + + switch (this) { + case MySql: + case Oracle: + if (splitPk.length() >= 2 && splitPk.startsWith("`") && splitPk.endsWith("`")) { + result = splitPk.substring(1, splitPk.length() - 1).toLowerCase(); + } + break; + case SQLServer: + if (splitPk.length() >= 2 && splitPk.startsWith("[") && splitPk.endsWith("]")) { + result = splitPk.substring(1, splitPk.length() - 1).toLowerCase(); + } + break; + case DB2: + case PostgreSQL: + break; + default: + throw DataXException.asDataXException(DBUtilErrorCode.UNSUPPORTED_TYPE, "unsupported database type."); + } + + return result; + } + + + public String quoteColumnName(String columnName) { + String result = columnName; + + switch (this) { + case MySql: + result = "`" + columnName.replace("`", "``") + "`"; + break; + case Oracle: + break; + case SQLServer: + result = "[" + columnName + "]"; + break; + case DB2: + case PostgreSQL: + break; + default: + throw DataXException.asDataXException(DBUtilErrorCode.UNSUPPORTED_TYPE, "unsupported database type"); + } + + return result; + } + + public String quoteTableName(String tableName) { + String result = tableName; + + switch (this) { + case MySql: + result = "`" + tableName.replace("`", "``") + "`"; + break; + case Oracle: + break; + case SQLServer: + break; + case DB2: + break; + case PostgreSQL: + break; + default: + throw DataXException.asDataXException(DBUtilErrorCode.UNSUPPORTED_TYPE, "unsupported database type"); + } + + return result; + } + + private static Pattern mysqlPattern = Pattern.compile("jdbc:mysql://(.+):\\d+/.+"); + private static Pattern oraclePattern = Pattern.compile("jdbc:oracle:thin:@(.+):\\d+:.+"); + + /** + * 注意:目前只实现了从 mysql/oracle 中识别出ip 信息.未识别到则返回 null. + */ + public static String parseIpFromJdbcUrl(String jdbcUrl) { + Matcher mysql = mysqlPattern.matcher(jdbcUrl); + if (mysql.matches()) { + return mysql.group(1); + } + Matcher oracle = oraclePattern.matcher(jdbcUrl); + if (oracle.matches()) { + return oracle.group(1); + } + return null; + } + + public String getTypeName() { + return typeName; + } + + public void setTypeName(String typeName) { + this.typeName = typeName; + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DriverWrapper.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DriverWrapper.java new file mode 100644 index 000000000..2459ecdc3 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/DriverWrapper.java @@ -0,0 +1,51 @@ +package com.alibaba.datax.plugin.rdbms.util; + +import java.sql.*; +import java.util.Properties; +import java.util.logging.Logger; + +/** + * @author davidhua + * 2019/8/28 + */ +public class DriverWrapper implements Driver { + + private Driver driver; + public DriverWrapper(Driver driver){ + this.driver = driver; + } + @Override + public Connection connect(String url, Properties info) throws SQLException { + return this.driver.connect(url, info); + } + + @Override + public boolean acceptsURL(String url) throws SQLException { + return this.driver.acceptsURL(url); + } + + @Override + public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws SQLException { + return this.driver.getPropertyInfo(url, info); + } + + @Override + public int getMajorVersion() { + return this.driver.getMajorVersion(); + } + + @Override + public int getMinorVersion() { + return this.driver.getMinorVersion(); + } + + @Override + public boolean jdbcCompliant() { + return this.driver.jdbcCompliant(); + } + + @Override + public Logger getParentLogger() throws SQLFeatureNotSupportedException { + return this.driver.getParentLogger(); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/JdbcConnectionFactory.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/JdbcConnectionFactory.java new file mode 100644 index 000000000..3305db8d4 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/JdbcConnectionFactory.java @@ -0,0 +1,46 @@ +package com.alibaba.datax.plugin.rdbms.util; + +import java.sql.Connection; + +/** + * Date: 15/3/16 下午3:12 + */ +public class JdbcConnectionFactory implements ConnectionFactory { + + private DataBaseType dataBaseType; + + private String jdbcUrl; + + private String userName; + + private String password; + + private String proxyHost; + + private int proxyPort; + + public JdbcConnectionFactory(DataBaseType dataBaseType, String jdbcUrl, String userName, String password, + String proxyHost, int proxyPort) { + this.dataBaseType = dataBaseType; + this.jdbcUrl = jdbcUrl; + this.userName = userName; + this.password = password; + this.proxyHost = proxyHost; + this.proxyPort = proxyPort; + } + + @Override + public Connection getConnecttion() { + return DBUtil.getConnection(dataBaseType, jdbcUrl, userName, password, proxyHost, proxyPort); + } + + @Override + public Connection getConnecttionWithoutRetry() { + return DBUtil.getConnectionWithoutRetry(dataBaseType, jdbcUrl, userName, password, proxyHost, proxyPort); + } + + @Override + public String getConnectionInfo() { + return "jdbcUrl:" + jdbcUrl; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/RdbmsException.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/RdbmsException.java new file mode 100644 index 000000000..04a50b8ab --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/RdbmsException.java @@ -0,0 +1,190 @@ +package com.alibaba.datax.plugin.rdbms.util; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.spi.ErrorCode; + +/** + * Created by judy.lt on 2015/6/5. + */ +public class RdbmsException extends DataXException { + public RdbmsException(ErrorCode errorCode, String message) { + super(errorCode, message); + } + + public static DataXException asConnException(DataBaseType dataBaseType, Exception e, String userName, String dbName) { + if (dataBaseType.equals(DataBaseType.MySql)) { + DBUtilErrorCode dbUtilErrorCode = mySqlConnectionErrorAna(e.getMessage()); + if (dbUtilErrorCode == DBUtilErrorCode.MYSQL_CONN_DB_ERROR && dbName != null) { + return asDataXException(dbUtilErrorCode, "该数据库名称为:" + dbName + " 具体错误信息为:" + e); + } + if (dbUtilErrorCode == DBUtilErrorCode.MYSQL_CONN_USERPWD_ERROR) { + return asDataXException(dbUtilErrorCode, "该数据库用户名为:" + userName + " 具体错误信息为:" + e); + } + return asDataXException(dbUtilErrorCode, " 具体错误信息为:" + e); + } + + if (dataBaseType.equals(DataBaseType.Oracle)) { + DBUtilErrorCode dbUtilErrorCode = oracleConnectionErrorAna(e.getMessage()); + if (dbUtilErrorCode == DBUtilErrorCode.ORACLE_CONN_DB_ERROR && dbName != null) { + return asDataXException(dbUtilErrorCode, "该数据库名称为:" + dbName + " 具体错误信息为:" + e); + } + if (dbUtilErrorCode == DBUtilErrorCode.ORACLE_CONN_USERPWD_ERROR) { + return asDataXException(dbUtilErrorCode, "该数据库用户名为:" + userName + " 具体错误信息为:" + e); + } + return asDataXException(dbUtilErrorCode, " 具体错误信息为:" + e); + } + return asDataXException(DBUtilErrorCode.CONN_DB_ERROR, " 具体错误信息为:" + e); + } + + public static DBUtilErrorCode mySqlConnectionErrorAna(String e) { + if (e.contains(Constant.MYSQL_DATABASE)) { + return DBUtilErrorCode.MYSQL_CONN_DB_ERROR; + } + + if (e.contains(Constant.MYSQL_CONNEXP)) { + return DBUtilErrorCode.MYSQL_CONN_IPPORT_ERROR; + } + + if (e.contains(Constant.MYSQL_ACCDENIED)) { + return DBUtilErrorCode.MYSQL_CONN_USERPWD_ERROR; + } + + return DBUtilErrorCode.CONN_DB_ERROR; + } + + public static DBUtilErrorCode oracleConnectionErrorAna(String e) { + if (e.contains(Constant.ORACLE_DATABASE)) { + return DBUtilErrorCode.ORACLE_CONN_DB_ERROR; + } + + if (e.contains(Constant.ORACLE_CONNEXP)) { + return DBUtilErrorCode.ORACLE_CONN_IPPORT_ERROR; + } + + if (e.contains(Constant.ORACLE_ACCDENIED)) { + return DBUtilErrorCode.ORACLE_CONN_USERPWD_ERROR; + } + + return DBUtilErrorCode.CONN_DB_ERROR; + } + + public static DataXException asQueryException(DataBaseType dataBaseType, Exception e, String querySql, String table, String userName) { + if (dataBaseType.equals(DataBaseType.MySql)) { + DBUtilErrorCode dbUtilErrorCode = mySqlQueryErrorAna(e.getMessage()); + if (dbUtilErrorCode == DBUtilErrorCode.MYSQL_QUERY_TABLE_NAME_ERROR && table != null) { + return asDataXException(dbUtilErrorCode, "表名为:" + table + " 执行的SQL为:" + querySql + " 具体错误信息为:" + e); + } + if (dbUtilErrorCode == DBUtilErrorCode.MYSQL_QUERY_SELECT_PRI_ERROR && userName != null) { + return asDataXException(dbUtilErrorCode, "用户名为:" + userName + " 具体错误信息为:" + e); + } + + return asDataXException(dbUtilErrorCode, "执行的SQL为: " + querySql + " 具体错误信息为:" + e); + } + + if (dataBaseType.equals(DataBaseType.Oracle)) { + DBUtilErrorCode dbUtilErrorCode = oracleQueryErrorAna(e.getMessage()); + if (dbUtilErrorCode == DBUtilErrorCode.ORACLE_QUERY_TABLE_NAME_ERROR && table != null) { + return asDataXException(dbUtilErrorCode, "表名为:" + table + " 执行的SQL为:" + querySql + " 具体错误信息为:" + e); + } + if (dbUtilErrorCode == DBUtilErrorCode.ORACLE_QUERY_SELECT_PRI_ERROR) { + return asDataXException(dbUtilErrorCode, "用户名为:" + userName + " 具体错误信息为:" + e); + } + + return asDataXException(dbUtilErrorCode, "执行的SQL为: " + querySql + " 具体错误信息为:" + e); + + } + + return asDataXException(DBUtilErrorCode.SQL_EXECUTE_FAIL, "执行的SQL为: " + querySql + " 具体错误信息为:" + e); + } + + public static DBUtilErrorCode mySqlQueryErrorAna(String e) { + if (e.contains(Constant.MYSQL_TABLE_NAME_ERR1) && e.contains(Constant.MYSQL_TABLE_NAME_ERR2)) { + return DBUtilErrorCode.MYSQL_QUERY_TABLE_NAME_ERROR; + } else if (e.contains(Constant.MYSQL_SELECT_PRI)) { + return DBUtilErrorCode.MYSQL_QUERY_SELECT_PRI_ERROR; + } else if (e.contains(Constant.MYSQL_COLUMN1) && e.contains(Constant.MYSQL_COLUMN2)) { + return DBUtilErrorCode.MYSQL_QUERY_COLUMN_ERROR; + } else if (e.contains(Constant.MYSQL_WHERE)) { + return DBUtilErrorCode.MYSQL_QUERY_SQL_ERROR; + } + return DBUtilErrorCode.READ_RECORD_FAIL; + } + + public static DBUtilErrorCode oracleQueryErrorAna(String e) { + if (e.contains(Constant.ORACLE_TABLE_NAME)) { + return DBUtilErrorCode.ORACLE_QUERY_TABLE_NAME_ERROR; + } else if (e.contains(Constant.ORACLE_SQL)) { + return DBUtilErrorCode.ORACLE_QUERY_SQL_ERROR; + } else if (e.contains(Constant.ORACLE_SELECT_PRI)) { + return DBUtilErrorCode.ORACLE_QUERY_SELECT_PRI_ERROR; + } + return DBUtilErrorCode.READ_RECORD_FAIL; + } + + public static DataXException asSqlParserException(DataBaseType dataBaseType, Exception e, String querySql) { + if (dataBaseType.equals(DataBaseType.MySql)) { + throw asDataXException(DBUtilErrorCode.MYSQL_QUERY_SQL_PARSER_ERROR, "执行的SQL为:" + querySql + " 具体错误信息为:" + e); + } + if (dataBaseType.equals(DataBaseType.Oracle)) { + throw asDataXException(DBUtilErrorCode.ORACLE_QUERY_SQL_PARSER_ERROR, "执行的SQL为:" + querySql + " 具体错误信息为:" + e); + } + throw asDataXException(DBUtilErrorCode.READ_RECORD_FAIL, "执行的SQL为:" + querySql + " 具体错误信息为:" + e); + } + + public static DataXException asPreSQLParserException(DataBaseType dataBaseType, Exception e, String querySql) { + if (dataBaseType.equals(DataBaseType.MySql)) { + throw asDataXException(DBUtilErrorCode.MYSQL_PRE_SQL_ERROR, "执行的SQL为:" + querySql + " 具体错误信息为:" + e); + } + + if (dataBaseType.equals(DataBaseType.Oracle)) { + throw asDataXException(DBUtilErrorCode.ORACLE_PRE_SQL_ERROR, "执行的SQL为:" + querySql + " 具体错误信息为:" + e); + } + throw asDataXException(DBUtilErrorCode.READ_RECORD_FAIL, "执行的SQL为:" + querySql + " 具体错误信息为:" + e); + } + + public static DataXException asPostSQLParserException(DataBaseType dataBaseType, Exception e, String querySql) { + if (dataBaseType.equals(DataBaseType.MySql)) { + throw asDataXException(DBUtilErrorCode.MYSQL_POST_SQL_ERROR, "执行的SQL为:" + querySql + " 具体错误信息为:" + e); + } + + if (dataBaseType.equals(DataBaseType.Oracle)) { + throw asDataXException(DBUtilErrorCode.ORACLE_POST_SQL_ERROR, "执行的SQL为:" + querySql + " 具体错误信息为:" + e); + } + throw asDataXException(DBUtilErrorCode.READ_RECORD_FAIL, "执行的SQL为:" + querySql + " 具体错误信息为:" + e); + } + + public static DataXException asInsertPriException(DataBaseType dataBaseType, String userName, String jdbcUrl) { + if (dataBaseType.equals(DataBaseType.MySql)) { + throw asDataXException(DBUtilErrorCode.MYSQL_INSERT_ERROR, "用户名为:" + userName + " jdbcURL为:" + jdbcUrl); + } + + if (dataBaseType.equals(DataBaseType.Oracle)) { + throw asDataXException(DBUtilErrorCode.ORACLE_INSERT_ERROR, "用户名为:" + userName + " jdbcURL为:" + jdbcUrl); + } + throw asDataXException(DBUtilErrorCode.NO_INSERT_PRIVILEGE, "用户名为:" + userName + " jdbcURL为:" + jdbcUrl); + } + + public static DataXException asDeletePriException(DataBaseType dataBaseType, String userName, String jdbcUrl) { + if (dataBaseType.equals(DataBaseType.MySql)) { + throw asDataXException(DBUtilErrorCode.MYSQL_DELETE_ERROR, "用户名为:" + userName + " jdbcURL为:" + jdbcUrl); + } + + if (dataBaseType.equals(DataBaseType.Oracle)) { + throw asDataXException(DBUtilErrorCode.ORACLE_DELETE_ERROR, "用户名为:" + userName + " jdbcURL为:" + jdbcUrl); + } + throw asDataXException(DBUtilErrorCode.NO_DELETE_PRIVILEGE, "用户名为:" + userName + " jdbcURL为:" + jdbcUrl); + } + + public static DataXException asSplitPKException(DataBaseType dataBaseType, Exception e, String splitSql, String splitPkID) { + if (dataBaseType.equals(DataBaseType.MySql)) { + + return asDataXException(DBUtilErrorCode.MYSQL_SPLIT_PK_ERROR, "配置的SplitPK为: " + splitPkID + ", 执行的SQL为: " + splitSql + " 具体错误信息为:" + e); + } + + if (dataBaseType.equals(DataBaseType.Oracle)) { + return asDataXException(DBUtilErrorCode.ORACLE_SPLIT_PK_ERROR, "配置的SplitPK为: " + splitPkID + ", 执行的SQL为: " + splitSql + " 具体错误信息为:" + e); + } + + return asDataXException(DBUtilErrorCode.READ_RECORD_FAIL, splitSql + e); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/RdbmsRangeSplitWrap.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/RdbmsRangeSplitWrap.java new file mode 100644 index 000000000..da55379ce --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/RdbmsRangeSplitWrap.java @@ -0,0 +1,101 @@ +package com.alibaba.datax.plugin.rdbms.util; + +import com.alibaba.datax.common.util.RangeSplitUtil; +import org.apache.commons.lang3.StringUtils; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; + +public final class RdbmsRangeSplitWrap { + + public static List splitAndWrap(String left, String right, int expectSliceNumber, + String columnName, String quote, DataBaseType dataBaseType) { + String[] tempResult = RangeSplitUtil.doAsciiStringSplit(left, right, expectSliceNumber); + return RdbmsRangeSplitWrap.wrapRange(tempResult, columnName, quote, dataBaseType); + } + + // warn: do not use this method long->BigInteger + public static List splitAndWrap(long left, long right, int expectSliceNumber, String columnName) { + long[] tempResult = RangeSplitUtil.doLongSplit(left, right, expectSliceNumber); + return RdbmsRangeSplitWrap.wrapRange(tempResult, columnName); + } + + public static List splitAndWrap(BigInteger left, BigInteger right, int expectSliceNumber, String columnName) { + BigInteger[] tempResult = RangeSplitUtil.doBigIntegerSplit(left, right, expectSliceNumber); + return RdbmsRangeSplitWrap.wrapRange(tempResult, columnName); + } + + public static List wrapRange(long[] rangeResult, String columnName) { + String[] rangeStr = new String[rangeResult.length]; + for (int i = 0, len = rangeResult.length; i < len; i++) { + rangeStr[i] = String.valueOf(rangeResult[i]); + } + return wrapRange(rangeStr, columnName, "", null); + } + + public static List wrapRange(BigInteger[] rangeResult, String columnName) { + String[] rangeStr = new String[rangeResult.length]; + for (int i = 0, len = rangeResult.length; i < len; i++) { + rangeStr[i] = rangeResult[i].toString(); + } + return wrapRange(rangeStr, columnName, "", null); + } + + public static List wrapRange(String[] rangeResult, String columnName, + String quote, DataBaseType dataBaseType) { + if (null == rangeResult || rangeResult.length < 2) { + throw new IllegalArgumentException(String.format( + "Parameter rangeResult can not be null and its length can not <2. detail:rangeResult=[%s].", + StringUtils.join(rangeResult, ","))); + } + + List result = new ArrayList(); + + //TODO change to stringbuilder.append(..) + if (2 == rangeResult.length) { + result.add(String.format(" (%s%s%s <= %s AND %s <= %s%s%s) ", quote, quoteConstantValue(rangeResult[0], dataBaseType), + quote, columnName, columnName, quote, quoteConstantValue(rangeResult[1], dataBaseType), quote)); + return result; + } else { + for (int i = 0, len = rangeResult.length - 2; i < len; i++) { + result.add(String.format(" (%s%s%s <= %s AND %s < %s%s%s) ", quote, quoteConstantValue(rangeResult[i], dataBaseType), + quote, columnName, columnName, quote, quoteConstantValue(rangeResult[i + 1], dataBaseType), quote)); + } + + result.add(String.format(" (%s%s%s <= %s AND %s <= %s%s%s) ", quote, quoteConstantValue(rangeResult[rangeResult.length - 2], dataBaseType), + quote, columnName, columnName, quote, quoteConstantValue(rangeResult[rangeResult.length - 1], dataBaseType), quote)); + return result; + } + } + + public static String wrapFirstLastPoint(String firstPoint, String lastPoint, String columnName, + String quote, DataBaseType dataBaseType) { + return String.format(" ((%s < %s%s%s) OR (%s%s%s < %s)) ", columnName, quote, quoteConstantValue(firstPoint, dataBaseType), + quote, quote, quoteConstantValue(lastPoint, dataBaseType), quote, columnName); + } + + public static String wrapFirstLastPoint(Long firstPoint, Long lastPoint, String columnName) { + return wrapFirstLastPoint(firstPoint.toString(), lastPoint.toString(), columnName, "", null); + } + + public static String wrapFirstLastPoint(BigInteger firstPoint, BigInteger lastPoint, String columnName) { + return wrapFirstLastPoint(firstPoint.toString(), lastPoint.toString(), columnName, "", null); + } + + + private static String quoteConstantValue(String aString, DataBaseType dataBaseType) { + if (null == dataBaseType) { + return aString; + } + + if (dataBaseType.equals(DataBaseType.MySql)) { + return aString.replace("'", "''").replace("\\", "\\\\"); + } else if (dataBaseType.equals(DataBaseType.Oracle) || dataBaseType.equals(DataBaseType.SQLServer)) { + return aString.replace("'", "''"); + } else { + //TODO other type supported + return aString; + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/SqlFormatUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/SqlFormatUtil.java new file mode 100644 index 000000000..e14914a41 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/SqlFormatUtil.java @@ -0,0 +1,337 @@ +package com.alibaba.datax.plugin.rdbms.util; + +import java.util.HashSet; +import java.util.LinkedList; +import java.util.Set; +import java.util.StringTokenizer; + +// TODO delete it +public class SqlFormatUtil { + + private static final Set BEGIN_CLAUSES = new HashSet(); + private static final Set END_CLAUSES = new HashSet(); + private static final Set LOGICAL = new HashSet(); + private static final Set QUANTIFIERS = new HashSet(); + private static final Set DML = new HashSet(); + private static final Set MISC = new HashSet(); + + private static final String WHITESPACE = " \n\r\f\t"; + + static { + BEGIN_CLAUSES.add("left"); + BEGIN_CLAUSES.add("right"); + BEGIN_CLAUSES.add("inner"); + BEGIN_CLAUSES.add("outer"); + BEGIN_CLAUSES.add("group"); + BEGIN_CLAUSES.add("order"); + + END_CLAUSES.add("where"); + END_CLAUSES.add("set"); + END_CLAUSES.add("having"); + END_CLAUSES.add("join"); + END_CLAUSES.add("from"); + END_CLAUSES.add("by"); + END_CLAUSES.add("join"); + END_CLAUSES.add("into"); + END_CLAUSES.add("union"); + + LOGICAL.add("and"); + LOGICAL.add("or"); + LOGICAL.add("when"); + LOGICAL.add("else"); + LOGICAL.add("end"); + + QUANTIFIERS.add("in"); + QUANTIFIERS.add("all"); + QUANTIFIERS.add("exists"); + QUANTIFIERS.add("some"); + QUANTIFIERS.add("any"); + + DML.add("insert"); + DML.add("update"); + DML.add("delete"); + + MISC.add("select"); + MISC.add("on"); + } + + static final String indentString = " "; + static final String initial = "\n "; + + public static String format(String source) { + return new FormatProcess(source).perform(); + } + + private static class FormatProcess { + boolean beginLine = true; + boolean afterBeginBeforeEnd = false; + boolean afterByOrSetOrFromOrSelect = false; + boolean afterValues = false; + boolean afterOn = false; + boolean afterBetween = false; + boolean afterInsert = false; + int inFunction = 0; + int parensSinceSelect = 0; + private LinkedList parenCounts = new LinkedList(); + private LinkedList afterByOrFromOrSelects = new LinkedList(); + + int indent = 1; + + StringBuilder result = new StringBuilder(); + StringTokenizer tokens; + String lastToken; + String token; + String lcToken; + + public FormatProcess(String sql) { + tokens = new StringTokenizer(sql, "()+*/-=<>'`\"[]," + WHITESPACE, + true); + } + + public String perform() { + + result.append(initial); + + while (tokens.hasMoreTokens()) { + token = tokens.nextToken(); + lcToken = token.toLowerCase(); + + if ("'".equals(token)) { + String t; + do { + t = tokens.nextToken(); + token += t; + } while (!"'".equals(t) && tokens.hasMoreTokens()); // cannot + // handle + // single + // quotes + } else if ("\"".equals(token)) { + String t; + do { + t = tokens.nextToken(); + token += t; + } while (!"\"".equals(t)); + } + + if (afterByOrSetOrFromOrSelect && ",".equals(token)) { + commaAfterByOrFromOrSelect(); + } else if (afterOn && ",".equals(token)) { + commaAfterOn(); + } else if ("(".equals(token)) { + openParen(); + } else if (")".equals(token)) { + closeParen(); + } else if (BEGIN_CLAUSES.contains(lcToken)) { + beginNewClause(); + } else if (END_CLAUSES.contains(lcToken)) { + endNewClause(); + } else if ("select".equals(lcToken)) { + select(); + } else if (DML.contains(lcToken)) { + updateOrInsertOrDelete(); + } else if ("values".equals(lcToken)) { + values(); + } else if ("on".equals(lcToken)) { + on(); + } else if (afterBetween && lcToken.equals("and")) { + misc(); + afterBetween = false; + } else if (LOGICAL.contains(lcToken)) { + logical(); + } else if (isWhitespace(token)) { + white(); + } else { + misc(); + } + + if (!isWhitespace(token)) { + lastToken = lcToken; + } + + } + return result.toString(); + } + + private void commaAfterOn() { + out(); + indent--; + newline(); + afterOn = false; + afterByOrSetOrFromOrSelect = true; + } + + private void commaAfterByOrFromOrSelect() { + out(); + newline(); + } + + private void logical() { + if ("end".equals(lcToken)) { + indent--; + } + newline(); + out(); + beginLine = false; + } + + private void on() { + indent++; + afterOn = true; + newline(); + out(); + beginLine = false; + } + + private void misc() { + out(); + if ("between".equals(lcToken)) { + afterBetween = true; + } + if (afterInsert) { + newline(); + afterInsert = false; + } else { + beginLine = false; + if ("case".equals(lcToken)) { + indent++; + } + } + } + + private void white() { + if (!beginLine) { + result.append(" "); + } + } + + private void updateOrInsertOrDelete() { + out(); + indent++; + beginLine = false; + if ("update".equals(lcToken)) { + newline(); + } + if ("insert".equals(lcToken)) { + afterInsert = true; + } + } + + private void select() { + out(); + indent++; + newline(); + parenCounts.addLast(Integer.valueOf(parensSinceSelect)); + afterByOrFromOrSelects.addLast(Boolean + .valueOf(afterByOrSetOrFromOrSelect)); + parensSinceSelect = 0; + afterByOrSetOrFromOrSelect = true; + } + + private void out() { + result.append(token); + } + + private void endNewClause() { + if (!afterBeginBeforeEnd) { + indent--; + if (afterOn) { + indent--; + afterOn = false; + } + newline(); + } + out(); + if (!"union".equals(lcToken)) { + indent++; + } + newline(); + afterBeginBeforeEnd = false; + afterByOrSetOrFromOrSelect = "by".equals(lcToken) + || "set".equals(lcToken) || "from".equals(lcToken); + } + + private void beginNewClause() { + if (!afterBeginBeforeEnd) { + if (afterOn) { + indent--; + afterOn = false; + } + indent--; + newline(); + } + out(); + beginLine = false; + afterBeginBeforeEnd = true; + } + + private void values() { + indent--; + newline(); + out(); + indent++; + newline(); + afterValues = true; + } + + private void closeParen() { + parensSinceSelect--; + if (parensSinceSelect < 0) { + indent--; + parensSinceSelect = parenCounts.removeLast().intValue(); + afterByOrSetOrFromOrSelect = afterByOrFromOrSelects + .removeLast().booleanValue(); + } + if (inFunction > 0) { + inFunction--; + out(); + } else { + if (!afterByOrSetOrFromOrSelect) { + indent--; + newline(); + } + out(); + } + beginLine = false; + } + + private void openParen() { + if (isFunctionName(lastToken) || inFunction > 0) { + inFunction++; + } + beginLine = false; + if (inFunction > 0) { + out(); + } else { + out(); + if (!afterByOrSetOrFromOrSelect) { + indent++; + newline(); + beginLine = true; + } + } + parensSinceSelect++; + } + + private static boolean isFunctionName(String tok) { + final char begin = tok.charAt(0); + final boolean isIdentifier = Character.isJavaIdentifierStart(begin) + || '"' == begin; + return isIdentifier && !LOGICAL.contains(tok) + && !END_CLAUSES.contains(tok) && !QUANTIFIERS.contains(tok) + && !DML.contains(tok) && !MISC.contains(tok); + } + + private static boolean isWhitespace(String token) { + return WHITESPACE.indexOf(token) >= 0; + } + + private void newline() { + result.append("\n"); + for (int i = 0; i < indent; i++) { + result.append(indentString); + } + beginLine = true; + } + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/TableExpandUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/TableExpandUtil.java new file mode 100644 index 000000000..d75440a19 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/util/TableExpandUtil.java @@ -0,0 +1,82 @@ +package com.alibaba.datax.plugin.rdbms.util; + +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public final class TableExpandUtil { + + // schema.table[0-2]more + // 1 2 3 4 5 + public static final Pattern pattern = Pattern + .compile("(\\w+\\.)?(\\w+)\\[(\\d+)-(\\d+)\\](.*)"); + + private TableExpandUtil() { + } + + /** + * Split the table string(Usually contains names of some tables) to a List + * that is formated. example: table[0-32] will be splitted into `table0`, + * `table1`, `table2`, ... ,`table32` in {@link List} + * + * @param tables a string contains table name(one or many). + * @return a split result of table name. + *

+ * TODO 删除参数 DataBaseType + */ + public static List splitTables(DataBaseType dataBaseType, + String tables) { + List splittedTables = new ArrayList(); + + String[] tableArrays = tables.split(","); + + String tableName = null; + for (String tableArray : tableArrays) { + Matcher matcher = pattern.matcher(tableArray.trim()); + if (!matcher.matches()) { + tableName = tableArray.trim(); + splittedTables.add(tableName); + } else { + String start = matcher.group(3).trim(); + String end = matcher.group(4).trim(); + String tmp = ""; + if (Integer.valueOf(start) > Integer.valueOf(end)) { + tmp = start; + start = end; + end = tmp; + } + int len = start.length(); + String schema = null; + for (int k = Integer.valueOf(start); k <= Integer.valueOf(end); k++) { + schema = (null == matcher.group(1)) ? "" : matcher.group(1) + .trim(); + if (start.startsWith("0")) { + tableName = schema + matcher.group(2).trim() + + String.format("%0" + len + "d", k) + + matcher.group(5).trim(); + splittedTables.add(tableName); + } else { + tableName = schema + matcher.group(2).trim() + + String.format("%d", k) + + matcher.group(5).trim(); + splittedTables.add(tableName); + } + } + } + } + return splittedTables; + } + + public static List expandTableConf(DataBaseType dataBaseType, + List tables) { + List parsedTables = new ArrayList(); + for (String table : tables) { + List splittedTables = splitTables(dataBaseType, table); + parsedTables.addAll(splittedTables); + } + + return parsedTables; + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/CommonRdbmsWriter.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/CommonRdbmsWriter.java new file mode 100644 index 000000000..0d4e71cfa --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/CommonRdbmsWriter.java @@ -0,0 +1,587 @@ +package com.alibaba.datax.plugin.rdbms.writer; + +import com.alibaba.datax.common.element.Column; +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordReceiver; +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.plugin.rdbms.util.*; +import com.alibaba.datax.plugin.rdbms.writer.util.OriginalConfPretreatmentUtil; +import com.alibaba.datax.plugin.rdbms.writer.util.WriterUtil; +import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Triple; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Types; +import java.util.ArrayList; +import java.util.List; + +import static com.alibaba.datax.plugin.rdbms.util.Constant.DEFAULT_PROXY_SOCKS_HOST; +import static com.alibaba.datax.plugin.rdbms.util.Constant.DEFAULT_PROXY_SOCKS_PORT; + +public class CommonRdbmsWriter { + + public static class Job { + private DataBaseType dataBaseType; + + private static final Logger LOG = LoggerFactory + .getLogger(Job.class); + + public Job(DataBaseType dataBaseType) { + this.dataBaseType = dataBaseType; + OriginalConfPretreatmentUtil.DATABASE_TYPE = this.dataBaseType; + } + + public void init(Configuration originalConfig) { + OriginalConfPretreatmentUtil.doPretreatment(originalConfig, this.dataBaseType); + + LOG.debug("After job init(), originalConfig now is:[\n{}\n]", + originalConfig.toJSON()); + } + + /*目前只支持MySQL Writer跟Oracle Writer;检查PreSQL跟PostSQL语法以及insert,delete权限*/ + public void writerPreCheck(Configuration originalConfig, DataBaseType dataBaseType) { + /*检查PreSql跟PostSql语句*/ + prePostSqlValid(originalConfig, dataBaseType); + /*检查insert 跟delete权限*/ + privilegeValid(originalConfig, dataBaseType); + } + + public void prePostSqlValid(Configuration originalConfig, DataBaseType dataBaseType) { + /*检查PreSql跟PostSql语句*/ + WriterUtil.preCheckPrePareSQL(originalConfig, dataBaseType); + WriterUtil.preCheckPostSQL(originalConfig, dataBaseType); + } + + public void privilegeValid(Configuration originalConfig, DataBaseType dataBaseType) { + /*检查insert 跟delete权限*/ + String username = originalConfig.getString(Key.USERNAME); + String password = originalConfig.getString(Key.PASSWORD); + String proxyHost = originalConfig.getString(Key.PROXY_HOST, DEFAULT_PROXY_SOCKS_HOST); + int proxyPort = originalConfig.getInt(Key.PROXY_PORT, DEFAULT_PROXY_SOCKS_PORT); + List connections = originalConfig.getList(Constant.CONN_MARK, + Object.class); + + for (int i = 0, len = connections.size(); i < len; i++) { + Configuration connConf = Configuration.from(Json.toJson(connections.get(i), null)); + String jdbcUrl = connConf.getString(Key.JDBC_URL); + List expandedTables = connConf.getList(Key.TABLE, String.class); + boolean hasInsertPri = DBUtil.checkInsertPrivilege(dataBaseType, jdbcUrl, username, password, + proxyHost, proxyPort, expandedTables); + + if (!hasInsertPri) { + throw RdbmsException.asInsertPriException(dataBaseType, originalConfig.getString(Key.USERNAME), jdbcUrl); + } + + if (DBUtil.needCheckDeletePrivilege(originalConfig)) { + boolean hasDeletePri = DBUtil.checkDeletePrivilege(dataBaseType, jdbcUrl, username, password, + proxyHost, proxyPort, expandedTables); + if (!hasDeletePri) { + throw RdbmsException.asDeletePriException(dataBaseType, originalConfig.getString(Key.USERNAME), jdbcUrl); + } + } + } + } + + // 一般来说,是需要推迟到 task 中进行pre 的执行(单表情况例外) + public void prepare(Configuration originalConfig) { + int tableNumber = originalConfig.getInt(Constant.TABLE_NUMBER_MARK); + if (tableNumber == 1) { + String username = originalConfig.getString(Key.USERNAME); + String password = originalConfig.getString(Key.PASSWORD); + String proxyHost = originalConfig.getString(Key.PROXY_HOST, DEFAULT_PROXY_SOCKS_HOST); + int proxyPort = originalConfig.getInt(Key.PROXY_PORT, DEFAULT_PROXY_SOCKS_PORT); + List conns = originalConfig.getList(Constant.CONN_MARK, + Object.class); + Configuration connConf = Configuration.from(Json.toJson(conns.get(0), null)); + + // 这里的 jdbcUrl 已经 append 了合适后缀参数 + String jdbcUrl = connConf.getString(Key.JDBC_URL); + originalConfig.set(Key.JDBC_URL, jdbcUrl); + + String table = connConf.getList(Key.TABLE, String.class).get(0); + originalConfig.set(Key.TABLE, table); + + List preSqls = originalConfig.getList(Key.PRE_SQL, + String.class); + List renderedPreSqls = WriterUtil.renderPreOrPostSqls( + preSqls, table); + + originalConfig.remove(Constant.CONN_MARK); + if (null != renderedPreSqls && !renderedPreSqls.isEmpty()) { + // 说明有 preSql 配置,则此处删除掉 + originalConfig.remove(Key.PRE_SQL); + + Connection conn = DBUtil.getConnection(dataBaseType, + jdbcUrl, username, password, proxyHost, proxyPort); + LOG.info("Begin to execute preSqls:[{}]. context info:{}.", + StringUtils.join(renderedPreSqls, ";"), jdbcUrl); + + WriterUtil.executeSqls(conn, renderedPreSqls, jdbcUrl, dataBaseType); + DBUtil.closeDBResources(null, null, conn); + } + } + + LOG.debug("After job prepare(), originalConfig now is:[\n{}\n]", + originalConfig.toJSON()); + } + + public List split(Configuration originalConfig, + int mandatoryNumber) { + return WriterUtil.doSplit(originalConfig, mandatoryNumber); + } + + // 一般来说,是需要推迟到 task 中进行post 的执行(单表情况例外) + public void post(Configuration originalConfig) { + int tableNumber = originalConfig.getInt(Constant.TABLE_NUMBER_MARK); + if (tableNumber == 1) { + String username = originalConfig.getString(Key.USERNAME); + String password = originalConfig.getString(Key.PASSWORD); + String proxyHost = originalConfig.getString(Key.PROXY_HOST, DEFAULT_PROXY_SOCKS_HOST); + int proxyPort = originalConfig.getInt(Key.PROXY_PORT, DEFAULT_PROXY_SOCKS_PORT); + // 已经由 prepare 进行了appendJDBCSuffix处理 + String jdbcUrl = originalConfig.getString(Key.JDBC_URL); + + String table = originalConfig.getString(Key.TABLE); + + List postSqls = originalConfig.getList(Key.POST_SQL, + String.class); + List renderedPostSqls = WriterUtil.renderPreOrPostSqls( + postSqls, table); + + if (null != renderedPostSqls && !renderedPostSqls.isEmpty()) { + // 说明有 postSql 配置,则此处删除掉 + originalConfig.remove(Key.POST_SQL); + + Connection conn = DBUtil.getConnection(this.dataBaseType, + jdbcUrl, username, password, proxyHost, proxyPort); + + LOG.info( + "Begin to execute postSqls:[{}]. context info:{}.", + StringUtils.join(renderedPostSqls, ";"), jdbcUrl); + WriterUtil.executeSqls(conn, renderedPostSqls, jdbcUrl, dataBaseType); + DBUtil.closeDBResources(null, null, conn); + } + } + } + + public void destroy(Configuration originalConfig) { + } + + } + + public static class Task { + protected static final Logger LOG = LoggerFactory + .getLogger(Task.class); + + protected DataBaseType dataBaseType; + private static final String VALUE_HOLDER = "?"; + + protected String username; + protected String password; + protected String proxyHost; + protected int proxyPort; + protected String jdbcUrl; + protected String table; + protected List columns; + protected List preSqls; + protected List postSqls; + protected int batchSize; + protected int batchByteSize; + protected int columnNumber = 0; + protected List primarykeys; + protected TaskPluginCollector taskPluginCollector; + + // 作为日志显示信息时,需要附带的通用信息。比如信息所对应的数据库连接等信息,针对哪个表做的操作 + protected static String BASIC_MESSAGE; + + protected static String INSERT_OR_REPLACE_TEMPLATE; + + protected String writeRecordSql; + protected String writeMode; + protected boolean emptyAsNull; + protected Triple, List, List> resultSetMetaData; + + public Task(DataBaseType dataBaseType) { + this.dataBaseType = dataBaseType; + } + + public void init(Configuration writerSliceConfig) { + this.username = writerSliceConfig.getString(Key.USERNAME); + this.password = writerSliceConfig.getString(Key.PASSWORD); + this.proxyHost = writerSliceConfig.getString(Key.PROXY_HOST, DEFAULT_PROXY_SOCKS_HOST); + this.proxyPort = writerSliceConfig.getInt(Key.PROXY_PORT, DEFAULT_PROXY_SOCKS_PORT); + if(StringUtils.isNotBlank(password)){ + try { + password = (String) CryptoUtils.string2Object(password); + } catch (Exception e) { + throw DataXException.asDataXException(DBUtilErrorCode.CONF_ERROR, "decrypt password failed"); + } + } + this.jdbcUrl = writerSliceConfig.getString(Key.JDBC_URL); + //ob10的处理 + if (this.jdbcUrl.startsWith(Constant.OB10_SPLIT_STRING) && this.dataBaseType == DataBaseType.MySql) { + String[] ss = this.jdbcUrl.split(Constant.OB10_SPLIT_STRING_PATTERN); + if (ss.length != 3) { + throw DataXException + .asDataXException( + DBUtilErrorCode.JDBC_OB10_ADDRESS_ERROR, "JDBC OB10格式错误,请联系askdatax"); + } + LOG.info("this is ob1_0 jdbc url."); + this.username = ss[1].trim() + ":" + this.username; + this.jdbcUrl = ss[2]; + LOG.info("this is ob1_0 jdbc url. user=" + this.username + " :url=" + this.jdbcUrl); + } + + this.table = writerSliceConfig.getString(Key.TABLE); + this.primarykeys = writerSliceConfig.getList(Key.PRIMARYKEY, String.class); + + this.columns = writerSliceConfig.getList(Key.COLUMN, String.class); + this.columnNumber = this.columns.size(); + + this.preSqls = writerSliceConfig.getList(Key.PRE_SQL, String.class); + this.postSqls = writerSliceConfig.getList(Key.POST_SQL, String.class); + this.batchSize = writerSliceConfig.getInt(Key.BATCH_SIZE, Constant.DEFAULT_BATCH_SIZE); + this.batchByteSize = writerSliceConfig.getInt(Key.BATCH_BYTE_SIZE, Constant.DEFAULT_BATCH_BYTE_SIZE); + + writeMode = writerSliceConfig.getString(Key.WRITE_MODE, "INSERT"); + emptyAsNull = writerSliceConfig.getBool(Key.EMPTY_AS_NULL, true); + INSERT_OR_REPLACE_TEMPLATE = writerSliceConfig.getString(Constant.INSERT_OR_REPLACE_TEMPLATE_MARK); + this.writeRecordSql = String.format(INSERT_OR_REPLACE_TEMPLATE, this.table); + + BASIC_MESSAGE = String.format("jdbcUrl:[%s], table:[%s]", + this.jdbcUrl, this.table); + } + + public void prepare(Configuration writerSliceConfig) { + Connection connection = DBUtil.getConnection(this.dataBaseType, + this.jdbcUrl, username, password, proxyHost, proxyPort); + + DBUtil.dealWithSessionConfig(connection, writerSliceConfig, + this.dataBaseType, BASIC_MESSAGE); + + int tableNumber = writerSliceConfig.getInt( + Constant.TABLE_NUMBER_MARK); + if (tableNumber != 1) { + LOG.info("Begin to execute preSqls:[{}]. context info:{}.", + StringUtils.join(this.preSqls, ";"), BASIC_MESSAGE); + WriterUtil.executeSqls(connection, this.preSqls, BASIC_MESSAGE, dataBaseType); + } + + DBUtil.closeDBResources(null, null, connection); + } + + public void startWriteWithConnection(RecordReceiver recordReceiver, TaskPluginCollector taskPluginCollector, Connection connection) { + this.taskPluginCollector = taskPluginCollector; + + // 用于写入数据的时候的类型根据目的表字段类型转换 + this.resultSetMetaData = DBUtil.getColumnMetaData(connection, + this.table, StringUtils.join(this.columns, ",")); + // 写数据库的SQL语句 + calcWriteRecordSql(); + + List writeBuffer = new ArrayList(this.batchSize); + int bufferBytes = 0; + try { + Record record; + while ((record = recordReceiver.getFromReader()) != null) { + if (record.getColumnNumber() != this.columnNumber) { + // 源头读取字段列数与目的表字段写入列数不相等,直接报错 + throw DataXException + .asDataXException( + DBUtilErrorCode.CONF_ERROR, + String.format( + "列配置信息有错误. 因为您配置的任务中,源头读取字段数:%s 与 目的表要写入的字段数:%s 不相等. 请检查您的配置并作出修改.", + record.getColumnNumber(), + this.columnNumber)); + } + + writeBuffer.add(record); + bufferBytes += record.getMemorySize(); + + if (writeBuffer.size() >= batchSize || bufferBytes >= batchByteSize) { + doBatchInsert(connection, writeBuffer); + writeBuffer.clear(); + bufferBytes = 0; + } + } + if (!writeBuffer.isEmpty()) { + doBatchInsert(connection, writeBuffer); + writeBuffer.clear(); + bufferBytes = 0; + } + } catch (Exception e) { + throw DataXException.asDataXException( + DBUtilErrorCode.WRITE_DATA_ERROR, e); + } finally { + writeBuffer.clear(); + bufferBytes = 0; + DBUtil.closeDBResources(null, null, connection); + } + } + + // TODO 改用连接池,确保每次获取的连接都是可用的(注意:连接可能需要每次都初始化其 session) + public void startWrite(RecordReceiver recordReceiver, + Configuration writerSliceConfig, + TaskPluginCollector taskPluginCollector) { + Connection connection = DBUtil.getConnection(this.dataBaseType, + this.jdbcUrl, username, password, proxyHost, proxyPort); + DBUtil.dealWithSessionConfig(connection, writerSliceConfig, + this.dataBaseType, BASIC_MESSAGE); + startWriteWithConnection(recordReceiver, taskPluginCollector, connection); + } + + + public void post(Configuration writerSliceConfig) { + int tableNumber = writerSliceConfig.getInt( + Constant.TABLE_NUMBER_MARK); + + boolean hasPostSql = (this.postSqls != null && this.postSqls.size() > 0); + if (tableNumber == 1 || !hasPostSql) { + return; + } + + Connection connection = DBUtil.getConnection(this.dataBaseType, + this.jdbcUrl, username, password, proxyHost, proxyPort); + + LOG.info("Begin to execute postSqls:[{}]. context info:{}.", + StringUtils.join(this.postSqls, ";"), BASIC_MESSAGE); + WriterUtil.executeSqls(connection, this.postSqls, BASIC_MESSAGE, dataBaseType); + DBUtil.closeDBResources(null, null, connection); + } + + public void destroy(Configuration writerSliceConfig) { + } + + protected void doBatchInsert(Connection connection, List buffer) + throws SQLException { + PreparedStatement preparedStatement = null; + try { + connection.setAutoCommit(false); + preparedStatement = connection + .prepareStatement(this.writeRecordSql); + + for (Record record : buffer) { + preparedStatement = fillPreparedStatement( + preparedStatement, record); + preparedStatement.addBatch(); + } + preparedStatement.executeBatch(); + connection.commit(); + } catch (SQLException e) { + LOG.warn("回滚此次写入, 采用每次写入一行方式提交. 因为:" + e.getMessage()); + connection.rollback(); + doOneInsert(connection, buffer); + } catch (Exception e) { + throw DataXException.asDataXException( + DBUtilErrorCode.WRITE_DATA_ERROR, e); + } finally { + DBUtil.closeDBResources(preparedStatement, null); + } + } + + protected void doOneInsert(Connection connection, List buffer) { + PreparedStatement preparedStatement = null; + try { + connection.setAutoCommit(true); + preparedStatement = connection + .prepareStatement(this.writeRecordSql); + + for (Record record : buffer) { + try { + preparedStatement = fillPreparedStatement( + preparedStatement, record); + preparedStatement.execute(); + } catch (SQLException e) { + LOG.debug(e.toString()); + + this.taskPluginCollector.collectDirtyRecord(record, e); + } finally { + // 最后不要忘了关闭 preparedStatement + preparedStatement.clearParameters(); + } + } + } catch (Exception e) { + throw DataXException.asDataXException( + DBUtilErrorCode.WRITE_DATA_ERROR, e); + } finally { + DBUtil.closeDBResources(preparedStatement, null); + } + } + + // 直接使用了两个类变量:columnNumber,resultSetMetaData + protected PreparedStatement fillPreparedStatement(PreparedStatement preparedStatement, Record record) + throws SQLException { + for (int i = 0; i < this.columnNumber; i++) { + int columnSqltype = this.resultSetMetaData.getMiddle().get(i); + preparedStatement = fillPreparedStatementColumnType(preparedStatement, i, columnSqltype, record.getColumn(i)); + } + + return preparedStatement; + } + + protected PreparedStatement fillPreparedStatementColumnType(PreparedStatement preparedStatement, int columnIndex, int columnSqltype, Column column) throws SQLException { + java.util.Date utilDate; + switch (columnSqltype) { + case Types.CHAR: + case Types.NCHAR: + case Types.CLOB: + case Types.NCLOB: + case Types.VARCHAR: + case Types.LONGVARCHAR: + case Types.NVARCHAR: + case Types.LONGNVARCHAR: + preparedStatement.setString(columnIndex + 1, column + .asString()); + break; + + case Types.SMALLINT: + case Types.INTEGER: + case Types.BIGINT: + case Types.NUMERIC: + case Types.DECIMAL: + case Types.FLOAT: + case Types.REAL: + case Types.DOUBLE: + String strValue = column.asString(); + if (emptyAsNull && "".equals(strValue)) { + preparedStatement.setString(columnIndex + 1, null); + } else { + preparedStatement.setString(columnIndex + 1, strValue); + } + break; + + //tinyint is a little special in some database like mysql {boolean->tinyint(1)} + case Types.TINYINT: + Long longValue = column.asLong(); + if (null == longValue) { + preparedStatement.setString(columnIndex + 1, null); + } else { + preparedStatement.setString(columnIndex + 1, longValue.toString()); + } + break; + + // for mysql bug, see http://bugs.mysql.com/bug.php?id=35115 + case Types.DATE: + if (this.resultSetMetaData.getRight().get(columnIndex) + .equalsIgnoreCase("year")) { + if (column.asBigInteger() == null) { + preparedStatement.setString(columnIndex + 1, null); + } else { + preparedStatement.setInt(columnIndex + 1, column.asBigInteger().intValue()); + } + } else { + java.sql.Date sqlDate = null; + try { + utilDate = column.asDate(); + } catch (DataXException e) { + throw new SQLException(String.format( + "Date 类型转换错误:[%s]", column)); + } + + if (null != utilDate) { + sqlDate = new java.sql.Date(utilDate.getTime()); + } + preparedStatement.setDate(columnIndex + 1, sqlDate); + } + break; + + case Types.TIME: + java.sql.Time sqlTime = null; + try { + utilDate = column.asDate(); + } catch (DataXException e) { + throw new SQLException(String.format( + "TIME 类型转换错误:[%s]", column)); + } + + if (null != utilDate) { + sqlTime = new java.sql.Time(utilDate.getTime()); + } + preparedStatement.setTime(columnIndex + 1, sqlTime); + break; + + case Types.TIMESTAMP: + java.sql.Timestamp sqlTimestamp = null; + try { + utilDate = column.asDate(); + } catch (DataXException e) { + throw new SQLException(String.format( + "TIMESTAMP 类型转换错误:[%s]", column)); + } + + if (null != utilDate) { + sqlTimestamp = new java.sql.Timestamp( + utilDate.getTime()); + } + preparedStatement.setTimestamp(columnIndex + 1, sqlTimestamp); + break; + + case Types.BINARY: + case Types.VARBINARY: + case Types.BLOB: + case Types.LONGVARBINARY: + preparedStatement.setBytes(columnIndex + 1, column + .asBytes()); + break; + + case Types.BOOLEAN: + preparedStatement.setString(columnIndex + 1, column.asString()); + break; + + // warn: bit(1) -> Types.BIT 可使用setBoolean + // warn: bit(>1) -> Types.VARBINARY 可使用setBytes + case Types.BIT: + if (this.dataBaseType == DataBaseType.MySql) { + preparedStatement.setBoolean(columnIndex + 1, column.asBoolean()); + } else { + preparedStatement.setString(columnIndex + 1, column.asString()); + } + break; + default: + throw DataXException + .asDataXException( + DBUtilErrorCode.UNSUPPORTED_TYPE, + String.format( + "您的配置文件中的列配置信息有误. 因为DataX 不支持数据库写入这种字段类型. 字段名:[%s], 字段类型:[%d], 字段Java类型:[%s]. 请修改表中该字段的类型或者不同步该字段.", + this.resultSetMetaData.getLeft() + .get(columnIndex), + this.resultSetMetaData.getMiddle() + .get(columnIndex), + this.resultSetMetaData.getRight() + .get(columnIndex))); + } + return preparedStatement; + } + + private void calcWriteRecordSql() { + if (!VALUE_HOLDER.equals(calcValueHolder(""))) { + List valueHolders = new ArrayList(columnNumber); + for (int i = 0; i < columns.size(); i++) { + String type = resultSetMetaData.getRight().get(i); + valueHolders.add(calcValueHolder(type)); + } + + boolean forceUseUpdate = false; + //ob10的处理 + if (dataBaseType != null && dataBaseType == DataBaseType.MySql && OriginalConfPretreatmentUtil.isOB10(jdbcUrl)) { + forceUseUpdate = true; + } + + INSERT_OR_REPLACE_TEMPLATE = WriterUtil.getWriteTemplate(columns, valueHolders, writeMode, dataBaseType, forceUseUpdate, primarykeys); + writeRecordSql = String.format(INSERT_OR_REPLACE_TEMPLATE, this.table); + } + } + + protected String calcValueHolder(String columnType) { + return VALUE_HOLDER; + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/Constant.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/Constant.java new file mode 100644 index 000000000..7331db510 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/Constant.java @@ -0,0 +1,22 @@ +package com.alibaba.datax.plugin.rdbms.writer; + +/** + * 用于插件解析用户配置时,需要进行标识(MARK)的常量的声明. + */ +public final class Constant { + public static final int DEFAULT_BATCH_SIZE = 2048; + + public static final int DEFAULT_BATCH_BYTE_SIZE = 32 * 1024 * 1024; + + public static final String TABLE_NAME_PLACEHOLDER = "@table"; + + public static final String CONN_MARK = "connection"; + + public static final String TABLE_NUMBER_MARK = "tableNumber"; + + public static final String INSERT_OR_REPLACE_TEMPLATE_MARK = "insertOrReplaceTemplate"; + + public static final String OB10_SPLIT_STRING = "||_dsc_ob10_dsc_||"; + public static final String OB10_SPLIT_STRING_PATTERN = "\\|\\|_dsc_ob10_dsc_\\|\\|"; + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/Key.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/Key.java new file mode 100644 index 000000000..e35e41101 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/Key.java @@ -0,0 +1,56 @@ +package com.alibaba.datax.plugin.rdbms.writer; + +public final class Key { + public final static String JDBC_URL = "jdbcUrl"; + + public final static String USERNAME = "username"; + + public final static String PASSWORD = "password"; + + public final static String TABLE = "table"; + + public final static String COLUMN = "column"; + + //可选值为:insert,replace,默认为 insert (mysql 支持,oracle 没用 replace 机制,只能 insert,oracle 可以不暴露这个参数) + public final static String WRITE_MODE = "writeMode"; + + public final static String PRE_SQL = "preSql"; + + public final static String POST_SQL = "postSql"; + + public final static String TDDL_APP_NAME = "appName"; + + //默认值:256 + public final static String BATCH_SIZE = "batchSize"; + + //默认值:32m + public final static String BATCH_BYTE_SIZE = "batchByteSize"; + + public final static String EMPTY_AS_NULL = "emptyAsNull"; + + public final static String DB_NAME_PATTERN = "dbNamePattern"; + + public final static String DB_RULE = "dbRule"; + + public final static String TABLE_NAME_PATTERN = "tableNamePattern"; + + public final static String TABLE_RULE = "tableRule"; + + public final static String DRYRUN = "dryRun"; + + public final static String CONNPARM = "connParams"; + + public final static String HOST = "host"; + + public final static String PROXY_HOST = "proxyHost"; + + public final static String PORT = "port"; + + public final static String PROXY_PORT = "proxyPort"; + + public final static String DATABASE = "database"; + + public final static String JDBCTEM = "jdbc:mysql://"; + + public final static String PRIMARYKEY = "primaryKeys"; +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/MysqlWriterErrorCode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/MysqlWriterErrorCode.java new file mode 100644 index 000000000..523292ad0 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/MysqlWriterErrorCode.java @@ -0,0 +1,32 @@ +package com.alibaba.datax.plugin.rdbms.writer; + +import com.alibaba.datax.common.spi.ErrorCode; + +//TODO 后续考虑与 util 包种的 DBUTilErrorCode 做合并.(区分读和写的错误码) +public enum MysqlWriterErrorCode implements ErrorCode { + ; + + private final String code; + private final String describe; + + private MysqlWriterErrorCode(String code, String describe) { + this.code = code; + this.describe = describe; + } + + @Override + public String getCode() { + return this.code; + } + + @Override + public String getDescription() { + return this.describe; + } + + @Override + public String toString() { + return String.format("Code:[%s], Describe:[%s]. ", this.code, + this.describe); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/util/OriginalConfPretreatmentUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/util/OriginalConfPretreatmentUtil.java new file mode 100644 index 000000000..7caa788ef --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/util/OriginalConfPretreatmentUtil.java @@ -0,0 +1,228 @@ +package com.alibaba.datax.plugin.rdbms.writer.util; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.common.util.ListUtil; +import com.alibaba.datax.plugin.rdbms.util.*; +import com.alibaba.datax.plugin.rdbms.writer.Key; +import com.alibaba.datax.plugin.rdbms.writer.Constant; +import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static com.alibaba.datax.plugin.rdbms.util.Constant.DEFAULT_PROXY_SOCKS_HOST; +import static com.alibaba.datax.plugin.rdbms.util.Constant.DEFAULT_PROXY_SOCKS_PORT; + +public final class OriginalConfPretreatmentUtil { + private static final Logger LOG = LoggerFactory + .getLogger(OriginalConfPretreatmentUtil.class); + + public static DataBaseType DATABASE_TYPE; + +// static static void doPretreatment(Configuration originalConfig) { +// doPretreatment(originalConfig,null); +// } + + public static void doPretreatment(Configuration originalConfig, DataBaseType dataBaseType) { + // 检查 username/password 配置(必填) + originalConfig.getNecessaryValue(Key.USERNAME, DBUtilErrorCode.REQUIRED_VALUE); + originalConfig.getNecessaryValue(Key.PASSWORD, DBUtilErrorCode.REQUIRED_VALUE); + + doCheckBatchSize(originalConfig); + + simplifyConf(originalConfig); + + dealColumnConf(originalConfig); + dealWriteMode(originalConfig, dataBaseType); + } + + public static void doCheckBatchSize(Configuration originalConfig) { + // 检查batchSize 配置(选填,如果未填写,则设置为默认值) + int batchSize = originalConfig.getInt(Key.BATCH_SIZE, Constant.DEFAULT_BATCH_SIZE); + if (batchSize < 1) { + throw DataXException.asDataXException(DBUtilErrorCode.ILLEGAL_VALUE, String.format( + "您的batchSize配置有误. 您所配置的写入数据库表的 batchSize:%s 不能小于1. 推荐配置范围为:[100-1000], 该值越大, 内存溢出可能性越大. 请检查您的配置并作出修改.", + batchSize)); + } + + originalConfig.set(Key.BATCH_SIZE, batchSize); + } + + public static void simplifyConf(Configuration originalConfig) { + List connections = originalConfig.getList(Constant.CONN_MARK, + Object.class); + + int tableNum = 0; + + for (int i = 0, len = connections.size(); i < len; i++) { + Configuration connConf = Configuration.from(Json.toJson(connections.get(i), null)); + + String jdbcUrl = ""; + if(DATABASE_TYPE.equals(DataBaseType.MySql)){ + Map map = connConf.getMap(Key.JDBC_URL); + String parameter = ""; + Map parameterMap = originalConfig.getMap(Key.CONNPARM, new HashMap<>()); + for(String key : map.keySet()){ + if (key.equals(Key.CONNPARM)){ + parameterMap.putAll((Map) map.get(key)); + } + } + parameter = parameterMap.entrySet().stream().map( + e->String.join("=", e.getKey(), String.valueOf(e.getValue())) + ).collect(Collectors.joining("&")); + jdbcUrl = Key.JDBCTEM + map.get(Key.HOST).toString() + ":" + map.get(Key.PORT).toString() + "/" + map.get(Key.DATABASE).toString(); + if(parameter != null && parameter.length() != 0){ + jdbcUrl = Key.JDBCTEM + map.get(Key.HOST).toString() + ":" + map.get(Key.PORT).toString() + "/" + map.get(Key.DATABASE).toString() + "?" + parameter; + } + } else if (DATABASE_TYPE.equals(DataBaseType.Oracle)){ + Map map = connConf.getMap(com.alibaba.datax.plugin.rdbms.reader.Key.JDBC_URL); + + jdbcUrl = com.alibaba.datax.plugin.rdbms.reader.Key.JDBCORCL + "//" + map.get(com.alibaba.datax.plugin.rdbms.reader.Key.HOST).toString() + ":" + map.get(com.alibaba.datax.plugin.rdbms.reader.Key.PORT).toString() + "/" + map.get(com.alibaba.datax.plugin.rdbms.reader.Key.SERVICENAME).toString(); + if(StringUtils.isEmpty(map.get(com.alibaba.datax.plugin.rdbms.reader.Key.SERVICENAME).toString())){ + jdbcUrl = com.alibaba.datax.plugin.rdbms.reader.Key.JDBCORCL + map.get(com.alibaba.datax.plugin.rdbms.reader.Key.HOST).toString() + ":" + map.get(com.alibaba.datax.plugin.rdbms.reader.Key.PORT).toString() + ":" + map.get(com.alibaba.datax.plugin.rdbms.reader.Key.SID).toString(); + + } + }else { + jdbcUrl = connConf.getString(Key.JDBC_URL); + } + if (StringUtils.isBlank(jdbcUrl)) { + throw DataXException.asDataXException(DBUtilErrorCode.REQUIRED_VALUE, "您未配置的写入数据库表的 jdbcUrl."); + } + + jdbcUrl = DATABASE_TYPE.appendJDBCSuffixForReader(jdbcUrl); + originalConfig.set(String.format("%s[%d].%s", Constant.CONN_MARK, i, Key.JDBC_URL), + jdbcUrl); + + List tables = connConf.getList(Key.TABLE, String.class); + + if (null == tables || tables.isEmpty()) { + throw DataXException.asDataXException(DBUtilErrorCode.REQUIRED_VALUE, + "您未配置写入数据库表的表名称. 根据配置DataX找不到您配置的表. 请检查您的配置并作出修改."); + } + + // 对每一个connection 上配置的table 项进行解析 + List expandedTables = TableExpandUtil + .expandTableConf(DATABASE_TYPE, tables); + + if (null == expandedTables || expandedTables.isEmpty()) { + throw DataXException.asDataXException(DBUtilErrorCode.CONF_ERROR, + "您配置的写入数据库表名称错误. DataX找不到您配置的表,请检查您的配置并作出修改."); + } + + tableNum += expandedTables.size(); + + originalConfig.set(String.format("%s[%d].%s", Constant.CONN_MARK, + i, Key.TABLE), expandedTables); + } + + originalConfig.set(Constant.TABLE_NUMBER_MARK, tableNum); + } + + public static void dealColumnConf(Configuration originalConfig, ConnectionFactory connectionFactory, String oneTable) { + List userConfiguredColumns = originalConfig.getList(Key.COLUMN, String.class); + if (null == userConfiguredColumns || userConfiguredColumns.isEmpty()) { + throw DataXException.asDataXException(DBUtilErrorCode.ILLEGAL_VALUE, + "您的配置文件中的列配置信息有误. 因为您未配置写入数据库表的列名称,DataX获取不到列信息. 请检查您的配置并作出修改."); + } else { + boolean isPreCheck = originalConfig.getBool(Key.DRYRUN, false); + List allColumns; + if (isPreCheck) { + allColumns = DBUtil.getTableColumnsByConn(DATABASE_TYPE, connectionFactory.getConnecttionWithoutRetry(), oneTable, connectionFactory.getConnectionInfo()); + } else { + allColumns = DBUtil.getTableColumnsByConn(DATABASE_TYPE, connectionFactory.getConnecttion(), oneTable, connectionFactory.getConnectionInfo()); + } + + LOG.info("table:[{}] all columns:[\n{}\n].", oneTable, + StringUtils.join(allColumns, ",")); + + if (1 == userConfiguredColumns.size() && "*".equals(userConfiguredColumns.get(0))) { + LOG.warn("您的配置文件中的列配置信息存在风险. 因为您配置的写入数据库表的列为*,当您的表字段个数、类型有变动时,可能影响任务正确性甚至会运行出错。请检查您的配置并作出修改."); + + // 回填其值,需要以 String 的方式转交后续处理 + originalConfig.set(Key.COLUMN, allColumns); + } else if (userConfiguredColumns.size() > allColumns.size()) { + throw DataXException.asDataXException(DBUtilErrorCode.ILLEGAL_VALUE, + String.format("您的配置文件中的列配置信息有误. 因为您所配置的写入数据库表的字段个数:%s 大于目的表的总字段总个数:%s. 请检查您的配置并作出修改.", + userConfiguredColumns.size(), allColumns.size())); + } else { + // 确保用户配置的 column 不重复 + ListUtil.makeSureNoValueDuplicate(userConfiguredColumns, false); + + // 检查列是否都为数据库表中正确的列(通过执行一次 select column from table 进行判断) + DBUtil.getColumnMetaData(connectionFactory.getConnecttion(), oneTable, StringUtils.join(userConfiguredColumns, ",")); + } + } + } + + public static void dealColumnConf(Configuration originalConfig) { + String jdbcUrl = originalConfig.getString(String.format("%s[0].%s", + Constant.CONN_MARK, Key.JDBC_URL)); + + String username = originalConfig.getString(Key.USERNAME); + String password = originalConfig.getString(Key.PASSWORD); + String proxyHost = originalConfig.getString(Key.PROXY_HOST, DEFAULT_PROXY_SOCKS_HOST); + int proxyPort = originalConfig.getInt(Key.PROXY_PORT, DEFAULT_PROXY_SOCKS_PORT); + if(StringUtils.isNotBlank(password)){ + try { + password = (String) CryptoUtils.string2Object(password); + } catch (Exception e) { + throw DataXException.asDataXException(DBUtilErrorCode.CONF_ERROR, "decrypt password failed"); + } + } + String oneTable = originalConfig.getString(String.format( + "%s[0].%s[0]", Constant.CONN_MARK, Key.TABLE)); + + JdbcConnectionFactory jdbcConnectionFactory = new JdbcConnectionFactory(DATABASE_TYPE, jdbcUrl, username, password, proxyHost, proxyPort); + dealColumnConf(originalConfig, jdbcConnectionFactory, oneTable); + } + + public static void dealWriteMode(Configuration originalConfig, DataBaseType dataBaseType) { + List columns = originalConfig.getList(Key.COLUMN, String.class); + + String jdbcUrl = originalConfig.getString(String.format("%s[0].%s", + Constant.CONN_MARK, Key.JDBC_URL, String.class)); + + // 默认为:insert 方式 + String writeMode = originalConfig.getString(Key.WRITE_MODE, "INSERT"); + List primaryKeys = originalConfig.getList(Key.PRIMARYKEY,String.class); + + List valueHolders = new ArrayList(columns.size()); + for (int i = 0; i < columns.size(); i++) { + valueHolders.add("?"); + } + + boolean forceUseUpdate = false; + //ob10的处理 + if (dataBaseType == DataBaseType.MySql && isOB10(jdbcUrl)) { + forceUseUpdate = true; + } + + String writeDataSqlTemplate = WriterUtil.getWriteTemplate(columns, valueHolders, writeMode, dataBaseType, forceUseUpdate, primaryKeys); + + LOG.info("Write domain [\n{}\n], which jdbcUrl like:[{}]", writeDataSqlTemplate, jdbcUrl); + + originalConfig.set(Constant.INSERT_OR_REPLACE_TEMPLATE_MARK, writeDataSqlTemplate); + } + + public static boolean isOB10(String jdbcUrl) { + //ob10的处理 + if (jdbcUrl.startsWith(Constant.OB10_SPLIT_STRING)) { + String[] ss = jdbcUrl.split(Constant.OB10_SPLIT_STRING_PATTERN); + if (ss.length != 3) { + throw DataXException + .asDataXException( + DBUtilErrorCode.JDBC_OB10_ADDRESS_ERROR, "JDBC OB10格式错误,请联系askdatax"); + } + return true; + } + return false; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/util/WriterUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/util/WriterUtil.java new file mode 100644 index 000000000..3913a4088 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/rdbms/writer/util/WriterUtil.java @@ -0,0 +1,258 @@ +package com.alibaba.datax.plugin.rdbms.writer.util; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.plugin.rdbms.util.DBUtil; +import com.alibaba.datax.plugin.rdbms.util.DBUtilErrorCode; +import com.alibaba.datax.plugin.rdbms.util.DataBaseType; +import com.alibaba.datax.plugin.rdbms.util.RdbmsException; +import com.alibaba.datax.plugin.rdbms.writer.Constant; +import com.alibaba.datax.plugin.rdbms.writer.Key; +import com.alibaba.druid.sql.parser.ParserException; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public final class WriterUtil { + private static final Logger LOG = LoggerFactory.getLogger(WriterUtil.class); + + //TODO 切分报错 + public static List doSplit(Configuration simplifiedConf, + int adviceNumber) { + + List splitResultConfigs = new ArrayList(); + + int tableNumber = simplifiedConf.getInt(Constant.TABLE_NUMBER_MARK); + + //处理单表的情况 + if (tableNumber == 1) { + //由于在之前的 master prepare 中已经把 table,jdbcUrl 提取出来,所以这里处理十分简单 + for (int j = 0; j < adviceNumber; j++) { + splitResultConfigs.add(simplifiedConf.clone()); + } + + return splitResultConfigs; + } + + if (tableNumber != adviceNumber) { + throw DataXException.asDataXException(DBUtilErrorCode.CONF_ERROR, + String.format("您的配置文件中的列配置信息有误. 您要写入的目的端的表个数是:%s , 但是根据系统建议需要切分的份数是:%s. 请检查您的配置并作出修改.", + tableNumber, adviceNumber)); + } + + String jdbcUrl; + List preSqls = simplifiedConf.getList(Key.PRE_SQL, String.class); + List postSqls = simplifiedConf.getList(Key.POST_SQL, String.class); + + List conns = simplifiedConf.getList(Constant.CONN_MARK, + Object.class); + + for (Object conn : conns) { + Configuration sliceConfig = simplifiedConf.clone(); + + Configuration connConf = Configuration.from(Json.toJson(conn, null)); + jdbcUrl = connConf.getString(Key.JDBC_URL); + sliceConfig.set(Key.JDBC_URL, jdbcUrl); + + sliceConfig.remove(Constant.CONN_MARK); + + List tables = connConf.getList(Key.TABLE, String.class); + + for (String table : tables) { + Configuration tempSlice = sliceConfig.clone(); + tempSlice.set(Key.TABLE, table); + tempSlice.set(Key.PRE_SQL, renderPreOrPostSqls(preSqls, table)); + tempSlice.set(Key.POST_SQL, renderPreOrPostSqls(postSqls, table)); + + splitResultConfigs.add(tempSlice); + } + + } + + return splitResultConfigs; + } + + public static List renderPreOrPostSqls(List preOrPostSqls, String tableName) { + if (null == preOrPostSqls) { + return Collections.emptyList(); + } + + List renderedSqls = new ArrayList(); + for (String sql : preOrPostSqls) { + //preSql为空时,不加入执行队列 + if (StringUtils.isNotBlank(sql)) { + renderedSqls.add(sql.replace(Constant.TABLE_NAME_PLACEHOLDER, tableName)); + } + } + + return renderedSqls; + } + + public static void executeSqls(Connection conn, List sqls, String basicMessage, DataBaseType dataBaseType) { + Statement stmt = null; + String currentSql = null; + try { + stmt = conn.createStatement(); + for (String sql : sqls) { + currentSql = sql; + DBUtil.executeSqlWithoutResultSet(stmt, sql); + } + } catch (Exception e) { + throw RdbmsException.asQueryException(dataBaseType, e, currentSql, null, null); + } finally { + DBUtil.closeDBResources(null, stmt, null); + } + } + + public static String getWriteTemplate(List columnHolders, List valueHolders, String writeMode, DataBaseType dataBaseType, boolean forceUseUpdate, List primarykeys) { + boolean isWriteModeLegal = writeMode.trim().toLowerCase().startsWith("insert") + || writeMode.trim().toLowerCase().startsWith("replace") + || writeMode.trim().toLowerCase().startsWith("update"); + + if (!isWriteModeLegal) { + throw DataXException.asDataXException(DBUtilErrorCode.ILLEGAL_VALUE, + String.format("您所配置的 writeMode:%s 错误. 因为DataX 目前仅支持replace,update 或 insert 方式. 请检查您的配置并作出修改.", writeMode)); + } + // && writeMode.trim().toLowerCase().startsWith("replace") + String writeDataSqlTemplate; + if (forceUseUpdate || + ((dataBaseType == DataBaseType.MySql || dataBaseType == DataBaseType.Tddl) && writeMode.trim().toLowerCase().startsWith("update")) + ) { + //update只在mysql下使用 + + writeDataSqlTemplate = new StringBuilder() + .append("INSERT INTO %s (").append(StringUtils.join(columnHolders, ",")) + .append(") VALUES(").append(StringUtils.join(valueHolders, ",")) + .append(")") + .append(onDuplicateKeyUpdateString(columnHolders)) + .toString(); + } + else if (dataBaseType == DataBaseType.Oracle && primarykeys != null && !primarykeys.isEmpty()) { + writeDataSqlTemplate = new StringBuilder().append(onMergeIntoDoString(primarykeys, columnHolders)).toString(); + } + else { + + //这里是保护,如果其他错误的使用了update,需要更换为replace + if (writeMode.trim().toLowerCase().startsWith("update")) { + writeMode = "replace"; + } + writeDataSqlTemplate = new StringBuilder().append(writeMode) + .append(" INTO %s (").append(StringUtils.join(columnHolders, ",")) + .append(") VALUES(").append(StringUtils.join(valueHolders, ",")) + .append(")").toString(); + } + + return writeDataSqlTemplate; + } + + public static String onDuplicateKeyUpdateString(List columnHolders) { + if (columnHolders == null || columnHolders.size() < 1) { + return ""; + } + StringBuilder sb = new StringBuilder(); + sb.append(" ON DUPLICATE KEY UPDATE "); + boolean first = true; + for (String column : columnHolders) { + if (!first) { + sb.append(","); + } else { + first = false; + } + sb.append(column); + sb.append("=VALUES("); + sb.append(column); + sb.append(")"); + } + + return sb.toString(); + } + + public static String onMergeIntoDoString(List primaryKeys, List columnHolders) { + StringBuilder sb = new StringBuilder(); + sb.append("MERGE INTO %s A USING ( SELECT "); + StringBuilder str = new StringBuilder(); + StringBuilder update = new StringBuilder(); + StringBuilder insert = new StringBuilder(); + columnHolders.stream().filter(primaryKeys::contains) + .forEach(columnHolder -> str.append("TMP.") + .append(columnHolder) + .append(" = A.") + .append(columnHolder) + .append("AND")); + columnHolders.forEach(columnHolder -> { + sb.append("? AS ").append(columnHolder).append(","); + insert.append("TMP.").append(columnHolder).append(","); + }); + + columnHolders.stream().filter(columnHolder -> !primaryKeys.contains(columnHolder)) + .forEach(columnHolder -> { + update.append(columnHolder).append(" = TMP.") + .append(columnHolder).append(","); + }); + update.deleteCharAt(update.length()-1); + sb.deleteCharAt(sb.length()-1); + insert.deleteCharAt(insert.length()-1); + str.replace(update.length()-3,update.length(),""); + sb.append(" FROM DUAL ) TMP ON (").append(str).append(" ) WHEN MATCHED THEN UPDATE SET ") + .append(update).append(" WHEN NOT MATCHED THEN ").append("INSERT (") + .append(StringUtils.join(columnHolders,",")) + .append(") VALUES(").append(insert).append(")"); + return sb.toString(); + } + + public static void preCheckPrePareSQL(Configuration originalConfig, DataBaseType type) { + List conns = originalConfig.getList(Constant.CONN_MARK, Object.class); + Configuration connConf = Configuration.from(Json.toJson(conns.get(0), null)); + String table = connConf.getList(Key.TABLE, String.class).get(0); + + List preSqls = originalConfig.getList(Key.PRE_SQL, + String.class); + List renderedPreSqls = WriterUtil.renderPreOrPostSqls( + preSqls, table); + + if (null != renderedPreSqls && !renderedPreSqls.isEmpty()) { + LOG.info("Begin to preCheck preSqls:[{}].", + StringUtils.join(renderedPreSqls, ";")); + for (String sql : renderedPreSqls) { + try { + DBUtil.sqlValid(sql, type); + } catch (ParserException e) { + throw RdbmsException.asPreSQLParserException(type, e, sql); + } + } + } + } + + public static void preCheckPostSQL(Configuration originalConfig, DataBaseType type) { + List conns = originalConfig.getList(Constant.CONN_MARK, Object.class); + Configuration connConf = Configuration.from(Json.toJson(conns.get(0), null)); + String table = connConf.getList(Key.TABLE, String.class).get(0); + + List postSqls = originalConfig.getList(Key.POST_SQL, + String.class); + List renderedPostSqls = WriterUtil.renderPreOrPostSqls( + postSqls, table); + if (null != renderedPostSqls && !renderedPostSqls.isEmpty()) { + + LOG.info("Begin to preCheck postSqls:[{}].", + StringUtils.join(renderedPostSqls, ";")); + for (String sql : renderedPostSqls) { + try { + DBUtil.sqlValid(sql, type); + } catch (ParserException e) { + throw RdbmsException.asPostSQLParserException(type, e, sql); + } + + } + } + } + + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/PathMeta.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/PathMeta.java new file mode 100644 index 000000000..a0dd8b989 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/PathMeta.java @@ -0,0 +1,56 @@ +package com.alibaba.datax.plugin.unstructuredstorage; + +import java.util.Objects; + +/** + * @author davidhua + * 2019/6/13 + */ +public class PathMeta { + private String absolute; + + private String relative; + + public PathMeta(){ + + } + + public PathMeta(String absolute, String relative){ + this.absolute = absolute; + this.relative = relative; + } + + public String getAbsolute(){ + return this.absolute; + } + + public String getRelative(){ + return this.relative; + } + + public void setAbsolute(String absolute) { + this.absolute = absolute; + } + + public void setRelative(String relative) { + this.relative = relative; + } + + @Override + public boolean equals(Object o) { + if (this == o){ + return true; + } + if (o == null || getClass() != o.getClass()){ + return false; + } + PathMeta pathMeta = (PathMeta) o; + return Objects.equals(absolute, pathMeta.absolute); + } + + @Override + public int hashCode() { + + return Objects.hash(absolute); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ColumnEntry.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ColumnEntry.java new file mode 100644 index 000000000..eb69de903 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ColumnEntry.java @@ -0,0 +1,62 @@ +package com.alibaba.datax.plugin.unstructuredstorage.reader; + +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.lang3.StringUtils; + +import java.text.DateFormat; +import java.text.SimpleDateFormat; + +public class ColumnEntry { + private Integer index; + private String type; + private String value; + private String format; + private DateFormat dateParse; + + public Integer getIndex() { + return index; + } + + public void setIndex(Integer index) { + this.index = index; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getFormat() { + return format; + } + + public void setFormat(String format) { + this.format = format; + if (StringUtils.isNotBlank(this.format)) { + this.dateParse = new SimpleDateFormat(this.format); + } + } + + public DateFormat getDateFormat() { + return this.dateParse; + } + + public String toJSONString() { + return ColumnEntry.toJSONString(this); + } + + public static String toJSONString(ColumnEntry columnEntry) { + return Json.toJson(columnEntry, null); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/Constant.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/Constant.java new file mode 100644 index 000000000..a55694ac3 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/Constant.java @@ -0,0 +1,13 @@ +package com.alibaba.datax.plugin.unstructuredstorage.reader; + +public class Constant { + public static final String DEFAULT_ENCODING = "UTF-8"; + + public static final char DEFAULT_FIELD_DELIMITER = ','; + + public static final boolean DEFAULT_SKIP_HEADER = false; + + public static final String DEFAULT_NULL_FORMAT = "\\N"; + + public static final Integer DEFAULT_BUFFER_SIZE = 8192; +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ExpandLzopInputStream.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ExpandLzopInputStream.java new file mode 100644 index 000000000..267da1613 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ExpandLzopInputStream.java @@ -0,0 +1,154 @@ +/* + * description: + * + * 使用了shevek在github上开源的lzo解压缩代码(https://github.com/shevek/lzo-java) + * + * 继承LzopInputStream的原因是因为开源版本代码中LZO_LIBRARY_VERSION是这样定义的: + * static static final short LZO_LIBRARY_VERSION = 0x2050; + * 而很多lzo文件LZO_LIBRARY_VERSION是0x2060,要解压这种version的lzo文件,必须要更改 + * LZO_LIBRARY_VERSION的值,才不会抛异常,而LZO_LIBRARY_VERSION是final类型的,无法更改 + * 其值,于是继承了LzopInputStream的类,重新定义了LZO_LIBRARY_VERSION的值。 + * + */ +package com.alibaba.datax.plugin.unstructuredstorage.reader; + +import org.anarres.lzo.LzoVersion; +import org.anarres.lzo.LzopConstants; +import org.anarres.lzo.LzopInputStream; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import javax.annotation.Nonnegative; +import javax.annotation.Nonnull; +import java.io.IOException; +import java.io.InputStream; +import java.util.Arrays; +import java.util.zip.Adler32; +import java.util.zip.CRC32; + +/** + * Created by mingya.wmy on 16/8/26. + */ +public class ExpandLzopInputStream extends LzopInputStream { + + + public ExpandLzopInputStream(@Nonnull InputStream in) throws IOException { + super(in); + } + + /** + * Read and verify an lzo header, setting relevant block checksum options + * and ignoring most everything else. + */ + @Override + protected int readHeader() throws IOException { + short LZO_LIBRARY_VERSION = 0x2060; + Log LOG = LogFactory.getLog(LzopInputStream.class); + byte[] LZOP_MAGIC = new byte[]{ + -119, 'L', 'Z', 'O', 0, '\r', '\n', '\032', '\n'}; + byte[] buf = new byte[9]; + readBytes(buf, 0, 9); + if (!Arrays.equals(buf, LZOP_MAGIC)) { + throw new IOException("Invalid LZO header"); + } + Arrays.fill(buf, (byte) 0); + Adler32 adler = new Adler32(); + CRC32 crc32 = new CRC32(); + int hitem = readHeaderItem(buf, 2, adler, crc32); // lzop version + if (hitem > LzopConstants.LZOP_VERSION) { + LOG.debug("Compressed with later version of lzop: " + + Integer.toHexString(hitem) + " (expected 0x" + + Integer.toHexString(LzopConstants.LZOP_VERSION) + ")"); + } + hitem = readHeaderItem(buf, 2, adler, crc32); // lzo library version + if (hitem > LZO_LIBRARY_VERSION) { + throw new IOException("Compressed with incompatible lzo version: 0x" + + Integer.toHexString(hitem) + " (expected 0x" + + Integer.toHexString(LzoVersion.LZO_LIBRARY_VERSION) + ")"); + } + hitem = readHeaderItem(buf, 2, adler, crc32); // lzop extract version + if (hitem > LzopConstants.LZOP_VERSION) { + throw new IOException("Compressed with incompatible lzop version: 0x" + + Integer.toHexString(hitem) + " (expected 0x" + + Integer.toHexString(LzopConstants.LZOP_VERSION) + ")"); + } + hitem = readHeaderItem(buf, 1, adler, crc32); // method + switch (hitem) { + case LzopConstants.M_LZO1X_1: + case LzopConstants.M_LZO1X_1_15: + case LzopConstants.M_LZO1X_999: + break; + default: + throw new IOException("Invalid strategy " + Integer.toHexString(hitem)); + } + readHeaderItem(buf, 1, adler, crc32); // ignore level + + // flags + int flags = readHeaderItem(buf, 4, adler, crc32); + boolean useCRC32 = (flags & LzopConstants.F_H_CRC32) != 0; + boolean extraField = (flags & LzopConstants.F_H_EXTRA_FIELD) != 0; + if((flags & LzopConstants.F_MULTIPART) != 0) { + throw new IOException("Multipart lzop not supported"); + } + if((flags & LzopConstants.F_H_FILTER) != 0) { + throw new IOException("lzop filter not supported"); + } + if((flags & LzopConstants.F_RESERVED) != 0) { + throw new IOException("Unknown flags in header"); + } + // known !F_H_FILTER, so no optional block + + readHeaderItem(buf, 4, adler, crc32); // ignore mode + readHeaderItem(buf, 4, adler, crc32); // ignore mtime + readHeaderItem(buf, 4, adler, crc32); // ignore gmtdiff + hitem = readHeaderItem(buf, 1, adler, crc32); // fn len + if (hitem > 0) { + byte[] tmp = (hitem > buf.length) ? new byte[hitem] : buf; + readHeaderItem(tmp, hitem, adler, crc32); // skip filename + } + int checksum = (int) (useCRC32 ? crc32.getValue() : adler.getValue()); + hitem = readHeaderItem(buf, 4, adler, crc32); // read checksum + if (hitem != checksum) { + throw new IOException("Invalid header checksum: " + + Long.toHexString(checksum) + " (expected 0x" + + Integer.toHexString(hitem) + ")"); + } + if (extraField) { // lzop 1.08 ultimately ignores this + LOG.debug("Extra header field not processed"); + adler.reset(); + crc32.reset(); + hitem = readHeaderItem(buf, 4, adler, crc32); + readHeaderItem(new byte[hitem], hitem, adler, crc32); + checksum = (int) (useCRC32 ? crc32.getValue() : adler.getValue()); + if (checksum != readHeaderItem(buf, 4, adler, crc32)) { + throw new IOException("Invalid checksum for extra header field"); + } + } + + return flags; + } + + private int readHeaderItem(@Nonnull byte[] buf, @Nonnegative int len, @Nonnull Adler32 adler, @Nonnull CRC32 crc32) throws IOException { + int ret = readInt(buf, len); + adler.update(buf, 0, len); + crc32.update(buf, 0, len); + Arrays.fill(buf, (byte) 0); + return ret; + } + + /** + * Read len bytes into buf, st LSB of int returned is the last stream of the + * first word read. + */ + // @Nonnegative ? + private int readInt(@Nonnull byte[] buf, @Nonnegative int len) + throws IOException { + readBytes(buf, 0, len); + int ret = (0xFF & buf[0]) << 24; + ret |= (0xFF & buf[1]) << 16; + ret |= (0xFF & buf[2]) << 8; + ret |= (0xFF & buf[3]); + return (len > 3) ? ret : (ret >>> (8 * (4 - len))); + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/Key.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/Key.java new file mode 100644 index 000000000..f76cfaf79 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/Key.java @@ -0,0 +1,36 @@ +package com.alibaba.datax.plugin.unstructuredstorage.reader; + +/** + * Created by haiwei.luo on 14-12-5. + */ +public class Key { + public static final String COLUMN = "column"; + + public static final String ENCODING = "encoding"; + + public static final String FIELD_DELIMITER = "fieldDelimiter"; + + public static final String SKIP_HEADER = "skipHeader"; + + public static final String TYPE = "type"; + + public static final String FORMAT = "format"; + + public static final String INDEX = "index"; + + public static final String VALUE = "value"; + + public static final String COMPRESS = "compress"; + + public static final String NULL_FORMAT = "nullFormat"; + + public static final String FILE_FORMAT = "fileFormat"; + + public static final String BUFFER_SIZE = "bufferSize"; + + public static final String CSV_READER_CONFIG = "csvReaderConfig"; + + public static final String INCR_BEGIN_TIME="incrBeginTime"; + + public static final String INCR_END_TIME="incrEndTime"; +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/UnstructuredStorageReaderErrorCode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/UnstructuredStorageReaderErrorCode.java new file mode 100644 index 000000000..58165add4 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/UnstructuredStorageReaderErrorCode.java @@ -0,0 +1,46 @@ +package com.alibaba.datax.plugin.unstructuredstorage.reader; + +import com.alibaba.datax.common.spi.ErrorCode; + +/** + * Created by haiwei.luo on 14-9-20. + */ +public enum UnstructuredStorageReaderErrorCode implements ErrorCode { + CONFIG_INVALID_EXCEPTION("UnstructuredStorageReader-00", "您的参数配置错误."), + NOT_SUPPORT_TYPE("UnstructuredStorageReader-01", "您配置的列类型暂不支持."), + REQUIRED_VALUE("UnstructuredStorageReader-02", "您缺失了必须填写的参数值."), + ILLEGAL_VALUE("UnstructuredStorageReader-03", "您填写的参数值不合法."), + MIXED_INDEX_VALUE("UnstructuredStorageReader-04", "您的列信息配置同时包含了index,value."), + NO_INDEX_VALUE("UnstructuredStorageReader-05", "您明确的配置列信息,但未填写相应的index,value."), + FILE_NOT_EXISTS("UnstructuredStorageReader-06", "您配置的源路径不存在."), + OPEN_FILE_WITH_CHARSET_ERROR("UnstructuredStorageReader-07", "您配置的编码和实际存储编码不符合."), + OPEN_FILE_ERROR("UnstructuredStorageReader-08", "您配置的源在打开时异常,建议您检查源源是否有隐藏实体,管道文件等特殊文件."), + READ_FILE_IO_ERROR("UnstructuredStorageReader-09", "您配置的文件在读取时出现IO异常."), + SECURITY_NOT_ENOUGH("UnstructuredStorageReader-10", "您缺少权限执行相应的文件读取操作."), + RUNTIME_EXCEPTION("UnstructuredStorageReader-11", "出现运行时异常, 请联系我们"); + + private final String code; + private final String description; + + private UnstructuredStorageReaderErrorCode(String code, String description) { + this.code = code; + this.description = description; + } + + @Override + public String getCode() { + return this.code; + } + + @Override + public String getDescription() { + return this.description; + } + + @Override + public String toString() { + return String.format("Code:[%s], Description:[%s].", this.code, + this.description); + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/UnstructuredStorageReaderUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/UnstructuredStorageReaderUtil.java new file mode 100644 index 000000000..35e1b993c --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/UnstructuredStorageReaderUtil.java @@ -0,0 +1,668 @@ +package com.alibaba.datax.plugin.unstructuredstorage.reader; + +import com.alibaba.datax.common.element.*; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordSender; +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import com.alibaba.datax.common.util.Configuration; +import com.csvreader.CsvReader; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import io.airlift.compress.snappy.SnappyCodec; +import io.airlift.compress.snappy.SnappyFramedInputStream; +import org.anarres.lzo.LzoDecompressor1x_safe; +import org.anarres.lzo.LzoInputStream; +import org.apache.commons.beanutils.BeanUtils; +import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; +import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; +import org.apache.commons.io.Charsets; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.io.compress.CompressionCodec; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; +import java.nio.charset.UnsupportedCharsetException; +import java.text.DateFormat; +import java.util.*; +import java.util.function.Supplier; + +public class UnstructuredStorageReaderUtil { + private static final Logger LOG = LoggerFactory + .getLogger(UnstructuredStorageReaderUtil.class); + private static HashMap csvReaderConfigMap; + + private UnstructuredStorageReaderUtil() { + + } + + /** + * @param inputLine 输入待分隔字符串 + * @param delimiter 字符串分割符 + * @return 分隔符分隔后的字符串数组,出现异常时返回为null 支持转义,即数据中可包含分隔符 + */ + public static String[] splitOneLine(String inputLine, char delimiter) { + String[] splitedResult = null; + if (null != inputLine) { + try { + CsvReader csvReader = new CsvReader(new StringReader(inputLine)); + csvReader.setDelimiter(delimiter); + + setCsvReaderConfig(csvReader); + + if (csvReader.readRecord()) { + splitedResult = csvReader.getValues(); + } + } catch (IOException e) { + // nothing to do + } + } + return splitedResult; + } + + public static String[] splitBufferedReader(CsvReader csvReader) + throws IOException { + String[] splitedResult = null; + if (csvReader.readRecord()) { + splitedResult = csvReader.getValues(); + } + return splitedResult; + } + + /** + * 不支持转义 + * + * @return 分隔符分隔后的字符串数, + */ + public static String[] splitOneLine(String inputLine, String delimiter) { + //preserveAllTokens + return StringUtils.splitPreserveAllTokens(inputLine, delimiter); + } + + public static void readFromStream(InputStream inputStream, OutputStream outputStream, + Configuration readerSliceConfig) throws IOException { + String compress = readerSliceConfig.getString(Key.COMPRESS, null); + compress = StringUtils.isBlank(compress)? null : compress; + try { + InputStream compressedInput = decorateWithCompress(inputStream, compress); + int bufferSize = readerSliceConfig.getInt(Key.BUFFER_SIZE, + Constant.DEFAULT_BUFFER_SIZE); + byte[] buffer = new byte[bufferSize]; + int len = -1; + while((len = compressedInput.read(buffer, 0, bufferSize)) >= 0){ + outputStream.write(buffer, 0, len); + } + } catch (IOException e) { + throw DataXException.asDataXException( + UnstructuredStorageReaderErrorCode.READ_FILE_IO_ERROR, + String.format("流读取错误 "), e); + }finally{ + inputStream.close(); + } + } + public static void readFromStream(InputStream inputStream, String context, + Configuration readerSliceConfig, RecordSender recordSender, + TaskPluginCollector taskPluginCollector) { + String compress = readerSliceConfig.getString(Key.COMPRESS, null); + if (StringUtils.isBlank(compress)) { + compress = null; + } + String encoding = readerSliceConfig.getString(Key.ENCODING, + Constant.DEFAULT_ENCODING); + // handle blank encoding + if (StringUtils.isBlank(encoding)) { + encoding = Constant.DEFAULT_ENCODING; + LOG.warn(String.format("您配置的encoding为[%s], 使用默认值[%s]", encoding, + Constant.DEFAULT_ENCODING)); + } + + List column = readerSliceConfig + .getListConfiguration(Key.COLUMN); + // handle ["*"] -> [], null + if (null != column && 1 == column.size() + && "\"*\"".equals(column.get(0).toString())) { + readerSliceConfig.set(Key.COLUMN, null); + column = null; + } + + BufferedReader reader = null; + int bufferSize = readerSliceConfig.getInt(Key.BUFFER_SIZE, + Constant.DEFAULT_BUFFER_SIZE); + + // compress logic + try { + + reader = new BufferedReader(new InputStreamReader( + decorateWithCompress(inputStream, compress), encoding + ), bufferSize); + UnstructuredStorageReaderUtil.doReadFromStream(reader, context, + readerSliceConfig, recordSender, taskPluginCollector); + } catch (UnsupportedEncodingException uee) { + throw DataXException + .asDataXException( + UnstructuredStorageReaderErrorCode.OPEN_FILE_WITH_CHARSET_ERROR, + String.format("不支持的编码格式 : [%s]", encoding), uee); + } catch (NullPointerException e) { + throw DataXException.asDataXException( + UnstructuredStorageReaderErrorCode.RUNTIME_EXCEPTION, + "运行时错误, 请联系我们", e); + }/* catch (ArchiveException e) { + throw DataXException.asDataXException( + UnstructuredStorageReaderErrorCode.READ_FILE_IO_ERROR, + String.format("压缩文件流读取错误 : [%s]", context), e); + } */ catch (IOException e) { + throw DataXException.asDataXException( + UnstructuredStorageReaderErrorCode.READ_FILE_IO_ERROR, + String.format("流读取错误 : [%s]", context), e); + } finally { + IOUtils.closeQuietly(reader); + } + + } + + public static void doReadFromStream(BufferedReader reader, String context, + Configuration readerSliceConfig, RecordSender recordSender, + TaskPluginCollector taskPluginCollector) { + String encoding = readerSliceConfig.getString(Key.ENCODING, + Constant.DEFAULT_ENCODING); + Character fieldDelimiter = null; + String delimiterInStr = readerSliceConfig + .getString(Key.FIELD_DELIMITER); + if (null != delimiterInStr && 1 != delimiterInStr.length()) { + throw DataXException.asDataXException( + UnstructuredStorageReaderErrorCode.ILLEGAL_VALUE, + String.format("仅仅支持单字符切分, 您配置的切分为 : [%s]", delimiterInStr)); + } + if (null == delimiterInStr) { + LOG.warn(String.format("您没有配置列分隔符, 使用默认值[%s]", + Constant.DEFAULT_FIELD_DELIMITER)); + } + + // warn: default value ',', fieldDelimiter could be \n(lineDelimiter) + // for no fieldDelimiter + fieldDelimiter = readerSliceConfig.getChar(Key.FIELD_DELIMITER, + Constant.DEFAULT_FIELD_DELIMITER); + Boolean skipHeader = readerSliceConfig.getBool(Key.SKIP_HEADER, + Constant.DEFAULT_SKIP_HEADER); + // warn: no default value '\N' + String nullFormat = readerSliceConfig.getString(Key.NULL_FORMAT, ""); + + // warn: Configuration -> List for performance + // List column = readerSliceConfig + List column = UnstructuredStorageReaderUtil + .getListColumnEntry(readerSliceConfig, Key.COLUMN); + CsvReader csvReader = null; + // every line logic + try { + // TODO lineDelimiter + if (skipHeader) { + String fetchLine = reader.readLine(); + LOG.info(String.format("Header line %s has been skiped.", + fetchLine)); + } + SplitRecordSupplier splitSupplier = null; + if(readerSliceConfig.getString(Key.FILE_FORMAT, "csv").equalsIgnoreCase("text")){ + String finalFieldDelimiter = readerSliceConfig.getString(Key.FIELD_DELIMITER, + String.valueOf(Constant.DEFAULT_FIELD_DELIMITER)); + splitSupplier = () -> UnstructuredStorageReaderUtil.splitOneLine(reader.readLine(), finalFieldDelimiter); + }else{ + csvReader = new CsvReader(reader); + csvReader.setDelimiter(fieldDelimiter); + setCsvReaderConfig(csvReader); + CsvReader finalCsvReader = csvReader; + splitSupplier = () -> UnstructuredStorageReaderUtil.splitBufferedReader(finalCsvReader); + } + + String[] parseRows; + while ((parseRows = splitSupplier.get()) != null) { + UnstructuredStorageReaderUtil.transportOneRecord(recordSender, + column, parseRows, nullFormat, taskPluginCollector); + } + } catch (UnsupportedEncodingException uee) { + throw DataXException + .asDataXException( + UnstructuredStorageReaderErrorCode.OPEN_FILE_WITH_CHARSET_ERROR, + String.format("不支持的编码格式 : [%s]", encoding), uee); + } catch (FileNotFoundException fnfe) { + throw DataXException.asDataXException( + UnstructuredStorageReaderErrorCode.FILE_NOT_EXISTS, + String.format("无法找到文件 : [%s]", context), fnfe); + } catch (IOException ioe) { + throw DataXException.asDataXException( + UnstructuredStorageReaderErrorCode.READ_FILE_IO_ERROR, + String.format("读取文件错误 : [%s]", context), ioe); + } catch (Exception e) { + throw DataXException.asDataXException( + UnstructuredStorageReaderErrorCode.RUNTIME_EXCEPTION, + String.format("运行时异常 : %s", e.getMessage()), e); + } finally { + if(null != csvReader) { + csvReader.close(); + } + IOUtils.closeQuietly(reader); + } + } + + public static Record transportOneRecord(RecordSender recordSender, + Configuration configuration, + TaskPluginCollector taskPluginCollector, + String line) { + List column = UnstructuredStorageReaderUtil + .getListColumnEntry(configuration, Key.COLUMN); + // 注意: nullFormat 没有默认值 + String nullFormat = configuration.getString(Key.NULL_FORMAT); + String delimiterInStr = configuration.getString(Key.FIELD_DELIMITER); + if (null != delimiterInStr && 1 != delimiterInStr.length()) { + throw DataXException.asDataXException( + UnstructuredStorageReaderErrorCode.ILLEGAL_VALUE, + String.format("仅仅支持单字符切分, 您配置的切分为 : [%s]", delimiterInStr)); + } + if (null == delimiterInStr) { + LOG.warn(String.format("您没有配置列分隔符, 使用默认值[%s]", + Constant.DEFAULT_FIELD_DELIMITER)); + } + // warn: default value ',', fieldDelimiter could be \n(lineDelimiter) + // for no fieldDelimiter + Character fieldDelimiter = configuration.getChar(Key.FIELD_DELIMITER, + Constant.DEFAULT_FIELD_DELIMITER); + + String[] sourceLine = StringUtils.split(line, fieldDelimiter); + + return transportOneRecord(recordSender, column, sourceLine, nullFormat, taskPluginCollector); + } + + public static Record transportOneRecord(RecordSender recordSender, + List columnConfigs, String[] sourceLine, + String nullFormat, TaskPluginCollector taskPluginCollector) { + Record record = recordSender.createRecord(); + Column columnGenerated = null; + + // 创建都为String类型column的record + if (null == columnConfigs || columnConfigs.size() == 0) { + for (String columnValue : sourceLine) { + // not equalsIgnoreCase, it's all ok if nullFormat is null + if (null != columnValue && columnValue.equals(nullFormat)) { + columnGenerated = new StringColumn(null); + } else { + columnGenerated = new StringColumn(columnValue); + } + record.addColumn(columnGenerated); + } + recordSender.sendToWriter(record); + } else { + try { + for (ColumnEntry columnConfig : columnConfigs) { + String columnType = columnConfig.getType(); + Integer columnIndex = columnConfig.getIndex(); + String columnConst = columnConfig.getValue(); + + String columnValue = null; + + if (null == columnIndex && null == columnConst) { + throw DataXException + .asDataXException( + UnstructuredStorageReaderErrorCode.NO_INDEX_VALUE, + "由于您配置了type, 则至少需要配置 index 或 value"); + } + + if (null != columnIndex && null != columnConst) { + throw DataXException + .asDataXException( + UnstructuredStorageReaderErrorCode.MIXED_INDEX_VALUE, + "您混合配置了index, value, 每一列同时仅能选择其中一种"); + } + + if (null != columnIndex) { + if (columnIndex >= sourceLine.length) { + String message = String + .format("您尝试读取的列越界,源文件该行有 [%s] 列,您尝试读取第 [%s] 列, 数据详情[%s]", + sourceLine.length, columnIndex + 1, + StringUtils.join(sourceLine, ",")); + LOG.warn(message); + throw new IndexOutOfBoundsException(message); + } + + columnValue = sourceLine[columnIndex]; + } else { + columnValue = columnConst; + } + Type type = Type.valueOf(columnType.toUpperCase()); + // it's all ok if nullFormat is null + if (null != columnValue && columnValue.equals(nullFormat)) { + columnValue = null; + } + switch (type) { + case STRING: + columnGenerated = new StringColumn(columnValue); + break; + case LONG: + try { + columnGenerated = new LongColumn(columnValue); + } catch (Exception e) { + throw new IllegalArgumentException(String.format( + "类型转换错误, 无法将[%s] 转换为[%s]", columnValue, + "LONG")); + } + break; + case DOUBLE: + try { + columnGenerated = new DoubleColumn(columnValue); + } catch (Exception e) { + throw new IllegalArgumentException(String.format( + "类型转换错误, 无法将[%s] 转换为[%s]", columnValue, + "DOUBLE")); + } + break; + case BOOLEAN: + try { + columnGenerated = new BoolColumn(columnValue); + } catch (Exception e) { + throw new IllegalArgumentException(String.format( + "类型转换错误, 无法将[%s] 转换为[%s]", columnValue, + "BOOLEAN")); + } + + break; + case DATE: + try { + if (columnValue == null) { + Date date = null; + columnGenerated = new DateColumn(date); + } else { + String formatString = columnConfig.getFormat(); + //if (null != formatString) { + if (StringUtils.isNotBlank(formatString)) { + // 用户自己配置的格式转换, 脏数据行为出现变化 + DateFormat format = columnConfig + .getDateFormat(); + columnGenerated = new DateColumn( + format.parse(columnValue)); + } else { + // 框架尝试转换 + columnGenerated = new DateColumn( + new StringColumn(columnValue) + .asDate()); + } + } + } catch (Exception e) { + throw new IllegalArgumentException(String.format( + "类型转换错误, 无法将[%s] 转换为[%s]", columnValue, + "DATE")); + } + break; + default: + String errorMessage = String.format( + "您配置的列类型暂不支持 : [%s]", columnType); + LOG.error(errorMessage); + throw DataXException + .asDataXException( + UnstructuredStorageReaderErrorCode.NOT_SUPPORT_TYPE, + errorMessage); + } + + record.addColumn(columnGenerated); + + } + recordSender.sendToWriter(record); + } catch (IllegalArgumentException iae) { + taskPluginCollector + .collectDirtyRecord(record, iae.getMessage()); + } catch (IndexOutOfBoundsException ioe) { + taskPluginCollector + .collectDirtyRecord(record, ioe.getMessage()); + } catch (Exception e) { + if (e instanceof DataXException) { + throw (DataXException) e; + } + // 每一种转换失败都是脏数据处理,包括数字格式 & 日期格式 + taskPluginCollector.collectDirtyRecord(record, e.getMessage()); + } + } + + return record; + } + + public static List getListColumnEntry( + Configuration configuration, final String path) { + List lists = configuration.getList(path, Object.class); + if (lists == null) { + return null; + } + List result = new ArrayList(); + for (final Object object : lists) { + result.add(Json.fromJson(Json.toJson(object, null), + ColumnEntry.class)); + } + return result; + } + + private enum Type { + STRING, LONG, BOOLEAN, DOUBLE, DATE,; + } + + /** + * check parameter:encoding, compress, filedDelimiter + */ + public static void validateParameter(Configuration readerConfiguration) { + + // encoding check + validateEncoding(readerConfiguration); + + //only support compress types + validateCompress(readerConfiguration); + + //fieldDelimiter check + validateFieldDelimiter(readerConfiguration); + + // column: 1. index type 2.value type 3.when type is Date, may have format + validateColumn(readerConfiguration); + + } + + public static void validateEncoding(Configuration readerConfiguration) { + // encoding check + String encoding = readerConfiguration + .getString( + com.alibaba.datax.plugin.unstructuredstorage.reader.Key.ENCODING, + com.alibaba.datax.plugin.unstructuredstorage.reader.Constant.DEFAULT_ENCODING); + try { + encoding = encoding.trim(); + readerConfiguration.set(Key.ENCODING, encoding); + Charsets.toCharset(encoding); + } catch (UnsupportedCharsetException uce) { + throw DataXException.asDataXException(UnstructuredStorageReaderErrorCode.ILLEGAL_VALUE, + String.format("不支持您配置的编码格式 : [%s]", encoding), uce); + } catch (Exception e) { + throw DataXException.asDataXException(UnstructuredStorageReaderErrorCode.CONFIG_INVALID_EXCEPTION, + String.format("编码配置异常, 请联系我们: %s", e.getMessage()), e); + } + } + + public static void validateCompress(Configuration readerConfiguration) { + String compress = readerConfiguration + .getUnnecessaryValue(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.COMPRESS, null, null); + if (StringUtils.isNotBlank(compress)) { + compress = compress.toLowerCase().trim(); + boolean compressTag = "gzip".equals(compress) || "bzip2".equals(compress) || "zip".equals(compress) + || "lzo".equals(compress) || "lzo_deflate".equals(compress) || "hadoop-snappy".equals(compress) + || "framing-snappy".equals(compress); + if (!compressTag) { + throw DataXException.asDataXException(UnstructuredStorageReaderErrorCode.ILLEGAL_VALUE, + String.format("仅支持 gzip, bzip2, zip, lzo, lzo_deflate, hadoop-snappy, framing-snappy " + + "文件压缩格式, 不支持您配置的文件压缩格式: [%s]", compress)); + } + } else { + // 用户可能配置的是 compress:"",空字符串,需要将compress设置为null + compress = null; + } + readerConfiguration.set(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.COMPRESS, compress); + + } + + public static void validateFieldDelimiter(Configuration readerConfiguration) { + //fieldDelimiter check + String delimiterInStr = readerConfiguration.getString(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.FIELD_DELIMITER, null); + if (null == delimiterInStr) { + throw DataXException.asDataXException(UnstructuredStorageReaderErrorCode.REQUIRED_VALUE, + String.format("您提供配置文件有误,[%s]是必填参数.", + com.alibaba.datax.plugin.unstructuredstorage.reader.Key.FIELD_DELIMITER)); + } else if (1 != delimiterInStr.length()) { + // warn: if have, length must be one + throw DataXException.asDataXException(UnstructuredStorageReaderErrorCode.ILLEGAL_VALUE, + String.format("仅仅支持单字符切分, 您配置的切分为 : [%s]", delimiterInStr)); + } + } + + public static void validateColumn(Configuration readerConfiguration) { + // column: 1. index type 2.value type 3.when type is Date, may have + // format + List columns = readerConfiguration + .getListConfiguration(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.COLUMN); + if (null == columns || columns.size() == 0) { + throw DataXException.asDataXException(UnstructuredStorageReaderErrorCode.REQUIRED_VALUE, "您需要指定 columns"); + } + // handle ["*"] + if (null != columns && 1 == columns.size()) { + String columnsInStr = columns.get(0).toString(); + if ("\"*\"".equals(columnsInStr) || "'*'".equals(columnsInStr)) { + readerConfiguration.set(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.COLUMN, null); + columns = null; + } + } + + if (null != columns && columns.size() != 0) { + for (Configuration eachColumnConf : columns) { + eachColumnConf.getNecessaryValue(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.TYPE, + UnstructuredStorageReaderErrorCode.REQUIRED_VALUE); + Integer columnIndex = eachColumnConf + .getInt(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.INDEX); + String columnValue = eachColumnConf + .getString(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.VALUE); + + if (null == columnIndex && null == columnValue) { + throw DataXException.asDataXException(UnstructuredStorageReaderErrorCode.NO_INDEX_VALUE, + "由于您配置了type, 则至少需要配置 index 或 value"); + } + + if (null != columnIndex && null != columnValue) { + throw DataXException.asDataXException(UnstructuredStorageReaderErrorCode.MIXED_INDEX_VALUE, + "您混合配置了index, value, 每一列同时仅能选择其中一种"); + } + if (null != columnIndex && columnIndex < 0) { + throw DataXException.asDataXException(UnstructuredStorageReaderErrorCode.ILLEGAL_VALUE, + String.format("index需要大于等于0, 您配置的index为[%s]", columnIndex)); + } + } + } + } + + public static void validateCsvReaderConfig(Configuration readerConfiguration) { + String csvReaderConfig = readerConfiguration.getString(Key.CSV_READER_CONFIG); + if (StringUtils.isNotBlank(csvReaderConfig)) { + try { + UnstructuredStorageReaderUtil.csvReaderConfigMap = Json.fromJson(csvReaderConfig, Map.class, String.class, Object.class); + } catch (Exception e) { + LOG.info(String.format("WARN!!!!忽略csvReaderConfig配置! 配置错误,值只能为空或者为Map结构,您配置的值为: %s", csvReaderConfig)); + } + } + } + + /** + * @param @param regexPath + * @param @return + * @return String + * @throws + * @Title: getRegexPathParent + * @Description: 获取正则表达式目录的父目录 + */ + public static String getRegexPathParent(String regexPath) { + int endMark; + for (endMark = 0; endMark < regexPath.length(); endMark++) { + if ('*' != regexPath.charAt(endMark) && '?' != regexPath.charAt(endMark)) { + continue; + } else { + break; + } + } + int lastDirSeparator = regexPath.substring(0, endMark).lastIndexOf(IOUtils.DIR_SEPARATOR); + String parentPath = regexPath.substring(0, lastDirSeparator + 1); + + return parentPath; + } + + /** + * @param @param regexPath + * @param @return + * @return String + * @throws + * @Title: getRegexPathParentPath + * @Description: 获取含有通配符路径的父目录,目前只支持在最后一级目录使用通配符*或者?. + * (API jcraft.jsch.ChannelSftp.ls(String path)函数限制) http://epaul.github.io/jsch-documentation/javadoc/ + */ + public static String getRegexPathParentPath(String regexPath) { + int lastDirSeparator = regexPath.lastIndexOf(IOUtils.DIR_SEPARATOR); + String parentPath = ""; + parentPath = regexPath.substring(0, lastDirSeparator + 1); + if (parentPath.contains("*") || parentPath.contains("?")) { + throw DataXException.asDataXException(UnstructuredStorageReaderErrorCode.ILLEGAL_VALUE, + String.format("配置项目path中:[%s]不合法,目前只支持在最后一级目录使用通配符*或者?", regexPath)); + } + return parentPath; + } + + public static void setCsvReaderConfig(CsvReader csvReader) { + if (null != UnstructuredStorageReaderUtil.csvReaderConfigMap && !UnstructuredStorageReaderUtil.csvReaderConfigMap.isEmpty()) { + try { + BeanUtils.populate(csvReader, UnstructuredStorageReaderUtil.csvReaderConfigMap); + LOG.info(String.format("csvReaderConfig设置成功,设置后CsvReader:%s", Json.toJson(csvReader, null))); + } catch (Exception e) { + LOG.info(String.format("WARN!!!!忽略csvReaderConfig配置!通过BeanUtils.populate配置您的csvReaderConfig发生异常,您配置的值为: %s;请检查您的配置!CsvReader使用默认值[%s]", + Json.toJson(UnstructuredStorageReaderUtil.csvReaderConfigMap, null), Json.toJson(csvReader, null))); + } + } else { + //默认关闭安全模式, 放开10W字节的限制 + csvReader.setSafetySwitch(false); + LOG.info(String.format("CsvReader使用默认值[%s],csvReaderConfig值为[%s]", Json.toJson(csvReader, null), Json.toJson(UnstructuredStorageReaderUtil.csvReaderConfigMap, null))); + } + } + + private static InputStream decorateWithCompress(InputStream inputStream, String compress) throws IOException { + InputStream compressedInput = inputStream; + if( null != compress && !"none".equalsIgnoreCase(compress)){ + if("lzo_deflate".equalsIgnoreCase(compress)){ + compressedInput = new LzoInputStream(inputStream, new LzoDecompressor1x_safe()); + }else if("lzo".equalsIgnoreCase(compress)){ + compressedInput = new ExpandLzopInputStream(inputStream); + }else if("gzip".equalsIgnoreCase(compress)){ + compressedInput = new GzipCompressorInputStream(inputStream); + }else if("bzip2".equalsIgnoreCase(compress)){ + compressedInput = new BZip2CompressorInputStream(inputStream); + }else if("hadoop-snappy".equalsIgnoreCase(compress)){ + CompressionCodec snappyCodec = new SnappyCodec(); + compressedInput = snappyCodec.createInputStream(inputStream); + }else if("framing-snappy".equalsIgnoreCase(compress)){ + compressedInput = new SnappyFramedInputStream(inputStream); + }else if("zip".equalsIgnoreCase(compress)){ + compressedInput = new ZipCycleInputStream(inputStream); + }else{ + throw DataXException + .asDataXException( + UnstructuredStorageReaderErrorCode.ILLEGAL_VALUE, + String.format("仅支持 gzip, bzip2, zip, lzo, lzo_deflate, hadoop-snappy, framing-snappy" + + "文件压缩格式 , 不支持您配置的文件压缩格式: [%s]", compress)); + } + } + return compressedInput; + } + + @FunctionalInterface + private interface SplitRecordSupplier{ + /** + * Get method (cas throw exceptions inner) + * @return split result + * @throws Exception any exception + */ + T get() throws Exception; + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ZipCycleInputStream.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ZipCycleInputStream.java new file mode 100644 index 000000000..f57a3c595 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/reader/ZipCycleInputStream.java @@ -0,0 +1,59 @@ +package com.alibaba.datax.plugin.unstructuredstorage.reader; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; + +public class ZipCycleInputStream extends InputStream { + private static final Logger LOG = LoggerFactory + .getLogger(ZipCycleInputStream.class); + + private ZipInputStream zipInputStream; + private ZipEntry currentZipEntry; + + public ZipCycleInputStream(InputStream in) { + this.zipInputStream = new ZipInputStream(in); + } + + @Override + public int read() throws IOException { + // 定位一个Entry数据流的开头 + if (null == this.currentZipEntry) { + this.currentZipEntry = this.zipInputStream.getNextEntry(); + if (null == this.currentZipEntry) { + return -1; + } else { + LOG.info(String.format("Validate zipEntry with name: %s", + this.currentZipEntry.getName())); + } + } + + // 不支持zip下的嵌套, 对于目录跳过 + if (this.currentZipEntry.isDirectory()) { + LOG.warn(String.format("meet a directory %s, ignore...", + this.currentZipEntry.getName())); + this.currentZipEntry = null; + return this.read(); + } + + // 读取一个Entry数据流 + int result = this.zipInputStream.read(); + + // 当前Entry数据流结束了, 需要尝试下一个Entry + if (-1 == result) { + this.currentZipEntry = null; + return this.read(); + } else { + return result; + } + } + + @Override + public void close() throws IOException { + this.zipInputStream.close(); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/Constant.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/Constant.java new file mode 100644 index 000000000..07ce8c9ab --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/Constant.java @@ -0,0 +1,19 @@ +package com.alibaba.datax.plugin.unstructuredstorage.writer; + +public class Constant { + + public static final String DEFAULT_ENCODING = "UTF-8"; + + public static final char DEFAULT_FIELD_DELIMITER = ','; + + public static final String DEFAULT_NULL_FORMAT = "\\N"; + + public static final String FILE_FORMAT_CSV = "csv"; + + public static final String FILE_FORMAT_TEXT = "text"; + + //每个分块10MB,最大10000个分块 + public static final Long MAX_FILE_SIZE = 1024 * 1024 * 10 * 10000L; + + public static final String DEFAULT_SUFFIX = ""; +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/Key.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/Key.java new file mode 100644 index 000000000..76946bf12 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/Key.java @@ -0,0 +1,64 @@ +package com.alibaba.datax.plugin.unstructuredstorage.writer; + +public class Key { + /** + * must have + */ + public static final String FILE_NAME = "fileName"; + + public static final String TEMP_PATH = "tempPath"; + /** + * must have + */ + public static final String WRITE_MODE = "writeMode"; + + /** + * not must , not default , + */ + public static final String FIELD_DELIMITER = "fieldDelimiter"; + + /** + * not must, default UTF-8 + */ + public static final String ENCODING = "encoding"; + + /** + * not must, default no compress + */ + public static final String COMPRESS = "compress"; + + /** + * not must, not default \N + */ + public static final String NULL_FORMAT = "nullFormat"; + + /** + * not must, date format old style, do not use this + */ + public static final String FORMAT = "format"; + /** + * for writers ' domain format + */ + public static final String DATE_FORMAT = "dateFormat"; + + /** + * csv or plain text + */ + public static final String FILE_FORMAT = "fileFormat"; + + /** + * writer headers + */ + public static final String HEADER = "header"; + + /** + * writer maxFileSize + */ + public static final String MAX_FILE_SIZE = "maxFileSize"; + + /** + * writer file type suffix, like .txt .csv + */ + public static final String SUFFIX = "suffix"; + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/TextCsvWriterManager.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/TextCsvWriterManager.java new file mode 100644 index 000000000..6ec490dc8 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/TextCsvWriterManager.java @@ -0,0 +1,94 @@ +package com.alibaba.datax.plugin.unstructuredstorage.writer; + +import com.csvreader.CsvWriter; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.Writer; +import java.util.List; + +public class TextCsvWriterManager { + public static UnstructuredWriter produceUnstructuredWriter( + String fileFormat, char fieldDelimiter, Writer writer) { + // warn: false means plain text(old way), true means strict csv format + if (Constant.FILE_FORMAT_TEXT.equals(fileFormat)) { + return new TextWriterImpl(writer, fieldDelimiter); + } else { + return new CsvWriterImpl(writer, fieldDelimiter); + } + } +} + +class CsvWriterImpl implements UnstructuredWriter { + private static final Logger LOG = LoggerFactory + .getLogger(CsvWriterImpl.class); + // csv 严格符合csv语法, 有标准的转义等处理 + private char fieldDelimiter; + private CsvWriter csvWriter; + + public CsvWriterImpl(Writer writer, char fieldDelimiter) { + this.fieldDelimiter = fieldDelimiter; + this.csvWriter = new CsvWriter(writer, this.fieldDelimiter); + this.csvWriter.setTextQualifier('"'); + this.csvWriter.setUseTextQualifier(true); + // warn: in linux is \n , in windows is \r\n + this.csvWriter.setRecordDelimiter(IOUtils.LINE_SEPARATOR.charAt(0)); + } + + @Override + public void writeOneRecord(List splitedRows) throws IOException { + if (splitedRows.isEmpty()) { + LOG.info("Found one record line which is empty."); + } + this.csvWriter.writeRecord((String[]) splitedRows + .toArray(new String[0])); + } + + @Override + public void flush() throws IOException { + this.csvWriter.flush(); + } + + @Override + public void close() throws IOException { + this.csvWriter.close(); + } + +} + +class TextWriterImpl implements UnstructuredWriter { + private static final Logger LOG = LoggerFactory + .getLogger(TextWriterImpl.class); + // text StringUtils的join方式, 简单的字符串拼接 + private char fieldDelimiter; + private Writer textWriter; + + public TextWriterImpl(Writer writer, char fieldDelimiter) { + this.fieldDelimiter = fieldDelimiter; + this.textWriter = writer; + } + + @Override + public void writeOneRecord(List splitedRows) throws IOException { + if (splitedRows.isEmpty()) { + LOG.info("Found one record line which is empty."); + } + this.textWriter.write(String.format("%s%s", + StringUtils.join(splitedRows, this.fieldDelimiter), + IOUtils.LINE_SEPARATOR)); + } + + @Override + public void flush() throws IOException { + this.textWriter.flush(); + } + + @Override + public void close() throws IOException { + this.textWriter.close(); + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredStorageWriterErrorCode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredStorageWriterErrorCode.java new file mode 100644 index 000000000..fc3cba0d6 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredStorageWriterErrorCode.java @@ -0,0 +1,36 @@ +package com.alibaba.datax.plugin.unstructuredstorage.writer; + +import com.alibaba.datax.common.spi.ErrorCode; + + +public enum UnstructuredStorageWriterErrorCode implements ErrorCode { + ILLEGAL_VALUE("UnstructuredStorageWriter-00", "您填写的参数值不合法."), + Write_FILE_WITH_CHARSET_ERROR("UnstructuredStorageWriter-01", "您配置的编码未能正常写入."), + Write_FILE_IO_ERROR("UnstructuredStorageWriter-02", "您配置的文件在写入时出现IO异常."), + RUNTIME_EXCEPTION("UnstructuredStorageWriter-03", "出现运行时异常, 请联系我们"), + REQUIRED_VALUE("UnstructuredStorageWriter-04", "您缺失了必须填写的参数值."),; + + private final String code; + private final String description; + + private UnstructuredStorageWriterErrorCode(String code, String description) { + this.code = code; + this.description = description; + } + + @Override + public String getCode() { + return this.code; + } + + @Override + public String getDescription() { + return this.description; + } + + @Override + public String toString() { + return String.format("Code:[%s], Description:[%s].", this.code, + this.description); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredStorageWriterUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredStorageWriterUtil.java new file mode 100644 index 000000000..ed7bc8914 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredStorageWriterUtil.java @@ -0,0 +1,391 @@ +package com.alibaba.datax.plugin.unstructuredstorage.writer; + +import com.webank.wedatasphere.exchangis.datax.common.constant.TransportType; +import com.alibaba.datax.common.element.Column; +import com.alibaba.datax.common.element.DateColumn; +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordReceiver; +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.util.CompressSuffixName; +import com.google.common.collect.Sets; +import org.apache.commons.compress.compressors.CompressorOutputStream; +import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream; +import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; +import org.apache.commons.io.Charsets; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.function.Function; + +public class UnstructuredStorageWriterUtil { + private UnstructuredStorageWriterUtil() { + + } + + private static final Logger LOG = LoggerFactory + .getLogger(UnstructuredStorageWriterUtil.class); + + /** + * check parameter: writeMode, encoding, compress, filedDelimiter + */ + public static void validateParameter(Configuration writerConfiguration) { + // writeMode check + String writeMode = writerConfiguration.getNecessaryValue( + Key.WRITE_MODE, + UnstructuredStorageWriterErrorCode.REQUIRED_VALUE); + writeMode = writeMode.trim(); + Set supportedWriteModes = Sets.newHashSet("truncate", "append", + "nonConflict"); + if (!supportedWriteModes.contains(writeMode)) { + throw DataXException + .asDataXException( + UnstructuredStorageWriterErrorCode.ILLEGAL_VALUE, + String.format( + "仅支持 truncate, append, nonConflict 三种模式, 不支持您配置的 writeMode 模式 : [%s]", + writeMode)); + } + writerConfiguration.set(Key.WRITE_MODE, writeMode); + + // encoding check + String encoding = writerConfiguration.getString(Key.ENCODING); + if (StringUtils.isBlank(encoding)) { + // like " ", null + LOG.warn(String.format("您的encoding配置为空, 将使用默认值[%s]", + Constant.DEFAULT_ENCODING)); + writerConfiguration.set(Key.ENCODING, Constant.DEFAULT_ENCODING); + } else { + try { + encoding = encoding.trim(); + writerConfiguration.set(Key.ENCODING, encoding); + Charsets.toCharset(encoding); + } catch (Exception e) { + throw DataXException.asDataXException( + UnstructuredStorageWriterErrorCode.ILLEGAL_VALUE, + String.format("不支持您配置的编码格式:[%s]", encoding), e); + } + } + + // only support compress types + String compress = writerConfiguration.getString(Key.COMPRESS); + if (StringUtils.isBlank(compress)) { + writerConfiguration.set(Key.COMPRESS, null); + } else { + Set supportedCompress = Sets.newHashSet("gzip", "bzip2", "zip"); + if (!supportedCompress.contains(compress.toLowerCase().trim())) { + String message = String.format( + "仅支持 [%s] 文件压缩格式 , 不支持您配置的文件压缩格式: [%s]", + StringUtils.join(supportedCompress, ","), compress); + throw DataXException.asDataXException( + UnstructuredStorageWriterErrorCode.ILLEGAL_VALUE, + String.format(message, compress)); + } + } + + // fieldDelimiter check + String delimiterInStr = writerConfiguration + .getString(Key.FIELD_DELIMITER, ","); + // warn: if have, length must be one + if (null != delimiterInStr && 1 != delimiterInStr.length()) { + throw DataXException.asDataXException( + UnstructuredStorageWriterErrorCode.ILLEGAL_VALUE, + String.format("仅仅支持单字符切分, 您配置的切分为 : [%s]", delimiterInStr)); + } + if (null == delimiterInStr) { + LOG.warn(String.format("您没有配置列分隔符, 使用默认值[%s]", + Constant.DEFAULT_FIELD_DELIMITER)); + writerConfiguration.set(Key.FIELD_DELIMITER, + Constant.DEFAULT_FIELD_DELIMITER); + } + + // fileFormat check + String fileFormat = writerConfiguration.getString(Key.FILE_FORMAT, + Constant.FILE_FORMAT_TEXT); + if (!Constant.FILE_FORMAT_CSV.equals(fileFormat) + && !Constant.FILE_FORMAT_TEXT.equals(fileFormat)) { + throw DataXException.asDataXException( + UnstructuredStorageWriterErrorCode.ILLEGAL_VALUE, String + .format("您配置的fileFormat [%s]错误, 支持csv, text两种.", + fileFormat)); + } + } + + public static List split(Configuration writerSliceConfig, + Set originAllFileExists, TransportType transportType, int mandatoryNumber) { + LOG.info("begin do split..."); + Set allFileExists = new HashSet(); + allFileExists.addAll(originAllFileExists); + List writerSplitConfigs = new ArrayList(); + String filePrefix = writerSliceConfig.getString(Key.FILE_NAME, ""); + + String fileSuffix; + for (int i = 0; i < mandatoryNumber; i++) { + // handle same file name + Configuration splitedTaskConfig = writerSliceConfig.clone(); + String fullFileName = null; + if(transportType == TransportType.RECORD) { + do{ + fileSuffix = UUID.randomUUID().toString().replace('-', '_'); + fullFileName = String.format("%s__%s", filePrefix, fileSuffix); + }while (allFileExists.contains(fullFileName)); + String suffix = CompressSuffixName.chooseSuffix(writerSliceConfig.getString(Key.COMPRESS, "")); + if(StringUtils.isNotBlank(suffix)){ + fullFileName += suffix; + } + allFileExists.add(fullFileName); + LOG.info(String + .format("splited write file name:[%s]", fullFileName)); + }else{ + fullFileName = filePrefix; + } + splitedTaskConfig.set(Key.FILE_NAME, fullFileName); + writerSplitConfigs.add(splitedTaskConfig); + } + LOG.info("end do split."); + return writerSplitConfigs; + } + + public static String buildFilePath(String path, String fileName, + String suffix) { + boolean isEndWithSeparator = false; + switch (IOUtils.DIR_SEPARATOR) { + case IOUtils.DIR_SEPARATOR_UNIX: + isEndWithSeparator = path.endsWith(String + .valueOf(IOUtils.DIR_SEPARATOR)); + break; + case IOUtils.DIR_SEPARATOR_WINDOWS: + isEndWithSeparator = path.endsWith(String + .valueOf(IOUtils.DIR_SEPARATOR_WINDOWS)); + break; + default: + break; + } + if (!isEndWithSeparator) { + path = path + IOUtils.DIR_SEPARATOR; + } + if (null == suffix) { + suffix = ""; + } else { + suffix = suffix.trim(); + } + return String.format("%s%s%s", path, fileName, suffix); + } + + /** + * build temp path + * @param userPath + * @param isExists + * @return + */ + public static String buildTmpFilePath(String userPath, String tmpPrefix, + char separator, Function isExists){ + String tmpFilePath; + boolean exist; + do{ + String tmpSuffix = tmpPrefix + UUID.randomUUID().toString().replace('-', '_'); + if(userPath.endsWith(String.valueOf(separator))){ + tmpFilePath = String.format("%s%s%s", userPath, + tmpSuffix, separator); + } else{ + tmpFilePath = String.format("%s%s%s%s", userPath, separator, tmpSuffix, separator); + } + Boolean check = isExists.apply(tmpFilePath); + exist = null != check ? check : false; + } while(exist); + return tmpFilePath; + } + + public static void writeToStream(InputStream inputStream, OutputStream outputStream, + Configuration config){ + String compress = config.getString(Key.COMPRESS); + try{ + OutputStream compressedOutputStream = outputStream; + if(null != compress){ + if("gzip".equalsIgnoreCase(compress)){ + compressedOutputStream = new GzipCompressorOutputStream(outputStream); + }else if("bzip2".equalsIgnoreCase(compress)){ + compressedOutputStream = new BZip2CompressorOutputStream(outputStream); + }else{ + throw DataXException.asDataXException( + UnstructuredStorageWriterErrorCode.ILLEGAL_VALUE, + String.format( + "仅支持 gzip, bzip2 文件压缩格式 , 不支持您配置的文件压缩格式: [%s]", + compress)); + } + } + byte[] buffer = new byte[8192]; + int len = -1; + while((len = inputStream.read(buffer, 0, buffer.length)) >= 0){ + compressedOutputStream.write(buffer, 0, len); + } + }catch(IOException e){ + throw DataXException.asDataXException( + UnstructuredStorageWriterErrorCode.Write_FILE_IO_ERROR, + String.format("流写入错误 "), e); + } + } + + public static void writeToStream(RecordReceiver lineReceiver, + OutputStream outputStream, Configuration config, String context, + TaskPluginCollector taskPluginCollector) { + String encoding = config.getString(Key.ENCODING, + Constant.DEFAULT_ENCODING); + // handle blank encoding + if (StringUtils.isBlank(encoding)) { + LOG.warn(String.format("您配置的encoding为[%s], 使用默认值[%s]", encoding, + Constant.DEFAULT_ENCODING)); + encoding = Constant.DEFAULT_ENCODING; + } + String compress = config.getString(Key.COMPRESS); + + BufferedWriter writer = null; + // compress logic + try { + if (null == compress) { + writer = new BufferedWriter(new OutputStreamWriter( + outputStream, encoding)); + } else { + // TODO more compress + if ("gzip".equalsIgnoreCase(compress)) { + CompressorOutputStream compressorOutputStream = new GzipCompressorOutputStream( + outputStream); + writer = new BufferedWriter(new OutputStreamWriter( + compressorOutputStream, encoding)); + } else if ("bzip2".equalsIgnoreCase(compress)) { + CompressorOutputStream compressorOutputStream = new BZip2CompressorOutputStream( + outputStream); + writer = new BufferedWriter(new OutputStreamWriter( + compressorOutputStream, encoding)); + } else if ("zip".equalsIgnoreCase(compress)){ + //just wrap with BufferedWriter + writer = new BufferedWriter(new OutputStreamWriter(outputStream, encoding)); + }else{ + throw DataXException + .asDataXException( + UnstructuredStorageWriterErrorCode.ILLEGAL_VALUE, + String.format( + "仅支持 gzip, bzip2 文件压缩格式 , 不支持您配置的文件压缩格式: [%s]", + compress)); + } + } + UnstructuredStorageWriterUtil.doWriteToStream(lineReceiver, writer, + context, config, taskPluginCollector); + } catch (UnsupportedEncodingException uee) { + throw DataXException + .asDataXException( + UnstructuredStorageWriterErrorCode.Write_FILE_WITH_CHARSET_ERROR, + String.format("不支持的编码格式 : [%s]", encoding), uee); + } catch (NullPointerException e) { + throw DataXException.asDataXException( + UnstructuredStorageWriterErrorCode.RUNTIME_EXCEPTION, + "运行时错误, 请联系我们", e); + } catch (IOException e) { + throw DataXException.asDataXException( + UnstructuredStorageWriterErrorCode.Write_FILE_IO_ERROR, + String.format("流写入错误 : [%s]", context), e); + } finally { + IOUtils.closeQuietly(writer); + } + } + + private static void doWriteToStream(RecordReceiver lineReceiver, + BufferedWriter writer, String contex, Configuration config, + TaskPluginCollector taskPluginCollector) throws IOException { + + String nullFormat = config.getString(Key.NULL_FORMAT); + + // 兼容format & dataFormat + String dateFormat = config.getString(Key.DATE_FORMAT); + DateFormat dateParse = null; // warn: 可能不兼容 + if (StringUtils.isNotBlank(dateFormat)) { + dateParse = new SimpleDateFormat(dateFormat); + } + + // warn: default false + String fileFormat = config.getString(Key.FILE_FORMAT, + Constant.FILE_FORMAT_TEXT); + + String delimiterInStr = config.getString(Key.FIELD_DELIMITER, ","); + if (null != delimiterInStr && 1 != delimiterInStr.length()) { + throw DataXException.asDataXException( + UnstructuredStorageWriterErrorCode.ILLEGAL_VALUE, + String.format("仅仅支持单字符切分, 您配置的切分为 : [%s]", delimiterInStr)); + } + if (null == delimiterInStr) { + LOG.warn(String.format("您没有配置列分隔符, 使用默认值[%s]", + Constant.DEFAULT_FIELD_DELIMITER)); + } + + // warn: fieldDelimiter could not be '' for no fieldDelimiter + char fieldDelimiter = config.getChar(Key.FIELD_DELIMITER, + Constant.DEFAULT_FIELD_DELIMITER); + + UnstructuredWriter unstructuredWriter = TextCsvWriterManager + .produceUnstructuredWriter(fileFormat, fieldDelimiter, writer); + + List headers = config.getList(Key.HEADER, String.class); + if (null != headers && !headers.isEmpty()) { + unstructuredWriter.writeOneRecord(headers); + } + + Record record = null; + while ((record = lineReceiver.getFromReader()) != null) { + UnstructuredStorageWriterUtil.transportOneRecord(record, + nullFormat, dateParse, taskPluginCollector, + unstructuredWriter); + } + + // warn:由调用方控制流的关闭 + // IOUtils.closeQuietly(unstructuredWriter); + } + + /** + * 异常表示脏数据 + */ + public static void transportOneRecord(Record record, String nullFormat, + DateFormat dateParse, TaskPluginCollector taskPluginCollector, + UnstructuredWriter unstructuredWriter) { + // warn: default is null + if (null == nullFormat) { + nullFormat = "null"; + } + try { + List splitedRows = new ArrayList(); + int recordLength = record.getColumnNumber(); + if (0 != recordLength) { + Column column; + for (int i = 0; i < recordLength; i++) { + column = record.getColumn(i); + if (null != column.getRawData()) { + boolean isDateColumn = column instanceof DateColumn; + if (!isDateColumn) { + splitedRows.add(column.asString()); + } else { + if (null != dateParse) { + splitedRows.add(dateParse.format(column + .asDate())); + } else { + splitedRows.add(column.asString()); + } + } + } else { + // warn: it's all ok if nullFormat is null + splitedRows.add(nullFormat); + } + } + } + unstructuredWriter.writeOneRecord(splitedRows); + } catch (Exception e) { + // warn: dirty domain + taskPluginCollector.collectDirtyRecord(record, e); + } + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredWriter.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredWriter.java new file mode 100644 index 000000000..f3c5f6636 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/UnstructuredWriter.java @@ -0,0 +1,16 @@ +package com.alibaba.datax.plugin.unstructuredstorage.writer; + +import java.io.Closeable; +import java.io.IOException; +import java.util.List; + +public interface UnstructuredWriter extends Closeable { + + public void writeOneRecord(List splitedRows) throws IOException; + + public void flush() throws IOException; + + @Override + public void close() throws IOException; + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/ZipCollectOutputStream.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/ZipCollectOutputStream.java new file mode 100644 index 000000000..d8960cad7 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/unstructuredstorage/writer/ZipCollectOutputStream.java @@ -0,0 +1,53 @@ +package com.alibaba.datax.plugin.unstructuredstorage.writer; + +import java.io.IOException; +import java.io.OutputStream; +import java.nio.charset.Charset; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; + +/** + * @author davidhua + * 2019/6/4 + */ +public class ZipCollectOutputStream extends OutputStream { + + private ZipOutputStream zipOutputStream; + + private String entryName; + + private ZipEntry zipEntry; + + public ZipCollectOutputStream(String entryName, OutputStream outputStream, + String encoding){ + if(null == encoding){ + this.zipOutputStream = new ZipOutputStream(outputStream); + }else { + this.zipOutputStream = new ZipOutputStream(outputStream, Charset.forName(encoding)); + } + this.entryName = entryName; + } + + public ZipCollectOutputStream(String entryName, OutputStream outputStream){ + this(entryName, outputStream, null); + } + + @Override + public void write(int b) throws IOException { + if(null == zipEntry){ + this.zipEntry = new ZipEntry(entryName); + this.zipOutputStream.putNextEntry(this.zipEntry); + } + this.zipOutputStream.write(b); + } + + @Override + public void close() throws IOException { + this.zipOutputStream.close(); + } + + @Override + public void flush() throws IOException { + this.zipOutputStream.flush(); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/utils/HdfsUserGroupInfoLock.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/utils/HdfsUserGroupInfoLock.java new file mode 100644 index 000000000..1a4010f5c --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/plugin/utils/HdfsUserGroupInfoLock.java @@ -0,0 +1,20 @@ +package com.alibaba.datax.plugin.utils; + +import java.util.concurrent.locks.ReentrantLock; + +/** + * simple global LOCK + * @author davidhua + * 2019/4/24 + */ +public class HdfsUserGroupInfoLock { + private static ReentrantLock globallock = new ReentrantLock(); + + public static void lock(){ + globallock.lock(); + } + + public static void unlock(){ + globallock.unlock(); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/transformer/ComplexTransformer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/transformer/ComplexTransformer.java new file mode 100644 index 000000000..2a820aeae --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/transformer/ComplexTransformer.java @@ -0,0 +1,30 @@ +package com.alibaba.datax.transformer; + +import com.alibaba.datax.common.element.Record; + +import java.util.Map; + +/** + * no comments. + * Created by liqiang on 16/3/3. + */ +public abstract class ComplexTransformer { + //transformerName的唯一性在datax中检查,或者提交到插件中心检查。 + private String transformerName; + + + public String getTransformerName() { + return transformerName; + } + + public void setTransformerName(String transformerName) { + this.transformerName = transformerName; + } + + /** + * @param record 行记录,UDF进行record的处理后,更新相应的record + * @param tContext transformer运行的配置项 + * @param paras transformer函数参数 + */ + abstract public Record evaluate(Record record, Map tContext, Object... paras); +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/transformer/Transformer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/transformer/Transformer.java new file mode 100644 index 000000000..37f947da5 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/transformer/Transformer.java @@ -0,0 +1,28 @@ +package com.alibaba.datax.transformer; + +import com.alibaba.datax.common.element.Record; + + +/** + * no comments. + * Created by liqiang on 16/3/3. + */ +public abstract class Transformer { + //transformerName的唯一性在datax中检查,或者提交到插件中心检查。 + private String transformerName; + + + public String getTransformerName() { + return transformerName; + } + + public void setTransformerName(String transformerName) { + this.transformerName = transformerName; + } + + /** + * @param record 行记录,UDF进行record的处理后,更新相应的record + * @param paras transformer函数参数 + */ + abstract public Record evaluate(Record record, Object... paras); +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/CryptoUtils.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/CryptoUtils.java new file mode 100644 index 000000000..c9037c15b --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/CryptoUtils.java @@ -0,0 +1,98 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.common; + +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.lang3.StringUtils; + +import java.io.*; +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; + +/** + * @author devendeng + * For cryptor + */ +public class CryptoUtils { + private CryptoUtils(){ + } + /** + * Serialize the object to string + * @param o Object + * @return String + * @throws Exception e + */ + public static String object2String(Serializable o) throws Exception { + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + ObjectOutputStream oos = new ObjectOutputStream(bos); + oos.writeObject(o); + oos.flush(); + oos.close(); + bos.close(); + return new String(new Base64().encode(bos.toByteArray())); + } + + /** + * Deserialize the string to object + * + * @param str String + * @return Object + * @throws IOException + * @throws ClassNotFoundException + */ + public static Object string2Object(String str) throws IOException, ClassNotFoundException { + ByteArrayInputStream bis = new ByteArrayInputStream(new Base64().decode(str.getBytes())); + ObjectInputStream ois = new ObjectInputStream(bis); + Object o = ois.readObject(); + bis.close(); + ois.close(); + return o; + } + + /** + * MD5 algorithm + * @param source source string + * @param salt salt + * @param iterator iterator + * @return value encrypted + */ + public static String md5(String source, String salt, int iterator){ + StringBuilder token = new StringBuilder(); + try{ + MessageDigest digest = MessageDigest.getInstance("sha-256"); + if(StringUtils.isNotEmpty(salt)){ + digest.update(salt.getBytes(StandardCharsets.UTF_8)); + } + byte[] result = digest.digest(source.getBytes()); + for(int i = 0; i < iterator - 1; i++){ + digest.reset(); + result = digest.digest(result); + } + for (byte aResult : result) { + int temp = aResult & 0xFF; + if (temp <= 0xF) { + token.append("0"); + } + token.append(Integer.toHexString(temp)); + } + }catch(Exception e){ + throw new RuntimeException(e.getMessage()); + } + return token.toString(); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/GsonUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/GsonUtil.java new file mode 100644 index 000000000..c1d93a797 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/GsonUtil.java @@ -0,0 +1,73 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.common; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.reflect.TypeToken; + +import java.util.List; + +/** + * @author davidhua + * 2019/10/4 + */ +public class GsonUtil { + private static Gson gson; + static{ + GsonBuilder builder = new GsonBuilder(); + gson = builder.enableComplexMapKeySerialization() + .setPrettyPrinting() + .create(); + } + + /** + * use gson.fromJson(json, type) simplify + * @param json json string + * @param clazz type + * @param actual need type + * @return deserialized object + */ + public static T fromJson(String json, Class clazz ){ + if(json.startsWith("[") && json.endsWith("]")){ + return gson.fromJson(json, TypeToken.getParameterized(List.class, clazz).getType()); + } + return gson.fromJson(json, TypeToken.getParameterized(clazz).getType()); + } + + /** + * use gson.fromJson(json, type) simplify + * @param json json string + * @param rawClass raw class + * @param genericArguments generic arguments + * @param + * @return + */ + public static T fromJson(String json, Class rawClass, Class... genericArguments){ + return gson.fromJson(json, TypeToken.getParameterized(rawClass, genericArguments).getType()); + } + + /** + * use gson.toJson(src) simplify + * @param src source obj + * @return json + */ + public static String toJson(Object src){ + return gson.toJson(src); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/PatternInjectUtils.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/PatternInjectUtils.java new file mode 100644 index 000000000..acdeaa01f --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/PatternInjectUtils.java @@ -0,0 +1,183 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.common; + +import org.apache.commons.lang3.StringEscapeUtils; +import org.apache.commons.lang3.StringUtils; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * @author davidhua + * 2018/10/26 + */ +public class PatternInjectUtils { + + private PatternInjectUtils(){ + + } + + private static final String PARAMETER_PREFIX = "[#|$]"; + + private static final String ASSIGN_SYMBOL = "="; + + private static final Pattern REGEX = Pattern.compile( + "(" + ASSIGN_SYMBOL + "?)" + + "("+ PARAMETER_PREFIX + ")" + + "\\{([\\w-.]+)[|]?([^}]*)}?" + ); + + /** + * inject pattern + * @param template + * @param params + * @param useDefault use default Value + * @return + */ + public static String inject(String template, Object[] params, boolean useDefault, boolean escape, boolean placeholder){ + Matcher matcher = REGEX.matcher(template); + StringBuffer sb = new StringBuffer(); + int offset = 0; + while(matcher.find()){ + String value = ""; + String extra = ""; + if( offset < params.length && null != params[offset] ){ + Object paramsV = params[offset]; + if( paramsV instanceof String || paramsV instanceof Enum || + paramsV.getClass().isPrimitive() || + isWrapClass(paramsV.getClass())){ + value = escape? StringEscapeUtils.escapeJava(String.valueOf(paramsV)) + : String.valueOf(paramsV); + }else{ + value = GsonUtil.toJson(paramsV); + value = escape? StringEscapeUtils.escapeJava(value) : value; + } + if( null != matcher.group(1) + && !"".equals(matcher.group(1))){ + extra = matcher.group(1); + } + offset ++; + }else if(null != matcher.group(4) && useDefault){ +// value = escape? StringEscapeUtils.escapeJava(String.valueOf(matcher.group(4))) +// : matcher.group(4); + //For default not to escape + value = matcher.group(4); + } + if(StringUtils.isBlank(value) && !useDefault){ + value = "\"" + (escape?StringEscapeUtils.escapeJava(matcher.group(3)) + : matcher.group(3)); + }else if(!"$".equals(matcher.group(2)) && placeholder){ + value = "\"" + StringEscapeUtils.escapeJava(value) + "\""; + } + String result = (extra + value).replace("$", "\\$"); + matcher.appendReplacement(sb, result); + } + matcher.appendTail(sb); + return sb.toString().replace("\\$","$"); + } + public static String inject(String pattern, Object[] params){ + return inject(pattern, params, true, true, true); + } + + /** + * inject pattern + * @param template + * @param params + * @param useDefault + * @return + */ + public static String inject(String template, Map params, boolean useDefault, boolean escape, boolean placeholder){ + Matcher matcher = REGEX.matcher(template); + StringBuffer sb = new StringBuffer(); + //will be more faster? + while(matcher.find()){ + String injected = matcher.group(3); + if(null != injected && !"".equals(injected)){ + int flag = 0; + String value = ""; + String extra = ""; + for(Map.Entry entry : params.entrySet()){ + if(injected.equals(entry.getKey()) && null != entry.getValue()){ + Object entryV = entry.getValue(); + if(entryV instanceof String || entryV instanceof Enum || + entryV.getClass().isPrimitive() || + isWrapClass(entryV.getClass())){ + value = escape? StringEscapeUtils.escapeJava(String.valueOf(entryV)) + : String.valueOf(entryV); + }else{ + value = GsonUtil.toJson(entryV); + value = escape? StringEscapeUtils.escapeJava(value) : value; + } + if(null != matcher.group(1) + || !"".equals(matcher.group(1))){ + extra = matcher.group(1); + } + flag = 1; + break; + } + } + if(flag == 0 && null != matcher.group(4) && useDefault){ +// value = escape? StringEscapeUtils.escapeJava(String.valueOf(matcher.group(4))) +// : matcher.group(4); + //For default not to escape + value = matcher.group(4); + } + if(StringUtils.isBlank(value) && !useDefault){ + value = "\"*#{" + (escape?StringEscapeUtils.escapeJava(matcher.group(3)) + : matcher.group(3)) + "}*\""; + } else if(!"$".equals(matcher.group(2)) && placeholder){ + value = "\"" + StringEscapeUtils.escapeJava(value) + "\""; + } + String result = (extra + value).replace("$", "\\$"); + matcher.appendReplacement(sb, result); + } + } + matcher.appendTail(sb); + String print = sb.toString(); + return print.replace("\\$","$").replace("",""); + } + + public static String injectPattern(String template, String valuePattern){ + Matcher matcher = REGEX.matcher(template); + StringBuffer sb = new StringBuffer(); + while(matcher.find()){ + String extra = matcher.group(1); + String value = StringEscapeUtils.escapeJava(matcher.group(3)); + value = (extra + value.replaceAll("[\\s\\S]+", valuePattern)) + .replace("$", "\\$"); + matcher.appendReplacement(sb, value); + } + matcher.appendTail(sb); + return sb.toString().replace("\\$", "$"); + } + + public static String inject(String template, Map params){ + return inject(template, params, true, true, true); + } + + + private static boolean isWrapClass(Class clz){ + try{ + return ((Class)clz.getField("TYPE").get(null)).isPrimitive(); + }catch(Exception e){ + return false; + } + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/constant/TransportMode.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/constant/TransportMode.java new file mode 100644 index 000000000..6c5356dd2 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/constant/TransportMode.java @@ -0,0 +1,32 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.common.constant; + +/** + * @author davidhua + * 2020/3/3 + */ +public enum TransportMode { + //Distinguish different transmission modes + OFFLINE("offline"); + private String transportMode; + TransportMode(String transportMode){ + this.transportMode = transportMode; + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/constant/TransportType.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/constant/TransportType.java new file mode 100644 index 000000000..38f3469e0 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/constant/TransportType.java @@ -0,0 +1,35 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.common.constant; + +/** + * @author davidhua + * 2019/3/31 + */ +public enum TransportType { + // Used to dive the channel into two types + RECORD("record"), STREAM("stream"); + private String transportType; + TransportType(String transportType){ + this.transportType = transportType; + } + @Override + public String toString(){ + return this.transportType; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnection.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnection.java new file mode 100644 index 000000000..0846206f3 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnection.java @@ -0,0 +1,64 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.common.ldap; + +import javax.naming.Context; +import javax.naming.NamingException; +import javax.naming.ldap.InitialLdapContext; +import javax.naming.ldap.LdapContext; +import java.util.Hashtable; + +/** + * @author davidhua + * 2020/4/9 + */ +public class LdapConnection { + private Hashtable env = new Hashtable<>(); + private LdapContext context; + public LdapConnection(Hashtable env){ + if(null != env){ + this.env = env; + } + } + + /** + * Reconnect + * @param principle + * @param credentials + * @throws NamingException + */ + public void reconnect(String principle, String credentials) throws NamingException { + if(null == context){ + env.put(Context.SECURITY_PRINCIPAL, principle); + env.put(Context.SECURITY_CREDENTIALS, credentials); + this.context = new InitialLdapContext(env, null); + }else{ + this.context.addToEnvironment(Context.SECURITY_PRINCIPAL, principle); + this.context.addToEnvironment(Context.SECURITY_CREDENTIALS, credentials); + this.context.reconnect(null); + } + } + + public LdapContext getContext() { + return context; + } + + public void setContext(LdapContext context) { + this.context = context; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnectionFactory.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnectionFactory.java new file mode 100644 index 000000000..5f6a92539 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnectionFactory.java @@ -0,0 +1,80 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.common.ldap; + + +import org.apache.commons.pool.PoolableObjectFactory; + +import javax.naming.Context; +import javax.naming.ldap.LdapContext; +import java.util.Hashtable; + +/** + * @author davidhua + * 2020/4/9 + */ +public class LdapConnectionFactory implements PoolableObjectFactory { + + private String ldapUrl; + + private String baseDn; + + public LdapConnectionFactory(String url, String baseDn){ + this.ldapUrl = url; + this.baseDn = baseDn; + } + @Override + public LdapConnection makeObject() throws Exception { + Hashtable env = new Hashtable<>(); + env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory"); + String url = ldapUrl; + if(!url.endsWith("/")){ + url += "/"; + } + env.put(Context.PROVIDER_URL, url + baseDn); + env.put(Context.SECURITY_AUTHENTICATION, "simple"); + return new LdapConnection(env); + } + + @Override + public void destroyObject(LdapConnection ldapConnection) throws Exception { + LdapContext ldapContext = ldapConnection.getContext(); + if(null != ldapContext){ + try { + ldapContext.close(); + }catch(Exception e){ + //Ignore + } + } + } + + @Override + public boolean validateObject(LdapConnection ldapConnection) { + return false; + } + + @Override + public void activateObject(LdapConnection ldapConnection) throws Exception { + + } + + @Override + public void passivateObject(LdapConnection ldapConnection) throws Exception { + + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnector.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnector.java new file mode 100644 index 000000000..47232367f --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/ldap/LdapConnector.java @@ -0,0 +1,87 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.common.ldap; + + +import org.apache.commons.pool.impl.GenericObjectPool; + +import javax.naming.AuthenticationException; +import javax.print.DocFlavor; +import java.util.Properties; +import java.util.concurrent.ConcurrentHashMap; + +/** + * @author davidhua + * 2020/4/8 + */ +public class LdapConnector { + + public static final int MAX_ACTIVE = 50; + public static final int MAX_IDLE = 5; + + public static final String URL_PROP_NAME = "ldap.url"; + + public static final String BASE_DN_PROP_NAME = "ldap.baseDN"; + + private static final ConcurrentHashMap connectorMap = new ConcurrentHashMap<>(); + + private GenericObjectPool pool; + + public static LdapConnector getInstance(String url, String baseDn){ + return connectorMap.computeIfAbsent(url + baseDn, key -> new LdapConnector(url, baseDn)); + } + + public static LdapConnector getInstance(Properties properties){ + String url = String.valueOf(properties.getOrDefault(URL_PROP_NAME, "")); + String baseDn = String.valueOf(properties.getOrDefault(BASE_DN_PROP_NAME, "")); + return connectorMap.computeIfAbsent(url + baseDn, key -> new LdapConnector(url, baseDn)); + } + private LdapConnector(String url, String baseDn){ + GenericObjectPool.Config config = new GenericObjectPool.Config(); + config.maxActive = MAX_ACTIVE; + config.maxIdle = MAX_IDLE; + this.pool = new GenericObjectPool<>( + new LdapConnectionFactory(url, baseDn), config); + } + public boolean authenticate(String userName, String password){ + LdapConnection ldapConnection = null; + try { + ldapConnection = pool.borrowObject(); + ldapConnection.reconnect(userName, password); + return true; + } catch (Exception e) { + if(!(e instanceof AuthenticationException) && null != ldapConnection){ + try { + pool.invalidateObject(ldapConnection); + } catch (Exception ex) { + //Ignore + } + } + } finally { + if(null != ldapConnection){ + try { + pool.returnObject(ldapConnection); + } catch (Exception e) { + //Ignore + } + } + } + return false; + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/spi/EnhancedReader.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/spi/EnhancedReader.java new file mode 100644 index 000000000..be011358e --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/spi/EnhancedReader.java @@ -0,0 +1,66 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.common.spi; + +import com.alibaba.datax.common.plugin.AbstractJobPlugin; +import com.alibaba.datax.common.plugin.AbstractTaskPlugin; +import com.webank.wedatasphere.exchangis.datax.common.constant.TransportMode; +import com.webank.wedatasphere.exchangis.datax.core.job.meta.MetaSchema; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.ChannelOutput; + +/** + * @author davidhua + * 2020/4/8 + */ +public class EnhancedReader { + + public static abstract class Job extends AbstractJobPlugin{ + /** + * If the reader supports transport type of stream + * @return boolean + */ + public boolean isSupportStream(){ + return false; + } + + /** + * The transport mode the reader in + * @return boolean + */ + public TransportMode transportMode(){ + return TransportMode.OFFLINE; + } + /** + * Get meta schema from reader job + * @return + */ + public MetaSchema syncMetaData(){ + return null; + } + } + + public static abstract class Task extends AbstractTaskPlugin{ + /** + * if plugin can use stream channel + * @param outputStream + */ + public void startRead(ChannelOutput outputStream){ + //do nothing + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/spi/EnhancedWriter.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/spi/EnhancedWriter.java new file mode 100644 index 000000000..6f98b1ccb --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/common/spi/EnhancedWriter.java @@ -0,0 +1,90 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.common.spi; + +import com.alibaba.datax.common.plugin.AbstractJobPlugin; +import com.alibaba.datax.common.plugin.AbstractTaskPlugin; +import com.alibaba.datax.common.plugin.BasicDataReceiver; +import com.webank.wedatasphere.exchangis.datax.common.constant.TransportMode; +import com.webank.wedatasphere.exchangis.datax.core.job.meta.MetaSchema; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.ChannelInput; + +/** + * @author davidhua + * 2020/4/8 + */ +public class EnhancedWriter { + + public abstract static class Job extends AbstractJobPlugin{ + /** + * Post processor's class name + */ + protected String processor; + + /** + * If the writer supports transport type of stream + * @return boolean + */ + public boolean isSupportStream(){ + return false; + } + + /** + * sync meta schema + * @param metaSchema + */ + public void syncMetaData(MetaSchema metaSchema){ + + } + + /** + * The transport modes that the writer supports + * @return modes + */ + public TransportMode[] transportModes(){ + return new TransportMode[]{TransportMode.OFFLINE}; + } + + public void setProcessor(String processor){ + this.processor = processor; + } + + public String getProcessors(){ + return processor; + } + } + + public abstract static class Task extends AbstractTaskPlugin{ + /** + * custom data channel + * @param receiver + * @param type + */ + public void startWrite(BasicDataReceiver receiver, Class type){ + //throw unsupport + } + + /** + * if plugin can use stream channel + * @param inputStream + */ + public void startWrite(ChannelInput inputStream){ + //do nothing + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/ThreadLocalSecurityManager.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/ThreadLocalSecurityManager.java new file mode 100644 index 000000000..c621d03ad --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/ThreadLocalSecurityManager.java @@ -0,0 +1,77 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core; + + +import java.io.FileDescriptor; +import java.security.AccessControlException; +import java.security.Permission; +import java.util.concurrent.ThreadPoolExecutor; + +/** + * @author davidhua + * 2019/8/13 + */ +public class ThreadLocalSecurityManager extends SecurityManager{ + private static final String SET_SECURITY_MANAGER = "setSecurityManager"; + + private ThreadLocal threadSecurityManager = new ThreadLocal<>(); + private ThreadLocal keyCode = new ThreadLocal<>(); + + + @Override + public void checkRead(FileDescriptor fd) { + if(null != threadSecurityManager.get()) { + threadSecurityManager.get().checkRead(fd); + } + } + + @Override + public void checkWrite(FileDescriptor fd){ + if(null != threadSecurityManager.get()) { + threadSecurityManager.get().checkWrite(fd); + } + } + + @Override + public void checkPermission(Permission perm) { + if(perm instanceof RuntimePermission && perm.getName().equals(SET_SECURITY_MANAGER)){ + throw new AccessControlException("have no permission to set SecurityManager"); + } + if(null != threadSecurityManager.get()) { + threadSecurityManager.get().checkPermission(perm); + } + } + + public void setThreadSecurityManager(Object invoker, SecurityManager manager){ + if(null == threadSecurityManager.get()) { + String hashCode = String.valueOf(invoker.hashCode()); + keyCode.set(hashCode); + threadSecurityManager.set(manager); + } + } + + public void removeThreadSecurityManager(Object invoker){ + String hashCode = String.valueOf(invoker.hashCode()); + String code = keyCode.get(); + if(code.equals(hashCode)){ + keyCode.remove(); + threadSecurityManager.remove(); + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/meta/MetaSchema.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/meta/MetaSchema.java new file mode 100644 index 000000000..3b41d18c9 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/meta/MetaSchema.java @@ -0,0 +1,169 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.job.meta; + +import com.webank.wedatasphere.exchangis.datax.common.GsonUtil; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * store the job related schema information + * @author davidhua + * 2019/9/25 + */ +public class MetaSchema { + private List fieldSchemas = new ArrayList<>(); + + private Map schemaInfo = new HashMap<>(); + + + public MetaSchema(){ + + } + + public MetaSchema(List fieldSchemas){ + this.fieldSchemas = fieldSchemas; + } + + private static class SchemaInfoContext{ + enum StoreType{ + /** + * json type + */ + JSON, + /** + * object + */ + SOURCE_OBJECT + } + private Object v; + private StoreType storeType; + SchemaInfoContext(Object v, StoreType storeType){ + this.v = v; + this.storeType = storeType; + } + + } + /** + * field schema + */ + public static class FieldSchema{ + private String name; + private String type; + private String comment; + private Map props; + + public FieldSchema(String name, String type, String comment){ + this(name, type, comment, new HashMap<>()); + } + public FieldSchema(String name, String type, + String comment, Map props){ + this.name = name; + this.type = type; + this.comment = comment; + this.props = props; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + public Map getProps() { + return props; + } + + public void setProps(Map props) { + this.props = props; + } + } + + public void addSchemaInfo(String key, Object v){ + if(v instanceof String || v.getClass().isPrimitive() || isWrapClass(v.getClass())){ + this.schemaInfo.put(key, new SchemaInfoContext(v, SchemaInfoContext.StoreType.SOURCE_OBJECT)); + }else{ + this.schemaInfo.put(key, new SchemaInfoContext(GsonUtil.toJson(v), SchemaInfoContext.StoreType.JSON)); + } + } + + public T getSchemaInfo(String key, Class clazz){ + SchemaInfoContext schemaInfoContext = this.schemaInfo.get(key); + if(schemaInfoContext != null){ + if(schemaInfoContext.storeType == SchemaInfoContext.StoreType.JSON){ + return GsonUtil.fromJson(String.valueOf(schemaInfoContext.v), clazz); + } + if(schemaInfoContext.v.getClass().equals(clazz)){ + return (T) schemaInfoContext.v; + } + } + return null; + } + + public List getSchemaInfoList(String key, Class elementClazz){ + SchemaInfoContext schemaInfoContext = this.schemaInfo.get(key); + if(schemaInfoContext != null){ + return GsonUtil.fromJson(String.valueOf(schemaInfoContext.v), List.class, elementClazz); + } + return null; + } + + public Map getSchemaInfoMap(String key, Class kClass, Class vClass){ + SchemaInfoContext schemaInfoContext = this.schemaInfo.get(key); + if(schemaInfoContext != null){ + return GsonUtil.fromJson(String.valueOf(schemaInfoContext.v), Map.class, kClass, vClass); + } + return null; + } + + public List getFieldSchemas() { + return fieldSchemas; + } + + public void setFieldSchemas(List fieldSchemas) { + this.fieldSchemas = fieldSchemas; + } + + private static boolean isWrapClass(Class clz){ + try{ + return ((Class)clz.getField("TYPE").get(null)).isPrimitive(); + }catch(Exception e){ + return false; + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/scheduler/speed/DefaultVariableTaskGroupSpeedStrategy.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/scheduler/speed/DefaultVariableTaskGroupSpeedStrategy.java new file mode 100644 index 000000000..b06b46d3a --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/scheduler/speed/DefaultVariableTaskGroupSpeedStrategy.java @@ -0,0 +1,70 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.job.scheduler.speed; + +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.statistics.communication.CommunicationTool; +import com.alibaba.datax.core.util.container.CoreConstant; + + +/** + * Variable speed speed + * @author davidhua + * 2019/10/22 + */ +public class DefaultVariableTaskGroupSpeedStrategy implements VariableTaskGroupSpeedStrategy { + + + @Override + public boolean adjustSpeed(Communication communication, Configuration configuration) { + boolean result = false; + long channelNum = communication.getLongCounter(CommunicationTool.CHANNEL_RUNNING); + if(channelNum <= 0){ + return false; + } + long globalLimitedByteSpeed = configuration.getLong(CoreConstant.DATAX_JOB_SETTING_SPEED_BYTE, 0); + if(globalLimitedByteSpeed > 0){ + Long channelLimitedByteSpeed = configuration + .getLong(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_BYTE); + if(channelLimitedByteSpeed * channelNum != globalLimitedByteSpeed){ + long adjustedLimitedByteSpeed = globalLimitedByteSpeed / channelNum; + if(adjustedLimitedByteSpeed != channelLimitedByteSpeed){ + configuration.set(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_BYTE, + adjustedLimitedByteSpeed); + result = true; + } + } + } + long globalLimitedRecordSpeed = configuration.getLong( + CoreConstant.DATAX_JOB_SETTING_SPEED_RECORD, 0); + if(globalLimitedRecordSpeed > 0){ + Long channelLimitedRecordSpeed = configuration.getLong( + CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_RECORD); + if(channelLimitedRecordSpeed * channelNum != globalLimitedRecordSpeed){ + long adjustedLimitedRecordSpeed = globalLimitedRecordSpeed / channelNum; + if(adjustedLimitedRecordSpeed != channelLimitedRecordSpeed){ + configuration.set(CoreConstant.DATAX_CORE_TRANSPORT_CHANNEL_SPEED_RECORD, + adjustedLimitedRecordSpeed); + result = true; + } + } + } + return result; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/scheduler/speed/VariableTaskGroupSpeedStrategy.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/scheduler/speed/VariableTaskGroupSpeedStrategy.java new file mode 100644 index 000000000..64372f65d --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/job/scheduler/speed/VariableTaskGroupSpeedStrategy.java @@ -0,0 +1,37 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.job.scheduler.speed; + +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.communication.Communication; + + +/** + * @author davidhua + * 2019/10/22 + */ +public interface VariableTaskGroupSpeedStrategy { + /** + * Adjust speed + * @param communication task groups' communication + * @param configuration job configuration + * @return + */ + boolean adjustSpeed(Communication communication, Configuration configuration); + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/Processor.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/Processor.java new file mode 100644 index 000000000..4589c24a3 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/Processor.java @@ -0,0 +1,34 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.processor; + +import java.util.List; + +/** + * @author davidhua + * 2019/8/21 + */ +public interface Processor { + /** + * process the column list + * @param columnData + * @exception + * @return + */ + T process(List columnData) throws Exception; +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/ProcessorSecurityManager.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/ProcessorSecurityManager.java new file mode 100644 index 000000000..8a63b5bb8 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/ProcessorSecurityManager.java @@ -0,0 +1,144 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.processor; + +import sun.security.util.SecurityConstants; + +import java.awt.*; +import java.io.File; +import java.io.FileDescriptor; +import java.io.FilePermission; +import java.net.SocketPermission; +import java.security.AccessControlException; +import java.security.Permission; +import java.util.HashMap; +import java.util.Map; +import java.util.PropertyPermission; + +/** + * @author davidhua + * 2019/8/12 + */ +public class ProcessorSecurityManager extends SecurityManager { + + public static final String CAN_SET_SECURITY_MANAGER = "canSetSecurityManager"; + public static final String CAN_BUILD_SOCK = "canBuildSock"; + public static final String CAN_READ_PROPERTIES = "canWriteProps"; + public static final String CAN_RW_PROPERTIES = "canReadWriteProps"; + + private static final String SET_SECURITY_MANAGER = "setSecurityManager"; + private static final String EXIT_JVM = "exitVM"; + private static final String QUEUE_PRINT_JOB = "queuePrintJob"; + private static final String MODIFY_THREAD_GROUP_PERMISSION = "modifyThreadGroup"; + private static final String MODIFY_THREAD_PERMISSION = "modifyThread"; + + private Map switchMap = new HashMap<>(); + private String workDir; + + public ProcessorSecurityManager(String workDir){ + this.workDir = workDir; + } + + public ProcessorSecurityManager(String workDir, Map switchMap){ + this.workDir = workDir; + if(null != switchMap) { + this.switchMap = switchMap; + } + } + @Override + public void checkRead(FileDescriptor fd) { + checkIfStdDescriptor(fd); + } + + @Override + public void checkWrite(FileDescriptor fd) { + checkIfStdDescriptor(fd); + } + + + @Override + public void checkPermission(Permission perm) { + if(perm instanceof AWTPermission){ + throw new AccessControlException("have no permission to load AWT"); + }else if(perm instanceof FilePermission){ + FilePermission permission = (FilePermission)perm; + if(SecurityConstants.FILE_EXECUTE_ACTION.equals(permission.getActions())){ + throw new AccessControlException("have no permission to execute command"); + } + checkFilePermission(permission.getName()); + }else if(perm instanceof SocketPermission){ + checkSocket((SocketPermission)perm); + }else if(perm instanceof PropertyPermission){ + checkProps((PropertyPermission)perm); + }else if(perm instanceof RuntimePermission){ + checkRuntime((RuntimePermission)perm); + } + } + + private void checkFilePermission(String file){ + if(!new File(file).getAbsolutePath().startsWith(workDir)){ + throw new AccessControlException("have no permission to : " + file); + } + } + + private void checkIfStdDescriptor(FileDescriptor fd){ + if(!fd.equals(FileDescriptor.out) && !fd.equals(FileDescriptor.in) + && !fd.equals(FileDescriptor.err)) { + throw new AccessControlException("have no permission to read fd : " + fd.toString()); + } + } + + private void checkSocket(SocketPermission perm){ + if(!this.switchMap.getOrDefault(CAN_BUILD_SOCK, false)){ + throw new AccessControlException("have no permission to build a socket"); + } + } + + private void checkProps(PropertyPermission perm){ + if(perm.getActions().equals(SecurityConstants.PROPERTY_READ_ACTION) + && !this.switchMap.getOrDefault(CAN_READ_PROPERTIES, true)){ + throw new AccessControlException("have no permission to read property"); + } + if(perm.getActions().equals(SecurityConstants.PROPERTY_RW_ACTION) + && !this.switchMap.getOrDefault(CAN_RW_PROPERTIES, false)){ + throw new AccessControlException("have no permission to read/write property"); + } + } + + private void checkRuntime(RuntimePermission perm){ + if(perm.getName().startsWith(EXIT_JVM)){ + throw new AccessControlException("have no permission to exit jvm"); + } + if(perm.getName().equals(QUEUE_PRINT_JOB)){ + throw new AccessControlException("have no permission to queue print job"); + } + if(perm.getName().equals(MODIFY_THREAD_GROUP_PERMISSION) || perm.getName().equals(MODIFY_THREAD_PERMISSION)){ + throw new AccessControlException("have no permission to modify thread/threadGroup"); + } + if(perm.getName().equals(SET_SECURITY_MANAGER) && + !this.switchMap.getOrDefault(CAN_SET_SECURITY_MANAGER, false)){ + throw new AccessControlException("have no permission to set SecurityManager"); + } + + } + + public void setCanSetSecurityManager(boolean isCan){ + this.switchMap.put(CAN_SET_SECURITY_MANAGER, isCan); + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaMemoryClassObject.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaMemoryClassObject.java new file mode 100644 index 000000000..e8e3f8640 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaMemoryClassObject.java @@ -0,0 +1,52 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.processor.loader; + +import javax.tools.SimpleJavaFileObject; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.net.URI; + +/** + * @author davidhua + * 2019/8/26 + */ +public class JavaMemoryClassObject extends SimpleJavaFileObject { + + protected final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + + public JavaMemoryClassObject(String name, Kind kind){ + super(URI.create("string:///" + name.replace('.', '/')), + kind); + } + + public byte[] getBytes(){ + return outputStream.toByteArray(); + } + @Override + public OutputStream openOutputStream() throws IOException { + return outputStream; + } + + @Override + protected void finalize() throws Throwable { + super.finalize(); + outputStream.close(); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaMemoryFileObject.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaMemoryFileObject.java new file mode 100644 index 000000000..55ed15d1b --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaMemoryFileObject.java @@ -0,0 +1,42 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.processor.loader; + +import javax.tools.SimpleJavaFileObject; +import java.io.IOException; +import java.net.URI; + +/** + * @author davidhua + * 2019/8/26 + */ +public class JavaMemoryFileObject extends SimpleJavaFileObject { + + private String content; + + public JavaMemoryFileObject(String className, String content) { + super(URI.create("string:///" + className.replace('.', '/') + Kind.SOURCE.extension), + Kind.SOURCE); + this.content = content; + } + + @Override + public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException { + return content; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaSrcUtils.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaSrcUtils.java new file mode 100644 index 000000000..46ec21be2 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/JavaSrcUtils.java @@ -0,0 +1,53 @@ + +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.processor.loader; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * @author davidhua + * 2019/8/25 + */ +public class JavaSrcUtils { + private static final Pattern JAVA_FILE_NAME_PATTERN = Pattern.compile( + "([\\s]+public|^public)\\s+class[\\s]+([\\S]+)[^{]+\\{[\\s\\S]+}[\\s]*$"); + + private static final Pattern JAVA_PACKAGE_NAME_PATTERN = Pattern.compile( + "^[\\s]*package\\s+[\\S]+;"); + + public static String parseJavaFileName(String javaCode){ + Matcher matcher = JAVA_FILE_NAME_PATTERN.matcher(javaCode); + if(matcher.find()){ + return matcher.group(2); + } + return null; + } + + public static String addPackageName(String javaCode, String packageName){ + Matcher matcher = JAVA_PACKAGE_NAME_PATTERN.matcher(javaCode); + StringBuffer sb = new StringBuffer(); + if(matcher.find()){ + matcher.appendReplacement(sb, ""); + } + matcher.appendTail(sb); + return "package " + packageName + ";\n" + sb.toString(); + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/plugin/DefaultPluginProcessorLoader.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/plugin/DefaultPluginProcessorLoader.java new file mode 100644 index 000000000..7c6152ec8 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/processor/loader/plugin/DefaultPluginProcessorLoader.java @@ -0,0 +1,128 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.processor.loader.plugin; + +import com.alibaba.datax.common.plugin.PluginProcessorLoader; +import com.webank.wedatasphere.exchangis.datax.core.processor.loader.JavaMemoryClassObject; +import com.webank.wedatasphere.exchangis.datax.core.processor.loader.JavaMemoryFileObject; +import com.alibaba.datax.core.util.container.JarLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.tools.*; +import java.io.File; +import java.io.IOException; +import java.net.URL; +import java.net.URLClassLoader; +import java.util.ArrayList; +import java.util.List; + +/** + * @author davidhua + * 2019/8/26 + */ +public class DefaultPluginProcessorLoader implements PluginProcessorLoader { + private static final Logger logger = LoggerFactory.getLogger(PluginProcessorLoader.class); + private static final String CLASS_PATH_PARAMS = "-classpath"; + + @Override + public boolean load(String fullClassName, String javaCode, String classpath) { + JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); + DiagnosticCollector diagnostics = new DiagnosticCollector<>(); + CustomFileManager customFileManager = new CustomFileManager(compiler.getStandardFileManager(diagnostics, null, null)); + List javaFiles = new ArrayList<>(); + javaFiles.add(new JavaMemoryFileObject(fullClassName, javaCode)); + List options = new ArrayList<>(); + options.add(CLASS_PATH_PARAMS); + options.add(classpath); + JavaCompiler.CompilationTask task = compiler.getTask(null, customFileManager, diagnostics, options, null, javaFiles); + boolean success = task.call(); + if(success){ + ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); + if(classLoader instanceof JarLoader){ + JavaMemoryClassObject classObject = customFileManager.getClassObject(); + ((JarLoader)classLoader).loadClass(fullClassName, classObject.getBytes()); + } + }else{ + //logger error + diagnostics.getDiagnostics().forEach( diagnostic -> { + String res = ("Code:[" + diagnostic.getCode() + "]\n") + + "Kind:[" + diagnostic.getKind() + "]\n" + + "Position:[" + diagnostic.getPosition() + "]\n" + + "Start Position:[" + diagnostic.getStartPosition() + "]\n" + + "End Position:[" + diagnostic.getEndPosition() + "]\n" + + "Source:[" + diagnostic.getSource() + "]\n" + + "Message:[" + diagnostic.getMessage(null) + "]\n" + + "LineNumber:[" + diagnostic.getLineNumber() + "]\n" + + "ColumnNumber:[" + diagnostic.getColumnNumber() + "]\n"; + logger.error(res); + }); + } + return success; + } + + @Override + public boolean load(String fullClassName, String javaCode) { + StringBuilder builder = new StringBuilder(); + ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); + try { + Class foundClass = Class.forName(fullClassName, false, classLoader); + if(null != foundClass){ + //The class has been loaded + return true; + } + } catch (ClassNotFoundException e) { + //Ignore try to load class + } + while(classLoader instanceof URLClassLoader){ + URLClassLoader urlClassLoader = (URLClassLoader)classLoader; + for (URL url : urlClassLoader.getURLs()) { + builder.append(url.getFile()).append(File.pathSeparator); + } + classLoader = classLoader.getParent(); + } + return load(fullClassName, javaCode, builder.toString()); + } + + private static class CustomFileManager extends ForwardingJavaFileManager{ + + private JavaMemoryClassObject memoryClassObject; + /** + * Custom + * + * @param fileManager delegate to this file manager + */ + CustomFileManager(StandardJavaFileManager fileManager) { + super(fileManager); + } + + @Override + public JavaFileObject getJavaFileForOutput(Location location, String className, JavaFileObject.Kind kind, FileObject sibling) throws IOException { + if(null == memoryClassObject){ + memoryClassObject = new JavaMemoryClassObject(className, kind); + } + return memoryClassObject; + } + + JavaMemoryClassObject getClassObject(){ + return memoryClassObject; + } + + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/ChannelElement.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/ChannelElement.java new file mode 100644 index 000000000..2a90542e9 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/ChannelElement.java @@ -0,0 +1,31 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.transport.channel; + +/** + * @author davidhua + * 2019/3/24 + */ +public interface ChannelElement{ + + int getByteSize(); + + int getMemorySize(); + + T copyElement(); +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/StreamChannel.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/StreamChannel.java new file mode 100644 index 000000000..808685035 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/StreamChannel.java @@ -0,0 +1,70 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.transport.channel; + +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.communication.Communication; +import com.alibaba.datax.core.transport.channel.AbstractChannel; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.ByteBlock; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + +/** + * @author davidhua + * 2019/3/26 + */ +public abstract class StreamChannel extends AbstractChannel { + + protected long blockSize = 0; + + private static final Logger LOG = LoggerFactory.getLogger(StreamChannel.class); + + public StreamChannel(Configuration configuration) { + super(configuration); + } + + @Override + protected void statPush(Communication currentCommunication, long dataSize) { + } + + @Override + protected void statPull(Communication currentCommunication, long dataSize) { + + } + + @Override + protected long currentDataSpeed(Communication currentCommunication, Communication lastCommunication, long interval) { + return 0; + } + + @Override + protected void updateCounter(Communication currentCommunication, Communication lastCommunication) { + + } + + @Override + protected void firstPrint() { + LOG.info("StreamChannel set byte_speed_limit to " + getByteSpeed() + + (getByteSpeed() <= 0? ", No bps activated." : ".")); + } + + public long getBlockSize(){ + return this.blockSize; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/memory/MemoryStreamChannel.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/memory/MemoryStreamChannel.java new file mode 100644 index 000000000..ac738d768 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/channel/memory/MemoryStreamChannel.java @@ -0,0 +1,97 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.transport.channel.memory; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import com.webank.wedatasphere.exchangis.datax.core.transport.channel.StreamChannel; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.ByteBlock; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.core.util.container.CoreConstant; + +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; + +/** + * @author davidhua + * 2019/3/22 + */ +public class MemoryStreamChannel extends StreamChannel { + private BlockingQueue queue = null; + + public MemoryStreamChannel(final Configuration configuration) { + super(configuration); + super.consumeIsolated = true; + this.queue = new LinkedBlockingQueue<>(this.getCapacity()); + this.blockSize = configuration.getInt(CoreConstant.DATAX_CORE_TRANSPORT_STREAM_CHANNEL_BLOCKSIZE, + 8192); + } + + @Override + protected void doPush(ByteBlock byteBlock) { + try{ + if(byteBlock.getByteSize() > blockSize){ + throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, "the size of byte block is too big"); + } + long startTime = System.nanoTime(); + this.queue.put(byteBlock); + waitWriterTime.addAndGet(System.nanoTime() - startTime); + }catch(InterruptedException e){ + Thread.currentThread().interrupt(); + } + } + + @Override + protected ByteBlock doPull() { + try{ + long startTime = System.nanoTime(); + ByteBlock block = this.queue.take(); + waitReaderTime.addAndGet(System.nanoTime() - startTime); + return block; + }catch(InterruptedException e){ + Thread.currentThread().interrupt(); + throw new IllegalStateException(e); + } + } + + @Override + public int size() { + return this.queue.size(); + } + + @Override + public boolean isEmpty() { + return this.queue.isEmpty(); + } + + @Override + public void clear() { + this.queue.clear(); + } + + @Override + public void close() { + super.close(); + //push the byte block size 0 + try { + this.queue.put(ByteBlock.TERMINATE); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/CheckPoint.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/CheckPoint.java new file mode 100644 index 000000000..7ca819a0d --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/CheckPoint.java @@ -0,0 +1,26 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.transport.checkpoint; + +/** + * @author davidhua + * 2020/3/19 + */ +public class CheckPoint { + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/CheckPointStore.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/CheckPointStore.java new file mode 100644 index 000000000..d4cf74a60 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/CheckPointStore.java @@ -0,0 +1,47 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.transport.checkpoint; + + +import java.util.List; + +/** + * @author davidhua + * 2020/3/18 + */ +public interface CheckPointStore { + + /** + * Save checkpoint + * @param checkPoint checkpoint entity + */ + void savePoint(CheckPoint checkPoint); + + /** + * Get checkpoint + * @param unique unique identify + * @return + */ + CheckPoint getPoint(String unique); + + /** + * Get all checkpoints + * @return + */ + List getPoints(); +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/BufferedRandomAccessFile.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/BufferedRandomAccessFile.java new file mode 100644 index 000000000..344350f78 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/BufferedRandomAccessFile.java @@ -0,0 +1,298 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.transport.checkpoint.storage; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.nio.ByteBuffer; +import java.nio.CharBuffer; +import java.nio.charset.Charset; +import java.nio.charset.CharsetDecoder; +import java.nio.charset.CoderResult; +/** + * randomAccessFile for buffer + * @author davidhua + * + */ +public class BufferedRandomAccessFile extends RandomAccessFile{ + + private static final String DEFAULT_CHARSET = "UTF-8"; + + private static final int DEFAULT_BUFFER_SIZE = 8921; + + private static final int DEFAULT_CHAR_BUFFER_SIZE = 5120; + + private volatile byte[] buffer; + + private volatile CharBuffer charBuffer; + + private long currentPos = -1L; + + private long stPos = -1L,endPos = -1L; + + private boolean isWrite = false; + + public BufferedRandomAccessFile(File file, String mode) + throws FileNotFoundException { + super(file, mode); + buffer = new byte[DEFAULT_BUFFER_SIZE]; + this.seek(0); + currentPos = 0L; + } + public BufferedRandomAccessFile(String fileName,String mode) + throws FileNotFoundException { + super(fileName, mode); + buffer = new byte[DEFAULT_BUFFER_SIZE]; + this.seek(0); + currentPos = 0L; + } + + /** + * Fill the buffer + * @param pos + * @throws IOException + */ + private void fill(long pos) throws IOException{ + int offset = 0; + if(pos == stPos){ + return; + }else{ + offset = super.read(buffer); + } + if(offset<=-1) stPos = endPos = -1L; + else{ + stPos = pos; + endPos = stPos + offset; + } + super.seek(pos);//return the position + } + public void seek(long pos){ + try { + currentPos = pos; + flush();//flush + if(pos>-1) super.seek(pos); + fill(pos); + } catch (IOException e) { + throw new RuntimeException("there is a wrong in seeking file!"); + } + } + + /** + * Flush the buffer + */ + public void flush()throws IOException{ + //If the buffer is written,write the buff to the file + if(isWrite & (stPos|endPos) >= 0){ + super.write(buffer, 0,(int)(endPos-stPos)); + isWrite = false; + } + stPos = endPos = -1L; + } + + /** + * Read from buffer + */ + public synchronized int readBuffer() throws IOException{ + long pos = currentPos; + //In the buffer + if(pos >= stPos && pos < endPos){ + currentPos++; + return buffer[(int)(pos-stPos)]&0xff; + }else{ + seek(endPos); + if((stPos|endPos) == -1L){ + return -1; + } + currentPos ++; + return buffer[0]&0xff; + } + } + + public synchronized int readBuffer(byte[] b) throws IOException{ + return readBuffer(b,0,b.length); + } + + public synchronized int readBuffer(byte[] b,int off,int len) { + long pos = currentPos; + int avail = (int)(endPos - pos); + if(avail <= 0){ + //No rest size in buffer + seek(pos); + if((stPos|endPos) == -1L) return -1; + avail = (int)(endPos - pos); + } + int cut = Math.min(avail, len); + System.arraycopy(buffer, (int) (pos - stPos), b, off, cut); + currentPos+=cut; + while(cut < len){ + seek(endPos); + if((stPos|endPos) != -1L){ + avail = (int)(endPos - stPos); + int extra = Math.min(avail, len - cut); + System.arraycopy(buffer,0, b, off+cut,extra); + cut+=extra; + currentPos+=extra; + }else{ + break; + } + } + return cut; + } + public synchronized void writeBuffer(int b) throws IOException{ + long pos = currentPos; + if(pos >= stPos && pos < stPos + buffer.length){ + //If in the buffer size + currentPos ++; + //Overlap the buffer + buffer[(int)(pos-stPos)] = (byte)b; + //Enlarge the end position + if(pos == endPos) + endPos++; + }else{ +// flush(); + seek(pos); + if((stPos|endPos) == -1L){ + stPos = pos; + endPos = stPos+1; + } + currentPos++; + buffer[0] = (byte)b; + } + //Buffer is changed + isWrite = true; + } + public synchronized void writeBuffer(byte[] b) throws IOException{ + writeBuffer(b,0,b.length); + } + + public synchronized void writeBuffer(byte[] b,int off,int len) { + long pos = currentPos; + int avail = (stPos|endPos) == -1L? -1 : (int)(buffer.length - pos+stPos); + if(avail <= 0){ + seek(pos); + if((stPos|endPos) == -1L){ + stPos = pos; + endPos = stPos+1; + } + avail = buffer.length; + } + int write = Math.min(len, avail); + System.arraycopy(b, off, buffer, (int) (pos - stPos), write); + if(pos + write >= endPos) endPos = pos+write; + //Buffer is changed + isWrite = true; + currentPos += write; + while(write < len){ + long nPos = stPos+buffer.length; + seek(nPos); + int extra = Math.min(buffer.length, len - write); + System.arraycopy(b,off + write, buffer, 0,extra); + if((stPos|endPos) == -1L){ + stPos = nPos; + endPos = stPos+extra; + } + //Buffer is changed + isWrite = true; + write+=extra; + currentPos+=extra; + } + } + /** + *Read line from character stream + */ + public synchronized String readBufferLine() throws IOException{ + if(charBuffer == null){ + charBuffer = CharBuffer.allocate(DEFAULT_CHAR_BUFFER_SIZE); + //Set limit to 0 + charBuffer.limit(0); + } + StringBuilder input = new StringBuilder(); + int lef = 0; + boolean eol = false; + while(!eol){ + if(!charBuffer.hasRemaining()){ + lef = implyRead(charBuffer); + if(lef<0){ + eol = true; + } + } + while(charBuffer.hasRemaining()&&!eol){ + char c = charBuffer.get(); + switch(c){ + case '\n': + eol = true; + break; + case '\r': + continue; + default: + input.append(c); + break; + } + } + } + if(input.length() == 0&&(lef == -1)){ + return null; + } + return input.toString(); + } + private int implyRead(CharBuffer buf) { + if((stPos|endPos) == -1L){ + return -1; + } + //Set position to 0, and start to read data written + buf.rewind(); + //Set limit to max value to try the best to receive the data decoded + buf.limit(DEFAULT_CHAR_BUFFER_SIZE); + int st =(int)(currentPos-stPos); + int end = (int)(endPos-currentPos); + ByteBuffer in = ByteBuffer.wrap(buffer,st,end); + CharsetDecoder decoder = + Charset.forName(DEFAULT_CHARSET).newDecoder(); + boolean endOfInput = false; + CoderResult result = decoder.decode(in, buf, endOfInput); + currentPos +=(in.position() - st); + if(result.isUnderflow()){ + if(currentPos >= endPos){ + seek(currentPos); + }else if(buf.position()<=0){ + //Because the endOfInput == 0 + if(in.hasRemaining()){ + seek(currentPos); + } + } + }else if(result.isError()||result.isMalformed()){ + return -1; + } + int len = buf.position(); + //Set the limit to the length of data + buf.limit(len); + //Set the position to 0 + buf.rewind(); + return len; + } + + @Override + public void close() throws IOException { + if(null != charBuffer){ + charBuffer.clear(); + } + super.close(); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/CheckPointMemStore.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/CheckPointMemStore.java new file mode 100644 index 000000000..de5179f1b --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/CheckPointMemStore.java @@ -0,0 +1,25 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.transport.checkpoint.storage; + +/** + * @author davidhua + * 2020/3/25 + */ +public class CheckPointMemStore { +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/CheckPointStoreFile.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/CheckPointStoreFile.java new file mode 100644 index 000000000..d6f423c8e --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/checkpoint/storage/CheckPointStoreFile.java @@ -0,0 +1,77 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.transport.checkpoint.storage; + +import com.webank.wedatasphere.exchangis.datax.core.transport.checkpoint.CheckPoint; +import com.webank.wedatasphere.exchangis.datax.core.transport.checkpoint.CheckPointStore; + +import java.io.File; +import java.io.FileNotFoundException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +/** + * @author davidhua + * 2020/3/19 + */ +public class CheckPointStoreFile implements CheckPointStore { + + private Map checkPointOffset = new ConcurrentHashMap<>(); + + private BufferedRandomAccessFile accessFile; + + private CheckPointStoreFile(File pathFile) throws FileNotFoundException { + this.accessFile = new BufferedRandomAccessFile(pathFile, "rw"); + } + public static CheckPointStoreFile load(String path){ + CheckPointStoreFile storeFile = null; + File pathFile = new File(path); + if(pathFile.exists()){ + try { + storeFile = new CheckPointStoreFile(pathFile); + } catch (FileNotFoundException e) { + storeFile = null; + } + } + return storeFile; + } + + public static CheckPointStoreFile create(String path){ + try { + return new CheckPointStoreFile(new File(path)); + } catch (FileNotFoundException e) { + return null; + } + } + + @Override + public void savePoint(CheckPoint checkPoint) { + + } + + @Override + public CheckPoint getPoint(String unique) { + return null; + } + + @Override + public List getPoints() { + return null; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ByteBlock.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ByteBlock.java new file mode 100644 index 000000000..9790e28ac --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ByteBlock.java @@ -0,0 +1,69 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.transport.stream; + +import com.webank.wedatasphere.exchangis.datax.core.transport.channel.ChannelElement; +import com.alibaba.datax.core.util.ClassSize; + +import java.nio.ByteBuffer; + +/** + * @author davidhua + * 2019/3/26 + */ +public class ByteBlock implements ChannelElement { + + public static final ByteBlock SEPARATOR = new ByteBlock(ByteBuffer.allocate(0)); + + public static final ByteBlock TERMINATE = new ByteBlock(null); + private ByteBuffer byteStored; + + public ByteBlock(ByteBuffer byteStored){ + this.byteStored = byteStored; + } + + @Override + public int getByteSize() { + return byteStored != null? byteStored.remaining() : 0; + } + + @Override + public int getMemorySize() { + return ClassSize.REFERENCE + ClassSize.ByteBufferHead + + (byteStored != null? byteStored.remaining() : 0); + } + + ByteBuffer getByteStored(){ + return byteStored; + } + + @Override + public T copyElement() { + if(this.equals(SEPARATOR) || this.equals(TERMINATE)){ + return (T)this; + } + ByteBuffer buffer = null; + if(null != byteStored) { + byte[] stored = byteStored.array(); + buffer = ByteBuffer.wrap(stored); + buffer.position(0); + buffer.limit(byteStored.limit()); + } + return (T) new ByteBlock(buffer); + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ChannelInput.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ChannelInput.java new file mode 100644 index 000000000..3c869fda9 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ChannelInput.java @@ -0,0 +1,179 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.transport.stream; + + +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; +import com.webank.wedatasphere.exchangis.datax.core.transport.channel.StreamChannel; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; + +/** + * actually combined with multiply streams + * @author davidhua + * 2019/3/26 + */ +public class ChannelInput{ + private static final Logger LOG = LoggerFactory.getLogger(ChannelInput.class); + + private StreamChannel streamChannel; + private ByteBlock byteBlock; + private ByteBuffer byteBuffer; + private byte[] tmpBuf; + private byte[] streamMeta; + private boolean shutdown = false; + private ChannelInputStream stream; + + public ChannelInput(StreamChannel streamChannel){ + this.streamChannel = streamChannel; + tmpBuf = new byte[512]; + } + + public InputStream nextStream() throws IOException { + if(shutdown){ + return null; + } + if(null != stream){ + closeStream(); + } + if(byteBlock != ByteBlock.TERMINATE){ + //set byteBlock null + byteBlock = null; + ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); + int len = -1; + while((len = read0(tmpBuf, 0, tmpBuf.length)) > 0){ + byteArrayOutputStream.write(tmpBuf, 0, len); + } + if( byteBlock != ByteBlock.TERMINATE){ + streamMeta = byteArrayOutputStream.toByteArray(); + byteBlock = null; + stream = new ChannelInputStream(); + return stream; + } + } + return null; + } + + public StreamMeta streamMetaData(String encoding){ + try{ + String metaJson = new String(streamMeta, encoding); + return Json.fromJson(metaJson, StreamMeta.class); + }catch(Exception e){ + throw DataXException.asDataXException(FrameworkErrorCode.RUNTIME_ERROR, e); + } + } + + public void shutdown(){ + try { + if (null != stream) { + stream.close(); + } + }catch(Exception e){ + throw new RuntimeException(e); + } + this.shutdown = true; + } + + private void closeStream() throws IOException { + while(stream.read(this.tmpBuf, 0, this.tmpBuf.length) != -1){ + ; + } + stream.close(); + stream = null; + streamMeta = null; + } + + private int read0(byte[] b, int off, int len){ + pullFromChannel(); + if(byteBuffer.remaining() <= 0){ + return -1; + } + int start = off; + while(len > 0 && byteBuffer.remaining() > 0){ + int rest = byteBuffer.remaining(); + if(rest > len){ + byteBuffer.get(b, off, len); + break; + }else{ + byteBuffer.get(b, off, rest); + off += rest; + len -= rest; + } + pullFromChannel(); + } + return off - start; + } + private void pullFromChannel(){ + if(byteBlock != ByteBlock.SEPARATOR && byteBlock != ByteBlock.TERMINATE){ + if(null == byteBuffer || byteBuffer.remaining() <= 0){ + byteBlock = streamChannel.pull(); + byteBuffer = byteBlock.getByteStored(); + if(null == byteBuffer){ + byteBuffer = ByteBuffer.allocate(0); + } + } + } + } + + public class ChannelInputStream extends InputStream{ + private boolean isClosed = false; + + @Override + public int read() throws IOException { + if(isClosed){ + throw DataXException.asDataXException(CommonErrorCode.SHUT_DOWN_TASK, "channel input stream has been closed"); + } + pullFromChannel(); + return byteBuffer.remaining() > 0? byteBuffer.get() & 0xFF : -1; + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + if(isClosed){ + throw DataXException.asDataXException(CommonErrorCode.SHUT_DOWN_TASK, "channel input stream has been closed"); + } + if(b == null){ + throw new NullPointerException(); + }else if(off < 0 || len < 0 || len > b.length - off){ + throw new IndexOutOfBoundsException(); + }else if(len == 0){ + return 0; + } + return read0(b, off, len); + } + + @Override + public void close() throws IOException { + isClosed = true; + } + + @Override + public int available() throws IOException { + throw DataXException.asDataXException(CommonErrorCode.RUNTIME_ERROR, "channel input stream doesn't support method named 'available'"); + } + + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ChannelOutput.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ChannelOutput.java new file mode 100644 index 000000000..85cedd484 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/ChannelOutput.java @@ -0,0 +1,165 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.transport.stream; + +import com.alibaba.datax.common.exception.CommonErrorCode; +import com.alibaba.datax.common.exception.DataXException; +import com.webank.wedatasphere.exchangis.datax.core.transport.channel.StreamChannel; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; + +/** + * @author davidhua + * 2019/3/26 + */ +public class ChannelOutput { + private static final Logger LOG = LoggerFactory.getLogger(ChannelOutput.class); + private static final String DEFAULT_ENCODING = "UTF-8"; + private StreamChannel streamChannel; + private long blockSize; + private ByteBuffer byteBuffer; + private byte[] streamMeta; + private boolean shutdown = false; + private ChannelOutputStream stream; + + public ChannelOutput(StreamChannel streamChannel){ + this.streamChannel = streamChannel; + this.blockSize = streamChannel.getBlockSize(); + this.byteBuffer = ByteBuffer.allocate(Math.toIntExact(blockSize)); + } + + + public OutputStream createStream() throws IOException{ + return createStream(""); + } + + public OutputStream createStream(String name) throws IOException { + if(StringUtils.isBlank(name)){ + name = "1"; + } + StreamMeta streamMeta = new StreamMeta(); + streamMeta.setName(name); + return createStream(streamMeta, DEFAULT_ENCODING); + } + public OutputStream createStream(StreamMeta meta, String encoding) throws IOException{ + if(StringUtils.isBlank(encoding)){ + encoding = DEFAULT_ENCODING; + } + String metaJson = Json.toJson(meta, null); + return createStream(metaJson.getBytes(encoding)); + } + private OutputStream createStream(byte[] metaData) throws IOException { + if(shutdown){ + return null; + } + if(null != stream){ + stream.close(); + flush0(); + streamChannel.push(ByteBlock.SEPARATOR); + } + this.streamMeta = metaData; + write0(this.streamMeta, 0, this.streamMeta.length); + flush0(); + streamChannel.push(ByteBlock.SEPARATOR); + stream = new ChannelOutputStream(); + return stream; + } + public void shutdown(){ + shutdown = true; + streamChannel.clear(); + } + + public void close(){ + flush0(); + streamChannel.close(); + } + private void write0(byte[] b, int off , int len){ + while(len > 0){ + pushToChannel(); + int rest = byteBuffer.remaining(); + if(rest > len){ + byteBuffer.put(b, off, len); + break; + }else{ + byteBuffer.put(b, off, rest); + off += rest; + len -= rest; + } + } + } + + private void flush0(){ + byteBuffer.flip(); + if(byteBuffer.remaining() > 0){ + streamChannel.push(new ByteBlock(byteBuffer)); + } + byteBuffer = ByteBuffer.allocate(Math.toIntExact(blockSize)); + } + private void pushToChannel(){ + if(byteBuffer.remaining() <= 0){ + byteBuffer.flip(); + streamChannel.push(new ByteBlock(byteBuffer)); + byteBuffer = ByteBuffer.allocate(Math.toIntExact(blockSize)); + } + } + public class ChannelOutputStream extends OutputStream{ + private boolean isClosed = false; + @Override + public void write(int b) throws IOException { + if(isClosed){ + throw DataXException.asDataXException(CommonErrorCode.SHUT_DOWN_TASK, "channel output stream has been closed"); + } + pushToChannel(); + byteBuffer.put((byte)(b & 0xff)); + } + + @Override + public void write(byte[] b, int off, int len) throws IOException { + if(isClosed){ + throw DataXException.asDataXException(CommonErrorCode.SHUT_DOWN_TASK, "channel output stream has been closed"); + } + if(b == null){ + throw new NullPointerException(); + }else if((off < 0) || (off > b.length) || (len < 0) || + ((off + len) > b.length) || ((off + len) < 0)){ + throw new IndexOutOfBoundsException(); + }else if (len == 0){ + return; + } + write0(b, off ,len); + } + + @Override + public void flush() throws IOException { + flush0(); + } + + @Override + public void close() throws IOException { + flush(); + isClosed = true; + } + + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/StreamMeta.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/StreamMeta.java new file mode 100644 index 000000000..4f5d57aad --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/core/transport/stream/StreamMeta.java @@ -0,0 +1,81 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.core.transport.stream; + +import org.apache.htrace.fasterxml.jackson.annotation.JsonInclude; + +/** + * @author davidhua + * 2019/4/3 + */ +@JsonInclude(JsonInclude.Include.NON_EMPTY) +public class StreamMeta { + private String name; + private String absolutePath; + private String relativePath; + + /** + * Check Point ID + */ + private String checkPointId; + + /** + * Stream offset + */ + private long offset = 0; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getAbsolutePath() { + return absolutePath; + } + + public void setAbsolutePath(String absolutePath) { + this.absolutePath = absolutePath; + } + + public String getRelativePath() { + return relativePath; + } + + public void setRelativePath(String relativePath) { + this.relativePath = relativePath; + } + + public String getCheckPointId() { + return checkPointId; + } + + public void setCheckPointId(String checkPointId) { + this.checkPointId = checkPointId; + } + + public long getOffset() { + return offset; + } + + public void setOffset(long offset) { + this.offset = offset; + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/util/Json.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/util/Json.java new file mode 100644 index 000000000..3206291b2 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/util/Json.java @@ -0,0 +1,106 @@ +package com.webank.wedatasphere.exchangis.datax.util; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.*; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; + + +public class Json { + private static final String PREFIX = "["; + private static final String SUFFIX = "]"; + private static final Logger logger = LoggerFactory.getLogger(Json.class); + + private static ObjectMapper mapper; + + static{ + mapper = new ObjectMapper(); + mapper.configure(JsonParser.Feature.ALLOW_COMMENTS, true); + mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true); + mapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true); + mapper.configure(DeserializationFeature.READ_ENUMS_USING_TO_STRING, true); + mapper.configure(SerializationFeature.WRITE_ENUMS_USING_TO_STRING, true); + mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_CONTROL_CHARS, true); + //empty beans allowed + mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); + //ignore unknown properties + mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + //cancel to scape non ascii + mapper.configure(JsonGenerator.Feature.ESCAPE_NON_ASCII, false); + } + private Json(){} + + public static ObjectMapper getMapper(){ + return mapper; + } + + @SuppressWarnings("unchecked") + public static T fromJson(String json, Class clazz, Class... parameters){ + if(StringUtils.isNotBlank(json)){ + try{ + if(parameters.length > 0){ + return (T)mapper.readValue(json, mapper.getTypeFactory().constructParametricType(clazz, parameters)); + } + if(json.startsWith(PREFIX) + && json.endsWith(SUFFIX)){ + JavaType javaType = mapper.getTypeFactory() + .constructParametricType(ArrayList.class, clazz); + return mapper.readValue(json, javaType); + } + return (T)mapper.readValue(json, clazz); + } catch (Exception e) { + logger.info(e.getLocalizedMessage()); + throw new RuntimeException(e); + } + } + return null; + } + + public static T fromJson(InputStream stream, Class clazz, Class... parameters){ + StringBuilder builder = new StringBuilder(); + String jsonStr = null; + try{ + BufferedReader reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8)); + while((jsonStr = reader.readLine()) != null){ + builder.append(jsonStr); + } + reader.close(); + }catch(Exception e){ + logger.info(e.getLocalizedMessage()); + throw new RuntimeException(e); + } + return fromJson(builder.toString(), clazz, parameters); + } + + public static String toJson(Object obj, Class model){ + return toJson(obj, model, false); + } + public static String toJson(Object obj, Class model, boolean beautify){ + ObjectWriter writer = mapper.writer(); + if(null != obj){ + try{ + if(null != model){ + writer = writer.withView(model); + } + if(beautify){ + return writer.withDefaultPrettyPrinter().writeValueAsString(obj); + } + return writer.writeValueAsString(obj); + } catch (JsonProcessingException e) { + logger.info(e.getLocalizedMessage()); + throw new RuntimeException(e); + } + } + return null; + } + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/util/KerberosUtil.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/util/KerberosUtil.java new file mode 100644 index 000000000..4753efdca --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/webank/wedatasphere/exchangis/datax/util/KerberosUtil.java @@ -0,0 +1,56 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.util; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.core.util.container.CoreConstant; + +import java.io.*; +import java.util.Properties; + +/** + * For kerberos connection + * @author davidhua + * 2020/4/23 + */ +public class KerberosUtil { + + private static Properties properties; + + public static synchronized Properties getProperties(){ + if(null == properties && new File(CoreConstant.DATAX_KERBEROS_PATH).exists()){ + try (InputStream inputStream = new FileInputStream(CoreConstant.DATAX_LDAP_PATH)) { + Properties props = new Properties(); + props.load(inputStream); + properties = props; + } catch (IOException e) { + if (e instanceof FileNotFoundException) { + //Do nothing, just return null properties + return properties; + } + throw DataXException.asDataXException(FrameworkErrorCode.SECRET_ERROR, + "Fail to read kerberos config file", e); + } + //Ignore + } + return properties; + } + + +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/lib/Dm7JdbcDriver16.jar b/exchangis-engines/engines/datax/datax-core/src/main/lib/Dm7JdbcDriver16.jar new file mode 100644 index 000000000..30740dcd2 Binary files /dev/null and b/exchangis-engines/engines/datax/datax-core/src/main/lib/Dm7JdbcDriver16.jar differ diff --git a/exchangis-engines/engines/datax/datax-core/src/main/lib/db2jcc4.jar b/exchangis-engines/engines/datax/datax-core/src/main/lib/db2jcc4.jar new file mode 100644 index 000000000..fc53cfd94 Binary files /dev/null and b/exchangis-engines/engines/datax/datax-core/src/main/lib/db2jcc4.jar differ diff --git a/exchangis-engines/engines/datax/datax-core/src/main/lib/edb-jdbc16.jar b/exchangis-engines/engines/datax/datax-core/src/main/lib/edb-jdbc16.jar new file mode 100644 index 000000000..255e64794 Binary files /dev/null and b/exchangis-engines/engines/datax/datax-core/src/main/lib/edb-jdbc16.jar differ diff --git a/exchangis-engines/engines/datax/datax-core/src/main/lib/jconn3-1.0.0-SNAPSHOT.jar b/exchangis-engines/engines/datax/datax-core/src/main/lib/jconn3-1.0.0-SNAPSHOT.jar new file mode 100644 index 000000000..df6e78bbc Binary files /dev/null and b/exchangis-engines/engines/datax/datax-core/src/main/lib/jconn3-1.0.0-SNAPSHOT.jar differ diff --git a/exchangis-engines/engines/datax/datax-core/src/main/resources/.secret.properties b/exchangis-engines/engines/datax/datax-core/src/main/resources/.secret.properties new file mode 100644 index 000000000..b807f8ad6 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/resources/.secret.properties @@ -0,0 +1,9 @@ +#ds basicAuth config +auth.user= +auth.pass= +current.keyVersion= +current.publicKey= +current.privateKey= +current.service.username= +current.service.password= + diff --git a/exchangis-engines/engines/datax/datax-core/src/main/resources/core.json b/exchangis-engines/engines/datax/datax-core/src/main/resources/core.json new file mode 100644 index 000000000..b056dfc5d --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/resources/core.json @@ -0,0 +1,78 @@ +{ + "entry": { + "environment": {} + }, + "common": { + "column": { + "datetimeFormat": "yyyy-MM-dd HH:mm:ss", + "timeFormat": "HH:mm:ss", + "dateFormat": "yyyy-MM-dd", + "extraFormats": [ + "yyyyMMdd" + ], + "timeZone": "GMT+8", + "encoding": "utf-8" + } + }, + "core": { + "transport": { + "type": "record", + "channel":{ + "speed":{ + "byte": 5242880, + "record": 10000 + }, + "flowControlInterval": 20, + "capacity": 512, + "byteCapacity": 67108864 + }, + "record":{ + "channel": { + "class": "com.alibaba.datax.core.transport.channel.memory.MemoryRecordChannel" + }, + "exchanger": { + "class": "com.alibaba.datax.core.plugin.BufferedRecordExchanger", + "bufferSize": 32 + } + }, + "stream":{ + "channel":{ + "class": "com.webank.wedatasphere.exchangis.datax.core.transport.channel.memory.MemoryStreamChannel", + "blockSize": 8192 + } + } + + }, + "container": { + "job": { + "reportInterval": 5000, + "sleepInterval": 5000 + }, + "taskGroup": { + "reportInterval": 5000, + "sleepInterval": 100, + "channel": 5 + }, + "trace": { + "enable": "false" + } + }, + "statistics": { + "collector": { + "plugin": { + "taskClass": "com.alibaba.datax.core.statistics.plugin.task.StdoutPluginCollector", + "maxDirtyNumber": 10 + } + } + }, + "processor":{ + "loader":{ + "plugin":{ + "class":"com.webank.wedatasphere.exchangis.datax.core.processor.loader.plugin.DefaultPluginProcessorLoader", + "package": "com.webank.wedatasphere.exchangis.datax.core.processor.impl", + "sourcePath":"proc/src" + } + } + } + } +} diff --git a/exchangis-engines/engines/datax/datax-core/src/main/resources/kerberos.properties b/exchangis-engines/engines/datax/datax-core/src/main/resources/kerberos.properties new file mode 100644 index 000000000..9a4c1fc0b --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/resources/kerberos.properties @@ -0,0 +1 @@ +kerberos.krb5.path= \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/resources/ldap.properties b/exchangis-engines/engines/datax/datax-core/src/main/resources/ldap.properties new file mode 100644 index 000000000..15150d139 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/resources/ldap.properties @@ -0,0 +1,3 @@ +#Ldap configuration +ldap.url= +ldap.baseDN= \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/resources/log/logback.xml b/exchangis-engines/engines/datax/datax-core/src/main/resources/log/logback.xml new file mode 100644 index 000000000..5f5622b3f --- /dev/null +++ b/exchangis-engines/engines/datax/datax-core/src/main/resources/log/logback.xml @@ -0,0 +1,42 @@ + + + + + + UTF-8 + ${log.dir}/${log.file.name} + + ${log.dir}/${log.file.name}.%i.gz + 1 + 20 + + + 1GB + + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{0} - %msg%n + + + + + + + logger.endsWith("StdoutPluginCollector") || null != throwable + + ACCEPT + DENY + + UTF-8 + ${log.dir}/${log.file.name}.dirty-record + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{0} - %msg%n + + + + + + + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-elasticsearchwriter/pom.xml b/exchangis-engines/engines/datax/datax-elasticsearchwriter/pom.xml new file mode 100644 index 000000000..11adfd18a --- /dev/null +++ b/exchangis-engines/engines/datax/datax-elasticsearchwriter/pom.xml @@ -0,0 +1,103 @@ + + + + exchangis-engine-datax + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + jar + 3.0.0-Plus-2 + datax-elasticsearchwriter + + + com.webank.wedatasphere.exchangis + datax-core + ${datax.engine.version} + provided + + + slf4j-log4j12 + org.slf4j + + + + + org.slf4j + slf4j-api + provided + + + ch.qos.logback + logback-classic + provided + + + io.searchbox + jest-common + 2.4.0 + + + io.searchbox + jest + 2.4.0 + + + joda-time + joda-time + 2.9.9 + + + org.elasticsearch.client + elasticsearch-rest-high-level-client + 6.7.1 + + + elasticsearch + org.elasticsearch + + + + + org.elasticsearch + elasticsearch + 6.7.1 + + + commons-logging + commons-logging + + + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.2.1 + + + assemble + + single + + + install + + + + false + false + + ${basedir}/src/main/assembly/package.xml + + plugin + + + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/assembly/package.xml new file mode 100644 index 000000000..a1b6fcce7 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/assembly/package.xml @@ -0,0 +1,33 @@ + + elasticsearchwriter + + dir + + false + + + src/main/resources + + plugin.json + plugin_job_template.json + + plugin/writer/elasticsearchwriter + + + target/ + + datax-elasticsearchwriter-${datax.engine.version}.jar + + plugin/writer/elasticsearchwriter + + + + + false + plugin/writer/elasticsearchwriter/libs + runtime + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticKey.java b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticKey.java new file mode 100644 index 000000000..c90d46916 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticKey.java @@ -0,0 +1,110 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.plugin.writer.elasticsearchwriter.v6; + +/** + * @author davidhua + * 2019/8/12 + */ +public final class ElasticKey { + /** + * endPoints + */ + static final String ENDPOINTS = "elasticUrls"; + /** + * username + */ + static final String USERNAME = "username"; + /** + * password + */ + static final String PASSWORD = "password"; + /** + * index(index name) + */ + static final String INDEX_NAME = "index"; + /** + * type(index type) + */ + static final String INDEX_TYPE = "type"; + /** + * column(props column) + */ + static final String PROPS_COLUMN = "column"; + /** + * column-> {name:'xx'} + */ + static final String PROPS_COLUMN_NAME = "name"; + /** + * column-> {type:'xxx'} + */ + static final String PROPS_COLUMN_TYPE = "type"; + /** + * column-> {timezone:'xxx'} + */ + static final String PROPS_COLUMN_TIMEZONE = "timezone"; + /** + * format-> {format: 'format'} + */ + static final String PROPS_COLUMN_FORMAT = "format"; + /** + * cleanUp + */ + static final String CLEANUP = "cleanUp"; + /** + * settings(index settings) + */ + static final String SETTINGS = "settings"; + /** + * clientConfig + */ + static final String CLIENT_CONFIG = "clientConfig"; + /** + * clientConfig -> maxPoolSize + */ + static final String CLIENT_CONFIG_POOL_SIZE = "maxPoolSize"; + /** + * clientConfig -> sockTimeout + */ + static final String CLIENT_CONFIG_SOCKET_TIMEOUT = "sockTimeout"; + /** + * clientConfig -> connTimeout + */ + static final String CLIENT_CONFIG_CONN_TIMEOUT = "connTimeout"; + /** + * clientConfig -> timeout + */ + static final String CLIENT_CONFIG_REQ_TIMEOUT = "timeout"; + /** + * clientConfig -> masterTimeout + */ + static final String CLIENT_CONFIG_MASTER_TIMEOUT = "masterTimeout"; + /** + * bulkPerTask + */ + static final String BULK_PER_TASK = "bulkPerTask"; + /** + * bulkActions + */ + static final String BULK_ACTIONS = "bulkActions"; + + /** + * To split multiple level properties name + */ + static final String COLUMN_NAME_SEPARATOR = "columnNameSeparator"; +} diff --git a/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticRestClient.java b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticRestClient.java new file mode 100644 index 000000000..0417d7958 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticRestClient.java @@ -0,0 +1,393 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.plugin.writer.elasticsearchwriter.v6; + +import com.alibaba.datax.common.exception.DataXException; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.http.HttpHost; +import org.apache.http.HttpRequestInterceptor; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; +import org.apache.http.client.methods.HttpRequestWrapper; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.ssl.SSLContextBuilder; +import org.apache.http.ssl.SSLContexts; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.bulk.BackoffPolicy; +import org.elasticsearch.action.bulk.BulkProcessor; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.main.MainResponse; +import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.*; +import org.elasticsearch.client.indices.*; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.rest.RestStatus; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.net.ssl.SSLContext; +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.security.KeyManagementException; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.cert.CertificateException; +import java.util.*; +import java.util.function.BiConsumer; + +/** + * @author davidhua + * 2019/8/1 + */ +public class ElasticRestClient { + public static final Logger logger = LoggerFactory.getLogger(ElasticRestClient.class); + + private static final int HEAP_BUFFER_SIZE = 100 * 1024 * 1024; + private static final int SOCK_TIMEOUT_IN_MILLISECONDS = 60000; + private static final int CONN_TIMEOUT_IN_MILLISECONDS = 5000; + private static final int REQ_TIMEOUT_IN_MILLISECONDS = 60000; + private static final int MASTER_TIMEOUT_IN_MILLISECONDS = 30000; + + private static final String INCLUDE_TYPE_NAME = "include_type_name"; + private static final String MASTER_TIMEOUT = "master_timeout"; + + static final String FIELD_PROPS = "properties"; + private static final String MAPPING_PATH = "_mapping"; + private static final String MAPPING_TYPE_HEAD = "_mapping_type"; + private static final int DEFAULT_BACKOFF_DELAY_MILLS = 1000; + private static final int DEFAULT_BACKOFF_TIMES = 3; + static final String MAPPING_TYPE_DEFAULT = "_doc"; + + private static final RequestOptions COMMON_OPTIONS; + + private List bulkProcessors = new ArrayList<>(); + private Map clientConfig = new HashMap<>(); + private boolean matchVerison = true; + static{ + RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder(); + builder.setHttpAsyncResponseConsumerFactory( + new HttpAsyncResponseConsumerFactory.HeapBufferedResponseConsumerFactory(HEAP_BUFFER_SIZE) + ); + COMMON_OPTIONS = builder.build(); + } + private RestHighLevelClient restClient; + + ElasticRestClient(String[] endPoint, String username, String password, SSLContext sslContext, + Map clientConfig) throws IOException { + CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(AuthScope.ANY, + new UsernamePasswordCredentials(username, password)); + initialClient(endPoint, credentialsProvider, sslContext, clientConfig); + } + + ElasticRestClient(String[] endPoints, CredentialsProvider credentialsProvider, + SSLContext sslContext, Map clientConfig) throws IOException { + initialClient(endPoints, credentialsProvider, sslContext, clientConfig); + } + + BulkProcessor createBulk(BulkProcessor.Listener listener, int bulkActions, int bulkPerTask){ + BiConsumer> consumer = ((bulkRequest, bulkResponseActionListener) + -> restClient.bulkAsync(bulkRequest, COMMON_OPTIONS, bulkResponseActionListener)); + BulkProcessor.Builder builder = BulkProcessor.builder(consumer, listener); + builder.setBulkActions(bulkActions); + builder.setBulkSize(new ByteSizeValue(-1, ByteSizeUnit.BYTES)); + builder.setConcurrentRequests(bulkPerTask - 1); + builder.setBackoffPolicy(BackoffPolicy.constantBackoff(TimeValue.timeValueMillis(DEFAULT_BACKOFF_DELAY_MILLS), + DEFAULT_BACKOFF_TIMES)); + BulkProcessor bulkProcessor = builder.build(); + bulkProcessors.add(bulkProcessor); + return bulkProcessor; + } + + void close(){ + for(BulkProcessor bulkProcessor : bulkProcessors){ + bulkProcessor.close(); + } + execute(restClient ->{ + try { + restClient.close(); + }catch(Exception e){ + throw DataXException.asDataXException(ElasticWriterErrorCode.CLOSE_EXCEPTION, e); + } + return null; + }); + } + + boolean existIndices(String... indices){ + return execute(restClient -> restClient.indices().exists(configureTimedRequest(new GetIndexRequest(indices)), + COMMON_OPTIONS)); + } + + boolean deleteIndices(String... indices){ + return execute( restClient -> { + AcknowledgedResponse response = restClient.indices() + .delete(new DeleteIndexRequest(indices), COMMON_OPTIONS); + return response.isAcknowledged(); + }); + } + + void createIndex(String indexName, String typeName, Map settings, + Map properties){ + execute( restClient ->{ + if(!existIndices(indexName)) { + createIndex(indexName, settings); + } + putMapping(indexName, typeName, properties); + return null; + }); + } + + Map getProps(String indexName, String typeName){ + return execute( restClient->{ + GetMappingsRequest request = new GetMappingsRequest(); + request.indices(indexName); + RequestOptions.Builder optionsBuilder = COMMON_OPTIONS.toBuilder(); + optionsBuilder.addHeader(MAPPING_TYPE_HEAD, typeName); + GetMappingsResponse response = restClient.indices() + .getMapping(configureTimedRequest(request), optionsBuilder.build()); + Map typeMap = response.mappings().get(indexName).sourceAsMap(); + Map propsMap = typeMap; + if(typeMap.containsKey(typeName)) { + Object type = typeMap.get(typeName); + if (type instanceof Map) { + propsMap = (Map)type; + } + } + Object props = propsMap.get(FIELD_PROPS); + if (props instanceof Map) { + return (Map) props; + } + return null; + }); + } + private void putMapping(String indexName, String typeName, Map properties) throws IOException { + if(null == properties){ + properties = new HashMap<>(); + } + Map mappings = new HashMap<>(1); + mappings.put(FIELD_PROPS, properties); + PutMappingRequest request = new PutMappingRequest(indexName).source(mappings); + RequestOptions.Builder optionsBuilder = COMMON_OPTIONS.toBuilder(); + optionsBuilder.addHeader(MAPPING_TYPE_HEAD, typeName); + AcknowledgedResponse acknowledgedResponse = restClient.indices().putMapping(configureTimedRequest(request), optionsBuilder.build()); + if(!acknowledgedResponse.isAcknowledged()){ + throw DataXException.asDataXException(ElasticWriterErrorCode.PUT_MAPPINGS_ERROR, + "can't put mapping, type:[" + typeName +"], properties:" + Json.toJson(properties, null)); + } + } + + private void createIndex(String indexName, Map settings) throws IOException { + if(null == settings){ + settings = new HashMap<>(1); + } + CreateIndexRequest request = new CreateIndexRequest(indexName) + .settings(settings).waitForActiveShards(ActiveShardCount.DEFAULT); + try { + CreateIndexResponse response = restClient.indices().create(configureTimedRequest(request), COMMON_OPTIONS); + if(!response.isAcknowledged()){ + throw DataXException.asDataXException(ElasticWriterErrorCode.CREATE_INDEX_ERROR, "can't create index:[" + indexName + + "], settings:" + Json.toJson(settings, null) + ", message:[acknowledged=false]"); + } + }catch(ElasticsearchException e){ + if(e.status().getStatus() + != RestStatus.BAD_REQUEST.getStatus()){ + throw e; + } + logger.error("index:["+ indexName +"] maybe already existed, status=" + e.status().getStatus()); + } + } + + private T configureTimedRequest(T request){ + request.setMasterTimeout(TimeValue + .timeValueMillis(Integer + .valueOf(String.valueOf(clientConfig.getOrDefault(ElasticKey.CLIENT_CONFIG_MASTER_TIMEOUT, MASTER_TIMEOUT_IN_MILLISECONDS))) + )); + request.setTimeout(TimeValue + .timeValueMillis(Integer + .valueOf(String.valueOf(clientConfig.getOrDefault(ElasticKey.CLIENT_CONFIG_REQ_TIMEOUT, REQ_TIMEOUT_IN_MILLISECONDS))) + )); + return request; + } + + private R execute(Exec execFunc){ + try { + return execFunc.apply(restClient); + }catch(ElasticsearchException e){ + throw DataXException.asDataXException(ElasticWriterErrorCode.REQUEST_ERROR, e.status().name(), e); + }catch (Exception e) { + throw DataXException.asDataXException(ElasticWriterErrorCode.BAD_CONNECT, e); + } + } + + + static ElasticRestClient custom(String[] endPoints, Map clientConfig){ + try { + return new ElasticRestClient(endPoints, null, null, clientConfig); + } catch (IOException e) { + throw DataXException.asDataXException(ElasticWriterErrorCode.BAD_CONNECT, e); + } + } + + static ElasticRestClient custom(String[] endPoints, + String username, String password, Map clientConfig){ + try { + return new ElasticRestClient(endPoints, username, password, null, clientConfig); + } catch (IOException e) { + throw DataXException.asDataXException(ElasticWriterErrorCode.BAD_CONNECT, e); + } + } + + static ElasticRestClient sslCustom(String[] endPoints, + String keyStorePath, String keyStorePass, Map clientConfig){ + try { + return new ElasticRestClient(endPoints, null, buildSSLContext(keyStorePath, keyStorePass) + , clientConfig); + } catch (IOException e) { + throw DataXException.asDataXException(ElasticWriterErrorCode.BAD_CONNECT, e); + } + } + + static ElasticRestClient sslCustom(String[] endPoints, + String username, String password, + String keyStorePath, String keyStorePass, Map clientConfig){ + try{ + return new ElasticRestClient(endPoints, username, password, + buildSSLContext(keyStorePath, keyStorePass), clientConfig); + }catch(IOException e){ + throw DataXException.asDataXException(ElasticWriterErrorCode.BAD_CONNECT, e); + } + } + + private static SSLContext buildSSLContext(String keyStorePath, String keyStorePass){ + try { + KeyStore truststore = KeyStore.getInstance("jks"); + try (InputStream inputStream = Files.newInputStream(Paths.get(new URI(keyStorePath)))) { + truststore.load(inputStream, keyStorePass.toCharArray()); + } catch (URISyntaxException | IOException | NoSuchAlgorithmException | CertificateException e) { + throw DataXException.asDataXException(ElasticWriterErrorCode.BAD_CONNECT, e); + } + SSLContextBuilder sslContextBuilder = SSLContexts.custom() + .loadTrustMaterial(truststore, null); + return sslContextBuilder.build(); + }catch(KeyStoreException | NoSuchAlgorithmException | KeyManagementException e){ + throw DataXException.asDataXException(ElasticWriterErrorCode.BAD_CONNECT, e); + } + } + + private void initialClient(String[] endPoints, CredentialsProvider credentialsProvider, + SSLContext sslContext, Map clientConfig) throws IOException { + if(null == clientConfig){ + clientConfig = Collections.emptyMap(); + } + HttpHost[] httpHosts = new HttpHost[endPoints.length]; + for(int i = 0 ; i < endPoints.length; i++){ + httpHosts[i] = HttpHost.create(endPoints[i]); + } + RestClientBuilder restClientBuilder = RestClient.builder(httpHosts); + Map finalClientConfig = clientConfig; + restClientBuilder.setHttpClientConfigCallback( + httpClientBuilder -> { + if(null != credentialsProvider) { + httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); + } + if(null != sslContext){ + httpClientBuilder.setSSLContext(sslContext); + } + httpClientBuilder.addInterceptorFirst((HttpRequestInterceptor) (httpRequest, httpContext) -> { + if(httpRequest instanceof HttpRequestWrapper){ + HttpRequestWrapper wrapper = (HttpRequestWrapper)httpRequest; + String uri = wrapper.getURI().toString(); + if(matchVerison) { + uri = uri.replace(INCLUDE_TYPE_NAME + "=false", INCLUDE_TYPE_NAME + "=true"); + }else{ + //when use the different version, remove the INCLUDE_TYPE_NAME + uri = uri.replaceAll(INCLUDE_TYPE_NAME + "=[^&]+", "") + .replaceAll(MASTER_TIMEOUT + "=[^&]+", ""); + } + String type = MAPPING_TYPE_DEFAULT; + if (null != wrapper.getFirstHeader(MAPPING_TYPE_HEAD)) { + type = wrapper.getFirstHeader(MAPPING_TYPE_HEAD).getValue(); + } + uri = uri.replace(MAPPING_PATH, MAPPING_PATH + "/" + type); + try { + wrapper.setURI(new URI(uri)); + } catch (URISyntaxException e) { + logger.error(e.getMessage(), e); + } + } + }); + httpClientBuilder.setMaxConnTotal(Integer.parseInt( + String.valueOf(finalClientConfig.getOrDefault(ElasticKey.CLIENT_CONFIG_POOL_SIZE, 1)))); + return httpClientBuilder; + } + ); + restClientBuilder.setRequestConfigCallback( + requestConfigBuilder -> requestConfigBuilder +// .setContentCompressionEnabled(true) + .setConnectTimeout(Integer.parseInt( + String.valueOf(finalClientConfig.getOrDefault(ElasticKey.CLIENT_CONFIG_CONN_TIMEOUT, + CONN_TIMEOUT_IN_MILLISECONDS)))) + .setConnectionRequestTimeout(Integer.parseInt( + String.valueOf(finalClientConfig.getOrDefault(ElasticKey.CLIENT_CONFIG_CONN_TIMEOUT, + CONN_TIMEOUT_IN_MILLISECONDS)))) + .setSocketTimeout(Integer.parseInt( + String.valueOf(finalClientConfig.getOrDefault(ElasticKey.CLIENT_CONFIG_SOCKET_TIMEOUT, + SOCK_TIMEOUT_IN_MILLISECONDS))))); + restClient = new RestHighLevelClient(restClientBuilder); + boolean connect = restClient.ping(COMMON_OPTIONS); + if(! connect){ + throw DataXException.asDataXException(ElasticWriterErrorCode.BAD_CONNECT, "Ping to elastic server failed"); + } + //check the version + checkVersion(); + this.clientConfig = clientConfig; + } + + private void checkVersion() throws IOException { + logger.info("Check the version of ElasticSearch"); + MainResponse response = restClient.info(COMMON_OPTIONS); + Version version = response.getVersion(); + if(!version.isCompatible(Version.CURRENT)){ + throw DataXException.asDataXException(ElasticWriterErrorCode.CONFIG_ERROR, + "ElasticSearch's version is not compatible"); + } + logger.info("The version of ElasticSearch: [" + version.toString() +"]"); + if(version.major != Version.CURRENT.major){ + throw DataXException.asDataXException(ElasticWriterErrorCode.CONFIG_ERROR, + "ElasticSearch's version is not compatible"); + } + } + @FunctionalInterface + interface Exec { + R apply(T t) throws Exception; + } + +} diff --git a/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticWriter.java b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticWriter.java new file mode 100644 index 000000000..6c1e5ac58 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticWriter.java @@ -0,0 +1,334 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.plugin.writer.elasticsearchwriter.v6; + +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.BasicDataReceiver; +import com.alibaba.datax.common.plugin.RecordReceiver; +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import com.alibaba.datax.common.spi.Writer; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.core.statistics.plugin.task.util.DirtyRecord; +import com.webank.wedatasphere.exchangis.datax.plugin.writer.elasticsearchwriter.v6.column.ElasticColumn; +import com.webank.wedatasphere.exchangis.datax.plugin.writer.elasticsearchwriter.v6.column.ElasticFieldDataType; +import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.lang3.StringUtils; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkProcessor; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +import static com.webank.wedatasphere.exchangis.datax.plugin.writer.elasticsearchwriter.v6.ElasticWriter.Job.DEFAULT_ENDPOINT_SPLIT; +import static com.webank.wedatasphere.exchangis.datax.plugin.writer.elasticsearchwriter.v6.ElasticWriter.Job.WRITE_SIZE; + +/** + * @author davidhua + * 2019/8/15 + */ +public class ElasticWriter extends Writer { + + public static class Job extends Writer.Job{ + private static final Logger log = LoggerFactory.getLogger(Job.class); + + private static final String DEFAULT_ID = "_id"; + static final String WRITE_SIZE = "WRITE_SIZE"; + + static final String DEFAULT_ENDPOINT_SPLIT = ","; + + private Configuration jobConf = null; + private String[] endPoints; + private String userName; + private String password; + + @Override + public void init() { + this.jobConf = super.getPluginJobConf(); + this.validateParams(); + } + @Override + public void prepare() { + ElasticRestClient restClient; + Map clientConfig = jobConf.getMap(ElasticKey.CLIENT_CONFIG); + if(StringUtils.isNotBlank(userName) && StringUtils.isNotBlank(password)){ + restClient = ElasticRestClient.custom(endPoints, userName, + password, clientConfig); + }else{ + restClient = ElasticRestClient.custom(endPoints, clientConfig); + } + String indexName = this.jobConf.getNecessaryValue(ElasticKey.INDEX_NAME, ElasticWriterErrorCode.REQUIRE_VALUE); + String indexType = this.jobConf.getString(ElasticKey.INDEX_TYPE, ""); + String columnNameSeparator = this.jobConf.getString(ElasticKey.COLUMN_NAME_SEPARATOR, ElasticColumn.DEFAULT_NAME_SPLIT); + List rawColumnList = jobConf + .getList(ElasticKey.PROPS_COLUMN); + List resolvedColumnList = new ArrayList<>(); + Map props = resolveColumn(restClient, indexName, indexType, + rawColumnList, resolvedColumnList, columnNameSeparator); + this.jobConf.set(ElasticKey.PROPS_COLUMN, resolvedColumnList); + //clean up + if(jobConf.getBool(ElasticKey.CLEANUP, false) && + restClient.existIndices(indexName)){ + if(!restClient.deleteIndices(indexName)){ + throw DataXException.asDataXException(ElasticWriterErrorCode.DELETE_INDEX_ERROR, "cannot delete index:[" + indexName +"]"); + } + } + //if the index is not existed, create it + restClient.createIndex(indexName, indexType, jobConf.getMap(ElasticKey.SETTINGS), + props); + restClient.close(); + } + + @Override + public List split(int mandatoryNumber) { + List configurations = new ArrayList<>(); + for( int i = 0; i < mandatoryNumber; i++){ + configurations.add(this.jobConf.clone()); + } + return configurations; + } + + + @Override + public void destroy() { + + } + + private void validateParams(){ + String endPoints = this.jobConf.getString(ElasticKey.ENDPOINTS); + if(StringUtils.isBlank(endPoints)){ + throw DataXException.asDataXException(ElasticWriterErrorCode.REQUIRE_VALUE, "'endPoints(elasticUrls)' is necessary"); + } + this.endPoints = endPoints.split(DEFAULT_ENDPOINT_SPLIT); + this.userName = this.jobConf.getString(ElasticKey.USERNAME, ""); + this.password = this.jobConf.getString(ElasticKey.PASSWORD, ""); + if(StringUtils.isNotBlank(this.password)){ + try { + this.password = (String)CryptoUtils.string2Object(this.password); + } catch (Exception e) { + throw DataXException.asDataXException(ElasticWriterErrorCode.CONFIG_ERROR, "decrypt password failed"); + } + } + this.jobConf.getNecessaryValue(ElasticKey.INDEX_NAME, ElasticWriterErrorCode.REQUIRE_VALUE); + } + + private Map resolveColumn(ElasticRestClient client, + String index, String type , + List rawColumnList, List outputColumn, + String columnNameSeparator){ + Map properties; + if(null != rawColumnList && !rawColumnList.isEmpty()) { + //allow to custom the fields of properties + properties = new HashMap<>(rawColumnList.size()); + rawColumnList.forEach(columnRaw -> { + String raw = Json.toJson(columnRaw, null); + ElasticColumn column = Json + .fromJson(raw, ElasticColumn.class); + if (StringUtils.isNotBlank(column.getName()) && StringUtils.isNotBlank(column.getType())) { + outputColumn.add(column); + if (!column.getName().equals(DEFAULT_ID) && ElasticFieldDataType.valueOf(column.getType().toUpperCase()) + != ElasticFieldDataType.ALIAS) { + Map property = Json.fromJson(raw, Map.class); + property.remove(ElasticKey.PROPS_COLUMN_NAME); + properties.put(column.getName(), property); + } + } + }); + }else{ + if(!client.existIndices(index)){ + throw DataXException.asDataXException(ElasticWriterErrorCode.INDEX_NOT_EXIST, + "cannot get columns from index:[" + index +"]"); + } + //get properties from index existed + properties = client.getProps(index, type); + resolveColumn(outputColumn, null, properties, columnNameSeparator); + //Reverse outputColumn + Collections.reverse(outputColumn); + } + return properties; + } + + private void resolveColumn(List outputColumn, ElasticColumn column, + Map propsMap, String columnNameSeparator){ + propsMap.forEach((key, value) ->{ + if(value instanceof Map){ + Map metaMap = (Map)value; + if(null != metaMap.get(ElasticKey.PROPS_COLUMN_TYPE)){ + ElasticColumn levelColumn = new ElasticColumn(); + if(null != column) { + levelColumn.setName(column.getName() + columnNameSeparator + key); + }else{ + levelColumn.setName(String.valueOf(key)); + } + levelColumn.setType(String.valueOf(metaMap.get(ElasticKey.PROPS_COLUMN_TYPE))); + if(null != metaMap.get(ElasticKey.PROPS_COLUMN_TIMEZONE)){ + levelColumn.setTimezone(String.valueOf(metaMap.get(ElasticKey.PROPS_COLUMN_TIMEZONE))); + } + if(null != metaMap.get(ElasticKey.PROPS_COLUMN_FORMAT)){ + levelColumn.setFormat(String.valueOf(metaMap.get(ElasticKey.PROPS_COLUMN_FORMAT))); + } + outputColumn.add(levelColumn); + }else if(null != metaMap.get(ElasticRestClient.FIELD_PROPS) + && metaMap.get(ElasticRestClient.FIELD_PROPS) instanceof Map){ + ElasticColumn levelColumn = column; + if(null == levelColumn){ + levelColumn = new ElasticColumn(); + levelColumn.setName(String.valueOf(key)); + }else{ + levelColumn.setName(levelColumn.getName() + columnNameSeparator + key); + } + resolveColumn(outputColumn, levelColumn, (Map)metaMap.get(ElasticRestClient.FIELD_PROPS), + columnNameSeparator); + } + } + }); + } + } + + + public static class Task extends Writer.Task{ + private static final Logger logger = LoggerFactory.getLogger(Task.class); + private volatile boolean bulkError; + private Configuration taskConf; + private String indexName; + private String typeName; + private String columnNameSeparator = ElasticColumn.DEFAULT_NAME_SPLIT; + private List columns; + private ElasticRestClient restClient; + private BulkProcessor bulkProcessor; + + @Override + public void init() { + this.taskConf = super.getPluginJobConf(); + indexName = this.taskConf.getString(ElasticKey.INDEX_NAME); + typeName = this.taskConf.getString(ElasticKey.INDEX_TYPE, ElasticRestClient.MAPPING_TYPE_DEFAULT); + columnNameSeparator = this.taskConf.getString(ElasticKey.COLUMN_NAME_SEPARATOR, ElasticColumn.DEFAULT_NAME_SPLIT); + int batchSize = this.taskConf.getInt(ElasticKey.BULK_ACTIONS, 1000); + int bulkPerTask = this.taskConf.getInt(ElasticKey.BULK_PER_TASK, 1); + columns = Json.fromJson(this.taskConf.getString(ElasticKey.PROPS_COLUMN), List.class, ElasticColumn.class); + String userName = this.taskConf.getString(ElasticKey.USERNAME, ""); + String password = this.taskConf.getString(ElasticKey.PASSWORD, ""); + if(StringUtils.isNotBlank(password)){ + try { + password = (String) CryptoUtils.string2Object(password); + } catch (Exception e) { + throw DataXException.asDataXException(ElasticWriterErrorCode.CONFIG_ERROR, "decrypt password failed"); + } + } + String[] endPoints = this.taskConf.getString(ElasticKey.ENDPOINTS).split(DEFAULT_ENDPOINT_SPLIT); + if(StringUtils.isNotBlank(userName) && StringUtils.isNotBlank(password)){ + restClient = ElasticRestClient.custom(endPoints, userName, + password, this.taskConf.getMap(ElasticKey.CLIENT_CONFIG)); + }else{ + restClient = ElasticRestClient.custom(endPoints, this.taskConf.getMap(ElasticKey.CLIENT_CONFIG)); + } + this.bulkProcessor = restClient.createBulk(buildListener(getTaskPluginCollector()), batchSize, bulkPerTask); + } + + @Override + public void startWrite(BasicDataReceiver receiver, Class type) { + if(type.equals(DocWriteRequest.class)){ + logger.info("Begin to write record to ElasticSearch"); + long count = 0; + DocWriteRequest request = null; + while(null != (request = (DocWriteRequest) receiver.getFromReader())){ + request.index(indexName); + request.type(typeName); + if(bulkError){ + throw DataXException.asDataXException(ElasticWriterErrorCode.BULK_REQ_ERROR, ""); + } + this.bulkProcessor.add(request); + count += 1; + } + this.bulkProcessor.close(); + getTaskPluginCollector().collectMessage(WRITE_SIZE, String.valueOf(count)); + logger.info("End to write record to ElasticSearch"); + }else{ + super.startWrite(receiver, type); + } + } + + @Override + public void startWrite(RecordReceiver lineReceiver) { + logger.info("Begin to write record to ElasticSearch"); + Record record = null; + long count = 0; + while(null != (record = lineReceiver.getFromReader())){ + Map data = ElasticColumn.toData(record, columns, columnNameSeparator); + IndexRequest request = new IndexRequest(indexName, typeName); + request.source(data); + if(bulkError){ + throw DataXException.asDataXException(ElasticWriterErrorCode.BULK_REQ_ERROR, ""); + } + this.bulkProcessor.add(request); + count += 1; + } + this.bulkProcessor.close(); + getTaskPluginCollector().collectMessage(WRITE_SIZE, String.valueOf(count)); + logger.info("End to write record to ElasticSearch"); + } + + @Override + public void destroy() { + if(null != restClient){ + restClient.close(); + } + } + + + private BulkProcessor.Listener buildListener(final TaskPluginCollector pluginCollector){ + return new BulkProcessor.Listener() { + @Override + public void beforeBulk(long l, BulkRequest bulkRequest) { + bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.NONE); + logger.trace("do_bulk: " + bulkRequest.getDescription()); + } + + @Override + public void afterBulk(long l, BulkRequest bulkRequest, BulkResponse bulkResponse) { + BulkItemResponse[] response = bulkResponse.getItems(); + for (BulkItemResponse itemResponse : response) { + if (itemResponse.isFailed()) { + List message = new ArrayList<>(); + message.add(String.valueOf(itemResponse.getFailure().getStatus().getStatus())); + message.add(itemResponse.getId()); + message.add(itemResponse.getFailureMessage()); + pluginCollector.collectDirtyRecord(new DirtyRecord(), null, Json.toJson(message, null)); + } + } + } + + @Override + public void afterBulk(long l, BulkRequest bulkRequest, Throwable throwable) { + //Ignore interrupted error + if(!(throwable instanceof InterruptedException)){ + logger.error(throwable.getMessage(), throwable); + } + bulkError = true; + } + }; + } + } +} diff --git a/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticWriterErrorCode.java b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticWriterErrorCode.java new file mode 100644 index 000000000..64a5dfe96 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/ElasticWriterErrorCode.java @@ -0,0 +1,58 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.plugin.writer.elasticsearchwriter.v6; + +import com.alibaba.datax.common.spi.ErrorCode; + +/** + * @author davidhua + * 2019/8/12 + */ +public enum ElasticWriterErrorCode implements ErrorCode { + /** + * bad connection + */ + BAD_CONNECT("ESWriter-01", "Cannot connect to Elastic server"), + CLOSE_EXCEPTION("ESWriter-02", "Cannot close the Elastic client"), + REQUIRE_VALUE("ESWriter-03", "Necessary value"), + REQUEST_ERROR("ESWriter-04", "Send request error"), + CREATE_INDEX_ERROR("ESWriter-05", "Create index error"), + DELETE_INDEX_ERROR("ESWriter-06", "Delete index error"), + PUT_MAPPINGS_ERROR("ESWriter-07", "Put mappings error"), + MAPPING_TYPE_UNSUPPORTED("ESWriter-08", "Unsupported mapping type"), + BULK_REQ_ERROR("ESWriter-09", "Bulk request error"), + INDEX_NOT_EXIST("ESWriter-10", "Index not exist"), + CONFIG_ERROR("ESWriter-11", "Config error"); + + private final String code; + private final String description; + + ElasticWriterErrorCode(String code, String description){ + this.code = code; + this.description = description; + } + @Override + public String getCode() { + return code; + } + + @Override + public String getDescription() { + return description; + } +} diff --git a/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/CustomProcessor.java b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/CustomProcessor.java new file mode 100644 index 000000000..b747eb2d2 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/CustomProcessor.java @@ -0,0 +1,34 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.plugin.writer.elasticsearchwriter.v6.column; + +import com.webank.wedatasphere.exchangis.datax.core.processor.Processor; +import org.elasticsearch.action.DocWriteRequest; + +import java.util.List; + +/** + * @author davidhua + * 2019/8/27 + */ +public class CustomProcessor implements Processor{ + @Override + public DocWriteRequest process(List columnData) throws Exception { + return null; + } +} diff --git a/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/ElasticColumn.java b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/ElasticColumn.java new file mode 100644 index 000000000..07502081c --- /dev/null +++ b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/ElasticColumn.java @@ -0,0 +1,181 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.plugin.writer.elasticsearchwriter.v6.column; + + + +import com.alibaba.datax.common.element.Column; +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.exception.DataXException; +import com.webank.wedatasphere.exchangis.datax.plugin.writer.elasticsearchwriter.v6.ElasticWriterErrorCode; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.lang3.StringUtils; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * @author davidhua + * 2019/8/15 + */ +public class ElasticColumn { + + private static final String ARRAY_SUFFIX = "]"; + private static final String ARRAY_PREFIX = "["; + + public static final String DEFAULT_NAME_SPLIT = "\\|"; + + private String name; + + private String type; + + private String timezone; + + private String format; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + + public String getTimezone() { + return timezone; + } + + public void setTimezone(String timezone) { + this.timezone = timezone; + } + + public String getFormat() { + return format; + } + + public void setFormat(String format) { + this.format = format; + } + + public static Map toData(Record record, List colConfs, String columnNameSeparator){ + Map outputData = new HashMap<>(record.getColumnNumber()); + for(int i = 0; i < record.getColumnNumber(); i++){ + Column column = record.getColumn(i); + ElasticColumn config = colConfs.get(i); + String columnName = config.getName(); + Map innerOutput = outputData; + String[] levelColumns = columnName.split(columnNameSeparator); + if(levelColumns.length > 1) { + columnName = levelColumns[levelColumns.length - 1]; + for (int j = 0; j < levelColumns.length - 1 ; j++) { + Map data = new HashMap<>(); + innerOutput.put(levelColumns[j], data); + innerOutput = data; + } + } + ElasticFieldDataType type = ElasticFieldDataType.valueOf(config.getType().toUpperCase()); + switch(type){ + case IP: + case IP_RANGE: + case KEYWORD: + case TEXT: + innerOutput.put(columnName, column.asString()); + break; + case GEO_POINT: + case GEO_SHAPE: + case NESTED: + case OBJECT: + innerOutput.put(columnName, parseObject(column.asString())); + break; + case LONG_RANGE: + case LONG: + innerOutput.put(columnName, column.asLong()); + break; + case INTEGER: + case INTEGER_RANGE: + case SHORT: + innerOutput.put(columnName, column.asBigInteger()); + break; + case FLOAT: + case FLOAT_RANGE: + case HALF_FLOAT: + case SCALED_FLOAT: + case DOUBLE_RANGE: + case DOUBLE: + innerOutput.put(columnName, column.asDouble()); + break; + case BINARY: + case BYTE: + innerOutput.put(columnName, column.asBytes()); + break; + case BOOLEAN: + innerOutput.put(columnName, column.asBoolean()); + break; + case DATE_RANGE: + case DATE: + innerOutput.put(columnName, parseDate(config, column)); + break; + default: + throw DataXException.asDataXException(ElasticWriterErrorCode.MAPPING_TYPE_UNSUPPORTED, + "unsupported type:[" +config.getType() + "]"); + } + } + return outputData; + } + + private static Object parseObject(String rawData){ + if(rawData.startsWith(ARRAY_PREFIX) && + rawData.endsWith(ARRAY_SUFFIX)){ + return Json.fromJson(rawData, Object.class); + } + return Json.fromJson(rawData, Map.class); + } + + private static String parseDate(ElasticColumn config, Column column){ + DateTimeZone dateTimeZone = DateTimeZone.getDefault(); + if(StringUtils.isNotBlank(config.getTimezone())){ + dateTimeZone = DateTimeZone.forID(config.getTimezone()); + } + String output; + if(column.getType() == Column.Type.DATE){ + output = new DateTime(column.asLong(), dateTimeZone).toString(); + }else if(StringUtils.isNotBlank(config.getFormat())){ + DateTimeFormatter formatter = DateTimeFormat.forPattern(config.getFormat()); + output = formatter.withZone(dateTimeZone) + .parseDateTime(column.asString()).toString(); + }else{ + output = column.asString(); + } + return output; + } + +} diff --git a/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/ElasticFieldDataType.java b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/ElasticFieldDataType.java new file mode 100644 index 000000000..a03ed6529 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/java/com/webank/wedatasphere/exchangis/datax/plugin/writer/elasticsearchwriter/v6/column/ElasticFieldDataType.java @@ -0,0 +1,125 @@ +/* + * + * Copyright 2020 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.datax.plugin.writer.elasticsearchwriter.v6.column; + +/** + * @author davidhua + * 2019/8/15 + */ +public enum ElasticFieldDataType { + /** + * type:text + */ + TEXT, + /** + * object + */ + OBJECT, + /** + * type:long(numeric) + */ + LONG, + /** + * type:integer(numeric) + */ + INTEGER, + /** + * type:short(numeric) + */ + SHORT, + /** + * type:byte(numeric) + */ + BYTE, + /** + * type:double(numeric) + */ + DOUBLE, + /** + * type:float(numeric) + */ + FLOAT, + /** + * type:half_float(numeric) + */ + HALF_FLOAT, + /** + * type:scaled_float(numeric) + */ + SCALED_FLOAT, + /** + * type:alias, alternate name for a field + */ + ALIAS, + /** + * type:binary + */ + BINARY, + /** + * type:boolean + */ + BOOLEAN, + /** + * type:date + */ + DATE, + /** + * type:geo_point + */ + GEO_POINT, + /** + * type:geo_shape + */ + GEO_SHAPE, + /** + * type:integer_range, 32-bits + */ + INTEGER_RANGE, + /** + * type:ip + */ + IP, + /** + * type:keyword + */ + KEYWORD, + /** + * type:nested + */ + NESTED, + /** + * type:float_range, 32-bits IEEE 754 + */ + FLOAT_RANGE, + /** + * type:long_range, 64-bits + */ + LONG_RANGE, + /** + * type:double_range, 64-bits IEEE 754 + */ + DOUBLE_RANGE, + /** + * type:date_range, unsigned 64-bit integer milliseconds + */ + DATE_RANGE, + /** + * type:ip_range, IPv4 or IPv6 + */ + IP_RANGE +} diff --git a/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/resources/plugin.json new file mode 100644 index 000000000..93f67d893 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-elasticsearchwriter/src/main/resources/plugin.json @@ -0,0 +1,6 @@ +{ + "name": "elasticsearchwriter", + "class": "com.webank.wedatasphere.exchangis.datax.plugin.writer.elasticsearchwriter.v6.ElasticWriter", + "description": "elastic", + "developer": "webank" +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-ftpreader/pom.xml b/exchangis-engines/engines/datax/datax-ftpreader/pom.xml new file mode 100644 index 000000000..8912e7745 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpreader/pom.xml @@ -0,0 +1,93 @@ + + + + exchangis-engine-datax + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + jar + 3.0.0-Plus-2 + datax-ftpreader + + + com.webank.wedatasphere.exchangis + datax-core + ${datax.engine.version} + provided + + + slf4j-log4j12 + org.slf4j + + + + + org.slf4j + slf4j-api + provided + + + commons-io + commons-io + provided + + + org.apache.commons + commons-lang3 + provided + + + commons-codec + commons-codec + ${commons-codec} + provided + + + ch.qos.logback + logback-classic + provided + + + com.jcraft + jsch + + 0.1.54 + + + commons-net + commons-net + 3.3 + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.2.1 + + + assemble + + single + + + install + + + + false + false + + ${basedir}/src/main/assembly/package.xml + + plugin + + + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-ftpreader/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-ftpreader/src/main/assembly/package.xml new file mode 100644 index 000000000..c0d75c00b --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpreader/src/main/assembly/package.xml @@ -0,0 +1,33 @@ + + ftpreader + + dir + + false + + + src/main/resources + + plugin.json + plugin_job_template.json + + plugin/reader/ftpreader + + + target + + datax-ftpreader-${datax.engine.version}.jar + + plugin/reader/ftpreader + + + + + false + plugin/reader/ftpreader/libs + runtime + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/Constant.java b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/Constant.java new file mode 100644 index 000000000..64df35955 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/Constant.java @@ -0,0 +1,14 @@ +package com.alibaba.datax.plugin.reader.ftpreader; + + +public class Constant { + public static final String SOURCE_FILES = "sourceFiles"; + + public static final int DEFAULT_FTP_PORT = 21; + public static final int DEFAULT_SFTP_PORT = 22; + public static final int DEFAULT_TIMEOUT = 60000; + public static final int DEFAULT_MAX_TRAVERSAL_LEVEL = 100; + public static final String DEFAULT_FTP_CONNECT_PATTERN = "PASV"; + + +} diff --git a/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpConnParams.java b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpConnParams.java new file mode 100644 index 000000000..e3f2f8db9 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpConnParams.java @@ -0,0 +1,101 @@ +package com.alibaba.datax.plugin.reader.ftpreader; + + +import java.util.function.Consumer; + +/** + * @author davidhua + * 2019/7/4 + */ +public class FtpConnParams { + + private String protocol; + + private String host; + + private int port; + + private int timeout; + + private String username; + + private String prvKeyPath; + + private String password; + + private String connectPattern; + + private FtpConnParams(){ + + } + + public static FtpConnParams compose(Consumer function){ + FtpConnParams ftpConnParams = new FtpConnParams(); + function.accept(ftpConnParams); + return ftpConnParams; + } + + public String getProtocol() { + return protocol; + } + + public void setProtocol(String protocol) { + this.protocol = protocol; + } + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public int getPort() { + return port; + } + + public void setPort(int port) { + this.port = port; + } + + public String getUsername() { + return username; + } + + public void setUsername(String username) { + this.username = username; + } + + public String getPrvKeyPath() { + return prvKeyPath; + } + + public void setPrvKeyPath(String prvKeyPath) { + this.prvKeyPath = prvKeyPath; + } + + public String getPassword() { + return password; + } + + public void setPassword(String password) { + this.password = password; + } + + public String getConnectPattern() { + return connectPattern; + } + + public void setConnectPattern(String connectPattern) { + this.connectPattern = connectPattern; + } + + public int getTimeout() { + return timeout; + } + + public void setTimeout(int timeout) { + this.timeout = timeout; + } +} diff --git a/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpHelper.java b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpHelper.java new file mode 100644 index 000000000..e80a19305 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpHelper.java @@ -0,0 +1,124 @@ +package com.alibaba.datax.plugin.reader.ftpreader; + +import com.alibaba.datax.plugin.unstructuredstorage.PathMeta; +import com.alibaba.datax.plugin.unstructuredstorage.reader.UnstructuredStorageReaderUtil; +import org.apache.commons.io.IOUtils; + +import java.io.InputStream; +import java.util.HashSet; +import java.util.List; + +public abstract class FtpHelper { + + /** + * + * @param ftpConnParams connection parameters + */ + public abstract void loginFtpServer(FtpConnParams ftpConnParams); + + /** + * @param + * @return void + * @throws + * @Title: LogoutFtpServer + * todo 方法名首字母 + * @Description: 断开与ftp服务器的连接 + */ + public abstract void logoutFtpServer(); + + /** + * @param @param directoryPath + * @param @return + * @return boolean + * @throws + * @Title: isDirExist + * @Description: 判断指定路径是否是目录 + */ + public abstract boolean isDirExist(String directoryPath); + + /** + * @param @param filePath + * @param @return + * @return boolean + * @throws + * @Title: isFileExist + * @Description: 判断指定路径是否是文件 + */ + public abstract boolean isFileExist(String filePath); + + /** + * @param @param filePath + * @param @return + * @return boolean + * @throws + * @Title: isSymbolicLink + * @Description: 判断指定路径是否是软链接 + */ + public abstract boolean isSymbolicLink(String filePath); + + /** + * @param @param directoryPath + * @param @param parentLevel 父目录的递归层数(首次为0) + * @param @param maxTraversalLevel 允许的最大递归层数 + * @param @return + * @return HashSet + * @throws + * @Title: getListFiles + * @Description: 递归获取指定路径下符合条件的所有文件绝对路径 + */ + public abstract HashSet getListFiles(String directoryPath, int parentLevel, int maxTraversalLevel); + + /** + * @param @param filePath + * @param @return + * @return InputStream + * @throws + * @Title: getInputStream + * @Description: 获取指定路径的输入流 + */ + public abstract InputStream getInputStream(String filePath); + + /** + * get file's last modify time + * @param filePath + * @return + */ + public abstract long getLastModifyTIme(String filePath); + + /** + * delete file + * @param filePath + */ + public abstract void deleteFile(String filePath); + /** + * @param @param srcPaths 路径列表 + * @param @param parentLevel 父目录的递归层数(首次为0) + * @param @param maxTraversalLevel 允许的最大递归层数 + * @param @return + * @return HashSet + * @throws + * @Title: getAllFiles + * @Description: 获取指定路径列表下符合条件的所有文件的绝对路径 + */ + public HashSet getAllFiles(List srcPaths, int parentLevel, int maxTraversalLevel) { + HashSet sourceAllFiles = new HashSet<>(); + if (!srcPaths.isEmpty()) { + for (String eachPath : srcPaths) { + HashSet listFiles = getListFiles(eachPath, parentLevel, maxTraversalLevel); + String parent = eachPath; + if(eachPath.contains("*") || eachPath.contains("?")){ + parent = UnstructuredStorageReaderUtil.getRegexPathParentPath(eachPath); + } + for(String file : listFiles){ + if(file.equals(parent)){ + sourceAllFiles.add(new PathMeta(file, parent.substring(parent.lastIndexOf(IOUtils.DIR_SEPARATOR)))); + }else{ + sourceAllFiles.add(new PathMeta(file, file.substring(file.indexOf(parent) + parent.length()))); + } + } + } + } + return sourceAllFiles; + } + +} diff --git a/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpReader.java b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpReader.java new file mode 100644 index 000000000..a99e683a1 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpReader.java @@ -0,0 +1,398 @@ +package com.alibaba.datax.plugin.reader.ftpreader; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordSender; +import com.alibaba.datax.common.spi.Reader; +import com.alibaba.datax.common.util.Configuration; +import com.webank.wedatasphere.exchangis.datax.common.GsonUtil; +import com.webank.wedatasphere.exchangis.datax.core.job.meta.MetaSchema; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.ChannelOutput; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.StreamMeta; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.plugin.unstructuredstorage.PathMeta; +import com.alibaba.datax.plugin.unstructuredstorage.reader.UnstructuredStorageReaderUtil; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; +import java.util.*; + +import static com.alibaba.datax.plugin.unstructuredstorage.reader.Key.INCR_BEGIN_TIME; +import static com.alibaba.datax.plugin.unstructuredstorage.reader.Key.INCR_END_TIME; + +public class FtpReader extends Reader { + public static class Job extends Reader.Job { + + private static final String SIGNAL_FILE_NAME = ".ok"; + + private static final String DEFAULT_META_FILE_PATH = ".meta"; + + private static final long WAIT_SIGNAL_SLEEP_INTERVAL = 3000; + + private static final Logger LOG = LoggerFactory.getLogger(Job.class); + + private Configuration originConfig = null; + + private List path = null; + + private HashSet sourceFiles; + + /** + * ftp connection parameters + */ + private FtpConnParams connParams; + + private int maxTraversalLevel; + + private long incrBeginTime = 0; + + private long incrEndTime = 0; + + private FtpHelper ftpHelper = null; + + private String signalFilePath; + + @Override + public boolean isSupportStream() { + return true; + } + + @Override + public void init() { + this.originConfig = this.getPluginJobConf(); + this.sourceFiles = new HashSet<>(); + this.validateParameter(); + UnstructuredStorageReaderUtil.validateParameter(this.originConfig); + if ("sftp".equals(connParams.getProtocol())) { + //sftp协议 + this.connParams.setPort(originConfig.getInt(Key.PORT, Constant.DEFAULT_SFTP_PORT)); + this.ftpHelper = new SftpHelper(); + } else if ("ftp".equals(connParams.getProtocol())) { + // ftp 协议 + this.connParams.setPort(originConfig.getInt(Key.PORT, Constant.DEFAULT_FTP_PORT)); + this.ftpHelper = new StandardFtpHelper(); + } + ftpHelper.loginFtpServer(this.connParams); + for(String eachPath : path){ + boolean notFound = !ftpHelper.isDirExist(eachPath) && + (eachPath.endsWith(String.valueOf(IOUtils.DIR_SEPARATOR_UNIX)) || !ftpHelper.isFileExist(eachPath)); + if(notFound){ + String message = String.format("cannot find the path: [%s], please check your configuration", eachPath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.PATH_NOT_FOUND, message); + } + } + } + + @Override + public MetaSchema syncMetaData() { + //should wait for signal first + waitForSignal(); + return getMetaSchema(); + } + + private void validateParameter() { + this.connParams = FtpConnParams.compose(connParams -> { + String protocol = this.originConfig.getNecessaryValue(Key.PROTOCOL, FtpReaderErrorCode.REQUIRED_VALUE); + boolean protocolTag = "ftp".equals(protocol) || "sftp".equals(protocol); + if (!protocolTag) { + throw DataXException.asDataXException(FtpReaderErrorCode.ILLEGAL_VALUE, + String.format("仅支持 ftp和sftp 传输协议 , 不支持您配置的传输协议: [%s]", protocol)); + } + connParams.setProtocol(protocol); + connParams.setHost(this.originConfig.getNecessaryValue(Key.HOST, FtpReaderErrorCode.REQUIRED_VALUE)); + connParams.setUsername(this.originConfig.getNecessaryValue(Key.USERNAME, FtpReaderErrorCode.REQUIRED_VALUE)); + connParams.setPrvKeyPath(this.originConfig.getString(Key.PRV_KEY_PATH, "")); + connParams.setPassword(this.originConfig.getString(Key.PASSWORD, "")); + if(StringUtils.isBlank(connParams.getPrvKeyPath()) && StringUtils.isBlank(connParams.getPassword())){ + throw DataXException.asDataXException(FtpReaderErrorCode.REQUIRED_VALUE, "you need to set private key path or password"); + } + connParams.setTimeout(this.originConfig.getInt(Key.TIMEOUT, Constant.DEFAULT_TIMEOUT)); + // only support connect pattern + String connectPattern = this.originConfig.getUnnecessaryValue(Key.CONNECTPATTERN, Constant.DEFAULT_FTP_CONNECT_PATTERN, null); + boolean connectPatternTag = "PORT".equals(connectPattern) || "PASV".equals(connectPattern); + if (!connectPatternTag) { + throw DataXException.asDataXException(FtpReaderErrorCode.ILLEGAL_VALUE, + String.format("不支持您配置的ftp传输模式: [%s]", connectPattern)); + } else { + this.originConfig.set(Key.CONNECTPATTERN, connectPattern); + } + connParams.setConnectPattern(connectPattern); + }); + this.maxTraversalLevel = originConfig.getInt(Key.MAXTRAVERSALLEVEL, Constant.DEFAULT_MAX_TRAVERSAL_LEVEL); + //path check + String pathInString = this.originConfig.getNecessaryValue(Key.PATH, FtpReaderErrorCode.REQUIRED_VALUE); + if (!pathInString.startsWith("[") && !pathInString.endsWith("]")) { + path = new ArrayList<>(); + path.add(pathInString); + } else { + path = this.originConfig.getList(Key.PATH, String.class); + if (null == path || path.size() == 0) { + throw DataXException.asDataXException(FtpReaderErrorCode.REQUIRED_VALUE, "您需要指定待读取的源目录或文件"); + } + if(path.size() > 1){ + throw DataXException.asDataXException(FtpReaderErrorCode.ILLEGAL_VALUE, "you are allowed to add only one path"); + } + for (String eachPath : path) { + if (!eachPath.startsWith("/")) { + String message = String.format("请检查参数path:[%s],需要配置为绝对路径", eachPath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.ILLEGAL_VALUE, message); + } + } + } + this.incrBeginTime = this.originConfig.getLong(INCR_BEGIN_TIME, 0); + this.incrEndTime = this.originConfig.getLong(INCR_END_TIME, 0); + } + + @Override + public void prepare() { + LOG.debug("prepare() begin..."); + waitForSignal(); + this.sourceFiles = ftpHelper.getAllFiles(path, 0, maxTraversalLevel); + LOG.info(String.format("find [%s] files in source path", this.sourceFiles.size())); + Iterator iterator = this.sourceFiles.iterator(); + while(iterator.hasNext()){ + PathMeta pathMeta = iterator.next(); + String absolutePath = pathMeta.getAbsolute(); + long modifyTime = ftpHelper.getLastModifyTIme(absolutePath); + if(incrEndTime > 0) { + if (modifyTime <= incrBeginTime || modifyTime > incrEndTime) { + iterator.remove(); + } + } + } + LOG.info(String.format("您即将读取的文件数为: [%s]", this.sourceFiles.size())); + } + + @Override + public void post() { + removeSignal(); + } + + @Override + public void destroy() { + try { + this.ftpHelper.logoutFtpServer(); + } catch (Exception e) { + String message = String.format( + "关闭与ftp服务器连接失败: [%s] host=%s, username=%s, port=%s", + e.getMessage(), connParams.getHost(), connParams.getUsername(), connParams.getPort()); + LOG.error(message, e); + } + } + + @Override + public List split(int adviceNumber) { + LOG.debug("split() begin..."); + List readerSplitConfigs = new ArrayList(); + + // warn:每个slice拖且仅拖一个文件, + // int splitNumber = adviceNumber; + int splitNumber = this.sourceFiles.size(); + if (0 == splitNumber) { + return new ArrayList<>(); + } + + List> splitedSourceFiles = this.splitSourceFiles(new ArrayList<>(this.sourceFiles), splitNumber); + for (List files : splitedSourceFiles) { + Configuration splitedConfig = this.originConfig.clone(); + splitedConfig.set(Constant.SOURCE_FILES, files); + readerSplitConfigs.add(splitedConfig); + } + LOG.debug("split() ok and end..."); + return readerSplitConfigs; + } + + private List> splitSourceFiles(final List sourceList, int adviceNumber) { + List> splitedList = new ArrayList<>(); + int averageLength = sourceList.size() / adviceNumber; + averageLength = averageLength == 0 ? 1 : averageLength; + + for (int begin = 0, end = 0; begin < sourceList.size(); begin = end) { + end = begin + averageLength; + if (end > sourceList.size()) { + end = sourceList.size(); + } + splitedList.add(sourceList.subList(begin, end)); + } + return splitedList; + } + + private void waitForSignal(){ + if(originConfig.getBool(Key.TRANSIT, true)) { + //get the first path to check if exist SIGNAL_FILE + String pathFirst = getFirstPath(); + pathFirst += SIGNAL_FILE_NAME; + signalFilePath = pathFirst; + if (!ftpHelper.isFileExist(pathFirst)) { + LOG.info("check and wait for the creation of SIGNAL_FILE , path: {} ......", pathFirst); + do { + try { + Thread.sleep(WAIT_SIGNAL_SLEEP_INTERVAL); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw DataXException.asDataXException(FtpReaderErrorCode.RUNTIME_EXCEPTION, "interrupted while waiting for signal"); + } + } while (!ftpHelper.isFileExist(pathFirst)); + } + } + } + + private void removeSignal(){ + boolean removable = StringUtils.isNotEmpty(signalFilePath) && + ftpHelper.isFileExist(signalFilePath) && originConfig.getBool(Key.TRANSIT, true); + if(removable){ + ftpHelper.deleteFile(signalFilePath); + } + } + + private MetaSchema getMetaSchema(){ + String path = getFirstPath() + this.originConfig.getString(Key.META_FILE_PATH, DEFAULT_META_FILE_PATH); + try { + if (ftpHelper.isFileExist(path)) { + InputStream inputStream = ftpHelper.getInputStream(path); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + byte[] buffer = new byte[8 * 1024]; + int size; + while ((size = inputStream.read(buffer)) > 0) { + outputStream.write(buffer, 0, size); + } + String metaSer = outputStream.toString(this.originConfig.getString( + com.alibaba.datax.plugin.unstructuredstorage.reader.Key.ENCODING, + com.alibaba.datax.plugin.unstructuredstorage.reader.Constant.DEFAULT_ENCODING)); + inputStream.close(); + outputStream.close(); + return GsonUtil.fromJson(metaSer, MetaSchema.class); + } + }catch (IOException e){ + throw DataXException.asDataXException(FtpReaderErrorCode.GET_META_SCHEMA_ERROR, e.getMessage(), e); + } + return null; + } + + private String getFirstPath(){ + String pathFirst = path.get(0); + //check if the pathFirst is the directory + if(!ftpHelper.isDirExist(pathFirst)){ + pathFirst = pathFirst.substring(0, pathFirst.lastIndexOf(String.valueOf(IOUtils.DIR_SEPARATOR_UNIX))); + } + if(pathFirst.contains("*") || pathFirst.contains("?")){ + pathFirst = UnstructuredStorageReaderUtil.getRegexPathParentPath(pathFirst); + } + if(!pathFirst.endsWith(String.valueOf(IOUtils.DIR_SEPARATOR_UNIX))){ + pathFirst += String.valueOf(IOUtils.DIR_SEPARATOR_UNIX); + } + return pathFirst; + } + } + + public static class Task extends Reader.Task { + private static Logger LOG = LoggerFactory.getLogger(Task.class); + + private FtpConnParams connParams; + private Configuration readerSliceConfig; + private List sourceFiles; + + private FtpHelper ftpHelper = null; + + @Override + public void init() {//连接重试 + /* for ftp connection */ + this.readerSliceConfig = this.getPluginJobConf(); + this.connParams = FtpConnParams.compose( connParams ->{ + connParams.setHost(readerSliceConfig.getString(Key.HOST)); + connParams.setProtocol(readerSliceConfig.getString(Key.PROTOCOL)); + connParams.setUsername(readerSliceConfig.getString(Key.USERNAME)); + connParams.setPassword(readerSliceConfig.getString(Key.PASSWORD, "")); + connParams.setPrvKeyPath(readerSliceConfig.getString(Key.PRV_KEY_PATH, "")); + connParams.setTimeout(readerSliceConfig.getInt(Key.TIMEOUT, Constant.DEFAULT_TIMEOUT)); + }); + this.sourceFiles = this.readerSliceConfig.getList(Constant.SOURCE_FILES, Object.class); + if ("sftp".equals(connParams.getProtocol())) { + //sftp协议 + connParams.setPort(readerSliceConfig.getInt(Key.PORT, Constant.DEFAULT_SFTP_PORT)); + this.ftpHelper = new SftpHelper(); + } else if ("ftp".equals(connParams.getProtocol())) { + // ftp 协议 + connParams.setPort(readerSliceConfig.getInt(Key.PORT, Constant.DEFAULT_FTP_PORT)); + // 默认为被动模式 + connParams.setConnectPattern(readerSliceConfig.getString(Key.CONNECTPATTERN, Constant.DEFAULT_FTP_CONNECT_PATTERN)); + this.ftpHelper = new StandardFtpHelper(); + } + ftpHelper.loginFtpServer(connParams); + + } + + @Override + public void prepare() { + + } + + @Override + public void post() { + + } + + @Override + public void destroy() { + try { + this.ftpHelper.logoutFtpServer(); + } catch (Exception e) { + String message = String.format( + "关闭与ftp服务器连接失败: [%s] host=%s, username=%s, port=%s", + e.getMessage(), connParams.getHost(), connParams.getUsername(), connParams.getPort()); + LOG.error(message, e); + } + } + + @Override + public void startRead(RecordSender recordSender) { + LOG.info("start read source files..."); + for (Object sourceFile : this.sourceFiles) { + PathMeta pathMeta = Json.fromJson(Json.toJson(sourceFile, null), PathMeta.class); + String fileName = pathMeta.getAbsolute(); + LOG.info(String.format("reading file : [%s]", fileName)); + InputStream inputStream = null; + inputStream = ftpHelper.getInputStream(fileName); + + UnstructuredStorageReaderUtil.readFromStream(inputStream, fileName, this.readerSliceConfig, + recordSender, this.getTaskPluginCollector()); + recordSender.flush(); + } + + LOG.info("end read source files..."); + } + + @Override + public void startRead(ChannelOutput channelOutput) { + LOG.info("start read source files to stream channel..."); + for(Object sourceFile : this.sourceFiles){ + PathMeta pathMeta = Json.fromJson(Json.toJson(sourceFile, null), PathMeta.class); + String absolutePath = pathMeta.getAbsolute(); + String relativePath = pathMeta.getRelative(); + LOG.info(String.format("reading file: [%s]", absolutePath)); + InputStream inputStream; + try{ + String name = absolutePath.substring(absolutePath.lastIndexOf(IOUtils.DIR_SEPARATOR) + 1); + StreamMeta streamMeta = new StreamMeta(); + streamMeta.setName(name); + streamMeta.setAbsolutePath(absolutePath); + streamMeta.setRelativePath(relativePath); + OutputStream outputStream = channelOutput.createStream(streamMeta, readerSliceConfig.getString( + com.alibaba.datax.plugin.unstructuredstorage.reader.Key.ENCODING, + com.alibaba.datax.plugin.unstructuredstorage.reader.Constant.DEFAULT_ENCODING)); + inputStream = ftpHelper.getInputStream(absolutePath); + UnstructuredStorageReaderUtil.readFromStream(inputStream, outputStream, + this.readerSliceConfig); + }catch(IOException e){ + throw DataXException.asDataXException(FrameworkErrorCode.CHANNEL_STREAM_ERROR, e); + } + } + LOG.info("end read source files to stream channel..."); + } + } +} diff --git a/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpReaderErrorCode.java b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpReaderErrorCode.java new file mode 100644 index 000000000..57d92ada3 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/FtpReaderErrorCode.java @@ -0,0 +1,52 @@ +package com.alibaba.datax.plugin.reader.ftpreader; + +import com.alibaba.datax.common.spi.ErrorCode; + +/** + * Created by haiwei.luo on 14-9-20. + */ +public enum FtpReaderErrorCode implements ErrorCode { + REQUIRED_VALUE("FtpReader-00", "您缺失了必须填写的参数值."), + ILLEGAL_VALUE("FtpReader-01", "您填写的参数值不合法."), + MIXED_INDEX_VALUE("FtpReader-02", "您的列信息配置同时包含了index,value."), + NO_INDEX_VALUE("FtpReader-03", "您明确的配置列信息,但未填写相应的index,value."), + FILE_NOT_EXISTS("FtpReader-04", "您配置的目录文件路径不存在或者没有权限读取."), + OPEN_FILE_WITH_CHARSET_ERROR("FtpReader-05", "您配置的文件编码和实际文件编码不符合."), + OPEN_FILE_ERROR("FtpReader-06", "您配置的文件在打开时异常."), + READ_FILE_IO_ERROR("FtpReader-07", "您配置的文件在读取时出现IO异常."), + SECURITY_NOT_ENOUGH("FtpReader-08", "您缺少权限执行相应的文件操作."), + CONFIG_INVALID_EXCEPTION("FtpReader-09", "您的参数配置错误."), + RUNTIME_EXCEPTION("FtpReader-10", "出现运行时异常, 请联系我们"), + EMPTY_DIR_EXCEPTION("FtpReader-11", "您尝试读取的文件目录为空."), + FAIL_LOGIN("FtpReader-12", "登录失败,无法与ftp服务器建立连接."), + FAIL_DISCONNECT("FtpReader-13", "关闭ftp连接失败,无法与ftp服务器断开连接."), + COMMAND_FTP_IO_EXCEPTION("FtpReader-14", "与ftp服务器连接异常."), + OUT_MAX_DIRECTORY_LEVEL("FtpReader-15", "超出允许的最大目录层数."), + LINK_FILE("FtpReader-16", "您尝试读取的文件为链接文件."), + GET_FILE_STATS_ERROR("FtpReader-17", "获取文件状态信息异常"), + PATH_NOT_FOUND("FtpReader-18", "Path not found"), + GET_META_SCHEMA_ERROR("FtpReader-19", "Get meta schema information error"); + private final String code; + private final String description; + + private FtpReaderErrorCode(String code, String description) { + this.code = code; + this.description = description; + } + + @Override + public String getCode() { + return this.code; + } + + @Override + public String getDescription() { + return this.description; + } + + @Override + public String toString() { + return String.format("Code:[%s], Description:[%s].", this.code, + this.description); + } +} diff --git a/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/Key.java b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/Key.java new file mode 100644 index 000000000..0822bc20e --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/Key.java @@ -0,0 +1,16 @@ +package com.alibaba.datax.plugin.reader.ftpreader; + +public class Key { + static final String PROTOCOL = "protocol"; + static final String HOST = "host"; + static final String USERNAME = "username"; + static final String PASSWORD = "password"; + static final String PRV_KEY_PATH = "keyfilepath"; + static final String PORT = "port"; + static final String TIMEOUT = "timeout"; + static final String CONNECTPATTERN = "connectPattern"; + static final String PATH = "path"; + static final String MAXTRAVERSALLEVEL = "maxTraversalLevel"; + static final String META_FILE_PATH = "metaPath"; + static final String TRANSIT = "transit"; +} diff --git a/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/SftpHelper.java b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/SftpHelper.java new file mode 100644 index 000000000..ae96eb913 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/SftpHelper.java @@ -0,0 +1,287 @@ +package com.alibaba.datax.plugin.reader.ftpreader; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.plugin.unstructuredstorage.reader.UnstructuredStorageReaderUtil; +import com.jcraft.jsch.*; +import com.jcraft.jsch.ChannelSftp.LsEntry; +import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.util.HashSet; +import java.util.Properties; +import java.util.Vector; + +public class SftpHelper extends FtpHelper { + private static final Logger LOG = LoggerFactory.getLogger(SftpHelper.class); + + private static final String FILE_NOT_EXIST_ = "no such file"; + + private Session session = null; + private ChannelSftp channelSftp = null; + + @Override + public void loginFtpServer(FtpConnParams connParams) { + // 创建JSch对象 + JSch jsch = new JSch(); + try { + if(StringUtils.isNotBlank(connParams.getPrvKeyPath())){ + jsch.addIdentity(connParams.getPrvKeyPath()); + } + session = jsch.getSession(connParams.getUsername(), connParams.getHost(), connParams.getPort()); + // 根据用户名,主机ip,端口获取一个Session对象 + // 如果服务器连接不上,则抛出异常 + if (session == null) { + throw DataXException.asDataXException(FtpReaderErrorCode.FAIL_LOGIN, + "session is null,无法通过sftp与服务器建立链接,请检查主机名和用户名是否正确."); + } + // 设置密码 + if(StringUtils.isNotBlank(connParams.getPassword())){ + session.setPassword((String) CryptoUtils.string2Object(connParams.getPassword())); + } + Properties config = new Properties(); + config.put("StrictHostKeyChecking", "no"); + config.put("PreferredAuthentications", "publickey,password"); + // 为Session对象设置properties + session.setConfig(config); + // 设置timeout时间 + session.setTimeout(connParams.getTimeout()); + // 通过Session建立链接 + session.connect(); + // 打开SFTP通道 + channelSftp = (ChannelSftp) session.openChannel("sftp"); + channelSftp.connect(); // 建立SFTP通道的连接 + + } catch (JSchException | ClassNotFoundException | IOException e) { + if (null != e.getCause()) { + String cause = e.getCause().toString(); + String unknownHostException = "java.net.UnknownHostException: " + connParams.getHost(); + String illegalArgumentException = "java.lang.IllegalArgumentException: port out of range:" + connParams.getPort(); + String wrongPort = "java.net.ConnectException: Connection refused"; + if (unknownHostException.equals(cause)) { + String message = String.format("请确认ftp服务器地址是否正确,无法连接到地址为: [%s] 的ftp服务器", connParams.getHost()); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.FAIL_LOGIN, message, e); + } else if (illegalArgumentException.equals(cause) || wrongPort.equals(cause)) { + String message = String.format("请确认连接ftp服务器端口是否正确,错误的端口: [%s] ", connParams.getPort()); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.FAIL_LOGIN, message, e); + }else{ + String message = "cannot login to the sftp server, please check your configuration of connecting"; + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.FAIL_LOGIN, message, e); + } + } else { + if ("Auth fail".equals(e.getMessage())) { + String message = String.format("与ftp服务器建立连接失败,请检查用户名和密码是否正确: [%s]", + "message:host =" + connParams.getHost() + ",username = " + connParams.getUsername() + ",port =" + connParams.getPort()); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.FAIL_LOGIN, message); + } else { + String message = String.format("与ftp服务器建立连接失败 : [%s]", + "message:host =" + connParams.getHost() + ",username = " + connParams.getUsername() + ",port =" + connParams.getPort()); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.FAIL_LOGIN, message, e); + } + } + } + + } + + @Override + public void logoutFtpServer() { + if (channelSftp != null) { + channelSftp.disconnect(); + } + if (session != null) { + session.disconnect(); + } + } + + @Override + public boolean isDirExist(String directoryPath) { + try { + SftpATTRS sftpATTRS = channelSftp.lstat(directoryPath); + return sftpATTRS.isDir(); + } catch (SftpException e) { + if (e.getMessage().toLowerCase().equals(FILE_NOT_EXIST_)) { + return false; + } + String message = String.format("进入目录:[%s]时发生I/O异常,请确认与ftp服务器的连接正常", directoryPath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + } + + @Override + public boolean isFileExist(String filePath) { + boolean isExitFlag = false; + try { + SftpATTRS sftpATTRS = channelSftp.lstat(filePath); + if (sftpATTRS.getSize() >= 0) { + isExitFlag = true; + } + } catch (SftpException e) { + if (!FILE_NOT_EXIST_.equals(e.getMessage().toLowerCase())) { + String message = String.format("获取文件:[%s] 属性时发生I/O异常,请确认与ftp服务器的连接正常", filePath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + } + return isExitFlag; + } + + @Override + public boolean isSymbolicLink(String filePath) { + try { + SftpATTRS sftpATTRS = channelSftp.lstat(filePath); + return sftpATTRS.isLink(); + } catch (SftpException e) { + if (e.getMessage().toLowerCase().equals(FILE_NOT_EXIST_)) { + String message = String.format("请确认您的配置项path:[%s]存在,且配置的用户有权限读取", filePath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.FILE_NOT_EXISTS, message); + } else { + String message = String.format("获取文件:[%s] 属性时发生I/O异常,请确认与ftp服务器的连接正常", filePath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + } + } + + HashSet sourceFiles = new HashSet(); + + @Override + public HashSet getListFiles(String directoryPath, int parentLevel, int maxTraversalLevel) { + if (parentLevel < maxTraversalLevel) { + // 父级目录,以'/'结尾 + String parentPath; + int pathLen = directoryPath.length(); + //*和?的限制 + if (directoryPath.contains("*") || directoryPath.contains("?")) { + // path是正则表达式 + String subPath = UnstructuredStorageReaderUtil.getRegexPathParentPath(directoryPath); + if (isDirExist(subPath)) { + parentPath = subPath; + } else { + String message = String.format("不能进入目录:[%s]," + "请确认您的配置项path:[%s]存在,且配置的用户有权限进入", subPath, + directoryPath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.FILE_NOT_EXISTS, message); + } + + } else if (isDirExist(directoryPath)) { + // path是目录 + if (directoryPath.charAt(pathLen - 1) == IOUtils.DIR_SEPARATOR) { + parentPath = directoryPath; + } else { + parentPath = directoryPath + IOUtils.DIR_SEPARATOR; + } + } else if (isSymbolicLink(directoryPath)) { + //path是链接文件 + String message = String.format("文件:[%s]是链接文件,当前不支持链接文件的读取", directoryPath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.LINK_FILE, message); + } else if (isFileExist(directoryPath)) { + // path指向具体文件 + sourceFiles.add(directoryPath); + return sourceFiles; + } else { + String message = String.format("请确认您的配置项path:[%s]存在,且配置的用户有权限读取", directoryPath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.FILE_NOT_EXISTS, message); + } + + try { + Vector vector = channelSftp.ls(directoryPath); + for (int i = 0; i < vector.size(); i++) { + LsEntry le = (LsEntry) vector.get(i); + String strName = le.getFilename(); + if(strName.startsWith(".")){ + //skip hidden files + continue; + } + String filePath = parentPath + strName; + + if (isDirExist(filePath)) { + // 是子目录 + if (!(strName.equals(".") || strName.equals(".."))) { + //递归处理 + getListFiles(filePath, parentLevel + 1, maxTraversalLevel); + } + } else if (isSymbolicLink(filePath)) { + //是链接文件 + String message = String.format("文件:[%s]是链接文件,当前不支持链接文件的读取", filePath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.LINK_FILE, message); + } else if (isFileExist(filePath)) { + // 是文件 + sourceFiles.add(filePath); + } else { + String message = String.format("请确认path:[%s]存在,且配置的用户有权限读取", filePath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.FILE_NOT_EXISTS, message); + } + + } // end for vector + } catch (SftpException e) { + String message = String.format("获取path:[%s] 下文件列表时发生I/O异常,请确认与ftp服务器的连接正常", directoryPath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + + return sourceFiles; + } else { + //超出最大递归层数 + String message = String.format("获取path:[%s] 下文件列表时超出最大层数,请确认路径[%s]下不存在软连接文件", directoryPath, directoryPath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.OUT_MAX_DIRECTORY_LEVEL, message); + } + } + + @Override + public InputStream getInputStream(String filePath) { + try { + return channelSftp.get(filePath); + } catch (SftpException e) { + String message = String.format("读取文件 : [%s] 时出错,请确认文件:[%s]存在且配置的用户有权限读取", filePath, filePath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.OPEN_FILE_ERROR, message); + } + } + + @Override + public long getLastModifyTIme(String filePath) { + try { + SftpATTRS attrs = channelSftp.stat(filePath); + long time = attrs.getMTime(); + if(String.valueOf(time).length() < 13){ + //unix_time to timestamp + return time * 1000; + } + return time; + }catch(SftpException e){ + String message = String.format("获取SFTP文件: [%s] 最新修改时间异常,请确认是否支持该接口", filePath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.GET_FILE_STATS_ERROR, message); + } + } + + @Override + public void deleteFile(String filePath) { + try{ + this.channelSftp.rm(filePath); + }catch(SftpException e){ + String message = String.format( + "delete file [%s] error, please check your network and file permission, message [%s]", + filePath, e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + } + +} diff --git a/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/StandardFtpHelper.java b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/StandardFtpHelper.java new file mode 100644 index 000000000..49486ee27 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpreader/src/main/java/com/alibaba/datax/plugin/reader/ftpreader/StandardFtpHelper.java @@ -0,0 +1,260 @@ +package com.alibaba.datax.plugin.reader.ftpreader; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.plugin.unstructuredstorage.reader.UnstructuredStorageReaderUtil; +import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.net.ftp.FTP; +import org.apache.commons.net.ftp.FTPClient; +import org.apache.commons.net.ftp.FTPFile; +import org.apache.commons.net.ftp.FTPReply; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.net.UnknownHostException; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.HashSet; + +public class StandardFtpHelper extends FtpHelper { + private static final Logger LOG = LoggerFactory.getLogger(StandardFtpHelper.class); + + private static final String TIME_FORMAT_PATTERN = "YYYYMMDDhhmmss.SSS"; + + FTPClient ftpClient = null; + + @Override + public void loginFtpServer(FtpConnParams ftpConnParams) { + ftpClient = new FTPClient(); + try { + // 连接 + ftpClient.connect(ftpConnParams.getHost(), ftpConnParams.getPort()); + // 登录 + ftpClient.login(ftpConnParams.getUsername(), (String) CryptoUtils.string2Object(ftpConnParams.getPassword())); + ftpClient.setConnectTimeout(ftpConnParams.getTimeout()); + ftpClient.setDataTimeout(ftpConnParams.getTimeout()); + if ("PASV".equals(ftpConnParams.getConnectPattern())) { + ftpClient.enterRemotePassiveMode(); + ftpClient.enterLocalPassiveMode(); + } else if ("PORT".equals(ftpConnParams.getConnectPattern())) { + ftpClient.enterLocalActiveMode(); + // ftpClient.enterRemoteActiveMode(host, port); + } + int reply = ftpClient.getReplyCode(); + if (!FTPReply.isPositiveCompletion(reply)) { + ftpClient.disconnect(); + String message = String.format("与ftp服务器建立连接失败,请检查用户名和密码是否正确: [%s]", + "message:host =" + ftpConnParams.getHost() + ",username = " + ftpConnParams.getUsername() + ",port =" + ftpConnParams.getPort()); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.FAIL_LOGIN, message); + } + //设置命令传输编码 + String fileEncoding = System.getProperty("file.encoding"); + ftpClient.setControlEncoding(fileEncoding); + } catch (UnknownHostException e) { + String message = String.format("请确认ftp服务器地址是否正确,无法连接到地址为: [%s] 的ftp服务器", ftpConnParams.getHost()); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.FAIL_LOGIN, message, e); + } catch (IllegalArgumentException e) { + String message = String.format("请确认连接ftp服务器端口是否正确,错误的端口: [%s] ", ftpConnParams.getPort()); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.FAIL_LOGIN, message, e); + } catch (Exception e) { + String message = String.format("与ftp服务器建立连接失败 : [%s]", + "message:host =" + ftpConnParams.getHost() + ",username = " + ftpConnParams.getUsername() + ",port =" + ftpConnParams.getPort()); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.FAIL_LOGIN, message, e); + } + + } + + @Override + public void logoutFtpServer() { + if (ftpClient.isConnected()) { + try { + //todo ftpClient.completePendingCommand();//打开流操作之后必须,原因还需要深究 + ftpClient.logout(); + } catch (IOException e) { + String message = "与ftp服务器断开连接失败"; + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.FAIL_DISCONNECT, message, e); + } finally { + if (ftpClient.isConnected()) { + try { + ftpClient.disconnect(); + } catch (IOException e) { + String message = "与ftp服务器断开连接失败"; + LOG.error(message); + } + } + + } + } + } + + @Override + public boolean isDirExist(String directoryPath) { + try { + return ftpClient.changeWorkingDirectory(new String(directoryPath.getBytes(), FTP.DEFAULT_CONTROL_ENCODING)); + } catch (IOException e) { + String message = String.format("进入目录:[%s]时发生I/O异常,请确认与ftp服务器的连接正常", directoryPath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + } + + @Override + public boolean isFileExist(String filePath) { + boolean isExitFlag = false; + try { + FTPFile[] ftpFiles = ftpClient.listFiles(new String(filePath.getBytes(), FTP.DEFAULT_CONTROL_ENCODING)); + if (ftpFiles.length == 1 && ftpFiles[0].isFile()) { + isExitFlag = true; + } + } catch (IOException e) { + String message = String.format("获取文件:[%s] 属性时发生I/O异常,请确认与ftp服务器的连接正常", filePath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + return isExitFlag; + } + + @Override + public boolean isSymbolicLink(String filePath) { + boolean isExitFlag = false; + try { + FTPFile[] ftpFiles = ftpClient.listFiles(new String(filePath.getBytes(), FTP.DEFAULT_CONTROL_ENCODING)); + if (ftpFiles.length == 1 && ftpFiles[0].isSymbolicLink()) { + isExitFlag = true; + } + } catch (IOException e) { + String message = String.format("获取文件:[%s] 属性时发生I/O异常,请确认与ftp服务器的连接正常", filePath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + return isExitFlag; + } + + HashSet sourceFiles = new HashSet(); + + @Override + public HashSet getListFiles(String directoryPath, int parentLevel, int maxTraversalLevel) { + if (parentLevel < maxTraversalLevel) { + String parentPath = null;// 父级目录,以'/'结尾 + int pathLen = directoryPath.length(); + if (directoryPath.contains("*") || directoryPath.contains("?")) { + // path是正则表达式 + String subPath = UnstructuredStorageReaderUtil.getRegexPathParentPath(directoryPath); + if (isDirExist(subPath)) { + parentPath = subPath; + } else { + String message = String.format("不能进入目录:[%s]," + "请确认您的配置项path:[%s]存在,且配置的用户有权限进入", subPath, + directoryPath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.FILE_NOT_EXISTS, message); + } + } else if (isDirExist(directoryPath)) { + // path是目录 + if (directoryPath.charAt(pathLen - 1) == IOUtils.DIR_SEPARATOR) { + parentPath = directoryPath; + } else { + parentPath = directoryPath + IOUtils.DIR_SEPARATOR; + } + } else if (isFileExist(directoryPath)) { + // path指向具体文件 + sourceFiles.add(directoryPath); + return sourceFiles; + } else if (isSymbolicLink(directoryPath)) { + //path是链接文件 + String message = String.format("文件:[%s]是链接文件,当前不支持链接文件的读取", directoryPath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.LINK_FILE, message); + } else { + String message = String.format("请确认您的配置项path:[%s]存在,且配置的用户有权限读取", directoryPath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.FILE_NOT_EXISTS, message); + } + + try { + FTPFile[] fs = ftpClient.listFiles(new String(directoryPath.getBytes(), FTP.DEFAULT_CONTROL_ENCODING)); + for (FTPFile ff : fs) { + String strName = ff.getName(); + if(strName.startsWith(".")){ + //skip hidden files + continue; + } + String filePath = parentPath + strName; + if (ff.isDirectory()) { + if (!(strName.equals(".") || strName.equals(".."))) { + //递归处理 + getListFiles(filePath, parentLevel + 1, maxTraversalLevel); + } + } else if (ff.isFile()) { + // 是文件 + sourceFiles.add(filePath); + } else if (ff.isSymbolicLink()) { + //是链接文件 + String message = String.format("文件:[%s]是链接文件,当前不支持链接文件的读取", filePath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.LINK_FILE, message); + } else { + String message = String.format("请确认path:[%s]存在,且配置的用户有权限读取", filePath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.FILE_NOT_EXISTS, message); + } + } // end for FTPFile + } catch (IOException e) { + String message = String.format("获取path:[%s] 下文件列表时发生I/O异常,请确认与ftp服务器的连接正常", directoryPath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + return sourceFiles; + + } else { + //超出最大递归层数 + String message = String.format("获取path:[%s] 下文件列表时超出最大层数,请确认路径[%s]下不存在软连接文件", directoryPath, directoryPath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.OUT_MAX_DIRECTORY_LEVEL, message); + } + } + + @Override + public InputStream getInputStream(String filePath) { + try { + return ftpClient.retrieveFileStream(new String(filePath.getBytes(), FTP.DEFAULT_CONTROL_ENCODING)); + } catch (IOException e) { + String message = String.format("读取文件 : [%s] 时出错,请确认文件:[%s]存在且配置的用户有权限读取", filePath, filePath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.OPEN_FILE_ERROR, message); + } + } + + @Override + public long getLastModifyTIme(String filePath) { + try{ + String timeString = ftpClient.getModificationTime(filePath); + SimpleDateFormat simpleDateFormat = new SimpleDateFormat(TIME_FORMAT_PATTERN); + return simpleDateFormat.parse(timeString).getTime(); + }catch(IOException | ParseException e){ + String message = String.format("获取FTP文件: [%s] 最新修改时间异常,请确认是否支持该接口", filePath); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.GET_FILE_STATS_ERROR, message); + } + } + + @Override + public void deleteFile(String filePath) { + try{ + this.ftpClient.deleteFile(filePath); + }catch(IOException e){ + String message = String.format( + "delete file [%s] error, please check your network and file permission, message [%s]", + filePath, e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException(FtpReaderErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + } + +} diff --git a/exchangis-engines/engines/datax/datax-ftpreader/src/main/resources/plugin-template.json b/exchangis-engines/engines/datax/datax-ftpreader/src/main/resources/plugin-template.json new file mode 100644 index 000000000..9680aec67 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpreader/src/main/resources/plugin-template.json @@ -0,0 +1,38 @@ +{ + "name": "ftpreader", + "parameter": { + "host": "", + "port": "", + "username": "", + "password": "", + "protocol": "", + "path": [ + "" + ], + "encoding": "UTF-8", + "column": [ + { + "index": 0, + "type": "long" + }, + { + "index": 1, + "type": "boolean" + }, + { + "index": 2, + "type": "double" + }, + { + "index": 3, + "type": "string" + }, + { + "index": 4, + "type": "date", + "format": "yyyy.MM.dd" + } + ], + "fieldDelimiter": "," + } +} diff --git a/exchangis-engines/engines/datax/datax-ftpreader/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-ftpreader/src/main/resources/plugin.json new file mode 100644 index 000000000..cbf14ef76 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpreader/src/main/resources/plugin.json @@ -0,0 +1,7 @@ +{ + "name": "ftpreader", + "class": "com.alibaba.datax.plugin.reader.ftpreader.FtpReader", + "description": "useScene: test. mechanism: use datax framework to transport data from txt file. warn: The more you know about the data, the less problems you encounter.", + "developer": "alibaba" +} + diff --git a/exchangis-engines/engines/datax/datax-ftpreader/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-ftpreader/src/main/resources/plugin_job_template.json new file mode 100644 index 000000000..b3ac7fd3d --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpreader/src/main/resources/plugin_job_template.json @@ -0,0 +1,19 @@ +{ + "name": "ftpreader", + "parameter": { + "host": "", + "protocol": "sftp", + "port": "", + "username": "", + "password": "", + "path": [], + "column": [ + { + "index": 0, + "type": "" + } + ], + "fieldDelimiter": ",", + "encoding": "UTF-8" + } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-ftpwriter/pom.xml b/exchangis-engines/engines/datax/datax-ftpwriter/pom.xml new file mode 100644 index 000000000..d738c01d9 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpwriter/pom.xml @@ -0,0 +1,91 @@ + + + + exchangis-engine-datax + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + jar + 3.0.0-Plus-2 + datax-ftpwriter + + + com.webank.wedatasphere.exchangis + datax-core + ${datax.engine.version} + provided + + + slf4j-log4j12 + org.slf4j + + + + provided + org.slf4j + slf4j-api + + + ch.qos.logback + logback-classic + provided + + + commons-io + commons-io + provided + + + org.apache.commons + commons-lang3 + provided + + + commons-codec + commons-codec + ${commons-codec} + provided + + + com.jcraft + jsch + 0.1.54 + + + commons-net + commons-net + 3.3 + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.2.1 + + + assemble + + single + + + install + + + + false + false + + ${basedir}/src/main/assembly/package.xml + + plugin + + + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-ftpwriter/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/assembly/package.xml new file mode 100644 index 000000000..580335270 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/assembly/package.xml @@ -0,0 +1,33 @@ + + ftpwriter + + dir + + false + + + src/main/resources + + plugin.json + plugin_job_template.json + + plugin/writer/ftpwriter + + + target/ + + datax-ftpwriter-${datax.engine.version}.jar + + plugin/writer/ftpwriter + + + + + false + plugin/writer/ftpwriter/libs + runtime + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/FtpWriter.java b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/FtpWriter.java new file mode 100644 index 000000000..95874c3d7 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/FtpWriter.java @@ -0,0 +1,425 @@ +package com.alibaba.datax.plugin.writer.ftpwriter; + +import com.alibaba.datax.common.constant.CommonConstant; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordReceiver; +import com.alibaba.datax.common.spi.Writer; +import com.alibaba.datax.common.util.Configuration; +import com.webank.wedatasphere.exchangis.datax.common.GsonUtil; +import com.alibaba.datax.common.util.RetryUtil; +import com.webank.wedatasphere.exchangis.datax.core.job.meta.MetaSchema; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.ChannelInput; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.StreamMeta; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.plugin.unstructuredstorage.writer.UnstructuredStorageWriterUtil; +import com.alibaba.datax.plugin.unstructuredstorage.writer.ZipCollectOutputStream; +import com.alibaba.datax.plugin.writer.ftpwriter.util.*; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; +import java.util.*; +import java.util.concurrent.Callable; + +public class FtpWriter extends Writer { + private static void loginFtpWithRetry(IFtpHelper ftpHelper, FtpConnParams connParams, Logger log){ + try { + RetryUtil.executeWithRetry((Callable) () -> { + ftpHelper.loginFtpServer(connParams); + return null; + }, 3, 4000, true); + } catch (Exception e) { + String message = String + .format("与ftp服务器建立连接失败, host:%s, username:%s, port:%s, errorMessage:%s", + connParams.getHost(), connParams.getUsername(), connParams.getPort(), e.getMessage()); + log.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.FAIL_LOGIN, message, e); + } + } + public static class Job extends Writer.Job { + + private static final String SIGNAL_FILE_NAME = ".ok"; + + private static final String DEFAULT_META_FILE_PATH = ".meta"; + + private static final long WAIT_SIGNAL_SLEEP_INTERVAL = 3000; + + private static final Logger LOG = LoggerFactory.getLogger(Job.class); + + private Configuration writerSliceConfig = null; + private Set allFileExists = null; + + private FtpConnParams connParams; + + private String tempPath; + + private IFtpHelper ftpHelper = null; + + private String signalFilePath; + @Override + public boolean isSupportStream() { + return true; + } + + @Override + public void init() { + this.writerSliceConfig = this.getPluginJobConf(); + this.validateParameter(); + UnstructuredStorageWriterUtil + .validateParameter(this.writerSliceConfig); + loginFtpWithRetry(ftpHelper, this.connParams, LOG); + } + + @Override + public void syncMetaData(MetaSchema metaSchema) { + String path = this.writerSliceConfig.getString(Key.PATH); + //try to create direcotry + this.ftpHelper.mkDirRecursive(path); + //should wait for signal first + waitForSignal(path); + addMetaSchemaToFile(GsonUtil.toJson(metaSchema)); + } + + private void validateParameter() { + String path = this.writerSliceConfig.getNecessaryValue(Key.PATH, + FtpWriterErrorCode.REQUIRED_VALUE); + if (!path.startsWith("/")) { + String message = String.format("请检查参数path:%s,需要配置为绝对路径", path); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.ILLEGAL_VALUE, message); + } + this.connParams = FtpConnParams.compose(connParams ->{ + connParams.setHost(this.writerSliceConfig.getNecessaryValue(Key.HOST, + FtpWriterErrorCode.REQUIRED_VALUE)); + connParams.setUsername(this.writerSliceConfig.getNecessaryValue( + Key.USERNAME, FtpWriterErrorCode.REQUIRED_VALUE)); + connParams.setPassword(this.writerSliceConfig.getString(Key.PASSWORD, "")); + connParams.setPrvKeyPath(this.writerSliceConfig.getString(Key.PRV_KEY_PATH, "")); + if(StringUtils.isBlank(connParams.getPrvKeyPath()) && StringUtils.isBlank(connParams.getPassword())){ + throw DataXException.asDataXException(FtpWriterErrorCode.REQUIRED_VALUE, "you need to set private key path or password"); + } + connParams.setTimeout(this.writerSliceConfig.getInt(Key.TIMEOUT, Constant.DEFAULT_TIMEOUT)); + connParams.setProtocol(this.writerSliceConfig.getNecessaryValue( + Key.PROTOCOL, FtpWriterErrorCode.REQUIRED_VALUE)); + connParams.setPort(this.writerSliceConfig.getInt(Key.PORT, + Constant.DEFAULT_SFTP_PORT)); + }); + if ("sftp".equalsIgnoreCase(this.connParams.getProtocol())) { + this.ftpHelper = new SftpHelperImpl(); + } else if ("ftp".equalsIgnoreCase(this.connParams.getProtocol())) { + this.ftpHelper = new StandardFtpHelperImpl(); + } else { + throw DataXException.asDataXException( + FtpWriterErrorCode.ILLEGAL_VALUE, String.format( + "仅支持 ftp和sftp 传输协议 , 不支持您配置的传输协议: [%s]", + this.connParams.getProtocol())); + } + this.writerSliceConfig.set(Key.PORT, this.connParams.getPort()); + } + + @Override + public void prepare() { + String path = this.writerSliceConfig.getString(Key.PATH); + waitForSignal(path); + // warn: 这里用户需要配一个目录 + this.ftpHelper.mkDirRecursive(path); + + String writeMode = this.writerSliceConfig + .getString(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.WRITE_MODE); + Set allFileExists = this.ftpHelper.getAllFilesInDir(path, + "", false, false); + this.allFileExists = allFileExists; + // truncate option handler + if ("truncate".equals(writeMode)) { + LOG.info(String.format( + "由于您配置了writeMode truncate, 开始清理 [%s] 下面的内容", + path)); + Set fullFileNameToDelete = new HashSet<>(); + for (String each : allFileExists) { + //skip meta file + if(each.trim().equals(this.writerSliceConfig + .getString(Key.META_FILE_PATH, DEFAULT_META_FILE_PATH))){ + continue; + } + fullFileNameToDelete.add(UnstructuredStorageWriterUtil + .buildFilePath(path, each, null)); + } + LOG.info(String.format( + "删除目录path:[%s] 下文件列表如下: [%s]", path, + StringUtils.join(fullFileNameToDelete.iterator(), ", "))); + + this.ftpHelper.deleteFiles(fullFileNameToDelete); + } else if ("append".equals(writeMode)) { + LOG.info(String + .format("由于您配置了writeMode append, [%s] 目录写入前不做清理工作", + path)); + LOG.info(String.format( + "目录path:[%s] 下已经存在的文件列表如下: [%s]", + path, + StringUtils.join(allFileExists.iterator(), ", "))); + } else if ("nonConflict".equals(writeMode)) { + LOG.info(String.format( + "由于您配置了writeMode nonConflict, 开始检查 [%s] 下面的内容", path)); + if (!allFileExists.isEmpty()) { + LOG.info(String.format( + "目录path:[%s] 下冲突文件列表如下: [%s]", + path, + StringUtils.join(allFileExists.iterator(), ", "))); + throw DataXException + .asDataXException( + FtpWriterErrorCode.ILLEGAL_VALUE, + String.format( + "您配置的path: [%s] 目录不为空, 下面存在其他文件或文件夹.", + path)); + } + } else { + throw DataXException + .asDataXException( + FtpWriterErrorCode.ILLEGAL_VALUE, + String.format( + "仅支持 truncate, append, nonConflict 三种模式, 不支持您配置的 writeMode 模式 : [%s]", + writeMode)); + } + } + + @Override + public void post() { + if(StringUtils.isNotBlank(this.tempPath)){ + String path = this.writerSliceConfig.getString(Key.PATH); + try { + this.ftpHelper.moveToDirectory(new ArrayList<>( + this.ftpHelper.getAllFilesInDir(this.tempPath, "", false, true) + ), path); + }finally{ + this.ftpHelper.deleteFiles(Collections.singleton(this.tempPath)); + } + this.tempPath = null; + } + //add signal file + addSignal(); + } + + @Override + public void destroy() { + if(StringUtils.isNotBlank(this.tempPath)){ + this.ftpHelper.deleteFiles(Collections.singleton(this.tempPath)); + } + try { + this.ftpHelper.logoutFtpServer(); + } catch (Exception e) { + String message = String + .format("关闭与ftp服务器连接失败, host:%s, username:%s, port:%s, errorMessage:%s", + this.connParams.getHost(), this.connParams.getUsername(), + this.connParams.getPort(), e.getMessage()); + LOG.error(message, e); + } + } + + @Override + public List split(int mandatoryNumber) { + this.tempPath = UnstructuredStorageWriterUtil.buildTmpFilePath( + this.writerSliceConfig.getString(Key.PATH), + String.format(CommonConstant.TEMP_PREFIX, System.currentTimeMillis()), + IOUtils.DIR_SEPARATOR, + path -> this.allFileExists.contains(path) + ); + this.writerSliceConfig.set(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.TEMP_PATH, + this.tempPath); + //mkdir + this.ftpHelper.mkDirRecursive(this.tempPath); + return UnstructuredStorageWriterUtil.split(this.writerSliceConfig, + this.allFileExists, getTransportType() ,mandatoryNumber); + } + + private void waitForSignal(String path0){ + if(this.writerSliceConfig.getBool(Key.TRANSIT, true)) { + String path = path0; + if (!path.endsWith(String.valueOf(IOUtils.DIR_SEPARATOR_UNIX))) { + path += String.valueOf(IOUtils.DIR_SEPARATOR_UNIX); + } + path += SIGNAL_FILE_NAME; + signalFilePath = path; + if (ftpHelper.isFileExist(path)) { + LOG.info("signal file: {} exits, wait for the consuming of downstream...", signalFilePath); + do { + try { + Thread.sleep(WAIT_SIGNAL_SLEEP_INTERVAL); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw DataXException.asDataXException(FtpWriterErrorCode.RUNTIME_EXCEPTION, + "interrupted while waiting for the consuming of downstream"); + } + } while ((ftpHelper.isFileExist(path))); + } + } + } + + private void addSignal(){ + if(StringUtils.isNotEmpty(signalFilePath) && this.writerSliceConfig.getBool(Key.TRANSIT, true)){ + //empty signal file + ftpHelper.getOutputStream(signalFilePath); + } + } + + private void addMetaSchemaToFile(String content){ + String path = this.writerSliceConfig.getString(Key.PATH); + if(!path.endsWith(String.valueOf(IOUtils.DIR_SEPARATOR_UNIX))){ + path += String.valueOf(IOUtils.DIR_SEPARATOR_UNIX); + } + String metaPath = this.writerSliceConfig.getString(Key.META_FILE_PATH, DEFAULT_META_FILE_PATH); + path += metaPath; + try { + if(ftpHelper.isFileExist(path)){ + ftpHelper.deleteFiles(Collections.singleton(path)); + } + ByteArrayInputStream inputStream = new ByteArrayInputStream( + content.getBytes(this.writerSliceConfig.getString(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.ENCODING, + "UTF-8"))); + OutputStream outputStream = ftpHelper.getOutputStream(path); + byte[] buffer = new byte[8 * 1024]; + int size; + while((size = inputStream.read(buffer)) > 0){ + outputStream.write(buffer, 0, size); + } + //close simplify + inputStream.close(); + outputStream.close(); + } catch (IOException e) { + throw DataXException.asDataXException(FtpWriterErrorCode.FAIL_SYNC_METASCHEMA, e.getMessage(), e); + } + } + } + + public static class Task extends Writer.Task { + private static final Logger LOG = LoggerFactory.getLogger(Task.class); + + private Configuration writerSliceConfig; + + private String tempPath; + private String fileName; + private String suffix; + + private FtpConnParams connParams; + + private IFtpHelper ftpHelper = null; + + @Override + public void init() { + this.writerSliceConfig = this.getPluginJobConf(); + this.tempPath = this.writerSliceConfig + .getString(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.TEMP_PATH, ""); + this.fileName = this.writerSliceConfig + .getString(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.FILE_NAME, ""); + this.suffix = this.writerSliceConfig + .getString(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.SUFFIX); + this.connParams = FtpConnParams.compose(connParams -> { + connParams.setHost(this.writerSliceConfig.getString(Key.HOST)); + connParams.setPort(this.writerSliceConfig.getInt(Key.PORT)); + connParams.setUsername(this.writerSliceConfig.getString(Key.USERNAME)); + connParams.setPassword(this.writerSliceConfig.getString(Key.PASSWORD, "")); + connParams.setPrvKeyPath(this.writerSliceConfig.getString(Key.PRV_KEY_PATH, "")); + connParams.setTimeout(this.writerSliceConfig.getInt(Key.TIMEOUT, + Constant.DEFAULT_TIMEOUT)); + connParams.setProtocol(this.writerSliceConfig.getString(Key.PROTOCOL)); + }); + if ("sftp".equalsIgnoreCase(this.connParams.getProtocol())) { + this.ftpHelper = new SftpHelperImpl(); + } else if ("ftp".equalsIgnoreCase(this.connParams.getProtocol())) { + this.ftpHelper = new StandardFtpHelperImpl(); + } + loginFtpWithRetry(ftpHelper, this.connParams, LOG); + } + + @Override + public void prepare() { + + } + + @Override + public void startWrite(RecordReceiver lineReceiver) { + LOG.info("begin do write..."); + String fileFullPath = UnstructuredStorageWriterUtil.buildFilePath( + this.tempPath, this.fileName, this.suffix); + LOG.info(String.format("write to file : [%s]", fileFullPath)); + + OutputStream outputStream = null; + try { + ftpHelper.mkDirRecursive(fileFullPath.substring(0, + StringUtils.lastIndexOf(fileFullPath, IOUtils.DIR_SEPARATOR))); + outputStream = this.ftpHelper.getOutputStream(fileFullPath); + String encoding = writerSliceConfig.getString(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.ENCODING, + com.alibaba.datax.plugin.unstructuredstorage.writer.Constant.DEFAULT_ENCODING); + String compress = writerSliceConfig.getString(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.COMPRESS, ""); + if("zip".equalsIgnoreCase(compress)){ + outputStream = new ZipCollectOutputStream(fileName.substring(0, fileName.lastIndexOf(".")), + outputStream, encoding); + } + UnstructuredStorageWriterUtil.writeToStream(lineReceiver, + outputStream, this.writerSliceConfig, this.fileName, + this.getTaskPluginCollector()); + } catch (Exception e) { + throw DataXException.asDataXException( + FtpWriterErrorCode.WRITE_FILE_IO_ERROR, + String.format("无法创建待写文件 : [%s]", this.fileName), e); + } finally { + IOUtils.closeQuietly(outputStream); + } + LOG.info("end do write"); + } + + + @Override + public void startWrite(ChannelInput channelInput) { + LOG.info("begin do write form stream channel"); + try{ + InputStream inputStream; + while((inputStream = channelInput.nextStream()) != null){ + StreamMeta metaData = channelInput.streamMetaData(this.writerSliceConfig + .getString(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.ENCODING, "UTF-8")); + LOG.info("begin do read input stream, name : " + metaData.getName() + ", relativePath: " + metaData.getRelativePath()); + String relativePath = metaData.getRelativePath(); + if(StringUtils.isNotBlank(fileName)){ + //modify the relativePath + relativePath = relativePath.substring(0, relativePath.lastIndexOf(IOUtils.DIR_SEPARATOR) + 1) + + fileName + "_" + metaData.getName(); + } + String fileFullPath = UnstructuredStorageWriterUtil.buildFilePath(this.tempPath, relativePath, ""); + ftpHelper.mkDirRecursive(fileFullPath.substring(0, + StringUtils.lastIndexOf(fileFullPath, IOUtils.DIR_SEPARATOR))); + OutputStream outputStream = ftpHelper.getOutputStream(fileFullPath); + try{ + UnstructuredStorageWriterUtil.writeToStream(inputStream, outputStream, + this.writerSliceConfig); + }finally{ + IOUtils.closeQuietly(outputStream); + } + } + } catch (IOException e){ + throw DataXException.asDataXException(FrameworkErrorCode.CHANNEL_STREAM_ERROR, e); + } + LOG.info("end to write from stream channel"); + } + + @Override + public void post() { + + } + + @Override + public void destroy() { + try { + this.ftpHelper.logoutFtpServer(); + } catch (Exception e) { + String message = String + .format("关闭与ftp服务器连接失败, host:%s, username:%s, port:%s, errorMessage:%s", + this.connParams.getHost(), this.connParams.getUsername(), this.connParams.getPort(), e.getMessage()); + LOG.error(message, e); + } + } + } + +} diff --git a/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/FtpWriterErrorCode.java b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/FtpWriterErrorCode.java new file mode 100644 index 000000000..a9cc527e0 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/FtpWriterErrorCode.java @@ -0,0 +1,55 @@ +package com.alibaba.datax.plugin.writer.ftpwriter; + +import com.alibaba.datax.common.spi.ErrorCode; + +public enum FtpWriterErrorCode implements ErrorCode { + + REQUIRED_VALUE("FtpWriter-00", "您缺失了必须填写的参数值."), + ILLEGAL_VALUE("FtpWriter-01", "您填写的参数值不合法."), + MIXED_INDEX_VALUE("FtpWriter-02", "您的列信息配置同时包含了index,value."), + NO_INDEX_VALUE("FtpWriter-03", "您明确的配置列信息,但未填写相应的index,value."), + + FILE_NOT_EXISTS("FtpWriter-04", "您配置的目录文件路径不存在或者没有权限读取."), + OPEN_FILE_WITH_CHARSET_ERROR("FtpWriter-05", "您配置的文件编码和实际文件编码不符合."), + OPEN_FILE_ERROR("FtpWriter-06", "您配置的文件在打开时异常."), + WRITE_FILE_IO_ERROR("FtpWriter-07", "您配置的文件在读取时出现IO异常."), + SECURITY_NOT_ENOUGH("FtpWriter-08", "您缺少权限执行相应的文件操作."), + CONFIG_INVALID_EXCEPTION("FtpWriter-09", "您的参数配置错误."), + RUNTIME_EXCEPTION("FtpWriter-10", "出现运行时异常, 请联系我们"), + EMPTY_DIR_EXCEPTION("FtpWriter-11", "您尝试读取的文件目录为空."), + + FAIL_LOGIN("FtpWriter-12", "登录失败,无法与ftp服务器建立连接."), + FAIL_DISCONNECT("FtpWriter-13", "关闭ftp连接失败,无法与ftp服务器断开连接."), + COMMAND_FTP_IO_EXCEPTION("FtpWriter-14", "与ftp服务器连接异常."), + OUT_MAX_DIRECTORY_LEVEL("FtpWriter-15", "超出允许的最大目录层数."), + LINK_FILE("FtpWriter-16", "您尝试读取的文件为链接文件."), + COMMAND_FTP_ENCODING_EXCEPTION("FtpWriter-17", "与ftp服务器连接,使用指定编码异常."), + FAIL_LOGOUT("FtpWriter-18", "登出失败,关闭与ftp服务器建立连接失败,但这不影响任务同步."), + FAIL_SYNC_METASCHEMA("FtpWriter-19", "Fail to sync meta schema information"); + + + private final String code; + private final String description; + + private FtpWriterErrorCode(String code, String description) { + this.code = code; + this.description = description; + } + + @Override + public String getCode() { + return this.code; + } + + @Override + public String getDescription() { + return this.description; + } + + @Override + public String toString() { + return String.format("Code:[%s], Description:[%s].", this.code, + this.description); + } + +} diff --git a/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/Key.java b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/Key.java new file mode 100644 index 000000000..fa957d70f --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/Key.java @@ -0,0 +1,15 @@ +package com.alibaba.datax.plugin.writer.ftpwriter; + +public class Key { + static final String PROTOCOL = "protocol"; + static final String HOST = "host"; + static final String USERNAME = "username"; + static final String PASSWORD = "password"; + static final String PORT = "port"; + static final String TIMEOUT = "timeout"; + static final String CONNECTPATTERN = "connectPattern"; + static final String PATH = "path"; + static final String PRV_KEY_PATH = "keyfilepath"; + static final String META_FILE_PATH = "metaPath"; + static final String TRANSIT = "transit"; +} diff --git a/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/Constant.java b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/Constant.java new file mode 100644 index 000000000..4a735c666 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/Constant.java @@ -0,0 +1,16 @@ +package com.alibaba.datax.plugin.writer.ftpwriter.util; + + +public class Constant { + public static final int DEFAULT_FTP_PORT = 21; + + public static final int DEFAULT_SFTP_PORT = 22; + + public static final int DEFAULT_TIMEOUT = 60000; + + public static final int DEFAULT_MAX_TRAVERSAL_LEVEL = 100; + + public static final String DEFAULT_FTP_CONNECT_PATTERN = "PASV"; + + public static final String CONTROL_ENCODING = "utf8"; +} diff --git a/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/FtpConnParams.java b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/FtpConnParams.java new file mode 100644 index 000000000..b54531941 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/FtpConnParams.java @@ -0,0 +1,101 @@ +package com.alibaba.datax.plugin.writer.ftpwriter.util; + + +import java.util.function.Consumer; + +/** + * @author davidhua + * 2019/7/4 + */ +public class FtpConnParams { + + private String protocol; + + private String host; + + private int port; + + private int timeout; + + private String username; + + private String prvKeyPath; + + private String password; + + private String connectPattern; + + private FtpConnParams(){ + + } + + public static FtpConnParams compose(Consumer function){ + FtpConnParams ftpConnParams = new FtpConnParams(); + function.accept(ftpConnParams); + return ftpConnParams; + } + + public String getProtocol() { + return protocol; + } + + public void setProtocol(String protocol) { + this.protocol = protocol; + } + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public int getPort() { + return port; + } + + public void setPort(int port) { + this.port = port; + } + + public String getUsername() { + return username; + } + + public void setUsername(String username) { + this.username = username; + } + + public String getPrvKeyPath() { + return prvKeyPath; + } + + public void setPrvKeyPath(String prvKeyPath) { + this.prvKeyPath = prvKeyPath; + } + + public String getPassword() { + return password; + } + + public void setPassword(String password) { + this.password = password; + } + + public String getConnectPattern() { + return connectPattern; + } + + public void setConnectPattern(String connectPattern) { + this.connectPattern = connectPattern; + } + + public int getTimeout() { + return timeout; + } + + public void setTimeout(int timeout) { + this.timeout = timeout; + } +} diff --git a/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/IFtpHelper.java b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/IFtpHelper.java new file mode 100644 index 000000000..52ea321a7 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/IFtpHelper.java @@ -0,0 +1,51 @@ +package com.alibaba.datax.plugin.writer.ftpwriter.util; + +import java.io.OutputStream; +import java.util.List; +import java.util.Set; + +public interface IFtpHelper { + + /** + * 使用被动方式 + */ + void loginFtpServer(FtpConnParams ftpConnParams); + + void logoutFtpServer(); + + /** + * warn: 不支持递归创建, 比如 mkdir -p + */ + void mkdir(String directoryPath); + + /** + * 支持目录递归创建 + */ + void mkDirRecursive(String directoryPath); + + OutputStream getOutputStream(String filePath); + + String getRemoteFileContent(String filePath); + + Set getAllFilesInDir(String dir, String prefixFileName, boolean recurse, boolean fullFileName); + + /** + * delete files and empty directory + * @param filesToDelete + */ + void deleteFiles(Set filesToDelete); + + void completePendingCommand(); + + void rename(String srcPath, String destPath); + + void moveToDirectory(List srcPaths, String destDirPath); + + /** + * if the file exist + * @param filePath + * @return + */ + boolean isFileExist(String filePath); + +} diff --git a/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/SftpHelperImpl.java b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/SftpHelperImpl.java new file mode 100644 index 000000000..7a33554eb --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/SftpHelperImpl.java @@ -0,0 +1,403 @@ +package com.alibaba.datax.plugin.writer.ftpwriter.util; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.plugin.unstructuredstorage.writer.UnstructuredStorageWriterUtil; +import com.alibaba.datax.plugin.writer.ftpwriter.FtpWriterErrorCode; +import com.jcraft.jsch.*; +import com.jcraft.jsch.ChannelSftp.LsEntry; +import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.util.*; + +public class SftpHelperImpl implements IFtpHelper { + private static final Logger LOG = LoggerFactory + .getLogger(SftpHelperImpl.class); + + private static final String FILE_NOT_EXIST_ = "no such file"; + + private Session session = null; + private ChannelSftp channelSftp = null; + + @Override + public void loginFtpServer(FtpConnParams connParams) { + JSch jsch = new JSch(); + try { + if(StringUtils.isNotBlank(connParams.getPrvKeyPath())){ + jsch.addIdentity(connParams.getPrvKeyPath()); + } + this.session = jsch.getSession(connParams.getUsername(), connParams.getHost(), + connParams.getPort()); + if (this.session == null) { + throw DataXException + .asDataXException(FtpWriterErrorCode.FAIL_LOGIN, + "创建ftp连接this.session失败,无法通过sftp与服务器建立链接,请检查主机名和用户名是否正确."); + } + if(StringUtils.isNotBlank(connParams.getPassword())){ + this.session.setPassword((String) CryptoUtils.string2Object(connParams.getPassword())); + } + Properties config = new Properties(); + config.put("StrictHostKeyChecking", "no"); + config.put("PreferredAuthentications", "publickey,password"); + this.session.setConfig(config); + this.session.setTimeout(connParams.getTimeout()); + this.session.connect(); + this.channelSftp = (ChannelSftp) this.session.openChannel("sftp"); + this.channelSftp.connect(); + } catch (JSchException | ClassNotFoundException | IOException e) { + if (null != e.getCause()) { + String cause = e.getCause().toString(); + String unknownHostException = "java.net.UnknownHostException: " + + connParams.getHost(); + String illegalArgumentException = "java.lang.IllegalArgumentException: port out of range:" + + connParams.getPort(); + String wrongPort = "java.net.ConnectException: Connection refused"; + if (unknownHostException.equals(cause)) { + String message = String + .format("请确认ftp服务器地址是否正确,无法连接到地址为: [%s] 的ftp服务器, errorMessage:%s", + connParams.getHost(), e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.FAIL_LOGIN, message, e); + } else if (illegalArgumentException.equals(cause) + || wrongPort.equals(cause)) { + String message = String.format( + "请确认连接ftp服务器端口是否正确,错误的端口: [%s], errorMessage:%s", + connParams.getPort(), e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.FAIL_LOGIN, message, e); + }else{ + String message = "cannot login to the sftp server, please check your configuration of connecting"; + LOG.error(message); + throw DataXException.asDataXException(FtpWriterErrorCode.FAIL_LOGIN, message, e); + } + } else { + String message = String + .format("与ftp服务器建立连接失败,请检查主机、用户名、密码是否正确, host:%s, port:%s, username:%s, errorMessage:%s", + connParams.getHost(), connParams.getPort(), connParams.getUsername(), e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.FAIL_LOGIN, message); + } + } + + } + + @Override + public void logoutFtpServer() { + if (this.channelSftp != null) { + this.channelSftp.disconnect(); + this.channelSftp = null; + } + if (this.session != null) { + this.session.disconnect(); + this.session = null; + } + } + + @Override + public void mkdir(String directoryPath) { + boolean isDirExist = false; + try { + SftpATTRS sftpATTRS = this.channelSftp.lstat(directoryPath); + isDirExist = sftpATTRS.isDir(); + } catch (SftpException e) { + if (e.getMessage().toLowerCase().equals(FILE_NOT_EXIST_)) { + LOG.warn(String.format( + "您的配置项path:[%s]不存在,将尝试进行目录创建, errorMessage:%s", + directoryPath, e.getMessage()), e); + isDirExist = false; + } + } + if (!isDirExist) { + try { + // warn 检查mkdir -p + this.channelSftp.mkdir(directoryPath); + } catch (SftpException e) { + String message = String + .format("创建目录:%s时发生I/O异常,请确认与ftp服务器的连接正常,拥有目录创建权限, errorMessage:%s", + directoryPath, e.getMessage()); + LOG.error(message, e); + throw DataXException + .asDataXException( + FtpWriterErrorCode.COMMAND_FTP_IO_EXCEPTION, + message, e); + } + } + } + + @Override + public void mkDirRecursive(String directoryPath) { + boolean isDirExist = false; + try { + SftpATTRS sftpATTRS = this.channelSftp.lstat(directoryPath); + isDirExist = sftpATTRS.isDir(); + } catch (SftpException e) { + if (e.getMessage().toLowerCase().equals(FILE_NOT_EXIST_)) { + LOG.warn(String.format( + "您的配置项path:[%s]不存在,将尝试进行目录创建, errorMessage:%s", + directoryPath, e.getMessage()), e); + isDirExist = false; + } + } + if (!isDirExist) { + StringBuilder dirPath = new StringBuilder(); + dirPath.append(IOUtils.DIR_SEPARATOR_UNIX); + String[] dirSplit = StringUtils.split(directoryPath, IOUtils.DIR_SEPARATOR_UNIX); + try { + // ftp server不支持递归创建目录,只能一级一级创建 + for (String dirName : dirSplit) { + if(StringUtils.isNotBlank(dirName)) { + dirPath.append(dirName); + mkDirSingleHierarchy(dirPath.toString()); + dirPath.append(IOUtils.DIR_SEPARATOR_UNIX); + } + } + } catch (SftpException e) { + String message = String + .format("创建目录:%s时发生I/O异常,请确认与ftp服务器的连接正常,拥有目录创建权限, errorMessage:%s", + directoryPath, e.getMessage()); + LOG.error(message, e); + throw DataXException + .asDataXException( + FtpWriterErrorCode.COMMAND_FTP_IO_EXCEPTION, + message, e); + } + } + } + + public boolean mkDirSingleHierarchy(String directoryPath) throws SftpException { + boolean isDirExist = false; + try { + SftpATTRS sftpATTRS = this.channelSftp.lstat(directoryPath); + isDirExist = sftpATTRS.isDir(); + } catch (SftpException e) { + if (!isDirExist) { + LOG.info(String.format("正在逐级创建目录 [%s]", directoryPath)); + this.channelSftp.mkdir(directoryPath); + return true; + } + } + if (!isDirExist) { + LOG.info(String.format("正在逐级创建目录 [%s]", directoryPath)); + this.channelSftp.mkdir(directoryPath); + } + return true; + } + + @Override + public OutputStream getOutputStream(String filePath) { + try { + String parentDir = filePath.substring(0, + StringUtils.lastIndexOf(filePath, IOUtils.DIR_SEPARATOR)); + this.channelSftp.cd(parentDir); + this.printWorkingDirectory(); + OutputStream writeOutputStream = this.channelSftp.put(filePath, + ChannelSftp.APPEND); + String message = String.format( + "打开FTP文件[%s]获取写出流时出错,请确认文件%s有权限创建,有权限写出等", filePath, + filePath); + if (null == writeOutputStream) { + throw DataXException.asDataXException( + FtpWriterErrorCode.OPEN_FILE_ERROR, message); + } + return writeOutputStream; + } catch (SftpException e) { + String message = String.format( + "写出文件[%s] 时出错,请确认文件%s有权限写出, errorMessage:%s", filePath, + filePath, e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.OPEN_FILE_ERROR, message); + } + } + + @Override + public String getRemoteFileContent(String filePath) { + try { + this.completePendingCommand(); + this.printWorkingDirectory(); + String parentDir = filePath.substring(0, + StringUtils.lastIndexOf(filePath, IOUtils.DIR_SEPARATOR)); + this.channelSftp.cd(parentDir); + this.printWorkingDirectory(); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(22); + this.channelSftp.get(filePath, outputStream); + String result = outputStream.toString(); + IOUtils.closeQuietly(outputStream); + return result; + } catch (SftpException e) { + String message = String.format( + "写出文件[%s] 时出错,请确认文件%s有权限写出, errorMessage:%s", filePath, + filePath, e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.OPEN_FILE_ERROR, message); + } + } + + @Override + public Set getAllFilesInDir(String dir, String prefixFileName, boolean recurse, boolean fullFileName) { + Set allFilesWithPointedPrefix = new HashSet(); + try { + @SuppressWarnings("rawtypes") + Vector allFiles = this.channelSftp.ls(dir); + LOG.debug(String.format("ls: %s", Json.toJson(allFiles, null))); + StringBuilder dirBuilder = new StringBuilder(dir); + if(!dirBuilder.toString().endsWith(String.valueOf(IOUtils.DIR_SEPARATOR))){ + dirBuilder.append(IOUtils.DIR_SEPARATOR); + } + for (Object allFile : allFiles) { + LsEntry le = (LsEntry) allFile; + String strName = le.getFilename(); + if(hasPrefixFileName(strName, prefixFileName)){ + if (".".equals(strName) || "..".equals(strName)) { + continue; + } + if(fullFileName){ + allFilesWithPointedPrefix.add(dirBuilder.toString() + strName); + }else{ + allFilesWithPointedPrefix.add(strName); + } + if (this.channelSftp.stat(dir + IOUtils.DIR_SEPARATOR + strName).isDir()) { + if (recurse) { + String parent = dirBuilder.toString() + strName; + Set childWithPrefixFiles = getAllFilesInDir(parent, prefixFileName, true, fullFileName); + for (String childWithPrefixFile : childWithPrefixFiles) { + if(fullFileName){ + allFilesWithPointedPrefix.add(parent + IOUtils.DIR_SEPARATOR + childWithPrefixFile); + }else { + allFilesWithPointedPrefix.add(strName + IOUtils.DIR_SEPARATOR + childWithPrefixFile); + } + } + } + } + } + } + } catch (SftpException e) { + String message = String + .format("获取path:[%s] 下文件列表时发生I/O异常,请确认与ftp服务器的连接正常,拥有目录ls权限, errorMessage:%s", + dir, e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + return allFilesWithPointedPrefix; + } + + @Override + public void deleteFiles(Set filesToDelete) { + String eachFile = null; + try { + for (String each : filesToDelete) { + eachFile = each; + if(!this.channelSftp.stat(each).isDir()) { + LOG.info(String.format("delete file [%s].", each)); + this.channelSftp.rm(each); + }else{ + Set subFiles = getAllFilesInDir(each, "", false, true); + deleteFiles(subFiles); + LOG.info(String.format("delete empty directory [%s]", each)); + this.channelSftp.rmdir(each); + } + } + } catch (SftpException e) { + String message = String.format( + "删除文件:[%s] 时发生异常,请确认指定文件有删除权限,以及网络交互正常, errorMessage:%s", + eachFile, e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + } + + private void printWorkingDirectory() { + try { + LOG.info(String.format("current working directory:%s", + this.channelSftp.pwd())); + } catch (Exception e) { + LOG.warn(String.format("printWorkingDirectory error:%s", + e.getMessage())); + } + } + + @Override + public void completePendingCommand() { + } + + @Override + public void rename(String srcPath, String destPath) { + try { + this.channelSftp.rename(srcPath, destPath); + } catch (SftpException e) { + String message = String.format("rename srcPath:%s to destPath:%s error, message:%s ,please check your internet connection", + srcPath, destPath, e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException(FtpWriterErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + } + + @Override + public void moveToDirectory(List srcPaths, String destDirPath) { + try { + mkdir(destDirPath); + for (String srcPath : srcPaths) { + SftpATTRS sftpATTRS = this.channelSftp.lstat(srcPath); + if(sftpATTRS.isDir()) { + if(srcPath.endsWith(String.valueOf(IOUtils.DIR_SEPARATOR_UNIX))){ + srcPath = srcPath.substring(0, srcPath.length() - 1); + } + Set moveFiles = getAllFilesInDir(srcPath, "", false, true); + moveToDirectory(new ArrayList<>(moveFiles), + UnstructuredStorageWriterUtil.buildFilePath(destDirPath, + srcPath.substring(srcPath.lastIndexOf(IOUtils.DIR_SEPARATOR_UNIX)), "")); + + }else{ + rename(srcPath, UnstructuredStorageWriterUtil.buildFilePath(destDirPath, + srcPath.substring(srcPath.lastIndexOf(IOUtils.DIR_SEPARATOR_UNIX)), "")); + } + } + }catch(SftpException e){ + + } + } + + @Override + public boolean isFileExist(String filePath) { + boolean isExitFlag = false; + try{ + SftpATTRS sftpATTRS = channelSftp.lstat(filePath); + if(sftpATTRS.getSize() >= 0){ + isExitFlag = true; + } + }catch(SftpException e){ + if (!FILE_NOT_EXIST_.equals(e.getMessage().toLowerCase())) { + String message = String.format("获取文件:[%s] 属性时发生I/O异常,请确认与ftp服务器的连接正常", filePath); + LOG.error(message); + throw DataXException.asDataXException(FtpWriterErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + } + return isExitFlag; + } + + /** + * check if originalName stat with prefixFileName + * @param originalName + * @param prefixFileName + * @return + */ + private boolean hasPrefixFileName(String originalName, String prefixFileName){ + if(StringUtils.isBlank(prefixFileName)){ + return true; + } + return originalName != null && originalName.startsWith(prefixFileName); + } +} diff --git a/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/StandardFtpHelperImpl.java b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/StandardFtpHelperImpl.java new file mode 100644 index 000000000..13bc5a59a --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/java/com/alibaba/datax/plugin/writer/ftpwriter/util/StandardFtpHelperImpl.java @@ -0,0 +1,385 @@ +package com.alibaba.datax.plugin.writer.ftpwriter.util; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.plugin.writer.ftpwriter.FtpWriterErrorCode; +import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.net.ftp.FTPClient; +import org.apache.commons.net.ftp.FTPFile; +import org.apache.commons.net.ftp.FTPReply; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.net.UnknownHostException; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +public class StandardFtpHelperImpl implements IFtpHelper { + private static final Logger LOG = LoggerFactory + .getLogger(StandardFtpHelperImpl.class); + FTPClient ftpClient = null; + + @Override + public void loginFtpServer(FtpConnParams connParams) { + this.ftpClient = new FTPClient(); + try { + this.ftpClient.setControlEncoding("UTF-8"); + this.ftpClient.setDefaultTimeout(connParams.getTimeout()); + this.ftpClient.setConnectTimeout(connParams.getTimeout()); + this.ftpClient.setDataTimeout(connParams.getTimeout()); + + // 连接登录 + this.ftpClient.connect(connParams.getHost(), connParams.getPort()); + this.ftpClient.login(connParams.getUsername(), (String) CryptoUtils.string2Object(connParams.getPassword())); + + this.ftpClient.enterRemotePassiveMode(); + this.ftpClient.enterLocalPassiveMode(); + int reply = this.ftpClient.getReplyCode(); + if (!FTPReply.isPositiveCompletion(reply)) { + this.ftpClient.disconnect(); + String message = String + .format("与ftp服务器建立连接失败,host:%s, port:%s, username:%s, replyCode:%s", + connParams.getHost(), connParams.getPort(), connParams.getUsername(), reply); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.FAIL_LOGIN, message); + } + } catch (UnknownHostException e) { + String message = String.format( + "请确认ftp服务器地址是否正确,无法连接到地址为: [%s] 的ftp服务器, errorMessage:%s", + connParams.getHost(), e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.FAIL_LOGIN, message, e); + } catch (IllegalArgumentException e) { + String message = String.format( + "请确认连接ftp服务器端口是否正确,错误的端口: [%s], errorMessage:%s", connParams.getPort(), + e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.FAIL_LOGIN, message, e); + } catch (Exception e) { + String message = String + .format("与ftp服务器建立连接失败,host:%s, port:%s, username:%s, errorMessage:%s", + connParams.getHost(), connParams.getPort(), connParams.getUsername(), e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.FAIL_LOGIN, message, e); + } + + } + + @Override + public void logoutFtpServer() { + if (this.ftpClient.isConnected()) { + try { + this.ftpClient.logout(); + } catch (IOException e) { + String message = String.format( + "与ftp服务器断开连接失败, errorMessage:%s", e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.FAIL_DISCONNECT, message, e); + } finally { + if (this.ftpClient.isConnected()) { + try { + this.ftpClient.disconnect(); + } catch (IOException e) { + String message = String.format( + "与ftp服务器断开连接失败, errorMessage:%s", + e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.FAIL_DISCONNECT, message, e); + } + } + this.ftpClient = null; + } + } + } + + @Override + public void mkdir(String directoryPath) { + String message = String.format("创建目录:%s时发生异常,请确认与ftp服务器的连接正常,拥有目录创建权限", + directoryPath); + try { + this.printWorkingDirectory(); + boolean isDirExist = this.ftpClient + .changeWorkingDirectory(directoryPath); + if (!isDirExist) { + int replayCode = this.ftpClient.mkd(directoryPath); + message = String + .format("%s,replayCode:%s", message, replayCode); + if (replayCode != FTPReply.COMMAND_OK + && replayCode != FTPReply.PATHNAME_CREATED) { + throw DataXException.asDataXException( + FtpWriterErrorCode.COMMAND_FTP_IO_EXCEPTION, + message); + } + } + } catch (IOException e) { + message = String.format("%s, errorMessage:%s", message, + e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + } + + @Override + public void mkDirRecursive(String directoryPath) { + StringBuilder dirPath = new StringBuilder(); + dirPath.append(IOUtils.DIR_SEPARATOR_UNIX); + String[] dirSplit = StringUtils.split(directoryPath, IOUtils.DIR_SEPARATOR_UNIX); + String message = String.format("创建目录:%s时发生异常,请确认与ftp服务器的连接正常,拥有目录创建权限", directoryPath); + try { + // ftp server不支持递归创建目录,只能一级一级创建 + for (String dirName : dirSplit) { + if(StringUtils.isNotBlank(dirName)) { + dirPath.append(dirName); + boolean mkdirSuccess = mkDirSingleHierarchy(dirPath.toString()); + dirPath.append(IOUtils.DIR_SEPARATOR_UNIX); + if (!mkdirSuccess) { + throw DataXException.asDataXException( + FtpWriterErrorCode.COMMAND_FTP_IO_EXCEPTION, + message); + } + } + } + } catch (IOException e) { + message = String.format("%s, errorMessage:%s", message, + e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + } + + public boolean mkDirSingleHierarchy(String directoryPath) throws IOException { + boolean isDirExist = this.ftpClient + .changeWorkingDirectory(directoryPath); + // 如果directoryPath目录不存在,则创建 + if (!isDirExist) { + int replayCode = this.ftpClient.mkd(directoryPath); + if (replayCode != FTPReply.COMMAND_OK + && replayCode != FTPReply.PATHNAME_CREATED) { + return false; + } + } + return true; + } + + @Override + public OutputStream getOutputStream(String filePath) { + try { + this.printWorkingDirectory(); + String parentDir = filePath.substring(0, + StringUtils.lastIndexOf(filePath, IOUtils.DIR_SEPARATOR)); + this.ftpClient.changeWorkingDirectory(parentDir); + this.printWorkingDirectory(); + // + OutputStream writeOutputStream = this.ftpClient + .appendFileStream(filePath); + String message = String.format( + "打开FTP文件[%s]获取写出流时出错,请确认文件%s有权限创建,有权限写出等", filePath, + filePath); + if (null == writeOutputStream) { + throw DataXException.asDataXException( + FtpWriterErrorCode.OPEN_FILE_ERROR, message); + } + + return writeOutputStream; + } catch (IOException e) { + String message = String.format( + "写出文件 : [%s] 时出错,请确认文件:[%s]存在且配置的用户有权限写, errorMessage:%s", + filePath, filePath, e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.OPEN_FILE_ERROR, message); + } + } + + @Override + public String getRemoteFileContent(String filePath) { + try { + this.completePendingCommand(); + this.printWorkingDirectory(); + String parentDir = filePath.substring(0, + StringUtils.lastIndexOf(filePath, IOUtils.DIR_SEPARATOR)); + this.ftpClient.changeWorkingDirectory(parentDir); + this.printWorkingDirectory(); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(22); + this.ftpClient.retrieveFile(filePath, outputStream); + String result = outputStream.toString(); + IOUtils.closeQuietly(outputStream); + return result; + } catch (IOException e) { + String message = String.format( + "读取文件 : [%s] 时出错,请确认文件:[%s]存在且配置的用户有权限读取, errorMessage:%s", + filePath, filePath, e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.OPEN_FILE_ERROR, message); + } + } + + @Override + public Set getAllFilesInDir(String dir, String prefixFileName, boolean recurse, boolean fullFileName) { + Set allFilesWithPointedPrefix = new HashSet(); + try { + boolean isDirExist = this.ftpClient.changeWorkingDirectory(dir); + if (!isDirExist) { + throw DataXException.asDataXException( + FtpWriterErrorCode.COMMAND_FTP_IO_EXCEPTION, + String.format("进入目录[%s]失败", dir)); + } + this.printWorkingDirectory(); + FTPFile[] fs = this.ftpClient.listFiles(dir); + LOG.debug(String.format("ls: %s", + Json.toJson(fs, null))); + StringBuilder dirBuilder = new StringBuilder(dir); + if(!dirBuilder.toString().endsWith(String.valueOf(IOUtils.DIR_SEPARATOR))){ + dirBuilder.append(IOUtils.DIR_SEPARATOR); + } + for (FTPFile ff : fs) { + String strName = ff.getName(); + if (".".equals(strName) || "..".equals(strName)) { + continue; + } + if(hasPrefixFileName(strName, prefixFileName)){ + if(fullFileName){ + allFilesWithPointedPrefix.add(dirBuilder.toString() + strName); + }else { + allFilesWithPointedPrefix.add(strName); + } + if(ff.isDirectory()){ + if (recurse) { + Set childWithPrefixFiles = getAllFilesInDir(dirBuilder.toString() + strName, prefixFileName, true, fullFileName); + for (String childWithPrefixFile : childWithPrefixFiles) { + if(fullFileName){ + allFilesWithPointedPrefix.add(dirBuilder.toString() + strName + IOUtils.DIR_SEPARATOR + childWithPrefixFile); + }else { + allFilesWithPointedPrefix.add(strName + IOUtils.DIR_SEPARATOR + childWithPrefixFile); + } + } + } + } + } + + } + } catch (IOException e) { + String message = String + .format("获取path:[%s] 下文件列表时发生I/O异常,请确认与ftp服务器的连接正常,拥有目录ls权限, errorMessage:%s", + dir, e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + return allFilesWithPointedPrefix; + } + + @Override + public void deleteFiles(Set filesToDelete) { + String eachFile = null; + boolean deleteOk = false; + try { + this.printWorkingDirectory(); + for (String each : filesToDelete) { + LOG.info(String.format("delete file [%s].", each)); + eachFile = each; + deleteOk = this.ftpClient.deleteFile(each); + if (!deleteOk) { + String message = String.format( + "删除文件:[%s] 时失败,请确认指定文件有删除权限", eachFile); + throw DataXException.asDataXException( + FtpWriterErrorCode.COMMAND_FTP_IO_EXCEPTION, + message); + } + } + } catch (IOException e) { + String message = String.format( + "删除文件:[%s] 时发生异常,请确认指定文件有删除权限,以及网络交互正常, errorMessage:%s", + eachFile, e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + } + + private void printWorkingDirectory() { + try { + LOG.info(String.format("current working directory:%s", + this.ftpClient.printWorkingDirectory())); + } catch (Exception e) { + LOG.warn(String.format("printWorkingDirectory error:%s", + e.getMessage())); + } + } + + @Override + public void completePendingCommand() { + /* + * Q:After I perform a file transfer to the server, + * printWorkingDirectory() returns null. A:You need to call + * completePendingCommand() after transferring the file. wiki: + * http://wiki.apache.org/commons/Net/FrequentlyAskedQuestions + */ + try { + boolean isOk = this.ftpClient.completePendingCommand(); + if (!isOk) { + throw DataXException.asDataXException( + FtpWriterErrorCode.COMMAND_FTP_IO_EXCEPTION, + "完成ftp completePendingCommand操作发生异常"); + } + } catch (IOException e) { + String message = String.format( + "完成ftp completePendingCommand操作发生异常, errorMessage:%s", + e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException( + FtpWriterErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + } + + @Override + public void rename(String srcPath, String destPath) { + try { + this.ftpClient.rename(srcPath, destPath); + } catch (IOException e) { + String message = String.format("rename srcPath:%s to destPath:%s error, message:%s ,please check your internet connection", + srcPath, destPath, e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException(FtpWriterErrorCode.COMMAND_FTP_IO_EXCEPTION, message, e); + } + } + + @Override + public void moveToDirectory(List srcPaths, String destDirPath) { + //not support + } + + @Override + public boolean isFileExist(String filePath) { + //not support + return false; + } + + /** + * check if originalName stat with prefixFileName + * @param originalName + * @param prefixFileName + * @return + */ + private boolean hasPrefixFileName(String originalName, String prefixFileName){ + if(StringUtils.isBlank(prefixFileName)){ + return true; + } + return originalName != null && originalName.startsWith(prefixFileName); + } +} diff --git a/exchangis-engines/engines/datax/datax-ftpwriter/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/resources/plugin.json new file mode 100644 index 000000000..444ef8e87 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/resources/plugin.json @@ -0,0 +1,7 @@ +{ + "name": "ftpwriter", + "class": "com.alibaba.datax.plugin.writer.ftpwriter.FtpWriter", + "description": "useScene: test. mechanism: use datax framework to transport data from ftp txt file. warn: The more you know about the data, the less problems you encounter.", + "developer": "alibaba" +} + diff --git a/exchangis-engines/engines/datax/datax-ftpwriter/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/resources/plugin_job_template.json new file mode 100644 index 000000000..d89cf7da3 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-ftpwriter/src/main/resources/plugin_job_template.json @@ -0,0 +1,24 @@ +{ + "name": "ftpwriter", + "parameter": { + "name": "ftpwriter", + "parameter": { + "protocol": "", + "host": "", + "port": "", + "username": "", + "password": "", + "timeout": "", + "connectPattern": "", + "path": "", + "fileName": "", + "writeMode": "", + "fieldDelimiter": "", + "encoding": "", + "nullFormat": "", + "dateFormat": "", + "fileFormat": "", + "header": [] + } + } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-hdfsreader/pom.xml b/exchangis-engines/engines/datax/datax-hdfsreader/pom.xml new file mode 100644 index 000000000..853f56e87 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfsreader/pom.xml @@ -0,0 +1,157 @@ + + + + exchangis-engine-datax + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + datax-hdfsreader + jar + 3.0.0-Plus-2 + + 3.1.3 + 3.3.4 + 2.5.5 + 2.9.1 + + + + com.webank.wedatasphere.exchangis + datax-core + provided + + + org.apache.hadoop + hadoop-common + + + slf4j-log4j12 + org.slf4j + + + ${datax.engine.version} + + + org.slf4j + slf4j-api + provided + + + ch.qos.logback + logback-classic + provided + + + org.apache.hadoop + hadoop-hdfs + ${hadoop.version} + + + netty-all + io.netty + + + + + + io.netty + netty-all + 4.1.86.Final + + + org.apache.hadoop + hadoop-common + ${hadoop.version} + + + org.apache.hadoop + hadoop-yarn-common + ${hadoop.version} + + + org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop.version} + + + org.apache.hive + hive-exec + ${hive.version} + + + org.codehaus.groovy + groovy-all + + + zookeeper + org.apache.zookeeper + + + org.pentaho + pentaho-aggdesigner-algorithm + + + + + xalan + xalan + 2.7.1 + + + + org.apache.hbase + hbase-server + ${hbase.version} + + + org.apache.hadoop + hadoop-hdfs + + + + + xerces + xercesImpl + ${xerces.version} + + + + + + src/main/resources + + **/* + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.2.1 + + + assemble + + single + + + install + + + + false + false + + ${basedir}/src/main/assembly/package.xml + + plugin + + + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-hdfsreader/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/assembly/package.xml new file mode 100644 index 000000000..2ddc27028 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/assembly/package.xml @@ -0,0 +1,33 @@ + + hdfsreader + + dir + + false + + + src/main/resources + + plugin.json + plugin_job_template.json + + plugin/reader/hdfsreader + + + target/ + + datax-hdfsreader-${datax.engine.version}.jar + + plugin/reader/hdfsreader + + + + + false + plugin/reader/hdfsreader/libs + runtime + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/Constant.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/Constant.java new file mode 100644 index 000000000..dd7e76842 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/Constant.java @@ -0,0 +1,15 @@ +package com.alibaba.datax.plugin.reader.hdfsreader; + +/** + * Created by mingya.wmy on 2015/8/14. + */ +public class Constant { + static final String SOURCE_FILES = "sourceFiles"; + static final String TEXT = "TEXT"; + static final String ORC = "ORC"; + static final String CSV = "CSV"; + static final String SEQ = "SEQ"; + static final String RC = "RC"; + static final String HFILE = "HFile"; + static final String META_FIELD_DELIMITER = "field.delim"; +} diff --git a/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsFileType.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsFileType.java new file mode 100644 index 000000000..0c37df841 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsFileType.java @@ -0,0 +1,8 @@ +package com.alibaba.datax.plugin.reader.hdfsreader; + +/** + * Created by mingya.wmy on 2015/8/22. + */ +public enum HdfsFileType { + ORC, SEQ, RC, CSV, TEXT, +} diff --git a/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReader.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReader.java new file mode 100644 index 000000000..5b3715e16 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReader.java @@ -0,0 +1,447 @@ +package com.alibaba.datax.plugin.reader.hdfsreader; + +import com.webank.wedatasphere.exchangis.datax.common.constant.TransportType; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordSender; +import com.alibaba.datax.common.spi.Reader; +import com.alibaba.datax.common.util.Configuration; +import com.webank.wedatasphere.exchangis.datax.core.job.meta.MetaSchema; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.ChannelOutput; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.StreamMeta; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.plugin.unstructuredstorage.PathMeta; +import com.alibaba.datax.plugin.unstructuredstorage.reader.UnstructuredStorageReaderUtil; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.io.Charsets; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.compress.CompressionCodec; +import org.apache.hadoop.io.compress.CompressionCodecFactory; +import org.apache.hadoop.security.UserGroupInformation; +import org.joda.time.DateTime; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.charset.UnsupportedCharsetException; +import java.security.PrivilegedAction; +import java.util.*; + +import static com.alibaba.datax.plugin.unstructuredstorage.reader.Key.INCR_BEGIN_TIME; +import static com.alibaba.datax.plugin.unstructuredstorage.reader.Key.INCR_END_TIME; + +public class HdfsReader extends Reader { + + /** + * Job 中的方法仅执行一次,task 中方法会由框架启动多个 task 线程并行执行。 + *

+ * 整个 Reader 执行流程是: + *

+     * Job类init-->prepare-->split
+     *
+     * Task类init-->prepare-->startRead-->post-->destroy
+     * Task类init-->prepare-->startRead-->post-->destroy
+     *
+     * Job类post-->destroy
+     * 
+ */ + public static class Job extends Reader.Job { + private static final Logger LOG = LoggerFactory + .getLogger(Job.class); + + private Configuration readerOriginConfig = null; + private String encoding = null; + private HashSet sourceFiles = new HashSet<>(); + private String specifiedFileType = null; + private HdfsReaderUtil hdfsReaderUtil = null; + private List path = null; + private long incrBeginTime = 0; + private long incrEndTime = 0; + private String defaultFS; + org.apache.hadoop.conf.Configuration hadoopConf = null; + private UserGroupInformation ugi = null; + @Override + public boolean isSupportStream(){ + return true; + } + + + + @Override + public void init() { + + LOG.info("init() begin..."); + this.readerOriginConfig = super.getPluginJobConf(); + this.validate(); + hdfsReaderUtil = new HdfsReaderUtil(this.readerOriginConfig); + LOG.info("init() ok and end..."); + for(String eachPath : path) { + if (!hdfsReaderUtil.exists(eachPath)) { + String message = String.format("cannot find the path: [%s], please check your configuration", eachPath); + LOG.error(message); + throw DataXException.asDataXException(HdfsReaderErrorCode.PATH_NOT_FOUND, message); + } + } + } + + @Override + public MetaSchema syncMetaData() { + if(StringUtils.isNotBlank(readerOriginConfig.getString(Key.HIVE_METASTORE_URIS, ""))){ + return hdfsReaderUtil.getHiveMetadata( + readerOriginConfig.getString(Key.HIVE_DATABASE), + readerOriginConfig.getString(Key.HIVE_TABLE), + readerOriginConfig.getString(Key.HIVE_METASTORE_URIS) + ); + } + return null; + } + + private void validate() { + this.defaultFS = this.readerOriginConfig.getNecessaryValue(Key.DEFAULT_FS, + HdfsReaderErrorCode.DEFAULT_FS_NOT_FIND_ERROR); + // path check + String pathInString = this.readerOriginConfig.getNecessaryValue(Key.PATH, HdfsReaderErrorCode.REQUIRED_VALUE); + if (!pathInString.startsWith("[") && !pathInString.endsWith("]")) { + path = new ArrayList(); + path.add(pathInString); + } else { + path = this.readerOriginConfig.getList(Key.PATH, String.class); + if (null == path || path.size() == 0) { + throw DataXException.asDataXException(HdfsReaderErrorCode.REQUIRED_VALUE, "您需要指定待读取的源目录或文件"); + } + for (String eachPath : path) { + if (!eachPath.startsWith("/")) { + String message = String.format("请检查参数path:[%s],需要配置为绝对路径", eachPath); + LOG.error(message); + throw DataXException.asDataXException(HdfsReaderErrorCode.ILLEGAL_VALUE, message); + } + } + } + if(getTransportType() == TransportType.RECORD) { + specifiedFileType = this.readerOriginConfig.getNecessaryValue(Key.FILETYPE, HdfsReaderErrorCode.REQUIRED_VALUE); + if (!specifiedFileType.equalsIgnoreCase(Constant.ORC) && + !specifiedFileType.equalsIgnoreCase(Constant.TEXT) && + !specifiedFileType.equalsIgnoreCase(Constant.CSV) && + !specifiedFileType.equalsIgnoreCase(Constant.SEQ) && + !specifiedFileType.equalsIgnoreCase(Constant.RC) && + !specifiedFileType.equalsIgnoreCase(Constant.HFILE)) { + String message = "HdfsReader插件目前支持ORC, TEXT, CSV, SEQUENCE, RC, HFile 格式的文件," + + "请将fileType选项的值配置为ORC, TEXT, CSV, SEQUENCE, HFile 或者 RC"; + throw DataXException.asDataXException(HdfsReaderErrorCode.FILE_TYPE_ERROR, message); + } + if (this.specifiedFileType.equalsIgnoreCase(Constant.CSV)) { + //compress校验 + UnstructuredStorageReaderUtil.validateCompress(this.readerOriginConfig); + UnstructuredStorageReaderUtil.validateCsvReaderConfig(this.readerOriginConfig); + } + } + + encoding = this.readerOriginConfig.getString(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.ENCODING, "UTF-8"); + + try { + Charsets.toCharset(encoding); + } catch (UnsupportedCharsetException uce) { + throw DataXException.asDataXException( + HdfsReaderErrorCode.ILLEGAL_VALUE, + String.format("不支持的编码格式 : [%s]", encoding), uce); + } catch (Exception e) { + throw DataXException.asDataXException( + HdfsReaderErrorCode.ILLEGAL_VALUE, + String.format("运行配置异常 : %s", e.getMessage()), e); + } + //check Kerberos + Boolean haveKerberos = this.readerOriginConfig.getBool(Key.HAVE_KERBEROS, false); + if (haveKerberos) { + this.readerOriginConfig.getNecessaryValue(Key.KERBEROS_KEYTAB_FILE_PATH, HdfsReaderErrorCode.REQUIRED_VALUE); + this.readerOriginConfig.getNecessaryValue(Key.KERBEROS_PRINCIPAL, HdfsReaderErrorCode.REQUIRED_VALUE); + } + this.incrBeginTime = this.readerOriginConfig.getLong(INCR_BEGIN_TIME, 0); + this.incrEndTime = this.readerOriginConfig.getLong(INCR_END_TIME, DateTime.now().getMillis()); + // validate the Columns + validateColumns(); + + } + + private void validateColumns() { + List column = this.readerOriginConfig + .getListConfiguration(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.COLUMN); + if(null == column){ + column = new ArrayList<>(); + } + boolean emptyColumn = column.isEmpty() || (1 == column.size() && ("\"*\"".equals(column.get(0).toString()) || "'*'" + .equals(column.get(0).toString()))); + if (emptyColumn) { + this.readerOriginConfig + .set(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.COLUMN, new ArrayList()); + } else { + // column: 1. index type 2.value type 3.when type is Data, may have format + List columns = this.readerOriginConfig + .getListConfiguration(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.COLUMN); + if (null != columns && columns.size() != 0) { + for (Configuration eachColumnConf : columns) { + eachColumnConf.getNecessaryValue(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.TYPE, HdfsReaderErrorCode.REQUIRED_VALUE); + Integer columnIndex = eachColumnConf.getInt(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.INDEX); + String columnValue = eachColumnConf.getString(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.VALUE); + + if (null == columnIndex && null == columnValue) { + throw DataXException.asDataXException( + HdfsReaderErrorCode.NO_INDEX_VALUE, + "由于您配置了type, 则至少需要配置 index 或 value"); + } + + if (null != columnIndex && null != columnValue) { + throw DataXException.asDataXException( + HdfsReaderErrorCode.MIXED_INDEX_VALUE, + "您混合配置了index, value, 每一列同时仅能选择其中一种"); + } + + } + } + } + } + + @Override + public void prepare() { + if(StringUtils.isNotBlank(readerOriginConfig.getString(Key.HIVE_METASTORE_URIS, ""))) { + LOG.info("update the configuration dynamically by hive meta..."); + boolean affected = hdfsReaderUtil.updateConfigByHiveMeta( readerOriginConfig.getString(Key.HIVE_DATABASE), + readerOriginConfig.getString(Key.HIVE_TABLE), + readerOriginConfig.getString(Key.HIVE_METASTORE_URIS), this.readerOriginConfig); + if(affected){ + //validate the configuration again + this.validate(); + } + } + LOG.info("start to getAllFiles..."); + HashSet sourceFiles0 = hdfsReaderUtil.getAllFiles(path, specifiedFileType); + //to find the parent directory of path + Set parents = new HashSet<>(); + for(String path0 : path){ + boolean find = false; + for(int i = 0; i < path0.length(); i++){ + if('*' == path0.charAt(i) || '?' == path0.charAt(i)){ + int lastDirSeparator = path0.substring(0, i) + .lastIndexOf(IOUtils.DIR_SEPARATOR); + parents.add(path0. + substring(0, lastDirSeparator + 1)); + find = true; + break; + } + } + if(!find){ + parents.add(path0); + } + } + for(String sourceFile : sourceFiles0){ + if(getTransportType() == TransportType.STREAM ){ + FileStatus status = hdfsReaderUtil.getFileStatus(sourceFile); + if(status.getModificationTime() <= incrBeginTime + || status.getModificationTime() > incrEndTime){ + continue; + } + } + boolean find = false; + for(String parent : parents){ + //0: absolute path, 1: relative path + if(sourceFile.indexOf(parent) > 0){ + String relativePath = sourceFile.substring(sourceFile.indexOf(parent) + parent.length()); + if(StringUtils.isNotBlank(relativePath)){ + this.sourceFiles.add(new PathMeta(sourceFile, + relativePath)); + }else{ + this.sourceFiles.add(new PathMeta(sourceFile, + parent.substring(parent.lastIndexOf(IOUtils.DIR_SEPARATOR)))); + } + find = true; + } + + if(find){ + break; + } + } + if(!find){ + throw new DataXException(FrameworkErrorCode.ARGUMENT_ERROR, "路径参数配置错误"); + } + } + String fileSeq = StringUtils.join(sourceFiles0, ","); + if(fileSeq.length() > 30){ + fileSeq = fileSeq.substring(0, 30); + } + LOG.info(String.format("您即将读取的文件数为: [%s], 列表为: [%s]", + sourceFiles0.size(), fileSeq)); + } + + @Override + public List split(int adviceNumber) { + + LOG.info("split() begin..."); + List readerSplitConfigs = new ArrayList(); + // warn:每个slice拖且仅拖一个文件, + // int splitNumber = adviceNumber; + int splitNumber = this.sourceFiles.size(); + if (0 == splitNumber) { + return new ArrayList<>(); + } + + List> splitedSourceFiles = this.splitSourceFiles(new ArrayList<>(this.sourceFiles), splitNumber); + for (List files : splitedSourceFiles) { + Configuration splitedConfig = this.readerOriginConfig.clone(); + splitedConfig.set(Constant.SOURCE_FILES, files); + readerSplitConfigs.add(splitedConfig); + } + + return readerSplitConfigs; + } + + + private List> splitSourceFiles(final List sourceList, int adviceNumber) { + List> splitedList = new ArrayList>(); + int averageLength = sourceList.size() / adviceNumber; + averageLength = averageLength == 0 ? 1 : averageLength; + + for (int begin = 0, end = 0; begin < sourceList.size(); begin = end) { + end = begin + averageLength; + if (end > sourceList.size()) { + end = sourceList.size(); + } + splitedList.add(sourceList.subList(begin, end)); + } + return splitedList; + } + + + @Override + public void post() { + + } + + @Override + public void destroy() { + hdfsReaderUtil.closeFileSystem(); + } + + } + + public static class Task extends Reader.Task { + + private static Logger LOG = LoggerFactory.getLogger(Reader.Task.class); + private Configuration taskConfig; + private List sourceFiles; + private String specifiedFileType; + private String encoding; + private HdfsReaderUtil hdfsReaderUtil = null; + private int bufferSize; + + @Override + public void init() { + + this.taskConfig = super.getPluginJobConf(); + this.sourceFiles = this.taskConfig.getList(Constant.SOURCE_FILES, Object.class); + this.specifiedFileType = this.taskConfig.getString(Key.FILETYPE, ""); + this.encoding = this.taskConfig.getString(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.ENCODING, "UTF-8"); + this.hdfsReaderUtil = new HdfsReaderUtil(this.taskConfig); + this.bufferSize = this.taskConfig.getInt(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.BUFFER_SIZE, + com.alibaba.datax.plugin.unstructuredstorage.reader.Constant.DEFAULT_BUFFER_SIZE); + } + + @Override + public void prepare() { + + } + + + @Override + public void startRead(RecordSender recordSender) { + LOG.info("Read start"); + hdfsReaderUtil.getUgi().doAs((PrivilegedAction) () -> { + for (Object sourceFile : sourceFiles) { + PathMeta pathMeta = Json.fromJson(Json.toJson(sourceFile, null), PathMeta.class); + String fileName = pathMeta.getAbsolute(); + LOG.info(String.format("Reading file : [%s]", fileName)); + + if (specifiedFileType.equalsIgnoreCase(Constant.TEXT) + || specifiedFileType.equalsIgnoreCase(Constant.CSV)) { + InputStream inputStream = hdfsReaderUtil.getInputStream(fileName); + if(null == taskConfig.getString(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.COMPRESS, null)){ + CompressionCodecFactory factory = new CompressionCodecFactory(hdfsReaderUtil.getConf()); + try { + CompressionCodec codec = factory.getCodec(new Path(fileName)); + if(null != codec){ + inputStream = codec.createInputStream(inputStream); + } + } catch (IOException e) { + throw DataXException.asDataXException(HdfsReaderErrorCode.READ_FILE_ERROR, "Hdfs使用压缩工厂类CodecFactory生成文件流失败,message:" + + e.getMessage()); + } + } + UnstructuredStorageReaderUtil.readFromStream(inputStream, fileName, taskConfig, + recordSender, getTaskPluginCollector()); + } else if (specifiedFileType.equalsIgnoreCase(Constant.ORC)) { + hdfsReaderUtil.orcFileStartRead(fileName, taskConfig, recordSender, getTaskPluginCollector()); + } else if (specifiedFileType.equalsIgnoreCase(Constant.SEQ)) { + hdfsReaderUtil.sequenceFileStartRead(fileName, taskConfig, recordSender, getTaskPluginCollector()); + } else if (specifiedFileType.equalsIgnoreCase(Constant.RC)) { + hdfsReaderUtil.rcFileStartRead(fileName, taskConfig, recordSender, getTaskPluginCollector()); + } else if (specifiedFileType.equalsIgnoreCase(Constant.HFILE)) { + hdfsReaderUtil.hFileStartRead(fileName, taskConfig, recordSender, getTaskPluginCollector()); + }else{ + String message = "HdfsReader插件目前支持ORC, TEXT, CSV, SEQUENCE, HFile, RC格式的文件," + + "请将fileType选项的值配置为ORC, TEXT, CSV, SEQUENCE, HFile或者 RC"; + throw DataXException.asDataXException(HdfsReaderErrorCode.FILE_TYPE_UNSUPPORT, message); + } + + if (recordSender != null) { + recordSender.flush(); + } + } + return null; + }); + + + LOG.info("end read source files..."); + } + + @Override + public void post() { + + } + + @Override + public void destroy() { + + } + + @Override + public void startRead(ChannelOutput channelOutput) { + LOG.info("start read source HDFS files to stream channel..."); + hdfsReaderUtil.getUgi().doAs((PrivilegedAction) () ->{ + for(Object sourceFile : sourceFiles){ + PathMeta pathMeta = Json.fromJson(Json.toJson(sourceFile, null), PathMeta.class); + String absolutePath = pathMeta.getAbsolute(); + String relativePath = pathMeta.getRelative(); + LOG.info(String.format("reading file : [%s]", absolutePath)); + InputStream inputStream; + try{ + Path path = new Path(absolutePath); + StreamMeta streamMeta = new StreamMeta(); + streamMeta.setName(path.getName()); + streamMeta.setAbsolutePath(absolutePath); + streamMeta.setRelativePath(relativePath); + OutputStream outputStream = channelOutput.createStream(streamMeta, encoding); + inputStream = hdfsReaderUtil.getInputStream(absolutePath); + UnstructuredStorageReaderUtil.readFromStream(inputStream, outputStream, + this.taskConfig); + }catch(IOException e){ + throw DataXException.asDataXException(FrameworkErrorCode.CHANNEL_STREAM_ERROR, e); + } + } + return null; + }); + } + } + +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderErrorCode.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderErrorCode.java new file mode 100644 index 000000000..e2713a210 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderErrorCode.java @@ -0,0 +1,130 @@ +package com.alibaba.datax.plugin.reader.hdfsreader; + +import com.alibaba.datax.common.spi.ErrorCode; + +/** + * error code + * @author davidhua + */ +public enum HdfsReaderErrorCode implements ErrorCode { + /** + * BAD CONFIG VALUE + */ + BAD_CONFIG_VALUE("HdfsReader-00", "The value of configuration is illegal(您配置的值不合法)"), + /** + * PATH NOT FIND ERROR + */ + PATH_NOT_FIND_ERROR("HdfsReader-01", "Path is necessary(您未配置path值)"), + /** + * DEFAULT_FA NOT FIND ERROR + */ + DEFAULT_FS_NOT_FIND_ERROR("HdfsReader-02", "defaultFS is necessary(您未配置defaultFS值)"), + /** + * ILLEGAL VALUE + */ + ILLEGAL_VALUE("HdfsReader-03", "Value error(值错误)"), + /** + * CONFIG INVALID EXCEPTION + */ + CONFIG_INVALID_EXCEPTION("HdfsReader-04", "The parameters' configuration is wrong(参数配置错误)"), + /** + * REQUIRED VALUE + */ + REQUIRED_VALUE("HdfsReader-05", "Lost the required parameters(您缺失了必须填写的参数值)"), + /** + * NO INDEX VALUE + */ + NO_INDEX_VALUE("HdfsReader-06", "No index value(没有 Index)"), + /** + * MIXED INDEX VALUE + */ + MIXED_INDEX_VALUE("HdfsReader-07", "Mixed index value(index 和 value 混合)"), + /** + * EMPTY DIR EXCEPTION + */ + EMPTY_DIR_EXCEPTION("HdfsReader-08", "The directory is empty(您尝试读取的文件目录为空)"), + /** + * PATH CONFIG ERROR + */ + PATH_CONFIG_ERROR("HdfsReader-09", "The path format you configured is incorrect(您配置的path格式有误)"), + /** + * READ FILE ERROR + */ + READ_FILE_ERROR("HdfsReader-10", "Error reading file(读取文件出错)"), + /** + * MALFORMED ORC ERROR + */ + MALFORMED_ORC_ERROR("HdfsReader-10", "ORC file format exception(ORCFILE格式异常)"), + /** + * FILE TYPE ERROR + */ + FILE_TYPE_ERROR("HdfsReader-11", "File type configuration is incorrect(文件类型配置错误)"), + /** + * FILE TYPE UNSUPPORTED + */ + FILE_TYPE_UNSUPPORT("HdfsReader-12", "File type is unsupported(文件类型目前不支持)"), + /** + * KERBEROS LOGIN ERROR + */ + KERBEROS_LOGIN_ERROR("HdfsReader-13", "KERBEROS authentication fail(KERBEROS认证失败)"), + /** + * READ SEQUENCE FILE ERROR + */ + READ_SEQUENCEFILE_ERROR("HdfsReader-14", "Error reading SequenceFile(读取SequenceFile文件出错)"), + /** + * READ RC FILE ERROR + */ + READ_RCFILE_ERROR("HdfsReader-15", "Error reading RCFile(读取RCFile文件出错)"), + /** + * READ HFILE ERROR + */ + READ_HFILE_ERROR("HdfsReader-21", "Error reading HFile(读取HFile文件错误)"), + /** + * HDFS PROXY ERROR + */ + HDFS_PROXY_ERROR("HdfsReader-16", "Fail to create HDFS PROXY(创建HDFS PROXY失败)"), + /** + * CONNECT HDFS TO ERROR + */ + CONNECT_HDFS_IO_ERROR("HdfsReader-17", "Occurred exception when building connection to HDFS(与HDFS建立连接时出现IO异常)"), + /** + * HDFS RENAME FILE ERROR + */ + HDFS_RENAME_FILE_ERROR("HdfsWriter-08", "Fail to move file to path configured(将文件移动到配置路径失败)"), + /** + * PATH NOT FOUND + */ + PATH_NOT_FOUND("HdfsReader-18", "Path not found(找不到路径)"), + /** + * OBTAIN METADATA ERROR + */ + OBTAIN_METADATA_ERROR("HdfsReader-19","Fail to obtain metadata(获取元信息失败)"), + /** + * UPDATE CONFIG ERROR (update by hive meta) + */ + UPDATE_CONFIG_ERROR("HdfsReader-20", "Fail to update configuration dynamically(动态更新任务配置失败)"); + + private final String code; + private final String description; + + private HdfsReaderErrorCode(String code, String description) { + this.code = code; + this.description = description; + } + + @Override + public String getCode() { + return this.code; + } + + @Override + public String getDescription() { + return this.description; + } + + @Override + public String toString() { + return String.format("Code:[%s], Description:[%s]. ", this.code, + this.description); + } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderUtil.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderUtil.java new file mode 100644 index 000000000..38e094578 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderUtil.java @@ -0,0 +1,1080 @@ +package com.alibaba.datax.plugin.reader.hdfsreader; + +import com.alibaba.datax.common.element.*; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordSender; +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import com.alibaba.datax.common.util.Configuration; +import com.webank.wedatasphere.exchangis.datax.core.job.meta.MetaSchema; +import com.alibaba.datax.core.util.LdapUtil; +import com.alibaba.datax.plugin.reader.hdfsreader.hfile.HFileParser; +import com.alibaba.datax.plugin.reader.hdfsreader.hfile.HFileParserFactory; +import com.alibaba.datax.plugin.unstructuredstorage.reader.ColumnEntry; +import com.alibaba.datax.plugin.unstructuredstorage.reader.UnstructuredStorageReaderErrorCode; +import com.alibaba.datax.plugin.unstructuredstorage.reader.UnstructuredStorageReaderUtil; +import com.alibaba.datax.plugin.utils.HdfsUserGroupInfoLock; +import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import com.webank.wedatasphere.exchangis.datax.common.ldap.LdapConnector; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import com.webank.wedatasphere.exchangis.datax.util.KerberosUtil; +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.fs.*; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.IMetaStoreClient; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.io.*; +import org.apache.hadoop.hive.ql.io.orc.OrcFile; +import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; +import org.apache.hadoop.hive.ql.io.orc.OrcSerde; +import org.apache.hadoop.hive.ql.io.orc.Reader; +import org.apache.hadoop.hive.ql.metadata.Hive; +import org.apache.hadoop.hive.serde2.SerDeException; +import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable; +import org.apache.hadoop.hive.serde2.columnar.BytesRefWritable; +import org.apache.hadoop.hive.serde2.objectinspector.*; +import org.apache.hadoop.io.*; +import org.apache.hadoop.mapred.*; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.nio.ByteBuffer; +import java.security.PrivilegedExceptionAction; +import java.text.SimpleDateFormat; +import java.util.*; + +import static com.alibaba.datax.plugin.reader.hdfsreader.Key.*; +import static org.apache.hadoop.fs.FileSystem.FS_DEFAULT_NAME_KEY; + +/** + * Created by mingya.wmy on 2015/8/12. + */ +public class HdfsReaderUtil { + private static final Logger LOG = LoggerFactory.getLogger(HdfsReader.Job.class); + + private org.apache.hadoop.conf.Configuration hadoopConf; + private Configuration readerConfig; + private String specifiedFileType = null; + private FileSystem fileSystem = null; + private UserGroupInformation ugi = null; + + private static final int DIRECTORY_SIZE_GUESS = 16 * 1024; + private static final String HDFS_DEFAULT_FS_KEY = "fs.defaultFS"; + private static final String FS_DISABLE_CACHE = "fs.%s.impl.disable.cache"; + private static final String FS_AUTOMATIC_CLOSE_KEY = "fs.automatic.close"; + private static final String FALLBACK_TO_SIMPLE_AUTH_KEY = "ipc.client.fallback-to-simple-auth-allowed"; + private static final String HADOOP_SECURITY_AUTHENTICATION_KEY = "hadoop.security.authentication"; + private static final String EXEC_USER = "exec.user"; + /** + * hive parameters + */ + private static final String DEFAULT_HIVE_USER = "hadoop"; + + /** + * kerberos parameters + */ + private Boolean haveKerberos; + private String kerberosKeytabFilePath; + private String kerberosPrincipal; + + UserGroupInformation getUgi(){ + return ugi; + } + + HdfsReaderUtil(Configuration readerConfig) { + //to store task configuration + this.readerConfig = readerConfig; + hadoopConf = new org.apache.hadoop.conf.Configuration(); + //http://blog.csdn.net/yangjl38/article/details/7583374 + Configuration hadoopSiteParams = readerConfig.getConfiguration(Key.HADOOP_CONFIG); + Map hadoopSiteParamsAsJsonObject = Json.fromJson(readerConfig.getString(Key.HADOOP_CONFIG), Map.class); + if (null != hadoopSiteParams) { + Set paramKeys = hadoopSiteParams.getKeys(); + for (String each : paramKeys) { + assert hadoopSiteParamsAsJsonObject != null; + hadoopConf.set(each, String.valueOf(hadoopSiteParamsAsJsonObject.getOrDefault(each, ""))); + } + } + hadoopConf.set(HDFS_DEFAULT_FS_KEY, readerConfig.getString(Key.DEFAULT_FS)); + //disable automatic close + hadoopConf.setBoolean(FS_AUTOMATIC_CLOSE_KEY, false); + //if has Kerberos authentication + this.haveKerberos = readerConfig.getBool(Key.HAVE_KERBEROS, false); + HdfsUserGroupInfoLock.lock(); + try { + if (haveKerberos) { + this.kerberosKeytabFilePath = readerConfig.getString(Key.KERBEROS_KEYTAB_FILE_PATH); + this.kerberosPrincipal = readerConfig.getString(Key.KERBEROS_PRINCIPAL); + this.hadoopConf.set(HADOOP_SECURITY_AUTHENTICATION_KEY, "kerberos"); + //disable the cache + this.hadoopConf.setBoolean( + String.format(FS_DISABLE_CACHE, URI.create(this.hadoopConf.get(FS_DEFAULT_NAME_KEY, "")).getScheme()), true); + hadoopConf.setBoolean(FALLBACK_TO_SIMPLE_AUTH_KEY, true); + ugi = this.kerberosAuthentication(this.kerberosPrincipal, this.kerberosKeytabFilePath); + } else { + ugi = this.getUgiInAuth(readerConfig); + } + try { + fileSystem = null == ugi? null : ugi.doAs((PrivilegedExceptionAction) () -> { + FileSystem fs = null; + try { + fs = FileSystem.get(hadoopConf); + fs.exists(new Path("/")); + } catch (IOException e) { + String message = String.format("获取FileSystem时发生网络IO异常,请检查您的网络是否正常!HDFS地址:[%s]", + "message:defaultFS =" + readerConfig.getString(Key.DEFAULT_FS)); + LOG.error(message); + throw DataXException.asDataXException(HdfsReaderErrorCode.CONNECT_HDFS_IO_ERROR, e); + } catch (Exception e) { + String message = String.format("获取FileSystem失败,请检查HDFS地址是否正确: [%s]", + "message:defaultFS =" + hadoopConf); + LOG.error(message); + throw DataXException.asDataXException(HdfsReaderErrorCode.CONNECT_HDFS_IO_ERROR, e); + } + return fs; + }); + } catch (Exception e) { + throw DataXException.asDataXException(HdfsReaderErrorCode.CONNECT_HDFS_IO_ERROR, e); + } + }finally{ + HdfsUserGroupInfoLock.unlock(); + } + if (null == fileSystem || null == hadoopConf) { + String message = String.format("获取FileSystem失败,请检查HDFS地址是否正确: [%s]", + "message:defaultFS =" + readerConfig.getString(Key.DEFAULT_FS)); + LOG.error(message); + throw DataXException.asDataXException(HdfsReaderErrorCode.CONNECT_HDFS_IO_ERROR, message); + } + + LOG.trace(String.format("hadoopConfig details:%s", Json.toJson(this.hadoopConf, null))); + } + + /** + * fetch meta information + * @param database database name + * @param table table name + * @param hiveMetaStoreUris hiveMetaStore uris + * @return meta schema + */ + MetaSchema getHiveMetadata(String database, String table, String hiveMetaStoreUris) { + MetaSchema metaSchema = new MetaSchema(); + try { + IMetaStoreClient client = null; + try { + client = getHiveConnByUris(hiveMetaStoreUris).getMSC(); + Table tableInfo = client.getTable(database, table); + StorageDescriptor storageDescriptor = tableInfo.getSd(); + metaSchema.addSchemaInfo(HIVE_META_SERDE_INFO, storageDescriptor.getSerdeInfo()); + metaSchema.addSchemaInfo(HIVE_META_NUM_BUCKETS, storageDescriptor.getNumBuckets()); + metaSchema.addSchemaInfo(HIVE_META_BUCKET_COLS, storageDescriptor.getBucketCols()); + metaSchema.addSchemaInfo(HIVE_META_SORT_COLS, storageDescriptor.getSortCols()); + metaSchema.addSchemaInfo(HIVE_META_INPUT_FORMAT, storageDescriptor.getInputFormat()); + metaSchema.addSchemaInfo(HIVE_META_OUTPUT_FORMAT, storageDescriptor.getOutputFormat()); + metaSchema.addSchemaInfo(HIVE_META_PARAMETERS, storageDescriptor.getParameters()); + metaSchema.addSchemaInfo(HIVE_META_COMPRESSED, storageDescriptor.isCompressed()); + // get the field schema list from storage descriptor + List fieldSchemas = new ArrayList<>(); + List fields = + storageDescriptor.getCols(); + fields.forEach(schemaDescriptor ->{ + MetaSchema.FieldSchema fieldSchema = new MetaSchema.FieldSchema( + schemaDescriptor.getName(), + schemaDescriptor.getType(), + schemaDescriptor.getComment() + ); + fieldSchemas.add(fieldSchema); + }); + metaSchema.setFieldSchemas(fieldSchemas); + } finally { + if (client != null) { + client.close(); + } + } + + } catch (Exception e) { + LOG.error("Failure to obtain metadata", e); + throw DataXException.asDataXException(HdfsReaderErrorCode.OBTAIN_METADATA_ERROR, e.getMessage()); + } + return metaSchema; + } + + /** + * update reader configuration by hive meta information dynamically + * @param database database name + * @param table table name + * @param hiveMetaStoreUris hiveMetaStore uris + * @param originConfig the configuration should be updated + * @return if affect the original configuration + */ + boolean updateConfigByHiveMeta(String database, String table, + String hiveMetaStoreUris, Configuration originConfig){ + try{ + IMetaStoreClient client = null; + boolean affect = false; + try{ + client = getHiveConnByUris(hiveMetaStoreUris).getMSC(); + Table tableInfo = client.getTable(database, table); + StorageDescriptor descriptor = tableInfo.getSd(); + String partitionValues = originConfig.getString(Key.PARTITIONS_VALUES); + if(StringUtils.isNotBlank(partitionValues)){ + String[] partitions = partitionValues.split(","); + Partition partition= null; + try{ + partition = client.getPartition(database, table, Arrays.asList(partitions)); + }catch(Exception e){ + //ignore + } + if(null != partition){ + //if the partition exists, use its storage descriptor + descriptor = partition.getSd(); + } + } + String fileType = detectFileType(descriptor); + if(StringUtils.isNotBlank(fileType) && !fileType + .equalsIgnoreCase(originConfig.getString(Key.FILETYPE, ""))){ + affect = true; + originConfig.set(Key.FILETYPE, fileType); + } + String fieldDelimiter = descriptor.getSerdeInfo().getParameters().getOrDefault(Constant.META_FIELD_DELIMITER, ""); + if(StringUtils.isNotEmpty(fieldDelimiter) + && !fieldDelimiter.equalsIgnoreCase(originConfig + .getString( com.alibaba.datax.plugin.unstructuredstorage.reader.Key.FIELD_DELIMITER, ""))){ + affect = true; + originConfig.set(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.FIELD_DELIMITER, fieldDelimiter); + } + return affect; + }finally{ + if(null != client){ + client.close(); + } + } + }catch(Exception e){ + LOG.error("Fail to update configuration", e); + throw DataXException.asDataXException(HdfsReaderErrorCode.UPDATE_CONFIG_ERROR, e.getMessage()); + } + } + void closeFileSystem(){ + try { + fileSystem.close(); + } catch (IOException e) { + String message = "关闭FileSystem时发生IO异常,请检查您的网络是否正常!"; + LOG.error(message); + throw DataXException.asDataXException(HdfsReaderErrorCode.CONNECT_HDFS_IO_ERROR, message); + } + } + + + + boolean exists(String path){ + try{ + return fileSystem.exists(new Path(path)); + }catch(IOException e){ + String message = "exception occurs while reading the file info in HDFS ,please check your network"; + LOG.error(message); + throw DataXException.asDataXException(HdfsReaderErrorCode.CONNECT_HDFS_IO_ERROR, message); + } + } + + /** + * get hadoop configuration + * @return + */ + org.apache.hadoop.conf.Configuration getConf(){ + return hadoopConf; + } + /** + * 获取指定路径列表下符合条件的所有文件的绝对路径 + * + * @param srcPaths 路径列表 + * @param specifiedFileType 指定文件类型 + */ + HashSet getAllFiles(List srcPaths, String specifiedFileType) { + + this.specifiedFileType = specifiedFileType; + + if (!srcPaths.isEmpty()) { + for (String eachPath : srcPaths) { + LOG.info(String.format("get HDFS all files in path = [%s]", eachPath)); + getHDFSAllFiles(eachPath); + } + } + return sourceHDFSAllFilesList; + } + + private HashSet sourceHDFSAllFilesList = new HashSet<>(); + + private HashSet getHDFSAllFiles(String hdfsPath) { + + try { + //check if the hdfsPath contains regular sign + if (hdfsPath.contains("*") || hdfsPath.contains("?")) { + Path path = new Path(hdfsPath); + FileStatus[] stats = fileSystem.globStatus(path, path1 -> !path1.getName().startsWith(".")); + for (FileStatus f : stats) { + if (f.isFile()) { + if (f.getLen() == 0) { + String message = String.format("文件[%s]长度为0,将会跳过不作处理!", hdfsPath); + LOG.warn(message); + } else { + addSourceFileByType(f.getPath().toString()); + } + } else if (f.isDirectory()) { + getHDFSAllFilesNORegex(f.getPath().toString(), fileSystem); + } + } + } else { + getHDFSAllFilesNORegex(hdfsPath, fileSystem); + } + + return sourceHDFSAllFilesList; + + } catch (IOException e) { + String message = String.format("无法读取路径[%s]下的所有文件,请确认您的配置项fs.defaultFS, path的值是否正确," + + "是否有读写权限,网络是否已断开!", hdfsPath); + LOG.error(message); + throw DataXException.asDataXException(HdfsReaderErrorCode.PATH_CONFIG_ERROR, e); + } + } + + private HashSet getHDFSAllFilesNORegex(String path, FileSystem hdfs) throws IOException { + + // get the root directory + Path listFiles = new Path(path); + // If the network disconnected, this method will retry 45 times + // each time the retry interval for 20 seconds + // get all the subdirectories of root directory + FileStatus stats[] = hdfs.listStatus(listFiles, path1 -> !path1.getName().startsWith(".")); + for (FileStatus f : stats) { + // recursive directory + if (f.isDirectory()) { + LOG.info(String.format("[%s] 是目录, 递归获取该目录下的文件", f.getPath().toString())); + getHDFSAllFilesNORegex(f.getPath().toString(), hdfs); + } else if (f.isFile()) { + + addSourceFileByType(f.getPath().toString()); + } else { + String message = String.format("该路径[%s]文件类型既不是目录也不是文件,插件自动忽略。", + f.getPath().toString()); + LOG.info(message); + } + } + return sourceHDFSAllFilesList; + } + + /** + * add file int sourceHDFSAllFilesList by filePath + * @param filePath + */ + private void addSourceFileByType(String filePath) { + if(isEmpty(filePath)){ + return; + } + boolean isMatchedFileType = checkHdfsFileType(filePath, this.specifiedFileType); + + if (isMatchedFileType) { + LOG.info(String.format("[%s]是[%s]类型的文件, 将该文件加入source files列表", filePath, this.specifiedFileType)); + sourceHDFSAllFilesList.add(filePath); + } else { + String message = String.format("文件[%s]的类型与用户配置的fileType类型不一致," + + "请确认您配置的目录下面所有文件的类型均为[%s]" + , filePath, this.specifiedFileType); + LOG.error(message); + throw DataXException.asDataXException( + HdfsReaderErrorCode.FILE_TYPE_UNSUPPORT, message); + } + } + + InputStream getInputStream(String filepath) { + InputStream inputStream; + Path path = new Path(filepath); + try { + + //If the network disconnected, this method will retry 45 times + //each time the retry interval for 20 seconds + inputStream = fileSystem.open(path); + FSDataInputStream in; + return inputStream; + } catch (IOException e) { + String message = String.format("读取文件 : [%s] 时出错,请确认文件:[%s]存在且配置的用户有权限读取", filepath, filepath); + throw DataXException.asDataXException(HdfsReaderErrorCode.READ_FILE_ERROR, message, e); + } + } + + void sequenceFileStartRead(String sourceSequenceFilePath, Configuration readerSliceConfig, + RecordSender recordSender, TaskPluginCollector taskPluginCollector) { + LOG.info(String.format("Start Read sequence file [%s].", sourceSequenceFilePath)); + + Path seqFilePath = new Path(sourceSequenceFilePath); + SequenceFile.Reader reader = null; + try { + reader = new SequenceFile.Reader(this.hadoopConf, + SequenceFile.Reader.file(seqFilePath)); + Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), this.hadoopConf); + Text value = new Text(); + while (reader.next(key, value)) { + if (StringUtils.isNotBlank(value.toString())) { + UnstructuredStorageReaderUtil.transportOneRecord(recordSender, + readerSliceConfig, taskPluginCollector, value.toString()); + } + } + } catch (Exception e) { + String message = String.format("SequenceFile.Reader读取文件[%s]时出错", sourceSequenceFilePath); + throw DataXException.asDataXException(HdfsReaderErrorCode.READ_SEQUENCEFILE_ERROR, message, e); + } finally { + IOUtils.closeStream(reader); + LOG.info("Finally, Close stream SequenceFile.Reader."); + } + + } + + void rcFileStartRead(String sourceRcFilePath, Configuration readerSliceConfig, + RecordSender recordSender, TaskPluginCollector taskPluginCollector) { + LOG.info(String.format("Start Read RC File [%s].", sourceRcFilePath)); + List column = UnstructuredStorageReaderUtil + .getListColumnEntry(readerSliceConfig, com.alibaba.datax.plugin.unstructuredstorage.reader.Key.COLUMN); + // warn: no default value '\N' + String nullFormat = readerSliceConfig.getString(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.NULL_FORMAT); + + Path rcFilePath = new Path(sourceRcFilePath); + RCFileRecordReader recordReader = null; + try { + long fileLen = fileSystem.getFileStatus(rcFilePath).getLen(); + FileSplit split = new FileSplit(rcFilePath, 0, fileLen, (String[]) null); + recordReader = new RCFileRecordReader(hadoopConf, split); + LongWritable key = new LongWritable(); + BytesRefArrayWritable value = new BytesRefArrayWritable(); + Text txt = new Text(); + while (recordReader.next(key, value)) { + String[] sourceLine = new String[value.size()]; + txt.clear(); + for (int i = 0; i < value.size(); i++) { + BytesRefWritable v = value.get(i); + txt.set(v.getData(), v.getStart(), v.getLength()); + sourceLine[i] = txt.toString(); + } + UnstructuredStorageReaderUtil.transportOneRecord(recordSender, + column, sourceLine, nullFormat, taskPluginCollector); + } + + } catch (IOException e) { + String message = String.format("读取文件[%s]时出错", sourceRcFilePath); + throw DataXException.asDataXException(HdfsReaderErrorCode.READ_RCFILE_ERROR, message, e); + } finally { + try { + if (recordReader != null) { + recordReader.close(); + LOG.info("Finally, Close RCFileRecordReader."); + } + } catch (IOException e) { + LOG.warn(String.format("finally: 关闭RCFileRecordReader失败, %s", e.getMessage())); + } + } + + } + + void orcFileStartRead(String sourceOrcFilePath, Configuration readerSliceConfig, + RecordSender recordSender, TaskPluginCollector taskPluginCollector) { + LOG.info(String.format("Start Read ORC File [%s].", sourceOrcFilePath)); + List column = UnstructuredStorageReaderUtil + .getListColumnEntry(readerSliceConfig, com.alibaba.datax.plugin.unstructuredstorage.reader.Key.COLUMN); + String nullFormat = readerSliceConfig.getString(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.NULL_FORMAT); + StringBuilder allColumns = new StringBuilder(); + StringBuilder allColumnTypes = new StringBuilder(); + boolean isReadAllColumns = false; + int columnIndexMax = -1; + if (null == column || column.size() == 0) { + int allColumnsCount = getAllColumnsCount(sourceOrcFilePath); + columnIndexMax = allColumnsCount - 1; + isReadAllColumns = true; + } else { + columnIndexMax = getMaxIndex(column); + } + for (int i = 0; i <= columnIndexMax; i++) { + allColumns.append("col"); + allColumnTypes.append("string"); + if (i != columnIndexMax) { + allColumns.append(","); + allColumnTypes.append(":"); + } + } + if (columnIndexMax >= 0) { + JobConf conf = new JobConf(hadoopConf); + Path orcFilePath = new Path(sourceOrcFilePath); + Properties p = new Properties(); + p.setProperty("columns", allColumns.toString()); + p.setProperty("columns.types", allColumnTypes.toString()); + try { + OrcSerde serde = new OrcSerde(); + serde.initialize(conf, p); + StructObjectInspector inspector = (StructObjectInspector) serde.getObjectInspector(); + InputFormat in = new OrcInputFormat(); + FileInputFormat.setInputPaths(conf, orcFilePath.toString()); + + //If the network disconnected, will retry 45 times, each time the retry interval for 20 seconds + //Each file as a split + InputSplit[] splits = in.getSplits(conf, -1); + for(InputSplit split : splits){ + RecordReader reader = in.getRecordReader(split, conf, Reporter.NULL); + Object key = reader.createKey(); + Object value = reader.createValue(); + // get all field refs + List fields = inspector.getAllStructFieldRefs(); + + List recordFields; + while (reader.next(key, value)) { + recordFields = new ArrayList<>(); + for (int i = 0; i <= columnIndexMax; i++) { + Object field = inspector.getStructFieldData(value, fields.get(i)); + recordFields.add(field); + } + transportOneRecord(column, recordFields, recordSender, + taskPluginCollector, isReadAllColumns, nullFormat); + } + reader.close(); + } + } catch (IOException | SerDeException e) { + String message = String.format("从orcfile文件路径[%s]中读取数据发生异常,请联系系统管理员。" + , sourceOrcFilePath); + throw DataXException.asDataXException(HdfsReaderErrorCode.READ_FILE_ERROR, message); + } + } else { + String message = String.format("请确认您所读取的列配置正确!columnIndexMax 小于0,column:%s", Json.toJson(column, null)); + throw DataXException.asDataXException(HdfsReaderErrorCode.BAD_CONFIG_VALUE, message); + } + } + + void hFileStartRead(String sourceFilePath, Configuration readerSliceConfig, + RecordSender recordSender, TaskPluginCollector taskPluginCollector){ + LOG.info(String.format("Start Read HFile file [%s].", sourceFilePath)); + String encoding = readerSliceConfig.getString(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.ENCODING, "UTF-8"); + HFileParser parser = HFileParserFactory.getHBASEImpl(fileSystem, encoding); + List column = UnstructuredStorageReaderUtil + .getListColumnEntry(readerSliceConfig, com.alibaba.datax.plugin.unstructuredstorage.reader.Key.COLUMN); + String nullFormat = readerSliceConfig.getString(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.NULL_FORMAT); + parser.parse(sourceFilePath, readerSliceConfig.getConfiguration(HFILE_PARSE_CONFIG), sourceLine -> UnstructuredStorageReaderUtil.transportOneRecord(recordSender, column, + sourceLine, nullFormat, taskPluginCollector)); + LOG.info("Finally, Close stream HFile.Reader"); + } + + public FileStatus getFileStatus(String path){ + try{ + return fileSystem.getFileStatus(new Path(path)); + }catch(IOException e){ + String message = String.format("Failed to get file status from : %s", path); + LOG.error(message); + throw DataXException.asDataXException(HdfsReaderErrorCode.READ_FILE_ERROR, e); + } + } + private Record transportOneRecord(List columnConfigs, List recordFields + , RecordSender recordSender, TaskPluginCollector taskPluginCollector, boolean isReadAllColumns, String nullFormat) { + Record record = recordSender.createRecord(); + Column columnGenerated; + try { + if (isReadAllColumns) { + // read all the columns, then create the column whose type is STRING + for (Object recordField : recordFields) { + String columnValue = null; + if (recordField != null) { + columnValue = recordField.toString(); + } + columnGenerated = new StringColumn(columnValue); + record.addColumn(columnGenerated); + } + } else { + for (ColumnEntry columnConfig : columnConfigs) { + String columnType = columnConfig.getType(); + Integer columnIndex = columnConfig.getIndex(); + String columnConst = columnConfig.getValue(); + + String columnValue = null; + + if (null != columnIndex) { + if (null != recordFields.get(columnIndex)) { + columnValue = recordFields.get(columnIndex).toString(); + } + } else { + columnValue = columnConst; + } + Type type = Type.valueOf(columnType.toUpperCase()); + // it's all ok if nullFormat is null + if (StringUtils.equals(columnValue, nullFormat) || StringUtils.isEmpty(columnValue)) { + columnValue = null; + } + switch (type) { + case STRING: + columnGenerated = new StringColumn(columnValue); + break; + case LONG: + try { + columnGenerated = new LongColumn(columnValue); + } catch (Exception e) { + throw new IllegalArgumentException(String.format( + "类型转换错误, 无法将[%s] 转换为[%s]", columnValue, + "LONG")); + } + break; + case DOUBLE: + try { + columnGenerated = new DoubleColumn(columnValue); + } catch (Exception e) { + throw new IllegalArgumentException(String.format( + "类型转换错误, 无法将[%s] 转换为[%s]", columnValue, + "DOUBLE")); + } + break; + case BOOLEAN: + try { + columnGenerated = new BoolColumn(columnValue); + } catch (Exception e) { + throw new IllegalArgumentException(String.format( + "类型转换错误, 无法将[%s] 转换为[%s]", columnValue, + "BOOLEAN")); + } + + break; + case DATE: + try { + if (columnValue == null) { + columnGenerated = new DateColumn((Date) null); + } else { + String formatString = columnConfig.getFormat(); + if (StringUtils.isNotBlank(formatString)) { + SimpleDateFormat format = new SimpleDateFormat( + formatString); + columnGenerated = new DateColumn( + format.parse(columnValue)); + } else { + columnGenerated = new DateColumn( + new StringColumn(columnValue) + .asDate()); + } + } + } catch (Exception e) { + throw new IllegalArgumentException(String.format( + "类型转换错误, 无法将[%s] 转换为[%s]", columnValue, + "DATE")); + } + break; + default: + String errorMessage = String.format( + "您配置的列类型暂不支持 : [%s]", columnType); + LOG.error(errorMessage); + throw DataXException + .asDataXException( + UnstructuredStorageReaderErrorCode.NOT_SUPPORT_TYPE, + errorMessage); + } + + record.addColumn(columnGenerated); + } + } + recordSender.sendToWriter(record); + } catch (IllegalArgumentException iae) { + taskPluginCollector + .collectDirtyRecord(record, iae.getMessage()); + } catch (IndexOutOfBoundsException ioe) { + taskPluginCollector + .collectDirtyRecord(record, ioe.getMessage()); + } catch (Exception e) { + if (e instanceof DataXException) { + throw (DataXException) e; + } + //regard then failure of transform as dirty record + taskPluginCollector.collectDirtyRecord(record, e.getMessage()); + } + + return record; + } + + private int getAllColumnsCount(String filePath) { + Path path = new Path(filePath); + try { + Reader reader = OrcFile.createReader(path, OrcFile.readerOptions(hadoopConf)); + return reader.getTypes().get(0).getSubtypesCount(); + } catch (IOException e) { + String message = "读取orcfile column列数失败,请联系系统管理员"; + throw DataXException.asDataXException(HdfsReaderErrorCode.READ_FILE_ERROR, message); + } + } + + private int getMaxIndex(List columnConfigs) { + int maxIndex = -1; + for (ColumnEntry columnConfig : columnConfigs) { + Integer columnIndex = columnConfig.getIndex(); + if (columnIndex != null && columnIndex < 0) { + String message = String.format("您column中配置的index不能小于0,请修改为正确的index,column配置:%s", + Json.toJson(columnConfigs, null)); + LOG.error(message); + throw DataXException.asDataXException(HdfsReaderErrorCode.CONFIG_INVALID_EXCEPTION, message); + } else if (columnIndex != null && columnIndex > maxIndex) { + maxIndex = columnIndex; + } + } + return maxIndex; + } + + private enum Type { + STRING, LONG, BOOLEAN, DOUBLE, DATE, + } + + public boolean checkHdfsFileType(String filepath, String specifiedFileType) { + if(StringUtils.isBlank(specifiedFileType)){ + return true; + } + Path file = new Path(filepath); + + try { + FSDataInputStream in = fileSystem.open(file); + + if (StringUtils.equalsIgnoreCase(specifiedFileType, Constant.CSV) + || StringUtils.equalsIgnoreCase(specifiedFileType, Constant.TEXT)) { + + boolean isORC = isORCFile(file, fileSystem, in); + if (isORC) { + return false; + } + boolean isRC = isRCFile(filepath, in); + if (isRC) { + return false; + } + boolean isSEQ = isSequenceFile(filepath, in); + if (isSEQ) { + return false; + } + //default file type is TEXT or CSV + return !isORC && !isRC && !isSEQ; + + } else if (StringUtils.equalsIgnoreCase(specifiedFileType, Constant.ORC)) { + + return isORCFile(file, fileSystem, in); + } else if (StringUtils.equalsIgnoreCase(specifiedFileType, Constant.RC)) { + + return isRCFile(filepath, in); + } else if (StringUtils.equalsIgnoreCase(specifiedFileType, Constant.SEQ)) { + + return isSequenceFile(filepath, in); + } else if (StringUtils.equalsIgnoreCase(specifiedFileType, Constant.HFILE)){ + //Accept all files + return true; + } + + } catch (Exception e) { + String message = String.format("检查文件[%s]类型失败,目前支持ORC,SEQUENCE,RCFile,TEXT,CSV五种格式的文件," + + "请检查您文件类型和文件是否正确。", filepath); + LOG.error(message); + throw DataXException.asDataXException(HdfsReaderErrorCode.READ_FILE_ERROR, message, e); + } + return false; + } + + /** + * if the file is ORC File + * @param file + * @param fs + * @param in + * @return + */ + private boolean isORCFile(Path file, FileSystem fs, FSDataInputStream in) { + try { + // figure out the size of the file using the option or filesystem + long size = fs.getFileStatus(file).getLen(); + + //read last bytes into buffer to get PostScript + int readSize = (int) Math.min(size, DIRECTORY_SIZE_GUESS); + in.seek(size - readSize); + ByteBuffer buffer = ByteBuffer.allocate(readSize); + in.readFully(buffer.array(), buffer.arrayOffset() + buffer.position(), + buffer.remaining()); + + //read the PostScript + //get length of PostScript + int psLen = buffer.get(readSize - 1) & 0xff; + int len = OrcFile.MAGIC.length(); + if (psLen < len + 1) { + return false; + } + int offset = buffer.arrayOffset() + buffer.position() + buffer.limit() - 1 + - len; + byte[] array = buffer.array(); + // now look for the magic string at the end of the postscript. + if (Text.decode(array, offset, len).equals(OrcFile.MAGIC)) { + return true; + } else { + // If it isn't there, this may be the 0.11.0 version of ORC. + // Read the first 3 bytes of the file to check for the header + in.seek(0); + byte[] header = new byte[len]; + in.readFully(header, 0, len); + // if it isn't there, this isn't an ORC file + if (Text.decode(header, 0, len).equals(OrcFile.MAGIC)) { + return true; + } + } + } catch (IOException e) { + LOG.info(String.format("检查文件类型: [%s] 不是ORC File.", file.toString())); + } + return false; + } + + /** + * if the file is RC file + * @param filepath + * @param in + * @return + */ + private boolean isRCFile(String filepath, FSDataInputStream in) { + + // The first version of RCFile used the sequence file header. + final byte[] ORIGINAL_MAGIC = new byte[]{(byte) 'S', (byte) 'E', (byte) 'Q'}; + // The 'magic' bytes at the beginning of the RCFile + final byte[] RC_MAGIC = new byte[]{(byte) 'R', (byte) 'C', (byte) 'F'}; + // the version that was included with the original magic, which is mapped + // into ORIGINAL_VERSION + final byte ORIGINAL_MAGIC_VERSION_WITH_METADATA = 6; + // All of the versions should be place in this list. + // version with SEQ + final int ORIGINAL_VERSION = 0; + // version with RCF + final int NEW_MAGIC_VERSION = 1; + final int CURRENT_VERSION = NEW_MAGIC_VERSION; + byte version; + + byte[] magic = new byte[RC_MAGIC.length]; + try { + in.seek(0); + in.readFully(magic); + + if (Arrays.equals(magic, ORIGINAL_MAGIC)) { + byte vers = in.readByte(); + if (vers != ORIGINAL_MAGIC_VERSION_WITH_METADATA) { + return false; + } + version = ORIGINAL_VERSION; + } else { + if (!Arrays.equals(magic, RC_MAGIC)) { + return false; + } + + // Set 'version' + version = in.readByte(); + if (version > CURRENT_VERSION) { + return false; + } + } + + if (version == ORIGINAL_VERSION) { + try { + Class keyCls = hadoopConf.getClassByName(Text.readString(in)); + Class valCls = hadoopConf.getClassByName(Text.readString(in)); + if (!keyCls.equals(RCFile.KeyBuffer.class) + || !valCls.equals(RCFile.ValueBuffer.class)) { + return false; + } + } catch (ClassNotFoundException e) { + return false; + } + } + // is compressed? + boolean decompress = in.readBoolean(); + if (version == ORIGINAL_VERSION) { + // is block-compressed? it should be always false. + boolean blkCompressed = in.readBoolean(); + if (blkCompressed) { + return false; + } + } + return true; + } catch (IOException e) { + LOG.info(String.format("检查文件类型: [%s] 不是RC File.", filepath)); + } + return false; + } + + /** + * if the file is Sequence file + * @param filepath + * @param in + * @return + */ + private boolean isSequenceFile(String filepath, FSDataInputStream in) { + byte[] SEQ_MAGIC = new byte[]{(byte) 'S', (byte) 'E', (byte) 'Q'}; + byte[] magic = new byte[SEQ_MAGIC.length]; + try { + in.seek(0); + in.readFully(magic); + if (Arrays.equals(magic, SEQ_MAGIC)) { + return true; + } else { + return false; + } + } catch (IOException e) { + LOG.info(String.format("检查文件类型: [%s] 不是Sequence File.", filepath)); + } + return false; + } + + + /** + * check if the file is empty + * @param filePath + * @return + * @throws IOException + */ + private boolean isEmpty(String filePath){ + FileStatus status; + try { + status = fileSystem.getFileStatus(new Path(filePath)); + return status.getLen() <= 0; + } catch (IOException e) { + throw DataXException.asDataXException(HdfsReaderErrorCode.READ_FILE_ERROR, e.getMessage(), e); + } + } + + private Hive getHiveConnByUris(String hiveMetaStoreUris) throws IOException, InterruptedException { + HiveConf hiveConf = new HiveConf(); + hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, hiveMetaStoreUris); + UserGroupInformation hiveUgi; + if(haveKerberos){ + Properties kerberosProps = KerberosUtil.getProperties(); + kerberosProps = null == kerberosProps? new Properties() : kerberosProps; + if(StringUtils.isNotBlank(kerberosProps.getProperty("kerberos.krb5.path", ""))){ + System.setProperty("java.security.krb5.conf", kerberosProps.getProperty("kerberos.krb5.path")); + } + hiveConf.setVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL, "true"); + hiveConf.set("hadoop.security.authentication", "kerberos"); + hiveConf.setVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL, + this.readerConfig.getString(Key.HIVE_KEBEROS_PRINCIPAL, DEFAULT_HIVE_USER + "/_HOST@EXAMPLE.COM")); + hiveUgi = this.kerberosAuthentication(hiveConf, this.kerberosPrincipal, this.kerberosKeytabFilePath); + }else{ + hiveUgi = this.getUgiInAuth(this.readerConfig); + } + return hiveUgi.doAs((PrivilegedExceptionAction) () -> { + Hive hive1 = Hive.get(hiveConf); + hive1.getMSC(); + return hive1; + }); + } + + private UserGroupInformation getUgiInAuth(Configuration taskConfig){ + String userName = taskConfig.getString(Key.LDAP_USERNAME, ""); + String password = taskConfig.getString(Key.LDAP_USERPASSWORD, ""); + if(StringUtils.isNotBlank(userName) && StringUtils.isNotBlank(password)){ + try { + password = (String) CryptoUtils.string2Object(password); + } catch (Exception e) { + LOG.error("Fail to decrypt password", e); + throw DataXException.asDataXException(HdfsReaderErrorCode.CONFIG_INVALID_EXCEPTION, e); + } + Properties properties = null; + try { + properties = LdapUtil.getLdapProperties(); + }catch(Exception e){ + //Ignore + } + if(null != properties){ + LdapConnector ldapConnector = LdapConnector.getInstance(properties); + if(!ldapConnector.authenticate(userName, password)){ + throw DataXException.asDataXException(HdfsReaderErrorCode.CONFIG_INVALID_EXCEPTION, "LDAP authenticate fail"); + } + }else{ + throw DataXException.asDataXException(HdfsReaderErrorCode.CONFIG_INVALID_EXCEPTION, "Engine need LDAP configuration"); + } + } + UserGroupInformation ugi; + try { + UserGroupInformation.setConfiguration(hadoopConf); + String procUser = System.getProperty("user.name", ""); + String execUser = System.getProperty(EXEC_USER, ""); + String remoteUser = StringUtils.isNotBlank(userName) ? userName : execUser; + if(StringUtils.isNotBlank(remoteUser) && !remoteUser.equals(procUser)){ + //Disable the cache + this.hadoopConf.setBoolean( + String.format(FS_DISABLE_CACHE, URI.create(this.hadoopConf.get(FS_DEFAULT_NAME_KEY, "")).getScheme()), true); + ugi = UserGroupInformation.createRemoteUser(remoteUser); + }else{ + ugi = UserGroupInformation.getCurrentUser(); + } + } catch (Exception e) { + LOG.error(e.getMessage()); + throw DataXException.asDataXException(HdfsReaderErrorCode.HDFS_PROXY_ERROR, e); + } + return ugi; + } + private UserGroupInformation kerberosAuthentication(String kerberosPrincipal, String kerberosKeytabFilePath){ + return kerberosAuthentication(this.hadoopConf, kerberosPrincipal, kerberosKeytabFilePath); + } + + private UserGroupInformation kerberosAuthentication(org.apache.hadoop.conf.Configuration config, + String kerberosPrincipal, String kerberosKeytabFilePath) { + UserGroupInformation ugi = null; + if(StringUtils.isNotBlank(kerberosPrincipal) && StringUtils.isNotBlank(kerberosKeytabFilePath)) { + UserGroupInformation.setConfiguration(config); + try { + ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(kerberosPrincipal + .substring(0, kerberosPrincipal.indexOf("@")), kerberosKeytabFilePath); + } catch (Exception e) { + String message = String.format("kerberos认证失败,请确定kerberosKeytabFilePath[%s]和kerberosPrincipal[%s]填写正确", + kerberosKeytabFilePath, kerberosPrincipal); + LOG.error(message); + throw DataXException.asDataXException(HdfsReaderErrorCode.KERBEROS_LOGIN_ERROR, e); + } + } + return ugi; + } + + private enum HiveFileType{ + /** + * TYPE: TEXT + */ + TEXT, + /** + * TYPE: ORC + */ + ORC, + /** + * TYPE: AVRO + */ + AVRO, + /** + * TYPE: PARQUET + */ + PARQUET, + /** + * TYPE: RC + */ + RC, + /** + * TYPE: SEQ + */ + SEQ; + static final Map INPUT_FORMAT = new HashMap<>(); + static{ + INPUT_FORMAT.put(new TextFileStorageFormatDescriptor().getInputFormat(), TEXT); + INPUT_FORMAT.put(new ORCFileStorageFormatDescriptor().getInputFormat(), ORC); + INPUT_FORMAT.put(new AvroStorageFormatDescriptor().getInputFormat(), AVRO); + INPUT_FORMAT.put(new ParquetFileStorageFormatDescriptor().getInputFormat(), PARQUET); + INPUT_FORMAT.put(new RCFileStorageFormatDescriptor().getInputFormat(), RC); + INPUT_FORMAT.put(new SequenceFileStorageFormatDescriptor().getInputFormat(), SEQ); + } + static HiveFileType input(String inputStreamFormat){ + return INPUT_FORMAT.get(inputStreamFormat); + } + } + + /** + * detect the file type + * @param tableDescriptor tableDescriptor + * @return + */ + private String detectFileType(StorageDescriptor tableDescriptor){ + //search file type by output format of table/partition + HiveFileType hiveFileType = HiveFileType.input(tableDescriptor.getInputFormat()); + return hiveFileType != null ? hiveFileType.toString(): ""; + } +} diff --git a/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/Key.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/Key.java new file mode 100644 index 000000000..8f94127ea --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/Key.java @@ -0,0 +1,44 @@ +package com.alibaba.datax.plugin.reader.hdfsreader; + +public final class Key { + + /** + * plugin configuration + */ + final static String PATH = "path"; + final static String DEFAULT_FS = "defaultFS"; + static final String FILETYPE = "fileType"; + static final String HADOOP_CONFIG = "hadoopConfig"; + static final String HAVE_KERBEROS = "haveKerberos"; + static final String KERBEROS_KEYTAB_FILE_PATH = "kerberosKeytabFilePath"; + static final String KERBEROS_PRINCIPAL = "kerberosPrincipal"; + static final String LDAP_USERNAME="ldapUserName"; + static final String LDAP_USERPASSWORD="ldapUserPassword"; + static final String PARTITIONS_VALUES="partitionValues"; + static final String HIVE_TABLE = "hiveTable"; + static final String HIVE_DATABASE="hiveDatabase"; + static final String HIVE_METASTORE_URIS="hiveMetastoreUris"; + static final String HIVE_KEBEROS_PRINCIPAL="hivePrincipal"; + + /** + * keys of meta schema information + */ + static final String HIVE_META_SERDE_INFO = "serdeInfo"; + static final String HIVE_META_NUM_BUCKETS = "numBuckets"; + static final String HIVE_META_BUCKET_COLS = "bucketCols"; + static final String HIVE_META_SORT_COLS = "sortCols"; + static final String HIVE_META_INPUT_FORMAT = "inputFormat"; + static final String HIVE_META_OUTPUT_FORMAT = "outputFormat"; + static final String HIVE_META_PARAMETERS = "parameters"; + static final String HIVE_META_COMPRESSED = "compressed"; + + /** + * keys of HFile configuration + */ + public static final String HFILE_PARSE_CONFIG = "hFileParseConfig"; + public static final String HFILE_PARSE_ROW_KEY = "rowKey"; + public static final String HFILE_PARSE_FAMILY = "family"; + public static final String HFIEL_PARSE_QUALIFIER = "qualifier"; + public static final String HFILE_PARSE_VALUE = "value"; + public static final String HFILE_TIMESTAMP = "timestamp"; +} diff --git a/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HBASEV1HFileParser.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HBASEV1HFileParser.java new file mode 100644 index 000000000..b46189e7c --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HBASEV1HFileParser.java @@ -0,0 +1,94 @@ +package com.alibaba.datax.plugin.reader.hdfsreader.hfile; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.plugin.reader.hdfsreader.HdfsReaderErrorCode; +import com.alibaba.datax.plugin.reader.hdfsreader.Key; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.io.hfile.CacheConfig; +import org.apache.hadoop.hbase.io.hfile.HFile; +import org.apache.hadoop.hbase.io.hfile.HFileScanner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.List; + +/** + * HFile parser of HBase 1.x + * @author davidhua + * 2020/2/24 + */ +public class HBASEV1HFileParser implements HFileParser{ + + private static final Logger LOG = LoggerFactory.getLogger(HBASEV1HFileParser.class); + /** + * File system + */ + private FileSystem fileSystem; + + private Charset encoding = Charset.defaultCharset(); + + public HBASEV1HFileParser(FileSystem fileSystem){ + this.fileSystem = fileSystem; + } + + public HBASEV1HFileParser(FileSystem fileSystem, String encoding){ + this.fileSystem = fileSystem; + this.encoding = Charset.forName(encoding); + } + @Override + public void parse(String inputPath, Configuration parseConf, Action action) { + org.apache.hadoop.conf.Configuration configuration = fileSystem.getConf(); + LOG.info("Start to parse HFile: [" + inputPath + "] in HBASEV1HFileParser"); + try (HFile.Reader reader = HFile.createReader(fileSystem, new Path(inputPath), + new CacheConfig(configuration), false, configuration)) { + HFileScanner scanner = reader.getScanner(configuration, true, true); + if(null == parseConf){ + parseConf = Configuration.from("{}"); + for(String parseColumn : PARSE_COLUMNS){ + parseConf.set(parseColumn, true); + } + } + if(scanner.seekTo()) { + do { + //Cell entity + Cell cell = scanner.getCell(); + List sourceList = new ArrayList<>(); + parseConf.getKeys().forEach(configKey -> { + switch(configKey){ + case Key.HFILE_PARSE_ROW_KEY: + sourceList.add(new String(CellUtil.cloneRow(cell), encoding)); + break; + case Key.HFILE_PARSE_FAMILY: + sourceList.add(new String(CellUtil.cloneFamily(cell), encoding)); + break; + case Key.HFIEL_PARSE_QUALIFIER: + sourceList.add(new String(CellUtil.cloneQualifier(cell), encoding)); + break; + case Key.HFILE_PARSE_VALUE: + sourceList.add(new String(CellUtil.cloneValue(cell), encoding)); + break; + case Key.HFILE_TIMESTAMP: + sourceList.add(String.valueOf(cell.getTimestamp())); + break; + } + }); + String[] sourceLine = new String[sourceList.size()]; + sourceList.toArray(sourceLine); + action.process(sourceLine); + } while (scanner.next()); + } + } catch (IOException e) { + String message = "解析读取[" + inputPath + "]"; + throw DataXException.asDataXException(HdfsReaderErrorCode.READ_HFILE_ERROR, message, e); + } + //Ignore exception + } + +} diff --git a/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HFileParser.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HFileParser.java new file mode 100644 index 000000000..de8e1e0a4 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HFileParser.java @@ -0,0 +1,32 @@ +package com.alibaba.datax.plugin.reader.hdfsreader.hfile; + +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.plugin.reader.hdfsreader.Key; + +/** + * Parser + * @author davidhua + * 2020/2/24 + */ +public interface HFileParser { + String[] PARSE_COLUMNS = new String[]{Key.HFILE_PARSE_ROW_KEY, + Key.HFILE_PARSE_FAMILY, Key.HFIEL_PARSE_QUALIFIER, + Key.HFILE_PARSE_VALUE, Key.HFILE_TIMESTAMP}; + /** + * Parse HFile under input path + * @param inputPath input path + * @param parseConf configuration for parsing + * @param action action + */ + void parse(String inputPath, Configuration parseConf, Action action); + + @FunctionalInterface + interface Action{ + /** + * Process source lines + * @param sourceLine source line + * @return + */ + void process(String[] sourceLine); + } +} diff --git a/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HFileParserFactory.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HFileParserFactory.java new file mode 100644 index 000000000..2d1f2366f --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/hfile/HFileParserFactory.java @@ -0,0 +1,26 @@ +package com.alibaba.datax.plugin.reader.hdfsreader.hfile; + +import org.apache.hadoop.fs.FileSystem; + +import java.util.concurrent.ConcurrentHashMap; + +/** + * @author davidhua + * 2020/2/24 + */ +public class HFileParserFactory { + private static final ConcurrentHashMap hFileParserStoreMap = new ConcurrentHashMap<>(); + + /** + * Get parser implemented by HBASE server + * @param fileSystem + * @return + */ + public static HFileParser getHBASEImpl(FileSystem fileSystem){ + return hFileParserStoreMap.computeIfAbsent("HBASEImpl", key -> new HBASEV1HFileParser(fileSystem)); + } + + public static HFileParser getHBASEImpl(FileSystem fileSystem, String encoding){ + return hFileParserStoreMap.computeIfAbsent("HBASEImpl", key -> new HBASEV1HFileParser(fileSystem, encoding)); + } +} diff --git a/exchangis-engines/engines/datax/datax-hdfsreader/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/resources/plugin.json new file mode 100644 index 000000000..606d19740 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/resources/plugin.json @@ -0,0 +1,6 @@ +{ + "name": "hdfsreader", + "class": "com.alibaba.datax.plugin.reader.hdfsreader.HdfsReader", + "description": "useScene: test. mechanism: use datax framework to transport data from hdfs. warn: The more you know about the data, the less problems you encounter.", + "developer": "alibaba" +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-hdfsreader/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/resources/plugin_job_template.json new file mode 100644 index 000000000..d7e3e2bd0 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/resources/plugin_job_template.json @@ -0,0 +1,11 @@ +{ + "name": "hdfsreader", + "parameter": { + "path": "", + "defaultFS": "", + "column": [], + "fileType": "orc", + "encoding": "UTF-8", + "fieldDelimiter": "," + } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-hdfsreader/src/test/java/com/alibaba/datax/plugin/reader/hdfsreader/HFileParser.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/test/java/com/alibaba/datax/plugin/reader/hdfsreader/HFileParser.java new file mode 100644 index 000000000..dae334eab --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfsreader/src/test/java/com/alibaba/datax/plugin/reader/hdfsreader/HFileParser.java @@ -0,0 +1,23 @@ +package com.alibaba.datax.plugin.reader.hdfsreader; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.io.hfile.CacheConfig; +import org.apache.hadoop.hbase.io.hfile.HFile; +import org.apache.hadoop.hbase.io.hfile.HFileScanner; + +import java.io.IOException; + +public class HFileParser { + + public void parse(String pathString, Configuration hadoopConf) throws IOException { + FileSystem fs = new Path(pathString).getFileSystem(hadoopConf); + HFile.Reader reader = HFile.createReader(fs, new Path(pathString), new CacheConfig(hadoopConf), false, hadoopConf); + HFileScanner scanner = reader.getScanner(hadoopConf, true, true); + scanner.seekTo(); + Cell cell = scanner.getCell(); + scanner.next(); + } +} diff --git a/exchangis-engines/engines/datax/datax-hdfswriter/pom.xml b/exchangis-engines/engines/datax/datax-hdfswriter/pom.xml new file mode 100644 index 000000000..69e76c86c --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfswriter/pom.xml @@ -0,0 +1,162 @@ + + + + exchangis-engine-datax + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + + datax-hdfswriter + jar + 3.0.0-Plus-2 + + 3.1.3 + 3.3.4 + 2.9.1 + + + + + com.webank.wedatasphere.exchangis + datax-core + provided + + + org.apache.hadoop + hadoop-common + + + slf4j-log4j12 + org.slf4j + + + ${datax.engine.version} + + + org.slf4j + slf4j-api + provided + + + ch.qos.logback + logback-classic + provided + + + org.apache.hadoop + hadoop-hdfs + ${hadoop.version} + + + xml-apis + xml-apis + + + netty-all + io.netty + + + + + + io.netty + netty-all + 4.1.86.Final + + + org.apache.hadoop + hadoop-common + ${hadoop.version} + + + commons-logging + commons-logging + + + + + + org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop.version} + + + + org.apache.hive + hive-serde + ${hive.version} + + + org.apache.hive + hive-service + ${hive.version} + + + geronimo-jaspic_1.0_spec + org.apache.geronimo.specs + + + org.pentaho + pentaho-aggdesigner-algorithm + + + + + org.apache.hive + hive-common + ${hive.version} + + + org.apache.hive.hcatalog + hive-hcatalog-core + ${hive.version} + + + org.pentaho + pentaho-aggdesigner-algorithm + + + + + xalan + xalan + 2.7.1 + + + xerces + xercesImpl + ${xerces.version} + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.2.1 + + + assemble + + single + + + install + + + + false + false + + ${basedir}/src/main/assembly/package.xml + + plugin + + + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-hdfswriter/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/assembly/package.xml new file mode 100644 index 000000000..d32a822be --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/assembly/package.xml @@ -0,0 +1,33 @@ + + hdfswriter + + dir + + false + + + src/main/resources + + plugin.json + plugin_job_template.json + + plugin/writer/hdfswriter + + + target/ + + datax-hdfswriter-${datax.engine.version}.jar + + plugin/writer/hdfswriter + + + + + false + plugin/writer/hdfswriter/libs + runtime + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/Constant.java b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/Constant.java new file mode 100644 index 000000000..32c16b013 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/Constant.java @@ -0,0 +1,6 @@ +package com.alibaba.datax.plugin.writer.hdfswriter; + +public class Constant { + + public static final String META_FIELD_DELIMITER = "field.delim"; +} diff --git a/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriter.java b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriter.java new file mode 100644 index 000000000..305e04c7c --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriter.java @@ -0,0 +1,439 @@ +package com.alibaba.datax.plugin.writer.hdfswriter; + +import com.alibaba.datax.common.constant.CommonConstant; +import com.webank.wedatasphere.exchangis.datax.common.constant.TransportType; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordReceiver; +import com.alibaba.datax.common.spi.Writer; +import com.alibaba.datax.common.util.Configuration; +import com.webank.wedatasphere.exchangis.datax.core.job.meta.MetaSchema; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.ChannelInput; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.StreamMeta; +import com.alibaba.datax.core.util.CompressSuffixName; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.plugin.unstructuredstorage.writer.Constant; +import com.alibaba.datax.plugin.unstructuredstorage.writer.UnstructuredStorageWriterUtil; +import com.google.common.collect.Sets; +import org.apache.commons.io.Charsets; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.security.UserGroupInformation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.security.PrivilegedAction; +import java.util.*; + + +public class HdfsWriter extends Writer { + public static class Job extends Writer.Job { + private static final Logger LOG = LoggerFactory.getLogger(Job.class); + + private Configuration writerSliceConfig = null; + + private String defaultFS; + private String path; + private String fileType; + private String fileName; + private List columns; + private String writeMode; + private String fieldDelimiter; + private String compress; + private String encoding; + + private String tempPath; + /** + * 临时文件全路径 + */ + private HashSet tmpFiles = new HashSet<>(); + /** + * 最终文件全路径 + */ + private HashSet endFiles = new HashSet<>(); + + private HdfsWriterUtil hdfsWriterUtil = null; + + @Override + public boolean isSupportStream() { + return true; + } + + @Override + public void init() { + this.writerSliceConfig = this.getPluginJobConf(); + this.validateParameter(); + + //创建textfile存储 + hdfsWriterUtil = new HdfsWriterUtil(); + + hdfsWriterUtil.getFileSystem(defaultFS, this.writerSliceConfig); + + } + + @Override + public void syncMetaData(MetaSchema metaSchema) { + if(StringUtils.isNotBlank(writerSliceConfig.getString(Key.HIVE_METASTORE_URIS, ""))){ + hdfsWriterUtil.updateHiveMetaData(writerSliceConfig.getString(Key.HIVE_DATABASE), + writerSliceConfig.getString(Key.HIVE_TABLE), + writerSliceConfig.getString(Key.HIVE_METASTORE_URIS), + metaSchema); + } + } + + private void validateParameter() { + this.defaultFS = this.writerSliceConfig.getNecessaryValue(Key.DEFAULT_FS, HdfsWriterErrorCode.REQUIRED_VALUE); + //fileType check + if(getTransportType() == TransportType.RECORD) { + this.fileType = this.writerSliceConfig.getNecessaryValue(Key.FILE_TYPE, HdfsWriterErrorCode.REQUIRED_VALUE); + if (!fileType.equalsIgnoreCase("ORC") && !fileType.equalsIgnoreCase("TEXT")) { + String message = "HdfsWriter插件目前只支持ORC和TEXT两种格式的文件,请将filetype选项的值配置为ORC或者TEXT"; + throw DataXException.asDataXException(HdfsWriterErrorCode.ILLEGAL_VALUE, message); + } + //columns check + this.columns = this.writerSliceConfig.getListConfiguration(Key.COLUMN); + if (null != columns && columns.size() > 0) { + for (Configuration eachColumnConf : columns) { + eachColumnConf.getNecessaryValue(Key.NAME, HdfsWriterErrorCode.COLUMN_REQUIRED_VALUE); + eachColumnConf.getNecessaryValue(Key.TYPE, HdfsWriterErrorCode.COLUMN_REQUIRED_VALUE); + } + } + //fieldDelimiter check + this.fieldDelimiter = this.writerSliceConfig.getString(Key.FIELD_DELIMITER, ","); + if (null == fieldDelimiter) { + throw DataXException.asDataXException(HdfsWriterErrorCode.REQUIRED_VALUE, + String.format("您提供配置文件有误,[%s]是必填参数.", Key.FIELD_DELIMITER)); + } + } + //path + this.path = this.writerSliceConfig.getNecessaryValue(Key.PATH, HdfsWriterErrorCode.REQUIRED_VALUE); + if (!path.startsWith("/")) { + String message = String.format("请检查参数path:[%s],需要配置为绝对路径", path); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.ILLEGAL_VALUE, message); + } else if (path.contains("*") || path.contains("?")) { + String message = String.format("请检查参数path:[%s],不能包含*,?等特殊字符", path); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.ILLEGAL_VALUE, message); + } + //fileName + this.fileName = this.writerSliceConfig.getString(Key.FILE_NAME, ""); + //writeMode check + this.writeMode = this.writerSliceConfig.getNecessaryValue(Key.WRITE_MODE, HdfsWriterErrorCode.REQUIRED_VALUE); + writeMode = writeMode.toLowerCase().trim(); + Set supportedWriteModes = Sets.newHashSet("append", "nonconflict", "truncate"); + if (!supportedWriteModes.contains(writeMode)) { + throw DataXException.asDataXException(HdfsWriterErrorCode.ILLEGAL_VALUE, + String.format("仅支持append, nonConflict,truncate模式, 不支持您配置的 writeMode 模式 : [%s]", + writeMode)); + } + this.writerSliceConfig.set(Key.WRITE_MODE, writeMode); + //compress check + this.compress = this.writerSliceConfig.getString(Key.COMPRESS, null); + if(StringUtils.isNotBlank(fileType)) { + if (fileType.equalsIgnoreCase("TEXT")) { + Set textSupportedCompress = Sets.newHashSet("GZIP", "BZIP2"); + //用户可能配置的是compress:"",空字符串,需要将compress设置为null + if (StringUtils.isBlank(compress)) { + this.writerSliceConfig.set(Key.COMPRESS, null); + } else { + compress = compress.toUpperCase().trim(); + if (!textSupportedCompress.contains(compress)) { + throw DataXException.asDataXException(HdfsWriterErrorCode.ILLEGAL_VALUE, + String.format("目前TEXT FILE仅支持GZIP、BZIP2 两种压缩, 不支持您配置的 compress 模式 : [%s]", + compress)); + } + } + } else if (fileType.equalsIgnoreCase("ORC")) { + Set orcSupportedCompress = Sets.newHashSet("NONE", "SNAPPY"); + if (null == compress) { + this.writerSliceConfig.set(Key.COMPRESS, "NONE"); + } else { + compress = compress.toUpperCase().trim(); + if (!orcSupportedCompress.contains(compress)) { + throw DataXException.asDataXException(HdfsWriterErrorCode.ILLEGAL_VALUE, + String.format("目前ORC FILE仅支持SNAPPY压缩, 不支持您配置的 compress 模式 : [%s]", + compress)); + } + } + + } + } + //Kerberos check + Boolean haveKerberos = this.writerSliceConfig.getBool(Key.HAVE_KERBEROS, false); + if (haveKerberos) { + this.writerSliceConfig.getNecessaryValue(Key.KERBEROS_KEYTAB_FILE_PATH, HdfsWriterErrorCode.REQUIRED_VALUE); + this.writerSliceConfig.getNecessaryValue(Key.KERBEROS_PRINCIPAL, HdfsWriterErrorCode.REQUIRED_VALUE); + } + // encoding check + this.encoding = this.writerSliceConfig.getString(Key.ENCODING, Constant.DEFAULT_ENCODING); + try { + encoding = encoding.trim(); + this.writerSliceConfig.set(Key.ENCODING, encoding); + Charsets.toCharset(encoding); + } catch (Exception e) { + throw DataXException.asDataXException(HdfsWriterErrorCode.ILLEGAL_VALUE, + String.format("不支持您配置的编码格式:[%s]", encoding), e); + } + } + + @Override + public void prepare() { + if(StringUtils.isNotBlank(writerSliceConfig.getString(Key.HIVE_METASTORE_URIS, ""))){ + LOG.info("update the configuration dynamically by hive meta..."); + boolean affected = hdfsWriterUtil.updateConfigByHiveMeta( + writerSliceConfig.getString(Key.HIVE_DATABASE), + writerSliceConfig.getString(Key.HIVE_TABLE), + writerSliceConfig.getString(Key.HIVE_METASTORE_URIS), + this.writerSliceConfig + ); + if(affected){ + //validate the configuration again + this.validateParameter(); + } + } + //若路径已经存在,检查path是否是目录 + if (hdfsWriterUtil.isPathexists(path)) { + if (!hdfsWriterUtil.isPathDir(path)) { + throw DataXException.asDataXException(HdfsWriterErrorCode.ILLEGAL_VALUE, + String.format("您配置的path: [%s] 不是一个合法的目录, 请您注意文件重名, 不合法目录名等情况.", + path)); + } + //根据writeMode对目录下文件进行处理 + String[] pathList = hdfsWriterUtil.hdfsDirList(path); + Path[] existFilePaths = new Path[pathList.length]; + for(int i = 0; i < pathList.length; i ++){ + existFilePaths[i] = new Path(pathList[i]); + } + boolean isExistFile = false; + if (existFilePaths.length > 0) { + isExistFile = true; + } + + if ("truncate".equals(writeMode) && isExistFile) { + LOG.info(String.format("由于您配置了writeMode truncate, 开始清理 [%s] 下面的内容", + path)); + hdfsWriterUtil.deleteFiles(existFilePaths); + } else if ("append".equalsIgnoreCase(writeMode)) { + LOG.info(String.format("由于您配置了writeMode append, 写入前不做清理工作", + path, fileName)); + } else if ("nonconflict".equalsIgnoreCase(writeMode) && isExistFile) { + LOG.info(String.format("由于您配置了writeMode nonConflict, 开始检查 [%s] 下面的内容", path)); + List allFiles = new ArrayList(); + for (Path eachFile : existFilePaths) { + allFiles.add(eachFile.toString()); + } + LOG.error(String.format("冲突文件列表为: [%s]", StringUtils.join(allFiles, ","))); + throw DataXException.asDataXException(HdfsWriterErrorCode.ILLEGAL_VALUE, + String.format("由于您配置了writeMode nonConflict,但您配置的path: [%s] 目录不为空, 下面存在其他文件或文件夹.", path)); + } + } else { + hdfsWriterUtil.mkdirs(new Path(path)); + } + } + + @Override + public void post() { + //reset configuration + UserGroupInformation.setConfiguration(hdfsWriterUtil.hadoopConf); + if(!tmpFiles.isEmpty() && !endFiles.isEmpty()) { + hdfsWriterUtil.renameFile(tmpFiles, endFiles); + }else if (StringUtils.isNotBlank(this.tempPath)) { + try{ + LOG.info(String.format("move files or directories under temporary path: %s to path: %s", tempPath, path)); + hdfsWriterUtil.moveToDirectory(Arrays.asList(hdfsWriterUtil.hdfsDirList(this.tempPath)), this.path); + }finally { + LOG.info(String.format("delete temporary path : %s", tempPath)); + hdfsWriterUtil.deleteDir(new Path(this.tempPath)); + this.tempPath = null; + } + } + } + + @Override + public void destroy() { + if(StringUtils.isNotBlank(this.tempPath)){ + hdfsWriterUtil.deleteDir(new Path(this.tempPath)); + } + hdfsWriterUtil.closeFileSystem(); + } + + + @Override + public List split(int mandatoryNumber) { + LOG.info("begin do split..."); + List writerSplitConfigs = new ArrayList(); + String filePrefix = fileName; + + Set allFiles = new HashSet(); + //获取该路径下的所有已有文件列表 + if (hdfsWriterUtil.isPathexists(path)) { + allFiles.addAll(Arrays.asList(hdfsWriterUtil.hdfsDirList(path))); + } + + String fileSuffix; + //临时存放路径 + String storePath = UnstructuredStorageWriterUtil + .buildTmpFilePath(this.path, String.format(CommonConstant.TEMP_PREFIX, System.currentTimeMillis()), + IOUtils.DIR_SEPARATOR_UNIX, + path -> hdfsWriterUtil.isPathexists(path)); + this.tempPath = storePath; + //最终存放路径 + String endStorePath = buildFilePath(); + this.path = endStorePath; + for (int i = 0; i < mandatoryNumber; i++) { + // handle same file name + Configuration splitedTaskConfig = this.writerSliceConfig.clone(); + String fullFileName; + if(getTransportType() == TransportType.RECORD) { + String endFullFileName = null; + do{ + fileSuffix = UUID.randomUUID().toString().replace('-', '_'); + fullFileName = String.format("%s%s%s__%s", defaultFS, storePath, filePrefix, fileSuffix); + endFullFileName = String.format("%s%s%s__%s", defaultFS, endStorePath, filePrefix, fileSuffix); + }while(allFiles.contains(endFullFileName)); + allFiles.add(endFullFileName); + String suffix = CompressSuffixName.chooseSuffix(this.compress); + if(StringUtils.isNotBlank(suffix)){ + fullFileName += suffix; + endFullFileName += suffix; + } + this.tmpFiles.add(fullFileName); + this.endFiles.add(endFullFileName); + LOG.info(String.format("splited write file name:[%s]", + fullFileName)); + }else{ + fullFileName = String.format("%s%s%s", defaultFS , storePath ,fileName); + } + splitedTaskConfig + .set(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.FILE_NAME, + fullFileName); + splitedTaskConfig.set(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.TEMP_PATH, + this.tempPath); + writerSplitConfigs.add(splitedTaskConfig); + } + LOG.info("end do split."); + return writerSplitConfigs; + } + + private String buildFilePath() { + if (!this.path.endsWith(String.valueOf(IOUtils.DIR_SEPARATOR_UNIX))) { + this.path = this.path + IOUtils.DIR_SEPARATOR_UNIX; + } + return this.path; + } + + } + + public static class Task extends Writer.Task { + private static final Logger LOG = LoggerFactory.getLogger(Task.class); + + private Configuration writerSliceConfig; + + private String defaultFS; + private String fileType; + private String fileName; + + private HdfsWriterUtil hdfsWriterUtil = null; + + @Override + public void init() { + this.writerSliceConfig = this.getPluginJobConf(); + + this.defaultFS = this.writerSliceConfig.getString(Key.DEFAULT_FS); + this.fileType = this.writerSliceConfig.getString(Key.FILE_TYPE); + this.fileName = this.writerSliceConfig.getString(Key.FILE_NAME); + hdfsWriterUtil = new HdfsWriterUtil(); + hdfsWriterUtil.getFileSystem(defaultFS, writerSliceConfig); + } + + @Override + public void prepare() { + + } + + @Override + public void startWrite(RecordReceiver lineReceiver) { + LOG.info("begin do write..."); + hdfsWriterUtil.getUgi().doAs((PrivilegedAction) () -> { + LOG.info(String.format("write to file : [%s]", fileName)); + if (fileType.equalsIgnoreCase("TEXT")) { + //write TXT FILE, you should remove you file's extension before + int extIndex = fileName.lastIndexOf("."); + if(extIndex > 0 && fileName.substring(extIndex + 1).lastIndexOf(IOUtils.DIR_SEPARATOR_UNIX) < 0){ + fileName = fileName.substring(0, extIndex); + } + hdfsWriterUtil.textFileStartWrite(lineReceiver, writerSliceConfig, + fileName, + getTaskPluginCollector()); + } else if (fileType.equalsIgnoreCase("ORC")) { + //写ORC FILE + hdfsWriterUtil.orcFileStartWrite(lineReceiver, writerSliceConfig, fileName, + getTaskPluginCollector()); + } + return null; + }); + LOG.info("end do write"); + } + + + @Override + public void startWrite(ChannelInput channelInput) { + LOG.info("begin do write from stream channel..."); + String finalPathPrefix = fileName; + hdfsWriterUtil.getUgi().doAs((PrivilegedAction) () -> { + try{ + InputStream inputStream = null; + while((inputStream = channelInput.nextStream()) != null){ + StreamMeta streamMeta = channelInput.streamMetaData(this.writerSliceConfig.getString + (Key.ENCODING, Constant.DEFAULT_ENCODING)); + LOG.info("begin do read input stream, name : " + streamMeta.getName() + ", relativePath:" + streamMeta.getRelativePath()); + String relativePath = streamMeta.getRelativePath(); + String pathPrefix = finalPathPrefix; + if(!pathPrefix.endsWith(String.valueOf(IOUtils.DIR_SEPARATOR))){ + //means that having the fileNamePrefix + String fileNamePrefix = pathPrefix.substring(pathPrefix.lastIndexOf(IOUtils.DIR_SEPARATOR) + 1); + pathPrefix = pathPrefix.substring(0, pathPrefix.lastIndexOf(IOUtils.DIR_SEPARATOR) + 1); + //modify the relativePath + relativePath = relativePath.substring(0, relativePath.lastIndexOf(IOUtils.DIR_SEPARATOR) + 1) + + fileNamePrefix + "_" + streamMeta.getName(); + } + String destPath = pathPrefix + relativePath; + FSDataOutputStream outputStream = null; + try{ + outputStream = hdfsWriterUtil.fileSystem.create(new Path(destPath), true); + UnstructuredStorageWriterUtil.writeToStream(inputStream, outputStream, + this.writerSliceConfig); + outputStream.flush(); + }finally{ + if(null != outputStream){ + IOUtils.closeQuietly(outputStream); + } + } + } + }catch(IOException e){ + throw DataXException.asDataXException(FrameworkErrorCode.CHANNEL_STREAM_ERROR, e); + } + return null; + }); + LOG.info("end do write from stream channel"); + } + + @Override + public void post() { + + } + + @Override + public void destroy() { + + } + } + + public static void main(String[] args) { + + } +} diff --git a/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriterErrorCode.java b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriterErrorCode.java new file mode 100644 index 000000000..14b546acc --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriterErrorCode.java @@ -0,0 +1,91 @@ +package com.alibaba.datax.plugin.writer.hdfswriter; + +import com.alibaba.datax.common.spi.ErrorCode; + +/** + * Created by shf on 15/10/8. + */ +public enum HdfsWriterErrorCode implements ErrorCode { + /** + * CONFIG INVALID EXCEPTION + */ + CONFIG_INVALID_EXCEPTION("HdfsWriter-00", "The parameters' configuration is wrong(您的参数配置错误)"), + /** + * REQUIRED VALUE + */ + REQUIRED_VALUE("HdfsWriter-01", "Lost the required parameters(您缺失了必须填写的参数值)"), + /** + * ILLEGAL VALUE + */ + ILLEGAL_VALUE("HdfsWriter-02", "The value of configuration is illegal(您填写的参数值不合法)"), + /** + * CHARSET ERROR + */ + WRITER_FILE_WITH_CHARSET_ERROR("HdfsWriter-03", "Character Error(您配置的编码未能正常写入)"), + /** + * WRITE FILE IO ERROR + */ + WRITE_FILE_IO_ERROR("HdfsWriter-04", "Configuration Exception while writing(您配置的文件在写入时出现IO异常)"), + /** + * WRITE RUNTIME EXCEPTION + */ + WRITER_RUNTIME_EXCEPTION("HdfsWriter-05", "RuntimeException(出现运行时异常, 请联系我们)"), + /** + * CONNECT HDFS TO ERROR + */ + CONNECT_HDFS_IO_ERROR("HdfsWriter-06", "Occurred exception when building connection to HDFS(与HDFS建立连接时出现IO异常)"), + /** + * COLUMN REQUIRED VALUE + */ + COLUMN_REQUIRED_VALUE("HdfsWriter-07", "Required value in column configuration(您column配置中缺失了必须填写的参数值)"), + /** + * HDFS RENAME FILE ERROR + */ + HDFS_RENAME_FILE_ERROR("HdfsWriter-08", "Fail to move file to path configured(将文件移动到配置路径失败)"), + /** + * KERBEROS LOGIN ERROR + */ + KERBEROS_LOGIN_ERROR("HdfsWriter-09", "KERBEROS authentication fail(KERBEROS认证失败)"), + /** + * HDFS PROXY ERROR + */ + HDFS_PROXY_ERROR("HdfsWriter-10", "Fail to create HDFS PROXY(创建HDFS PROXY失败)"), + /** + * ADD PARTITION ERROR + */ + ADD_PARTITION_ERROR("HdfsWriter-11", "Fail to add partition(增加partition失败)"), + /** + * UPDATE HIVE META + */ + UPDATE_HIVE_META("HdfsWriter-12", "Fail to update hive meta更新HIVE元信息失败)"), + /** + * UPDATE CONFIG ERROR(update by hive meta) + */ + UPDATE_CONFIG_ERROR("HdfsWriter-13", "Fail to update configuration dynamically(动态更新任务配置失败)"); + + + private final String code; + private final String description; + + private HdfsWriterErrorCode(String code, String description) { + this.code = code; + this.description = description; + } + + @Override + public String getCode() { + return this.code; + } + + @Override + public String getDescription() { + return this.description; + } + + @Override + public String toString() { + return String.format("Code:[%s], Description:[%s].", this.code, + this.description); + } + +} diff --git a/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriterUtil.java b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriterUtil.java new file mode 100644 index 000000000..c5bfc5113 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/HdfsWriterUtil.java @@ -0,0 +1,1037 @@ +package com.alibaba.datax.plugin.writer.hdfswriter; + +import com.alibaba.datax.common.element.Column; +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordReceiver; +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import com.alibaba.datax.common.util.Configuration; +import com.webank.wedatasphere.exchangis.datax.core.job.meta.MetaSchema; +import com.alibaba.datax.core.util.LdapUtil; +import com.alibaba.datax.plugin.utils.HdfsUserGroupInfoLock; +import com.google.common.collect.Lists; +import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import com.webank.wedatasphere.exchangis.datax.common.ldap.LdapConnector; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import com.webank.wedatasphere.exchangis.datax.util.KerberosUtil; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.MutablePair; +import org.apache.hadoop.fs.*; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.IMetaStoreClient; +import org.apache.hadoop.hive.metastore.Warehouse; +import org.apache.hadoop.hive.metastore.api.*; +import org.apache.hadoop.hive.ql.io.*; +import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat; +import org.apache.hadoop.hive.ql.io.orc.OrcSerde; +import org.apache.hadoop.hive.ql.io.orc.OrcStruct; +import org.apache.hadoop.hive.ql.metadata.Hive; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.apache.hadoop.io.NullWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.compress.CompressionCodec; +import org.apache.hadoop.mapred.*; +import org.apache.hadoop.security.UserGroupInformation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.net.URI; +import java.security.PrivilegedExceptionAction; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.Date; + +import static com.alibaba.datax.plugin.writer.hdfswriter.Key.HIVE_KEBEROS_PRINCIPAL; +import static org.apache.hadoop.fs.FileSystem.FS_DEFAULT_NAME_KEY; + +public class HdfsWriterUtil { + private static final String DEFAULT_HIVE_USER = "hadoop"; + private static final Logger LOG = LoggerFactory.getLogger(HdfsWriter.Job.class); + FileSystem fileSystem = null; + private JobConf conf = null; + org.apache.hadoop.conf.Configuration hadoopConf = null; + + private static final String FS_AUTOMATIC_CLOSE_KEY = "fs.automatic.close"; + private static final String FS_DISABLE_CACHE = "fs.%s.impl.disable.cache"; + private static final String HADOOP_SECURITY_AUTHENTICATION_KEY = "hadoop.security.authentication"; + private static final String FALLBACK_TO_SIMPLE_AUTH_KEY = "ipc.client.fallback-to-simple-auth-allowed"; + private static final String HDFS_DEFAULT_FS_KEY = "fs.defaultFS"; + private static final String DEFAULT_COLUMN_CONFIG = "{\"name\":\"column%s\",\"type\":\"string\"}"; + private static final String EXEC_USER = "exec.user"; + // Kerberos + private Boolean haveKerberos = false; + private String kerberosKeytabFilePath; + private String kerberosPrincipal; + private UserGroupInformation ugi = null; + private Configuration writerConfig; + + UserGroupInformation getUgi(){ + return ugi; + } + + void getFileSystem(String defaultFS, Configuration taskConfig) { + hadoopConf = new org.apache.hadoop.conf.Configuration(); + this.writerConfig = taskConfig; + Configuration hadoopSiteParams = taskConfig.getConfiguration(Key.HADOOP_CONFIG); + Map hadoopSiteParamsAsJsonObject = Json.fromJson(taskConfig.getString(Key.HADOOP_CONFIG), Map.class); + if (null != hadoopSiteParams) { + Set paramKeys = hadoopSiteParams.getKeys(); + for (String each : paramKeys) { + assert hadoopSiteParamsAsJsonObject != null; + hadoopConf.set(each, String.valueOf(hadoopSiteParamsAsJsonObject.get(each))); + } + } + hadoopConf.set(HDFS_DEFAULT_FS_KEY, defaultFS); + //disable automatic close + hadoopConf.setBoolean(FS_AUTOMATIC_CLOSE_KEY, false); + //if use kerberos authentication + this.haveKerberos = taskConfig.getBool(Key.HAVE_KERBEROS, false); + HdfsUserGroupInfoLock.lock(); + try { + if (haveKerberos) { + this.kerberosKeytabFilePath = taskConfig.getString(Key.KERBEROS_KEYTAB_FILE_PATH); + this.kerberosPrincipal = taskConfig.getString(Key.KERBEROS_PRINCIPAL); + hadoopConf.set(HADOOP_SECURITY_AUTHENTICATION_KEY, "kerberos"); + //disable the cache + this.hadoopConf.setBoolean( + String.format(FS_DISABLE_CACHE, URI.create(this.hadoopConf.get(FS_DEFAULT_NAME_KEY, "")).getScheme()), true); + hadoopConf.setBoolean(FALLBACK_TO_SIMPLE_AUTH_KEY, true); + ugi = getUgiByKerberos(this.hadoopConf, this.kerberosPrincipal, this.kerberosKeytabFilePath); + } else { + ugi = getUgiInAuth(taskConfig); + } + try { + fileSystem = ugi.doAs((PrivilegedExceptionAction) () -> { + conf = new JobConf(hadoopConf); + FileSystem fs; + try { + fs = FileSystem.get(conf); + fs.exists(new Path("/")); + } catch (IOException e) { + String message = String.format("获取FileSystem时发生网络IO异常,请检查您的网络是否正常!HDFS地址:[%s]", + "message:defaultFS =" + defaultFS); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.CONNECT_HDFS_IO_ERROR, e); + } catch (Exception e) { + String message = String.format("获取FileSystem失败,请检查HDFS地址是否正确: [%s]", + "message:defaultFS =" + defaultFS); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.CONNECT_HDFS_IO_ERROR, e); + } + return fs; + }); + } catch (Exception e) { + throw DataXException.asDataXException(HdfsWriterErrorCode.CONNECT_HDFS_IO_ERROR, e); + } + }finally{ + HdfsUserGroupInfoLock.unlock(); + } + if (null == fileSystem || null == conf) { + String message = String.format("获取FileSystem失败,请检查HDFS地址是否正确: [%s]", + "message:defaultFS =" + defaultFS); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.CONNECT_HDFS_IO_ERROR, message); + } + } + + /** + * update hive meta information + * @param metaSchema meta schema info + */ + void updateHiveMetaData(String database, String table, String hiveMetaStoreUris,MetaSchema metaSchema){ + Hive hive = null; + try { + try { + hive = getHiveConnByUris(hiveMetaStoreUris); + IMetaStoreClient metaStoreClient = hive.getMSC(); + Table tableInfo = metaStoreClient.getTable(database, table); + StorageDescriptor storageDescriptor = tableInfo.getSd(); + List fieldSchemas = metaSchema.getFieldSchemas(); + List cols = new ArrayList<>(); + fieldSchemas.forEach(fieldSchema -> cols.add(new FieldSchema(fieldSchema.getName(), fieldSchema.getType(), + fieldSchema.getComment()))); + storageDescriptor.setCols(cols); + SerDeInfo serDeInfo = metaSchema.getSchemaInfo(Key.HIVE_META_SERDE_INFO, SerDeInfo.class); + if (null != serDeInfo) { + storageDescriptor.setSerdeInfo(serDeInfo); + } + Integer numBuckets = metaSchema.getSchemaInfo(Key.HIVE_META_NUM_BUCKETS, Integer.class); + if (null != numBuckets) { + storageDescriptor.setNumBuckets(numBuckets); + } + List bucketCols = metaSchema.getSchemaInfoList(Key.HIVE_META_BUCKET_COLS, String.class); + if (null != bucketCols) { + storageDescriptor.setBucketCols(bucketCols); + } + List sortCols = metaSchema.getSchemaInfoList(Key.HIVE_META_SORT_COLS, Order.class); + if (null != sortCols) { + storageDescriptor.setSortCols(sortCols); + } + String inputFormat = metaSchema.getSchemaInfo(Key.HIVE_META_INPUT_FORMAT, String.class); + if (null != inputFormat) { + storageDescriptor.setInputFormat(inputFormat); + } + String outputFormat = metaSchema.getSchemaInfo(Key.HIVE_META_OUTPUT_FORMAT, String.class); + if (null != outputFormat) { + storageDescriptor.setOutputFormat(outputFormat); + } + Map parameters = metaSchema.getSchemaInfoMap(Key.HIVE_META_PARAMETERS, String.class, String.class); + if (null != parameters && !parameters.isEmpty()) { + storageDescriptor.setParameters(parameters); + } + Boolean compressed = metaSchema.getSchemaInfo(Key.HIVE_META_COMPRESSED, Boolean.class); + if (null != compressed) { + storageDescriptor.setCompressed(compressed); + } + //alter_table finally + metaStoreClient.alter_table(database, table, tableInfo, true); + } finally { + if (null != hive) { + hive.getMSC().close(); + } + } + }catch(Exception e){ + LOG.error("Update hive metadata error table:[{}], database:[{}]", table, database, e); + throw DataXException.asDataXException(HdfsWriterErrorCode.UPDATE_HIVE_META, e.getMessage()); + } + } + + /** + * update reader configuration by hive meta information dynamically + * @param database database name + * @param table table name + * @param hiveMetaStoreUris hiveMetaStore uris + * @param originConfig the configuration should be updated + * @return if affect the original configuration + */ + boolean updateConfigByHiveMeta(String database, String table, + String hiveMetaStoreUris, Configuration originConfig){ + try{ + IMetaStoreClient client = null; + boolean affect = false; + try{ + Hive hive = getHiveConnByUris(hiveMetaStoreUris); + client = hive.getMSC(); + Table tableInfo = client.getTable(database, table); + StorageDescriptor descriptor = tableInfo.getSd(); + String partitionValues = originConfig.getString(Key.PARTITIONS_VALUES); + if(StringUtils.isNotBlank(partitionValues)){ + String[] partitions = partitionValues.split(","); + Partition partition= null; + try{ + partition = client.getPartition(database, table, Arrays.asList(partitions)); + }catch(Exception e){ + LOG.error(e.getMessage()); + //ignore + } + if(null != partition){ + //if the partition exists, use its storage descriptor + descriptor = partition.getSd(); + } + } + String fileType = detectFileType(descriptor); + if(StringUtils.isNotBlank(fileType) && !fileType + .equalsIgnoreCase(originConfig.getString(Key.FILE_TYPE, ""))){ + affect = true; + originConfig.set(Key.FILE_TYPE, fileType); + if(fileType.equalsIgnoreCase(HiveFileType.TEXT.toString())){ + originConfig.set(Key.COMPRESS, "GZIP"); + }else if(fileType.equalsIgnoreCase(HiveFileType.ORC.toString())){ + originConfig.set(Key.COMPRESS, "SNAPPY"); + }else{ + originConfig.set(Key.COMPRESS, null); + } + } + String fieldDelimiter = descriptor.getSerdeInfo().getParameters().getOrDefault(Constant.META_FIELD_DELIMITER, ""); + if(StringUtils.isNotEmpty(fieldDelimiter) + && !fieldDelimiter.equalsIgnoreCase(originConfig + .getString(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.FIELD_DELIMITER, ""))){ + affect = true; + originConfig.set(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.FIELD_DELIMITER, fieldDelimiter); + } + if(null == originConfig.getListConfiguration(Key.COLUMN)){ + affect = true; + List fieldSchemas = new ArrayList<>(); + descriptor.getCols().forEach(col -> fieldSchemas.add(new MetaSchema.FieldSchema(col.getName(), col.getType(), null))); + originConfig.set(Key.COLUMN, fieldSchemas); + } + //to add partition + if(StringUtils.isNotBlank(originConfig.getString(Key.PARTITIONS_VALUES, ""))){ + addHiveTablePartitions(hive, database, table, originConfig.getString(Key.PARTITIONS_VALUES, "")); + } + return affect; + }finally{ + if(null != client){ + client.close(); + } + } + }catch(Exception e){ + if(e instanceof DataXException){ + throw (DataXException)e; + } + LOG.error("Fail to update configuration", e); + throw DataXException.asDataXException(HdfsWriterErrorCode.UPDATE_CONFIG_ERROR, e.getMessage()); + } + } + /** + * 获取指定目录先的文件列表 + * + * @param dir + * @return 拿到的是文件全路径, + */ + String[] hdfsDirList(String dir) { + Path path = new Path(dir); + String[] files = null; + try { + FileStatus[] status = fileSystem.listStatus(path); + files = new String[status.length]; + for (int i = 0; i < status.length; i++) { + files[i] = status[i].getPath().toString(); + } + } catch (IOException e) { + String message = String.format("获取目录[%s]文件列表时发生网络IO异常,请检查您的网络是否正常!", dir); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.CONNECT_HDFS_IO_ERROR, e); + } + return files; + } + + /** + * 获取以fileName__ 开头的文件列表 + * + * @param dir + * @param fileName + * @return + */ + public Path[] hdfsDirList(String dir, String fileName) { + Path path = new Path(dir); + Path[] files = null; + String filterFileName = fileName + "__*"; + try { + PathFilter pathFilter = new GlobFilter(filterFileName); + FileStatus[] status = fileSystem.listStatus(path, pathFilter); + files = new Path[status.length]; + for (int i = 0; i < status.length; i++) { + files[i] = status[i].getPath(); + } + } catch (IOException e) { + String message = String.format("获取目录[%s]下文件名以[%s]开头的文件列表时发生网络IO异常,请检查您的网络是否正常!", + dir, fileName); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.CONNECT_HDFS_IO_ERROR, e); + } + return files; + } + + boolean isPathexists(String filePath) { + Path path = new Path(filePath); + boolean exist = false; + try { + exist = fileSystem.exists(path); + } catch (IOException e) { + String message = String.format("判断文件路径[%s]是否存在时发生网络IO异常,请检查您的网络是否正常!", + "message:filePath =" + filePath); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.CONNECT_HDFS_IO_ERROR, e); + } + return exist; + } + + boolean isPathDir(String filePath) { + Path path = new Path(filePath); + boolean isDir = false; + try { + isDir = fileSystem.isDirectory(path); + } catch (IOException e) { + String message = String.format("判断路径[%s]是否是目录时发生网络IO异常,请检查您的网络是否正常!", filePath); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.CONNECT_HDFS_IO_ERROR, e); + } + return isDir; + } + + void mkdirs(Path path){ + try { + fileSystem.mkdirs(path); + }catch(IOException e){ + String message = String.format("Occurred IO error while creating directory %s", path.toString()); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.CONNECT_HDFS_IO_ERROR, e); + } + } + void deleteFiles(Path[] paths) { + for (int i = 0; i < paths.length; i++) { + LOG.info(String.format("delete file [%s].", paths[i].toString())); + try { + fileSystem.delete(paths[i], true); + } catch (IOException e) { + String message = String.format("删除文件[%s]时发生IO异常,请检查您的网络是否正常!", + paths[i].toString()); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.CONNECT_HDFS_IO_ERROR, e); + } + } + } + + void deleteDir(Path path) { + LOG.info(String.format("start delete tmp dir [%s] .", path.toString())); + try { + if (isPathexists(path.toString())) { + fileSystem.delete(path, true); + } + } catch (Exception e) { + String message = String.format("删除临时目录[%s]时发生IO异常,请检查您的网络是否正常!", path.toString()); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.CONNECT_HDFS_IO_ERROR, e); + } + LOG.info(String.format("finish delete tmp dir [%s] .", path.toString())); + } + + void renameFile(HashSet tmpFiles, HashSet endFiles) { + Path tmpFilesParent = null; + if (tmpFiles.size() != endFiles.size()) { + String message = String.format("临时目录下文件名个数与目标文件名个数不一致!"); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.HDFS_RENAME_FILE_ERROR, message); + } else { + try { + for (Iterator it1 = tmpFiles.iterator(), it2 = endFiles.iterator(); it1.hasNext() && it2.hasNext(); ) { + String srcFile = it1.next().toString(); + String dstFile = it2.next().toString(); + Path srcFilePah = new Path(srcFile); + Path dstFilePah = new Path(dstFile); + if (tmpFilesParent == null) { + tmpFilesParent = srcFilePah.getParent(); + } + LOG.info(String.format("start rename file [%s] to file [%s].", srcFile, dstFile)); + boolean renameTag = false; + long fileLen = fileSystem.getFileStatus(srcFilePah).getLen(); + if (fileLen > 0) { + renameTag = fileSystem.rename(srcFilePah, dstFilePah); + if (!renameTag) { + String message = String.format("重命名文件[%s]失败,请检查您的网络是否正常!", srcFile); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.HDFS_RENAME_FILE_ERROR, message); + } + LOG.info(String.format("finish rename file [%s] to file [%s].", srcFile, dstFile)); + } else { + LOG.info(String.format("文件[%s]内容为空,请检查写入是否正常!", srcFile)); + } + } + } catch (Exception e) { + String message = String.format("重命名文件时发生异常,请检查您的网络是否正常!"); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.CONNECT_HDFS_IO_ERROR, e); + } finally { + deleteDir(tmpFilesParent); + } + } + } + + void moveToDirectory(List srcPaths, String destPath){ + try{ + Path dest = new Path(destPath); + if(!fileSystem.exists(dest)){ + fileSystem.mkdirs(dest); + }else if(!fileSystem.isDirectory(dest)){ + String message = String.format("move to directory error, %s is not a directory", destPath); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.HDFS_RENAME_FILE_ERROR, message); + } + for(String srcPath : srcPaths){ + if(srcPath.endsWith(String.valueOf(IOUtils.DIR_SEPARATOR_UNIX))){ + srcPath = srcPath.substring(0, srcPath.length() - 1); + } + Path path = new Path(srcPath); + String destName = path.getName(); + StringBuilder destPathBuilder = new StringBuilder(destPath); + if(!destPathBuilder.toString().endsWith(String.valueOf(IOUtils.DIR_SEPARATOR_UNIX))){ + destPathBuilder.append(IOUtils.DIR_SEPARATOR_UNIX); + } + destPathBuilder.append(destName); + if(fileSystem.isDirectory(path)) { + moveToDirectory(Arrays.asList(hdfsDirList(path.toString())), destPathBuilder.toString()); + }else{ + Path destFilePath = new Path(destPathBuilder.toString()); + if(fileSystem.exists(destFilePath)){ + fileSystem.delete(destFilePath, false); + } + fileSystem.rename(path, destFilePath); + } + } + }catch(Exception e){ + String message = String.format("occurred error while move srcPaths : %s to destPath: %s ,please check your network", + Json.toJson(srcPaths, null), destPath); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.CONNECT_HDFS_IO_ERROR, e); + } + } + //关闭FileSystem + void closeFileSystem() { + try { + fileSystem.close(); + } catch (IOException e) { + String message = "关闭FileSystem时发生IO异常,请检查您的网络是否正常!"; + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.CONNECT_HDFS_IO_ERROR, e); + } + } + + + //textfile格式文件 + public FSDataOutputStream getOutputStream(String path) { + Path storePath = new Path(path); + FSDataOutputStream fSDataOutputStream = null; + try { + fSDataOutputStream = fileSystem.create(storePath); + } catch (IOException e) { + String message = String.format("Create an FSDataOutputStream at the indicated Path[%s] failed: [%s]", + "message:path =" + path, e.getMessage()); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.WRITE_FILE_IO_ERROR, e); + } + return fSDataOutputStream; + } + + /** + * 写textfile类型文件 + * + * @param lineReceiver + * @param config + * @param fileName + * @param taskPluginCollector + */ + void textFileStartWrite(RecordReceiver lineReceiver, Configuration config, String fileName, + TaskPluginCollector taskPluginCollector) { + String fieldDelimiter = config.getString(Key.FIELD_DELIMITER); + List columns = config.getListConfiguration(Key.COLUMN); + String compress = config.getString(Key.COMPRESS, null); + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmm"); + String attempt = "attempt_" + dateFormat.format(new Date()) + "_0001_m_000000_0"; + Path outputPath = new Path(fileName); + //todo 需要进一步确定TASK_ATTEMPT_ID + conf.set(JobContext.TASK_ATTEMPT_ID, attempt); + FileOutputFormat outFormat = new TextOutputFormat(); + FileOutputFormat.setOutputPath(conf, outputPath); + FileOutputFormat.setWorkOutputPath(conf, outputPath); + if (null != compress) { + Class codecClass = getCompressCodec(compress); + if (null != codecClass) { + FileOutputFormat.setOutputCompressorClass(conf, codecClass); + } + } + try { + RecordWriter writer = outFormat.getRecordWriter(fileSystem, conf, outputPath.toString(), Reporter.NULL); + Record record = null; + while ((record = lineReceiver.getFromReader()) != null) { + MutablePair transportResult = transportOneRecord(record, fieldDelimiter, columns, taskPluginCollector); + if (!transportResult.getRight()) { + writer.write(NullWritable.get(), transportResult.getLeft()); + } + } + writer.close(Reporter.NULL); + } catch (Exception e) { + String message = String.format("写文件文件[%s]时发生IO异常,请检查您的网络是否正常!", fileName); + LOG.error(message); + Path path = new Path(fileName); + deleteDir(path.getParent()); + throw DataXException.asDataXException(HdfsWriterErrorCode.WRITE_FILE_IO_ERROR, e); + } + + } + + private static MutablePair transportOneRecord( + Record record, String fieldDelimiter, List columnsConfiguration, TaskPluginCollector taskPluginCollector) { + MutablePair, Boolean> transportResultList = transportOneRecord(record, columnsConfiguration, taskPluginCollector); + //保存<转换后的数据,是否是脏数据> + MutablePair transportResult = new MutablePair(); + transportResult.setRight(false); + if (null != transportResultList) { + Text recordResult = new Text(StringUtils.join(transportResultList.getLeft(), fieldDelimiter)); + transportResult.setRight(transportResultList.getRight()); + transportResult.setLeft(recordResult); + } + return transportResult; + } + + private Class getCompressCodec(String compress) { + Class codecClass = null; + if (null == compress) { + codecClass = null; + } else if ("GZIP".equalsIgnoreCase(compress)) { + codecClass = org.apache.hadoop.io.compress.GzipCodec.class; + } else if ("BZIP2".equalsIgnoreCase(compress)) { + codecClass = org.apache.hadoop.io.compress.BZip2Codec.class; + } else if ("SNAPPY".equalsIgnoreCase(compress)) { + //todo 等需求明确后支持 需要用户安装SnappyCodec + codecClass = org.apache.hadoop.io.compress.SnappyCodec.class; + // org.apache.hadoop.hive.ql.io.orc.ZlibCodec.class not static + //codecClass = org.apache.hadoop.hive.ql.io.orc.ZlibCodec.class; + } else { + throw DataXException.asDataXException(HdfsWriterErrorCode.ILLEGAL_VALUE, + String.format("目前不支持您配置的 compress 模式 : [%s]", compress)); + } + return codecClass; + } + + /** + * 写orcfile类型文件 + * + * @param lineReceiver + * @param config + * @param fileName + * @param taskPluginCollector + */ + void orcFileStartWrite(RecordReceiver lineReceiver, Configuration config, String fileName, + TaskPluginCollector taskPluginCollector) { + List columns = config.getListConfiguration(Key.COLUMN); + String compress = config.getString(Key.COMPRESS, null); + OrcSerde orcSerde = new OrcSerde(); + + FileOutputFormat outFormat = new OrcOutputFormat(); + if (!"NONE".equalsIgnoreCase(compress) && null != compress) { + Class codecClass = getCompressCodec(compress); + if (null != codecClass) { + FileOutputFormat.setOutputCompressorClass(conf, codecClass); + } + } + try { + RecordWriter writer = outFormat.getRecordWriter(fileSystem, conf, fileName, Reporter.NULL); + Record record = null; + StructObjectInspector inspector = null; + while ((record = lineReceiver.getFromReader()) != null) { + MutablePair, Boolean> transportResult = transportOneRecord(record, columns, taskPluginCollector); + int length = record.getColumnNumber(); + if(null == inspector){ + if(null == columns){ + columns = new ArrayList<>(); + for(int i = 0; i < length; i ++){ + columns.add(Configuration.from(String.format(DEFAULT_COLUMN_CONFIG, i))); + } + } + List columnNames = getColumnNames(columns); + List columnTypeInspectors = getColumnTypeInspectors(columns); + inspector = ObjectInspectorFactory + .getStandardStructObjectInspector(columnNames, columnTypeInspectors); + } + if (!transportResult.getRight() && null != writer) { + transformOrcStruct(transportResult, columns); + writer.write(NullWritable.get(), orcSerde.serialize(transportResult.getLeft(), inspector)); + } + } + writer.close(Reporter.NULL); + } catch (Exception e) { + String message = String.format("写文件文件[%s]时发生IO异常,请检查您的网络是否正常!", fileName); + LOG.error(message); + Path path = new Path(fileName); + deleteDir(path.getParent()); + throw DataXException.asDataXException(HdfsWriterErrorCode.WRITE_FILE_IO_ERROR, e); + } + + } + + private List getColumnNames(List columns) { + List columnNames = Lists.newArrayList(); + if(null != columns) { + for (Configuration eachColumnConf : columns) { + columnNames.add(eachColumnConf.getString(Key.NAME)); + } + } + return columnNames; + } + + /** + * 根据writer配置的字段类型,构建inspector + * + * @param columns + * @return + */ + private List getColumnTypeInspectors(List columns) { + List columnTypeInspectors = Lists.newArrayList(); + if(null != columns) { + for (Configuration eachColumnConf : columns) { + SupportHiveDataType columnType = SupportHiveDataType.valueOf(eachColumnConf.getString(Key.TYPE).toUpperCase()); + ObjectInspector objectInspector = null; + switch (columnType) { + case TINYINT: + objectInspector = ObjectInspectorFactory.getReflectionObjectInspector(Byte.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); + break; + case SMALLINT: + objectInspector = ObjectInspectorFactory.getReflectionObjectInspector(Short.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); + break; + case INT: + objectInspector = ObjectInspectorFactory.getReflectionObjectInspector(Integer.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); + break; + case BIGINT: + objectInspector = ObjectInspectorFactory.getReflectionObjectInspector(Long.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); + break; + case FLOAT: + objectInspector = ObjectInspectorFactory.getReflectionObjectInspector(Float.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); + break; + case DOUBLE: + objectInspector = ObjectInspectorFactory.getReflectionObjectInspector(Double.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); + break; + case TIMESTAMP: + objectInspector = ObjectInspectorFactory.getReflectionObjectInspector(java.sql.Timestamp.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); + break; + case DATE: + objectInspector = ObjectInspectorFactory.getReflectionObjectInspector(java.sql.Date.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); + break; + case STRING: + case VARCHAR: + case CHAR: + objectInspector = ObjectInspectorFactory.getReflectionObjectInspector(String.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); + break; + case BOOLEAN: + objectInspector = ObjectInspectorFactory.getReflectionObjectInspector(Boolean.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); + break; + case MAP: + objectInspector = OrcStruct.createObjectInspector( + TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo)); + break; + case ARRAY: + objectInspector = OrcStruct.createObjectInspector(TypeInfoFactory.getListTypeInfo(TypeInfoFactory.stringTypeInfo)); + break; + default: + throw DataXException + .asDataXException( + HdfsWriterErrorCode.ILLEGAL_VALUE, + String.format( + "您的配置文件中的列配置信息有误. 因为DataX 不支持数据库写入这种字段类型. 字段名:[%s], 字段类型:[%s]. 请修改表中该字段的类型或者不同步该字段.", + eachColumnConf.getString(Key.NAME), + eachColumnConf.getString(Key.TYPE))); + } + + columnTypeInspectors.add(objectInspector); + } + } + return columnTypeInspectors; + } + + public OrcSerde getOrcSerde(Configuration config) { + String fieldDelimiter = config.getString(Key.FIELD_DELIMITER); + String compress = config.getString(Key.COMPRESS); + String encoding = config.getString(Key.ENCODING); + + OrcSerde orcSerde = new OrcSerde(); + Properties properties = new Properties(); + properties.setProperty("orc.bloom.filter.columns", fieldDelimiter); + properties.setProperty("orc.compress", compress); + properties.setProperty("orc.encoding.strategy", encoding); + + orcSerde.initialize(conf, properties); + return orcSerde; + } + + private static MutablePair, Boolean> transportOneRecord( + Record record, List columnsConfiguration, + TaskPluginCollector taskPluginCollector) { + + MutablePair, Boolean> transportResult = new MutablePair, Boolean>(); + transportResult.setRight(false); + List recordList = Lists.newArrayList(); + int recordLength = record.getColumnNumber(); + if (0 != recordLength) { + Column column; + for (int i = 0; i < recordLength; i++) { + column = record.getColumn(i); + if (null != column.getRawData()) { + if(null == columnsConfiguration){ + recordList.add(column.asString()); + continue; + } + String rowData = column.getRawData().toString(); + SupportHiveDataType columnType = SupportHiveDataType.valueOf( + columnsConfiguration.get(i).getString(Key.TYPE).toUpperCase()); + //根据writer端类型配置做类型转换 + try { + switch (columnType) { + case TINYINT: + recordList.add(Byte.valueOf(rowData)); + break; + case SMALLINT: + recordList.add(Short.valueOf(rowData)); + break; + case INT: + recordList.add(Integer.valueOf(rowData)); + break; + case BIGINT: + recordList.add(column.asLong()); + break; + case FLOAT: + recordList.add(Float.valueOf(rowData)); + break; + case DOUBLE: + recordList.add(column.asDouble()); + break; + case STRING: + case VARCHAR: + case ARRAY: + case MAP: + case CHAR: + recordList.add(column.asString()); + break; + case BOOLEAN: + recordList.add(column.asBoolean()); + break; + case DATE: + recordList.add(new java.sql.Date(column.asDate().getTime())); + break; + case TIMESTAMP: + recordList.add(new java.sql.Timestamp(column.asDate().getTime())); + break; + default: + throw DataXException + .asDataXException( + HdfsWriterErrorCode.ILLEGAL_VALUE, + String.format( + "您的配置文件中的列配置信息有误. 因为DataX 不支持数据库写入这种字段类型. 字段名:[%s], 字段类型:[%s]. 请修改表中该字段的类型或者不同步该字段.", + columnsConfiguration.get(i).getString(Key.NAME), + columnsConfiguration.get(i).getString(Key.TYPE))); + } + } catch (Exception e) { + // warn: 此处认为脏数据 + String message = String.format( + "字段类型转换错误:你目标字段为[%s]类型,实际字段值为[%s].", + columnsConfiguration.get(i).getString(Key.TYPE), column.getRawData().toString()); + taskPluginCollector.collectDirtyRecord(record, message); + transportResult.setRight(true); + break; + } + } else { + // warn: it's all ok if nullFormat is null + recordList.add(null); + } + } + } + transportResult.setLeft(recordList); + return transportResult; + } + + private UserGroupInformation getUgiByKerberos( + org.apache.hadoop.conf.Configuration conf, + String kerberosPrincipal, String kerberosKeytabFilePath){ + return kerberosAuthentication(conf, kerberosPrincipal, kerberosKeytabFilePath); + } + + private UserGroupInformation getUgiInAuth(Configuration taskConfig){ + String userName = taskConfig.getString(Key.LDAP_USERNAME, ""); + String password = taskConfig.getString(Key.LDAP_USERPASSWORD, ""); + if(StringUtils.isNotBlank(userName) && StringUtils.isNotBlank(password)) { + try { + password = (String) CryptoUtils.string2Object(password); + } catch (Exception e) { + LOG.error("Fail to decrypt password", e); + throw DataXException.asDataXException(HdfsWriterErrorCode.CONFIG_INVALID_EXCEPTION, e); + } + Properties properties = null; + try { + properties = LdapUtil.getLdapProperties(); + }catch(Exception e){ + //Ignore + } + if(null != properties){ + LdapConnector ldapConnector = LdapConnector.getInstance(properties); + if(!ldapConnector.authenticate(userName, password)){ + throw DataXException.asDataXException(HdfsWriterErrorCode.CONFIG_INVALID_EXCEPTION, "LDAP authenticate fail"); + } + }else{ + throw DataXException.asDataXException(HdfsWriterErrorCode.CONFIG_INVALID_EXCEPTION, "Engine need LDAP configuration"); + } + } + UserGroupInformation ugi; + try { + UserGroupInformation.setConfiguration(hadoopConf); + String procUser = System.getProperty("user.name", ""); + String execUser = System.getProperty(EXEC_USER, ""); + String remoteUser = StringUtils.isNotBlank(userName) ? userName : execUser; + if(StringUtils.isNotBlank(remoteUser) && !remoteUser.equals(procUser)){ + //Disable the cache + this.hadoopConf.setBoolean( + String.format(FS_DISABLE_CACHE, URI.create(this.hadoopConf.get(FS_DEFAULT_NAME_KEY, "")).getScheme()), true); + ugi = UserGroupInformation.createRemoteUser(remoteUser); + }else{ + ugi = UserGroupInformation.getCurrentUser(); + } + } catch (Exception e) { + LOG.error(e.getMessage()); + throw DataXException.asDataXException(HdfsWriterErrorCode.HDFS_PROXY_ERROR, e); + } + return ugi; + } + + private void transformOrcStruct( MutablePair, Boolean> transportResult, + List columns){ + //deal the columns whose type is MAP or ARRAY + List result = transportResult.getLeft(); + for(int i = 0; i < columns.size(); i++) { + Configuration column = columns.get(i); + SupportHiveDataType columnType = SupportHiveDataType.valueOf(column.getString(Key.TYPE).toUpperCase()); + String split = ","; + if (columnType == SupportHiveDataType.ARRAY) { + String array = result.get(i).toString().trim(); + if (array.startsWith("[") && array.endsWith("]")) { + array = array.substring(1, array.length() - 1); + String[] items = array.split(split); + for (int t = 0; t < items.length; t++){ + items[t] = StringUtils.join(new String[]{"\"",items[t] ,"\""}, ""); + } + List list = Json.fromJson("[" + StringUtils.join(items, split) + "]", String.class); + List listText = new ArrayList<>(); + list.forEach(value -> listText.add(new Text(value))); + result.set(i, listText); + } + } else if (columnType == SupportHiveDataType.MAP) { + String map = result.get(i).toString().trim(); + if (map.startsWith("{") && map.endsWith("}")) { + map = map.substring(1, map.length() - 1); + String[] entries = map.split(split); + for(int t = 0; t < entries.length; t++){ + String[] attrs = entries[t].split("="); + if(attrs.length >= 2){ + entries[t] = StringUtils.join + (new String[]{"\"", attrs[0],"\":\"", attrs[1], "\""}, ""); + } + } + Map map1 = Json.fromJson("{" + StringUtils.join(entries, split) + "}", Map.class); + Map mapText = new HashMap<>(); + if(null != map1) { + map1.forEach((k, v) -> mapText.put(new Text((String) k), new Text((String) v))); + } + result.set(i, mapText); + } + } + } + } + + private Hive getHiveConnByUris(String hiveMetaStoreUris) throws IOException, InterruptedException { + HiveConf hiveConf = new HiveConf(); + hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, hiveMetaStoreUris); + UserGroupInformation hiveUgi; + if(haveKerberos){ + Properties kerberosProps = KerberosUtil.getProperties(); + kerberosProps = null == kerberosProps? new Properties() : kerberosProps; + if(StringUtils.isNotBlank(kerberosProps.getProperty("kerberos.krb5.path", ""))){ + System.setProperty("java.security.krb5.conf", kerberosProps.getProperty("kerberos.krb5.path")); + } + hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL, true); + hiveConf.set("hadoop.security.authentication", "kerberos"); + hiveConf.setVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL, + writerConfig.getString(HIVE_KEBEROS_PRINCIPAL, DEFAULT_HIVE_USER+ "/_HOST@EXAMPLE.COM")); + hiveUgi = getUgiByKerberos(hiveConf, this.kerberosPrincipal, this.kerberosKeytabFilePath); + }else{ + hiveUgi = getUgiInAuth(this.writerConfig); + } + return hiveUgi.doAs((PrivilegedExceptionAction) () -> { + Hive hive1 = Hive.get(hiveConf); + hive1.getMSC(); + return hive1; + }); + } + /** + * add hive partition + * + * @param hive hive + * @param partitionValues + */ + private void addHiveTablePartitions(Hive hive, String database, String table, String partitionValues) { + try { + String[] parts = partitionValues.split(","); + List partVal = Arrays.asList(parts); + org.apache.hadoop.hive.ql.metadata.Table t = hive.getTable(database, table); + Partition partition = new Partition(); + partition.setDbName(database); + partition.setTableName(t.getTableName()); + StorageDescriptor partitionSd = new StorageDescriptor(t.getSd()); + List partitionKeys = t.getPartitionKeys(); + partitionKeys = partitionKeys.subList(0, partVal.size()); + String location = t.getPath().toUri().getPath() + Path.SEPARATOR + Warehouse.makePartName(partitionKeys, partVal); + Path p = new Path(location); + if (!fileSystem.exists(p)) { + //build directory by runtime user + fileSystem.mkdirs(p); + } + partitionSd.setLocation(location); + partition.setSd(partitionSd); + partition.setValues(partVal); + List partitions = new ArrayList<>(); + partitions.add(partition); + hive.getMSC().add_partitions(partitions, true, true); + } catch (Exception e) { + LOG.error("Add table partition error.", e); + throw DataXException.asDataXException(HdfsWriterErrorCode.ADD_PARTITION_ERROR, e.getMessage()); + } + } + + private UserGroupInformation kerberosAuthentication(org.apache.hadoop.conf.Configuration conf, + String kerberosPrincipal, String kerberosKeytabFilePath) { + UserGroupInformation ugi = null; + if (StringUtils.isNotBlank(kerberosPrincipal) && StringUtils.isNotBlank(kerberosKeytabFilePath)) { + UserGroupInformation.setConfiguration(conf); + try { + ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(kerberosPrincipal + .substring(0, kerberosPrincipal.indexOf("@")), kerberosKeytabFilePath); + } catch (Exception e) { + String message = String.format("kerberos认证失败,请确定kerberosKeytabFilePath[%s]和kerberosPrincipal[%s]填写正确", + kerberosKeytabFilePath, kerberosPrincipal); + LOG.error(message); + throw DataXException.asDataXException(HdfsWriterErrorCode.KERBEROS_LOGIN_ERROR, e); + } + } + return ugi; + } + + private enum HiveFileType{ + /** + * TYPE: TEXT + */ + TEXT, + /** + * TYPE: ORC + */ + ORC, + /** + * TYPE: AVRO + */ + AVRO, + /** + * TYPE: PARQUET + */ + PARQUET, + /** + * TYPE: RC + */ + RC, + /** + * TYPE: SEQ + */ + SEQ; + static final Map OUTPUT_FORMAT = new HashMap<>(); + static{ + OUTPUT_FORMAT.put(new TextFileStorageFormatDescriptor().getOutputFormat(), TEXT); + OUTPUT_FORMAT.put(new ORCFileStorageFormatDescriptor().getOutputFormat(), ORC); + OUTPUT_FORMAT.put(new AvroStorageFormatDescriptor().getOutputFormat(), AVRO); + OUTPUT_FORMAT.put(new ParquetFileStorageFormatDescriptor().getOutputFormat(), PARQUET); + OUTPUT_FORMAT.put(new RCFileStorageFormatDescriptor().getOutputFormat(), RC); + OUTPUT_FORMAT.put(new SequenceFileStorageFormatDescriptor().getOutputFormat(), SEQ); + } + + static HiveFileType output(String outputStreamFormat){ + return OUTPUT_FORMAT.get(outputStreamFormat); + } + } + + /** + * detect the file type + * @param tableDescriptor tableDescriptor + * @return + */ + private String detectFileType(StorageDescriptor tableDescriptor){ + //search file type by output format of table/partition + return HiveFileType.output(tableDescriptor.getOutputFormat()) != null ? HiveFileType.output(tableDescriptor.getOutputFormat()).toString() : ""; + } +} diff --git a/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/Key.java b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/Key.java new file mode 100644 index 000000000..fe1053984 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/Key.java @@ -0,0 +1,81 @@ +package com.alibaba.datax.plugin.writer.hdfswriter; + +/** + * Created by shf on 15/10/8. + */ +public class Key { + /** + * must have + */ + static final String PATH = "path"; + /** + * must have + */ + final static String DEFAULT_FS = "defaultFS"; + /** + * must have + */ + final static String FILE_TYPE = "fileType"; + /** + * must have + */ + static final String FILE_NAME = "fileName"; + /** + * must have for column + */ + static final String COLUMN = "column"; + static final String NAME = "name"; + static final String TYPE = "type"; + public static final String DATE_FORMAT = "dateFormat"; + /** + * must have + */ + static final String WRITE_MODE = "writeMode"; + /** + * must have + */ + static final String FIELD_DELIMITER = "fieldDelimiter"; + /** + * not must, default UTF-8 + */ + static final String ENCODING = "encoding"; + /** + * not must, default no compress + */ + static final String COMPRESS = "compress"; + /** + * not must, not default \N + */ + public static final String NULL_FORMAT = "nullFormat"; + /** + * Kerberos + */ + static final String HAVE_KERBEROS = "haveKerberos"; + static final String KERBEROS_KEYTAB_FILE_PATH = "kerberosKeytabFilePath"; + static final String KERBEROS_PRINCIPAL = "kerberosPrincipal"; + static final String LDAP_USERNAME="ldapUserName"; + static final String LDAP_USERPASSWORD="ldapUserPassword"; + static final String HIVE_KEBEROS_PRINCIPAL="hivePrincipal"; + /** + * hadoop config + */ + static final String HADOOP_CONFIG = "hadoopConfig"; + + static final String HIVE_METASTORE_URIS="hiveMetastoreUris"; + static final String PARTITIONS_VALUES="partitionValues"; + static final String HIVE_TABLE="hiveTable"; + static final String HIVE_DATABASE="hiveDatabase"; + + /** + * keys of meta schema information + */ + static final String HIVE_META_SERDE_INFO = "serdeInfo"; + static final String HIVE_META_NUM_BUCKETS = "numBuckets"; + static final String HIVE_META_BUCKET_COLS = "bucketCols"; + static final String HIVE_META_SORT_COLS = "sortCols"; + static final String HIVE_META_INPUT_FORMAT = "inputFormat"; + static final String HIVE_META_OUTPUT_FORMAT = "outputFormat"; + static final String HIVE_META_PARAMETERS = "parameters"; + static final String HIVE_META_COMPRESSED = "compressed"; + static final String HIVE_META_PARTITION = "partitionKeys"; +} diff --git a/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/SupportHiveDataType.java b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/SupportHiveDataType.java new file mode 100644 index 000000000..7abed7222 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/java/com/alibaba/datax/plugin/writer/hdfswriter/SupportHiveDataType.java @@ -0,0 +1,21 @@ +package com.alibaba.datax.plugin.writer.hdfswriter; + +public enum SupportHiveDataType { + /*类型支持*/ + TINYINT, + SMALLINT, + INT, + BIGINT, + FLOAT, + DOUBLE, + ARRAY, + MAP, + TIMESTAMP, + DATE, + + STRING, + VARCHAR, + CHAR, + + BOOLEAN +} diff --git a/exchangis-engines/engines/datax/datax-hdfswriter/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/resources/plugin.json new file mode 100644 index 000000000..0b6dda13c --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/resources/plugin.json @@ -0,0 +1,6 @@ +{ + "name": "hdfswriter", + "class": "com.alibaba.datax.plugin.writer.hdfswriter.HdfsWriter", + "description": "useScene: prod. mechanism: via FileSystem connect HDFS write data concurrent.", + "developer": "alibaba" +} diff --git a/exchangis-engines/engines/datax/datax-hdfswriter/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/resources/plugin_job_template.json new file mode 100644 index 000000000..529d6a779 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-hdfswriter/src/main/resources/plugin_job_template.json @@ -0,0 +1,13 @@ +{ + "name": "hdfswriter", + "parameter": { + "defaultFS": "", + "fileType": "", + "path": "", + "fileName": "", + "column": [], + "writeMode": "", + "fieldDelimiter": "", + "compress": "" + } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-mysqlreader/pom.xml b/exchangis-engines/engines/datax/datax-mysqlreader/pom.xml new file mode 100644 index 000000000..483c78022 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-mysqlreader/pom.xml @@ -0,0 +1,75 @@ + + + + exchangis-engine-datax + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + 3.0.0-Plus-2 + datax-mysqlreader + jar + + + com.webank.wedatasphere.exchangis + datax-core + provided + + + org.apache.hadoop + hadoop-common + + + slf4j-log4j12 + org.slf4j + + + ${datax.engine.version} + + + org.slf4j + slf4j-api + provided + + + ch.qos.logback + logback-classic + provided + + + mysql + mysql-connector-java + 5.1.38 + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.2.1 + + + assemble + + single + + + install + + + + false + false + + ${basedir}/src/main/assembly/package.xml + + plugin + + + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-mysqlreader/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-mysqlreader/src/main/assembly/package.xml new file mode 100644 index 000000000..c970ce4f7 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-mysqlreader/src/main/assembly/package.xml @@ -0,0 +1,33 @@ + + mysqlreader + + dir + + false + + + src/main/resources + + plugin.json + plugin_job_template.json + + plugin/reader/mysqlreader + + + target/ + + datax-mysqlreader-${datax.engine.version}.jar + + plugin/reader/mysqlreader + + + + + false + plugin/reader/mysqlreader/libs + runtime + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-mysqlreader/src/main/java/com/alibaba/datax/plugin/reader/mysqlreader/MysqlReader.java b/exchangis-engines/engines/datax/datax-mysqlreader/src/main/java/com/alibaba/datax/plugin/reader/mysqlreader/MysqlReader.java new file mode 100644 index 000000000..5db081c0b --- /dev/null +++ b/exchangis-engines/engines/datax/datax-mysqlreader/src/main/java/com/alibaba/datax/plugin/reader/mysqlreader/MysqlReader.java @@ -0,0 +1,97 @@ +package com.alibaba.datax.plugin.reader.mysqlreader; + +import com.alibaba.datax.common.plugin.RecordSender; +import com.alibaba.datax.common.spi.Reader; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.plugin.rdbms.reader.CommonRdbmsReader; +import com.alibaba.datax.plugin.rdbms.reader.Constant; +import com.alibaba.datax.plugin.rdbms.util.DataBaseType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; + +public class MysqlReader extends Reader { + + private static final DataBaseType DATABASE_TYPE = DataBaseType.MySql; + + public static class Job extends Reader.Job { + private static final Logger LOG = LoggerFactory + .getLogger(Job.class); + + private Configuration originalConfig = null; + private CommonRdbmsReader.Job commonRdbmsReaderJob; + + @Override + public void init() { + this.originalConfig = super.getPluginJobConf(); + + Integer userConfigedFetchSize = this.originalConfig.getInt(Constant.FETCH_SIZE); + if (userConfigedFetchSize != null) { + LOG.warn("对 mysqlreader 不需要配置 fetchSize, mysqlreader 将会忽略这项配置. 如果您不想再看到此警告,请去除fetchSize 配置."); + } + + this.originalConfig.set(Constant.FETCH_SIZE, Integer.MIN_VALUE); + + this.commonRdbmsReaderJob = new CommonRdbmsReader.Job(DATABASE_TYPE); + this.commonRdbmsReaderJob.init(this.originalConfig); + } + + @Override + public void preCheck() { + init(); + this.commonRdbmsReaderJob.preCheck(this.originalConfig, DATABASE_TYPE); + + } + + @Override + public List split(int adviceNumber) { + return this.commonRdbmsReaderJob.split(this.originalConfig, adviceNumber); + } + + @Override + public void post() { + this.commonRdbmsReaderJob.post(this.originalConfig); + } + + @Override + public void destroy() { + this.commonRdbmsReaderJob.destroy(this.originalConfig); + } + + } + + public static class Task extends Reader.Task { + + private Configuration readerSliceConfig; + private CommonRdbmsReader.Task commonRdbmsReaderTask; + + @Override + public void init() { + this.readerSliceConfig = super.getPluginJobConf(); + this.commonRdbmsReaderTask = new CommonRdbmsReader.Task(DATABASE_TYPE, super.getTaskGroupId(), super.getTaskId()); + this.commonRdbmsReaderTask.init(this.readerSliceConfig); + + } + + @Override + public void startRead(RecordSender recordSender) { + int fetchSize = this.readerSliceConfig.getInt(Constant.FETCH_SIZE); + + this.commonRdbmsReaderTask.startRead(this.readerSliceConfig, recordSender, + super.getTaskPluginCollector(), fetchSize); + } + + @Override + public void post() { + this.commonRdbmsReaderTask.post(this.readerSliceConfig); + } + + @Override + public void destroy() { + this.commonRdbmsReaderTask.destroy(this.readerSliceConfig); + } + + } + +} diff --git a/exchangis-engines/engines/datax/datax-mysqlreader/src/main/java/com/alibaba/datax/plugin/reader/mysqlreader/MysqlReaderErrorCode.java b/exchangis-engines/engines/datax/datax-mysqlreader/src/main/java/com/alibaba/datax/plugin/reader/mysqlreader/MysqlReaderErrorCode.java new file mode 100644 index 000000000..15d01545a --- /dev/null +++ b/exchangis-engines/engines/datax/datax-mysqlreader/src/main/java/com/alibaba/datax/plugin/reader/mysqlreader/MysqlReaderErrorCode.java @@ -0,0 +1,34 @@ +package com.alibaba.datax.plugin.reader.mysqlreader; + +import com.alibaba.datax.common.spi.ErrorCode; + +import java.sql.Driver; + +public enum MysqlReaderErrorCode implements ErrorCode { + ; + + private final String code; + private final String description; + + private MysqlReaderErrorCode(String code, String description) { + this.code = code; + this.description = description; + } + + @Override + public String getCode() { + return this.code; + } + + @Override + public String getDescription() { + return this.description; + } + + @Override + public String toString() { + return String.format("Code:[%s], Description:[%s]. ", this.code, + this.description); + } + +} diff --git a/exchangis-engines/engines/datax/datax-mysqlreader/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-mysqlreader/src/main/resources/plugin.json new file mode 100644 index 000000000..0f7743dc8 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-mysqlreader/src/main/resources/plugin.json @@ -0,0 +1,6 @@ +{ + "name": "mysqlreader", + "class": "com.alibaba.datax.plugin.reader.mysqlreader.MysqlReader", + "description": "useScene: prod. mechanism: Jdbc connection using the database, execute select sql, retrieve data from the ResultSet. warn: The more you know about the database, the less problems you encounter.", + "developer": "alibaba" +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-mysqlreader/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-mysqlreader/src/main/resources/plugin_job_template.json new file mode 100644 index 000000000..9659cb5c9 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-mysqlreader/src/main/resources/plugin_job_template.json @@ -0,0 +1,15 @@ +{ + "name": "mysqlreader", + "parameter": { + "username": "", + "password": "", + "column": [], + "connection": [ + { + "jdbcUrl": [], + "table": [] + } + ], + "where": "" + } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-mysqlwriter/pom.xml b/exchangis-engines/engines/datax/datax-mysqlwriter/pom.xml new file mode 100644 index 000000000..df5898ab7 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-mysqlwriter/pom.xml @@ -0,0 +1,75 @@ + + + + exchangis-engine-datax + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + jar + 3.0.0-Plus-2 + datax-mysqlwriter + + + com.webank.wedatasphere.exchangis + datax-core + provided + + + org.apache.hadoop + hadoop-common + + + slf4j-log4j12 + org.slf4j + + + ${datax.engine.version} + + + org.slf4j + slf4j-api + provided + + + ch.qos.logback + logback-classic + provided + + + mysql + mysql-connector-java + 5.1.38 + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.2.1 + + + assemble + + single + + + install + + + + false + false + + ${basedir}/src/main/assembly/package.xml + + plugin + + + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-mysqlwriter/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-mysqlwriter/src/main/assembly/package.xml new file mode 100644 index 000000000..cdc896140 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-mysqlwriter/src/main/assembly/package.xml @@ -0,0 +1,33 @@ + + mysqlwriter + + dir + + false + + + src/main/resources + + plugin.json + plugin_job_template.json + + plugin/writer/mysqlwriter + + + target/ + + datax-mysqlwriter-${datax.engine.version}.jar + + plugin/writer/mysqlwriter + + + + + false + plugin/writer/mysqlwriter/libs + runtime + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-mysqlwriter/src/main/java/com/alibaba/datax/plugin/writer/mysqlwriter/MysqlWriter.java b/exchangis-engines/engines/datax/datax-mysqlwriter/src/main/java/com/alibaba/datax/plugin/writer/mysqlwriter/MysqlWriter.java new file mode 100644 index 000000000..572226826 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-mysqlwriter/src/main/java/com/alibaba/datax/plugin/writer/mysqlwriter/MysqlWriter.java @@ -0,0 +1,102 @@ +package com.alibaba.datax.plugin.writer.mysqlwriter; + +import com.alibaba.datax.common.plugin.RecordReceiver; +import com.alibaba.datax.common.spi.Writer; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.plugin.rdbms.util.DataBaseType; +import com.alibaba.datax.plugin.rdbms.writer.CommonRdbmsWriter; +import com.alibaba.datax.plugin.rdbms.writer.Key; + +import java.util.List; + + +//TODO writeProxy +public class MysqlWriter extends Writer { + private static final DataBaseType DATABASE_TYPE = DataBaseType.MySql; + + public static class Job extends Writer.Job { + private Configuration originalConfig = null; + private CommonRdbmsWriter.Job commonRdbmsWriterJob; + + @Override + public void preCheck() { + this.init(); + this.commonRdbmsWriterJob.writerPreCheck(this.originalConfig, DATABASE_TYPE); + } + + @Override + public void init() { + this.originalConfig = super.getPluginJobConf(); + this.commonRdbmsWriterJob = new CommonRdbmsWriter.Job(DATABASE_TYPE); + this.commonRdbmsWriterJob.init(this.originalConfig); + } + + // 一般来说,是需要推迟到 task 中进行pre 的执行(单表情况例外) + @Override + public void prepare() { + //实跑先不支持 权限 检验 + //this.commonRdbmsWriterJob.privilegeValid(this.originalConfig, DATABASE_TYPE); + this.commonRdbmsWriterJob.prepare(this.originalConfig); + } + + @Override + public List split(int mandatoryNumber) { + return this.commonRdbmsWriterJob.split(this.originalConfig, mandatoryNumber); + } + + // 一般来说,是需要推迟到 task 中进行post 的执行(单表情况例外) + @Override + public void post() { + this.commonRdbmsWriterJob.post(this.originalConfig); + } + + @Override + public void destroy() { + this.commonRdbmsWriterJob.destroy(this.originalConfig); + } + + } + + public static class Task extends Writer.Task { + private Configuration writerSliceConfig; + private CommonRdbmsWriter.Task commonRdbmsWriterTask; + + @Override + public void init() { + this.writerSliceConfig = super.getPluginJobConf(); + this.commonRdbmsWriterTask = new CommonRdbmsWriter.Task(DATABASE_TYPE); + this.commonRdbmsWriterTask.init(this.writerSliceConfig); + } + + @Override + public void prepare() { + this.commonRdbmsWriterTask.prepare(this.writerSliceConfig); + } + + //TODO 改用连接池,确保每次获取的连接都是可用的(注意:连接可能需要每次都初始化其 session) + @Override + public void startWrite(RecordReceiver recordReceiver) { + this.commonRdbmsWriterTask.startWrite(recordReceiver, this.writerSliceConfig, + super.getTaskPluginCollector()); + } + + @Override + public void post() { + this.commonRdbmsWriterTask.post(this.writerSliceConfig); + } + + @Override + public void destroy() { + this.commonRdbmsWriterTask.destroy(this.writerSliceConfig); + } + + @Override + public boolean supportFailOver() { + String writeMode = writerSliceConfig.getString(Key.WRITE_MODE); + return "replace".equalsIgnoreCase(writeMode); + } + + } + + +} diff --git a/exchangis-engines/engines/datax/datax-mysqlwriter/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-mysqlwriter/src/main/resources/plugin.json new file mode 100644 index 000000000..5a04a6ca6 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-mysqlwriter/src/main/resources/plugin.json @@ -0,0 +1,6 @@ +{ + "name": "mysqlwriter", + "class": "com.alibaba.datax.plugin.writer.mysqlwriter.MysqlWriter", + "description": "useScene: prod. mechanism: Jdbc connection using the database, execute insert sql. warn: The more you know about the database, the less problems you encounter.", + "developer": "alibaba" +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-mysqlwriter/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-mysqlwriter/src/main/resources/plugin_job_template.json new file mode 100644 index 000000000..6ddf6bed3 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-mysqlwriter/src/main/resources/plugin_job_template.json @@ -0,0 +1,17 @@ +{ + "name": "mysqlwriter", + "parameter": { + "username": "", + "password": "", + "writeMode": "", + "column": [], + "session": [], + "preSql": [], + "connection": [ + { + "jdbcUrl": "", + "table": [] + } + ] + } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-oraclereader/pom.xml b/exchangis-engines/engines/datax/datax-oraclereader/pom.xml new file mode 100644 index 000000000..3e87bfad4 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-oraclereader/pom.xml @@ -0,0 +1,82 @@ + + + + exchangis-engine-datax + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + 3.0.0-Plus-2 + datax-oraclereader + jar + + + com.webank.wedatasphere.exchangis + datax-core + provided + + + org.apache.hadoop + hadoop-common + + + slf4j-log4j12 + org.slf4j + + + ${datax.engine.version} + + + org.slf4j + slf4j-api + provided + + + ch.qos.logback + logback-classic + provided + + + org.apache.commons + commons-lang3 + provided + + + com.oracle + ojdbc6 + 11.2.0.3 + system + ${pom.basedir}/src/main/lib/ojdbc6-11.2.0.3.jar + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.2.1 + + + assemble + + single + + + install + + + + false + false + + ${basedir}/src/main/assembly/package.xml + + plugin + + + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-oraclereader/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-oraclereader/src/main/assembly/package.xml new file mode 100644 index 000000000..e2117c50a --- /dev/null +++ b/exchangis-engines/engines/datax/datax-oraclereader/src/main/assembly/package.xml @@ -0,0 +1,37 @@ + + oraclereader + + dir + + false + + + src/main/resources + + plugin.json + plugin_job_template.json + + plugin/reader/oraclereader + + + target/ + + datax-oraclereader-${datax.engine.version}.jar + + plugin/reader/oraclereader + + + src/main/lib + plugin/reader/oraclereader/libs + + + + + false + plugin/reader/oraclereader/libs + runtime + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/Constant.java b/exchangis-engines/engines/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/Constant.java new file mode 100644 index 000000000..8006b1a6c --- /dev/null +++ b/exchangis-engines/engines/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/Constant.java @@ -0,0 +1,7 @@ +package com.alibaba.datax.plugin.reader.oraclereader; + +public class Constant { + + public static final int DEFAULT_FETCH_SIZE = 1024; + +} diff --git a/exchangis-engines/engines/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/OracleReader.java b/exchangis-engines/engines/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/OracleReader.java new file mode 100644 index 000000000..7f1231cf9 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/OracleReader.java @@ -0,0 +1,126 @@ +package com.alibaba.datax.plugin.reader.oraclereader; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordSender; +import com.alibaba.datax.common.spi.Reader; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.plugin.rdbms.reader.CommonRdbmsReader; +import com.alibaba.datax.plugin.rdbms.reader.Key; +import com.alibaba.datax.plugin.rdbms.reader.util.HintUtil; +import com.alibaba.datax.plugin.rdbms.util.DBUtilErrorCode; +import com.alibaba.datax.plugin.rdbms.util.DataBaseType; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; + +public class OracleReader extends Reader { + + private static final DataBaseType DATABASE_TYPE = DataBaseType.Oracle; + + public static class Job extends Reader.Job { + private static final Logger LOG = LoggerFactory + .getLogger(Job.class); + + private Configuration originalConfig = null; + private CommonRdbmsReader.Job commonRdbmsReaderJob; + + @Override + public void init() { + this.originalConfig = super.getPluginJobConf(); + + dealFetchSize(this.originalConfig); + + this.commonRdbmsReaderJob = new CommonRdbmsReader.Job( + DATABASE_TYPE); + this.commonRdbmsReaderJob.init(this.originalConfig); + + // 注意:要在 this.commonRdbmsReaderJob.init(this.originalConfig); 之后执行,这样可以直接快速判断是否是querySql 模式 + dealHint(this.originalConfig); + } + + @Override + public void preCheck() { + init(); + this.commonRdbmsReaderJob.preCheck(this.originalConfig, DATABASE_TYPE); + } + + @Override + public List split(int adviceNumber) { + return this.commonRdbmsReaderJob.split(this.originalConfig, + adviceNumber); + } + + @Override + public void post() { + this.commonRdbmsReaderJob.post(this.originalConfig); + } + + @Override + public void destroy() { + this.commonRdbmsReaderJob.destroy(this.originalConfig); + } + + private void dealFetchSize(Configuration originalConfig) { + int fetchSize = originalConfig.getInt( + com.alibaba.datax.plugin.rdbms.reader.Constant.FETCH_SIZE, + Constant.DEFAULT_FETCH_SIZE); + if (fetchSize < 1) { + throw DataXException + .asDataXException(DBUtilErrorCode.REQUIRED_VALUE, + String.format("您配置的 fetchSize 有误,fetchSize:[%d] 值不能小于 1.", + fetchSize)); + } + originalConfig.set( + com.alibaba.datax.plugin.rdbms.reader.Constant.FETCH_SIZE, + fetchSize); + } + + private void dealHint(Configuration originalConfig) { + String hint = originalConfig.getString(Key.HINT); + if (StringUtils.isNotBlank(hint)) { + boolean isTableMode = originalConfig.getBool(com.alibaba.datax.plugin.rdbms.reader.Constant.IS_TABLE_MODE).booleanValue(); + if (!isTableMode) { + throw DataXException.asDataXException(OracleReaderErrorCode.HINT_ERROR, "当且仅当非 querySql 模式读取 oracle 时才能配置 HINT."); + } + HintUtil.initHintConf(DATABASE_TYPE, originalConfig); + } + } + } + + public static class Task extends Reader.Task { + + private Configuration readerSliceConfig; + private CommonRdbmsReader.Task commonRdbmsReaderTask; + + @Override + public void init() { + this.readerSliceConfig = super.getPluginJobConf(); + this.commonRdbmsReaderTask = new CommonRdbmsReader.Task( + DATABASE_TYPE, super.getTaskGroupId(), super.getTaskId()); + this.commonRdbmsReaderTask.init(this.readerSliceConfig); + } + + @Override + public void startRead(RecordSender recordSender) { + int fetchSize = this.readerSliceConfig + .getInt(com.alibaba.datax.plugin.rdbms.reader.Constant.FETCH_SIZE); + + this.commonRdbmsReaderTask.startRead(this.readerSliceConfig, + recordSender, super.getTaskPluginCollector(), fetchSize); + } + + @Override + public void post() { + this.commonRdbmsReaderTask.post(this.readerSliceConfig); + } + + @Override + public void destroy() { + this.commonRdbmsReaderTask.destroy(this.readerSliceConfig); + } + + } + +} diff --git a/exchangis-engines/engines/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/OracleReaderErrorCode.java b/exchangis-engines/engines/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/OracleReaderErrorCode.java new file mode 100644 index 000000000..95a17eb12 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-oraclereader/src/main/java/com/alibaba/datax/plugin/reader/oraclereader/OracleReaderErrorCode.java @@ -0,0 +1,31 @@ +package com.alibaba.datax.plugin.reader.oraclereader; + +import com.alibaba.datax.common.spi.ErrorCode; + +public enum OracleReaderErrorCode implements ErrorCode { + HINT_ERROR("Oraclereader-00", "您的 Hint 配置出错."),; + + private final String code; + private final String description; + + private OracleReaderErrorCode(String code, String description) { + this.code = code; + this.description = description; + } + + @Override + public String getCode() { + return this.code; + } + + @Override + public String getDescription() { + return this.description; + } + + @Override + public String toString() { + return String.format("Code:[%s], Description:[%s]. ", this.code, + this.description); + } +} diff --git a/exchangis-engines/engines/datax/datax-oraclereader/src/main/lib/ojdbc6-11.2.0.3.jar b/exchangis-engines/engines/datax/datax-oraclereader/src/main/lib/ojdbc6-11.2.0.3.jar new file mode 100644 index 000000000..01da074d5 Binary files /dev/null and b/exchangis-engines/engines/datax/datax-oraclereader/src/main/lib/ojdbc6-11.2.0.3.jar differ diff --git a/exchangis-engines/engines/datax/datax-oraclereader/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-oraclereader/src/main/resources/plugin.json new file mode 100644 index 000000000..abb6a7148 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-oraclereader/src/main/resources/plugin.json @@ -0,0 +1,6 @@ +{ + "name": "oraclereader", + "class": "com.alibaba.datax.plugin.reader.oraclereader.OracleReader", + "description": "useScene: prod. mechanism: Jdbc connection using the database, execute select sql, retrieve data from the ResultSet. warn: The more you know about the database, the less problems you encounter.", + "developer": "alibaba" +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-oraclereader/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-oraclereader/src/main/resources/plugin_job_template.json new file mode 100644 index 000000000..6323c4c9d --- /dev/null +++ b/exchangis-engines/engines/datax/datax-oraclereader/src/main/resources/plugin_job_template.json @@ -0,0 +1,14 @@ +{ + "name": "oraclereader", + "parameter": { + "username": "", + "password": "", + "column": [], + "connection": [ + { + "table": [], + "jdbcUrl": [] + } + ] + } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-oraclewriter/pom.xml b/exchangis-engines/engines/datax/datax-oraclewriter/pom.xml new file mode 100644 index 000000000..d4f3813ff --- /dev/null +++ b/exchangis-engines/engines/datax/datax-oraclewriter/pom.xml @@ -0,0 +1,81 @@ + + + + exchangis-engine-datax + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + + 3.0.0-Plus-2 + datax-oraclewriter + jar + + + + com.webank.wedatasphere.exchangis + datax-core + provided + + + org.apache.hadoop + hadoop-common + + + slf4j-log4j12 + org.slf4j + + + ${datax.engine.version} + + + org.slf4j + slf4j-api + provided + + + ch.qos.logback + logback-classic + provided + + + com.oracle + ojdbc6 + 11.2.0.3 + system + ${pom.basedir}/src/main/lib/ojdbc6-11.2.0.3.jar + + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.2.1 + + + assemble + + single + + + install + + + + false + false + + ${pom.basedir}/src/main/assembly/package.xml + + plugin + + + + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-oraclewriter/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-oraclewriter/src/main/assembly/package.xml new file mode 100644 index 000000000..190577ced --- /dev/null +++ b/exchangis-engines/engines/datax/datax-oraclewriter/src/main/assembly/package.xml @@ -0,0 +1,37 @@ + + oraclewriter + + dir + + false + + + src/main/resources + + plugin.json + plugin_job_template.json + + plugin/writer/oraclewriter + + + target/ + + datax-oraclewriter-${datax.engine.version}.jar + + plugin/writer/oraclewriter + + + src/main/lib + plugin/writer/oraclewriter/libs + + + + + false + plugin/writer/oraclewriter/libs + runtime + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-oraclewriter/src/main/java/com/alibaba/datax/plugin/writer/oraclewriter/OracleWriter.java b/exchangis-engines/engines/datax/datax-oraclewriter/src/main/java/com/alibaba/datax/plugin/writer/oraclewriter/OracleWriter.java new file mode 100644 index 000000000..c55500c08 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-oraclewriter/src/main/java/com/alibaba/datax/plugin/writer/oraclewriter/OracleWriter.java @@ -0,0 +1,106 @@ +package com.alibaba.datax.plugin.writer.oraclewriter; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordReceiver; +import com.alibaba.datax.common.spi.Writer; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.plugin.rdbms.util.DBUtilErrorCode; +import com.alibaba.datax.plugin.rdbms.util.DataBaseType; +import com.alibaba.datax.plugin.rdbms.writer.CommonRdbmsWriter; +import com.alibaba.datax.plugin.rdbms.writer.Key; + +import java.util.List; + +public class OracleWriter extends Writer { + private static final DataBaseType DATABASE_TYPE = DataBaseType.Oracle; + + public static class Job extends Writer.Job { + private Configuration originalConfig = null; + private CommonRdbmsWriter.Job commonRdbmsWriterJob; + + @Override + public void preCheck() { + this.init(); + this.commonRdbmsWriterJob.writerPreCheck(this.originalConfig, DATABASE_TYPE); + } + + @Override + public void init() { + this.originalConfig = super.getPluginJobConf(); + + // warn:not like mysql, oracle only support insert mode, don't use + String writeMode = this.originalConfig.getString(Key.WRITE_MODE); + if (null != writeMode) { + throw DataXException + .asDataXException( + DBUtilErrorCode.CONF_ERROR, + String.format( + "写入模式(writeMode)配置错误. 因为Oracle不支持配置项 writeMode: %s, Oracle只能使用insert sql 插入数据. 请检查您的配置并作出修改", + writeMode)); + } + + this.commonRdbmsWriterJob = new CommonRdbmsWriter.Job( + DATABASE_TYPE); + this.commonRdbmsWriterJob.init(this.originalConfig); + } + + @Override + public void prepare() { + //oracle实跑先不做权限检查 + //this.commonRdbmsWriterJob.privilegeValid(this.originalConfig, DATABASE_TYPE); + this.commonRdbmsWriterJob.prepare(this.originalConfig); + } + + @Override + public List split(int mandatoryNumber) { + return this.commonRdbmsWriterJob.split(this.originalConfig, + mandatoryNumber); + } + + @Override + public void post() { + this.commonRdbmsWriterJob.post(this.originalConfig); + } + + @Override + public void destroy() { + this.commonRdbmsWriterJob.destroy(this.originalConfig); + } + + } + + public static class Task extends Writer.Task { + private Configuration writerSliceConfig; + private CommonRdbmsWriter.Task commonRdbmsWriterTask; + + @Override + public void init() { + this.writerSliceConfig = super.getPluginJobConf(); + this.commonRdbmsWriterTask = new CommonRdbmsWriter.Task(DATABASE_TYPE); + this.commonRdbmsWriterTask.init(this.writerSliceConfig); + } + + @Override + public void prepare() { + this.commonRdbmsWriterTask.prepare(this.writerSliceConfig); + } + + @Override + public void startWrite(RecordReceiver recordReceiver) { + this.commonRdbmsWriterTask.startWrite(recordReceiver, + this.writerSliceConfig, super.getTaskPluginCollector()); + } + + @Override + public void post() { + this.commonRdbmsWriterTask.post(this.writerSliceConfig); + } + + @Override + public void destroy() { + this.commonRdbmsWriterTask.destroy(this.writerSliceConfig); + } + + } + +} diff --git a/exchangis-engines/engines/datax/datax-oraclewriter/src/main/java/com/alibaba/datax/plugin/writer/oraclewriter/OracleWriterErrorCode.java b/exchangis-engines/engines/datax/datax-oraclewriter/src/main/java/com/alibaba/datax/plugin/writer/oraclewriter/OracleWriterErrorCode.java new file mode 100644 index 000000000..2351f5dfd --- /dev/null +++ b/exchangis-engines/engines/datax/datax-oraclewriter/src/main/java/com/alibaba/datax/plugin/writer/oraclewriter/OracleWriterErrorCode.java @@ -0,0 +1,31 @@ +package com.alibaba.datax.plugin.writer.oraclewriter; + +import com.alibaba.datax.common.spi.ErrorCode; + +public enum OracleWriterErrorCode implements ErrorCode { + ; + + private final String code; + private final String describe; + + private OracleWriterErrorCode(String code, String describe) { + this.code = code; + this.describe = describe; + } + + @Override + public String getCode() { + return this.code; + } + + @Override + public String getDescription() { + return this.describe; + } + + @Override + public String toString() { + return String.format("Code:[%s], Describe:[%s]. ", this.code, + this.describe); + } +} diff --git a/exchangis-engines/engines/datax/datax-oraclewriter/src/main/lib/ojdbc6-11.2.0.3.jar b/exchangis-engines/engines/datax/datax-oraclewriter/src/main/lib/ojdbc6-11.2.0.3.jar new file mode 100644 index 000000000..01da074d5 Binary files /dev/null and b/exchangis-engines/engines/datax/datax-oraclewriter/src/main/lib/ojdbc6-11.2.0.3.jar differ diff --git a/exchangis-engines/engines/datax/datax-oraclewriter/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-oraclewriter/src/main/resources/plugin.json new file mode 100644 index 000000000..60f1c3f97 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-oraclewriter/src/main/resources/plugin.json @@ -0,0 +1,6 @@ +{ + "name": "oraclewriter", + "class": "com.alibaba.datax.plugin.writer.oraclewriter.OracleWriter", + "description": "useScene: prod. mechanism: Jdbc connection using the database, execute insert sql. warn: The more you know about the database, the less problems you encounter.", + "developer": "alibaba" +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-oraclewriter/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-oraclewriter/src/main/resources/plugin_job_template.json new file mode 100644 index 000000000..1fe48d2a5 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-oraclewriter/src/main/resources/plugin_job_template.json @@ -0,0 +1,15 @@ +{ + "name": "oraclewriter", + "parameter": { + "username": "", + "password": "", + "column": [], + "preSql": [], + "connection": [ + { + "jdbcUrl": "", + "table": [] + } + ] + } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/pom.xml b/exchangis-engines/engines/datax/datax-starrockswriter/pom.xml new file mode 100644 index 000000000..c2f92f30c --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/pom.xml @@ -0,0 +1,93 @@ + + + + exchangis-engine-datax + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + + 3.0.0-Plus-2 + datax-oraclewriter + jar + + + + com.webank.wedatasphere.exchangis + datax-core + provided + + + org.apache.hadoop + hadoop-common + + + slf4j-log4j12 + org.slf4j + + + ${datax.engine.version} + + + org.slf4j + slf4j-api + provided + + + ch.qos.logback + logback-classic + provided + + + commons-codec + commons-codec + 1.9 + + + org.apache.commons + commons-lang3 + 3.12.0 + + + commons-logging + commons-logging + 1.1.1 + + + org.apache.httpcomponents + httpcore + 4.4.6 + + + org.apache.httpcomponents + httpclient + 4.5.3 + + + com.alibaba.fastjson2 + fastjson2 + 2.0.51 + + + mysql + mysql-connector-java + 8.0.16 + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/StarRocksWriter.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/StarRocksWriter.java new file mode 100644 index 000000000..19238525b --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/StarRocksWriter.java @@ -0,0 +1,152 @@ +package com.alibaba.datax.plugin.writer.starrockswriter; + +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordReceiver; +import com.alibaba.datax.common.spi.Writer; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.plugin.rdbms.util.DBUtil; +import com.alibaba.datax.plugin.rdbms.util.DBUtilErrorCode; +import com.alibaba.datax.plugin.rdbms.util.DataBaseType; +import com.alibaba.datax.plugin.writer.starrockswriter.manager.StarRocksWriterManager; +import com.alibaba.datax.plugin.writer.starrockswriter.row.StarRocksISerializer; +import com.alibaba.datax.plugin.writer.starrockswriter.row.StarRocksSerializerFactory; +import com.alibaba.datax.plugin.writer.starrockswriter.util.StarRocksWriterUtil; + +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +public class StarRocksWriter extends Writer { + + public static class Job extends Writer.Job { + + private static final Logger LOG = LoggerFactory.getLogger(Job.class); + private Configuration originalConfig = null; + private StarRocksWriterOptions options; + + @Override + public void init() { + this.originalConfig = super.getPluginJobConf(); + String selectedDatabase = super.getPluginJobConf().getString(StarRocksWriterOptions.KEY_SELECTED_DATABASE); + if(StringUtils.isBlank(this.originalConfig.getString(StarRocksWriterOptions.KEY_DATABASE)) && StringUtils.isNotBlank(selectedDatabase)){ + this.originalConfig.set(StarRocksWriterOptions.KEY_DATABASE, selectedDatabase); + } + options = new StarRocksWriterOptions(super.getPluginJobConf()); + options.doPretreatment(); + } + + @Override + public void preCheck(){ + this.init(); + StarRocksWriterUtil.preCheckPrePareSQL(options); + StarRocksWriterUtil.preCheckPostSQL(options); + } + + @Override + public void prepare() { + String username = options.getUsername(); + String password = options.getPassword(); + String jdbcUrl = options.getJdbcUrl(); + List renderedPreSqls = StarRocksWriterUtil.renderPreOrPostSqls(options.getPreSqlList(), options.getTable()); + if (null != renderedPreSqls && !renderedPreSqls.isEmpty()) { + Connection conn = DBUtil.getConnection(DataBaseType.MySql, jdbcUrl, username, password); + LOG.info("Begin to execute preSqls:[{}]. context info:{}.", String.join(";", renderedPreSqls), jdbcUrl); + StarRocksWriterUtil.executeSqls(conn, renderedPreSqls); + DBUtil.closeDBResources(null, null, conn); + } + } + + @Override + public List split(int mandatoryNumber) { + List configurations = new ArrayList<>(mandatoryNumber); + for (int i = 0; i < mandatoryNumber; i++) { + configurations.add(originalConfig); + } + return configurations; + } + + @Override + public void post() { + String username = options.getUsername(); + String password = options.getPassword(); + String jdbcUrl = options.getJdbcUrl(); + List renderedPostSqls = StarRocksWriterUtil.renderPreOrPostSqls(options.getPostSqlList(), options.getTable()); + if (null != renderedPostSqls && !renderedPostSqls.isEmpty()) { + Connection conn = DBUtil.getConnection(DataBaseType.MySql, jdbcUrl, username, password); + LOG.info("Begin to execute postSqls:[{}]. context info:{}.", String.join(";", renderedPostSqls), jdbcUrl); + StarRocksWriterUtil.executeSqls(conn, renderedPostSqls); + DBUtil.closeDBResources(null, null, conn); + } + } + + @Override + public void destroy() { + } + + } + + public static class Task extends Writer.Task { + private StarRocksWriterManager writerManager; + private StarRocksWriterOptions options; + private StarRocksISerializer rowSerializer; + + @Override + public void init() { + options = new StarRocksWriterOptions(super.getPluginJobConf()); + if (options.isWildcardColumn()) { + Connection conn = DBUtil.getConnection(DataBaseType.MySql, options.getJdbcUrl(), options.getUsername(), options.getPassword()); + List columns = StarRocksWriterUtil.getStarRocksColumns(conn, options.getDatabase(), options.getTable()); + options.setInfoCchemaColumns(columns); + } + writerManager = new StarRocksWriterManager(options, getTaskPluginCollector()); + rowSerializer = StarRocksSerializerFactory.createSerializer(options); + } + + @Override + public void prepare() { + } + + public void startWrite(RecordReceiver recordReceiver) { + try { + Record record; + while ((record = recordReceiver.getFromReader()) != null) { + if (record.getColumnNumber() != options.getColumns().size()) { + throw DataXException + .asDataXException( + DBUtilErrorCode.CONF_ERROR, + String.format( + "Column configuration error. The number of reader columns %d and the number of writer columns %d are not equal.", + record.getColumnNumber(), + options.getColumns().size())); + } + writerManager.writeRecord(rowSerializer.serialize(record)); + } + } catch (Exception e) { + throw DataXException.asDataXException(DBUtilErrorCode.WRITE_DATA_ERROR, e); + } + } + + @Override + public void post() { + try { + writerManager.close(); + } catch (Exception e) { + throw DataXException.asDataXException(DBUtilErrorCode.WRITE_DATA_ERROR, e); + } + } + + @Override + public void destroy() {} + + @Override + public boolean supportFailOver(){ + return false; + } + } +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/StarRocksWriterOptions.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/StarRocksWriterOptions.java new file mode 100644 index 000000000..c670c7643 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/StarRocksWriterOptions.java @@ -0,0 +1,231 @@ +package com.alibaba.datax.plugin.writer.starrockswriter; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.plugin.rdbms.util.DBUtilErrorCode; +import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.Serializable; +import java.util.Map; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +public class StarRocksWriterOptions implements Serializable { + + private static final Logger LOG = LoggerFactory.getLogger(StarRocksWriterOptions.class); + + private static final long serialVersionUID = 1l; + private static final long KILO_BYTES_SCALE = 1024l; + private static final long MEGA_BYTES_SCALE = KILO_BYTES_SCALE * KILO_BYTES_SCALE; + private static final int MAX_RETRIES = 1; + private static final int BATCH_ROWS = 500000; + private static final long BATCH_BYTES = 5 * MEGA_BYTES_SCALE; + private static final long FLUSH_INTERVAL = 300000; + + private static final String KEY_LOAD_PROPS_FORMAT = "format"; + public enum StreamLoadFormat { + CSV, JSON; + } + + public static final String KEY_USERNAME = "username"; + public static final String KEY_PASSWORD = "password"; + public static final String KEY_DATABASE = "database"; + public static final String KEY_SELECTED_DATABASE = "selectedDatabase"; + public static final String KEY_TABLE = "table"; + public static final String KEY_COLUMN = "column"; + public static final String KEY_PRE_SQL = "preSql"; + public static final String KEY_POST_SQL = "postSql"; + public static final String KEY_JDBC_URL = "jdbcUrl"; + public static final String KEY_HOST = "host"; + public static final String KEY_PORT = "port"; + public static final String KEY_HTTP_PORT = "httpPort"; + public static final String KEY_LABEL_PREFIX = "labelPrefix"; + public static final String KEY_MAX_BATCH_ROWS = "maxBatchRows"; + public static final String KEY_MAX_BATCH_SIZE = "maxBatchSize"; + public static final String KEY_FLUSH_INTERVAL = "flushInterval"; + public static final String KEY_LOAD_URL = "loadUrl"; + public static final String KEY_FLUSH_QUEUE_LENGTH = "flushQueueLength"; + public static final String KEY_LOAD_PROPS = "loadProps"; + public static final String CONNECTION_JDBC_URL = "connection[0].jdbcUrl"; + public static final String CONNECTION_HOST = "connection[0].host"; + public static final String CONNECTION_PORT = "connection[0].port"; + public static final String CONNECTION_HTTP_PORT = "connection[0].httpPort"; + public static final String CONNECTION_TABLE_NAME = "connection[0].table[0]"; + public static final String CONNECTION_SELECTED_DATABASE = "connection[0].selectedDatabase"; + + private final Configuration options; + private List infoCchemaColumns; + private List userSetColumns; + private boolean isWildcardColumn; + + public StarRocksWriterOptions(Configuration options) { + this.options = options; + // database + String database = this.options.getString(KEY_DATABASE); + if (StringUtils.isNotBlank(database)) { + this.options.set(KEY_DATABASE, database); + } + // jdbcUrl + String jdbcUrl = null; + String host = this.options.getString(CONNECTION_HOST); + String port = this.options.getString(CONNECTION_PORT); + if (StringUtils.isNotBlank(host) && StringUtils.isNotBlank(port)) { + jdbcUrl = "jdbc:mysql://" + host + ":" + port + "/"; + this.options.set(KEY_JDBC_URL, jdbcUrl); + } + // table + String table = this.options.getString(CONNECTION_TABLE_NAME); + if (StringUtils.isNotBlank(table)) { + this.options.set(KEY_TABLE, table); + } + // column + List keyColumns = this.options.getList(KEY_COLUMN, Map.class); + if (Objects.nonNull(keyColumns) && keyColumns.size() > 0) { + this.userSetColumns = keyColumns.stream() + .map(map -> String.valueOf(map.getOrDefault("name", ""))) + .map(name -> name.replace("`", "")) + .collect(Collectors.toList()); + } + } + + public void doPretreatment() { + validateRequired(); + validateStreamLoadUrl(); + } + + public String getJdbcUrl() { + return options.getString(KEY_JDBC_URL); + } + + public String getDatabase() { + return options.getString(KEY_DATABASE); + } + + public String getTable() { + return options.getString(KEY_TABLE); + } + + public String getUsername() { + return options.getString(KEY_USERNAME); + } + + public String getPassword() { + if(StringUtils.isNotBlank(options.getString(KEY_PASSWORD))) { + try { + return (String) CryptoUtils.string2Object(options.getString(KEY_PASSWORD)); + } catch (Exception e) { + throw DataXException.asDataXException(DBUtilErrorCode.CONF_ERROR, + "Decrypt password failed."); + } + } + return ""; + } + + public String getLabelPrefix() { + return options.getString(KEY_LABEL_PREFIX); + } + + public List getLoadUrlList() { + return options.getList(KEY_LOAD_URL, String.class); + } + + public List getColumns() { + if (isWildcardColumn) { + return this.infoCchemaColumns; + } + return this.userSetColumns; + } + + public boolean isWildcardColumn() { + return this.isWildcardColumn; + } + + public void setInfoCchemaColumns(List cols) { + this.infoCchemaColumns = cols; + } + + public List getPreSqlList() { + return options.getList(KEY_PRE_SQL, String.class); + } + + public List getPostSqlList() { + return options.getList(KEY_POST_SQL, String.class); + } + + public Map getLoadProps() { + return options.getMap(KEY_LOAD_PROPS); + } + + public int getMaxRetries() { + return MAX_RETRIES; + } + + public int getBatchRows() { + Integer rows = options.getInt(KEY_MAX_BATCH_ROWS); + return null == rows ? BATCH_ROWS : rows; + } + + public long getBatchSize() { + Long size = options.getLong(KEY_MAX_BATCH_SIZE); + return null == size ? BATCH_BYTES : size; + } + + public long getFlushInterval() { + Long interval = options.getLong(KEY_FLUSH_INTERVAL); + return null == interval ? FLUSH_INTERVAL : interval; + } + + public int getFlushQueueLength() { + Integer len = options.getInt(KEY_FLUSH_QUEUE_LENGTH); + return null == len ? 1 : len; + } + + public StreamLoadFormat getStreamLoadFormat() { + Map loadProps = getLoadProps(); + if (null == loadProps) { + return StreamLoadFormat.CSV; + } + if (loadProps.containsKey(KEY_LOAD_PROPS_FORMAT) + && StreamLoadFormat.JSON.name().equalsIgnoreCase(String.valueOf(loadProps.get(KEY_LOAD_PROPS_FORMAT)))) { + return StreamLoadFormat.JSON; + } + return StreamLoadFormat.CSV; + } + + private void validateStreamLoadUrl() { + List urlList = getLoadUrlList(); + for (String host : urlList) { + if (host.split(":").length < 2) { + throw DataXException.asDataXException(DBUtilErrorCode.CONF_ERROR, + "The format of loadUrl is illegal, please input `fe_ip:fe_http_ip;fe_ip:fe_http_ip`."); + } + } + } + + private void validateRequired() { + final String[] requiredOptionKeys = new String[]{ + KEY_USERNAME, + KEY_DATABASE, + KEY_TABLE, + KEY_COLUMN, + KEY_LOAD_URL + }; + for (String optionKey : requiredOptionKeys) { + options.getNecessaryValue(optionKey, DBUtilErrorCode.REQUIRED_VALUE); + } + } + + @Override + public String toString() { + return "StarRocksWriterOptions{" + + "options=" + options + + ", infoCchemaColumns=" + infoCchemaColumns + + ", userSetColumns=" + userSetColumns + + ", isWildcardColumn=" + isWildcardColumn + + '}'; + } +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksFlushTuple.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksFlushTuple.java new file mode 100644 index 000000000..105982195 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksFlushTuple.java @@ -0,0 +1,21 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.manager; + +import java.util.List; + +public class StarRocksFlushTuple { + + private String label; + private Long bytes; + private List rows; + + public StarRocksFlushTuple(String label, Long bytes, List rows) { + this.label = label; + this.bytes = bytes; + this.rows = rows; + } + + public String getLabel() { return label; } + public void setLabel(String label) { this.label = label; } + public Long getBytes() { return bytes; } + public List getRows() { return rows; } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksStreamLoadFailedException.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksStreamLoadFailedException.java new file mode 100644 index 000000000..859f5777f --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksStreamLoadFailedException.java @@ -0,0 +1,32 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.manager; + +import java.io.IOException; +import java.util.Map; + +public class StarRocksStreamLoadFailedException extends IOException { + + static final long serialVersionUID = 1L; + + private final Map response; + private boolean reCreateLabel; + + public StarRocksStreamLoadFailedException(String message, Map response) { + super(message); + this.response = response; + } + + public StarRocksStreamLoadFailedException(String message, Map response, boolean reCreateLabel) { + super(message); + this.response = response; + this.reCreateLabel = reCreateLabel; + } + + public Map getFailedResponse() { + return response; + } + + public boolean needReCreateLabel() { + return reCreateLabel; + } + +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksStreamLoadVisitor.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksStreamLoadVisitor.java new file mode 100644 index 000000000..138c5e6bf --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksStreamLoadVisitor.java @@ -0,0 +1,319 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.manager; + +import java.io.IOException; +import java.net.HttpURLConnection; +import java.net.URL; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; + +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import com.alibaba.datax.core.statistics.plugin.task.util.DirtyRecord; +import com.alibaba.fastjson2.JSON; +import com.alibaba.datax.plugin.writer.starrockswriter.StarRocksWriterOptions; +import com.alibaba.datax.plugin.writer.starrockswriter.row.StarRocksDelimiterParser; + +import org.apache.commons.codec.binary.Base64; +import org.apache.http.HttpEntity; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPut; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.DefaultRedirectStrategy; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.util.EntityUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +public class StarRocksStreamLoadVisitor { + + private static final Logger LOG = LoggerFactory.getLogger(StarRocksStreamLoadVisitor.class); + + private final StarRocksWriterOptions writerOptions; + private long pos; + private static final String RESULT_FAILED = "Fail"; + private static final String RESULT_LABEL_EXISTED = "Label Already Exists"; + private static final String LAEBL_STATE_VISIBLE = "VISIBLE"; + private static final String LAEBL_STATE_COMMITTED = "COMMITTED"; + private static final String RESULT_LABEL_PREPARE = "PREPARE"; + private static final String RESULT_LABEL_ABORTED = "ABORTED"; + private static final String RESULT_LABEL_UNKNOWN = "UNKNOWN"; + + public StarRocksStreamLoadVisitor(StarRocksWriterOptions writerOptions) { + this.writerOptions = writerOptions; + } + + public void doStreamLoad(StarRocksFlushTuple flushData, TaskPluginCollector taskPluginCollector) throws IOException { + String host = getAvailableHost(); + if (null == host) { + throw new IOException("None of the host in `load_url` could be connected."); + } + String loadUrl = new StringBuilder(host) + .append("/api/") + .append(writerOptions.getDatabase()) + .append("/") + .append(writerOptions.getTable()) + .append("/_stream_load") + .toString(); + if (LOG.isDebugEnabled()) { + LOG.debug(String.format("Start to join batch data: rows[%d] bytes[%d] label[%s].", flushData.getRows().size(), flushData.getBytes(), flushData.getLabel())); + } + Map loadResult = doHttpPut(loadUrl, flushData.getLabel(), joinRows(flushData.getRows(), flushData.getBytes().intValue())); + LOG.info("LoadResult is {}", loadResult.toString()); + final String keyStatus = "Status"; + if (null == loadResult || !loadResult.containsKey(keyStatus)) { + LOG.error("unknown result status. {}", loadResult); + throw new IOException("Unable to flush data to StarRocks: unknown result status. " + loadResult); + } + if (LOG.isDebugEnabled()) { + LOG.debug(new StringBuilder("StreamLoad response:\n").append(JSON.toJSONString(loadResult)).toString()); + } + int dirtyRecord = 0; + String errorMsg = null; + if (loadResult.containsKey("NumberFilteredRows")) { + Object numberFilteredRows = loadResult.get("NumberFilteredRows"); + dirtyRecord = Integer.parseInt(String.valueOf(numberFilteredRows)); + } + if (RESULT_FAILED.equals(loadResult.get(keyStatus))) { + StringBuilder errorBuilder = new StringBuilder("Failed to flush data to StarRocks.\n"); + if (loadResult.containsKey("Message")) { + errorBuilder.append(loadResult.get("Message")); + errorBuilder.append('\n'); + } + if (loadResult.containsKey("ErrorURL")) { + LOG.error("StreamLoad response: {}", loadResult); + try { + errorBuilder.append(doHttpGet(loadResult.get("ErrorURL").toString())); + errorBuilder.append('\n'); + } catch (IOException e) { + LOG.warn("Get Error URL failed. {} ", loadResult.get("ErrorURL"), e); + } + } else { + errorBuilder.append(JSON.toJSONString(loadResult)); + errorBuilder.append('\n'); + } + LOG.error(errorBuilder.toString()); + errorMsg = errorBuilder.toString(); + } else if (RESULT_LABEL_EXISTED.equals(loadResult.get(keyStatus))) { + LOG.debug(new StringBuilder("StreamLoad response:\n").append(JSON.toJSONString(loadResult)).toString()); + // has to block-checking the state to get the final result + checkLabelState(host, flushData.getLabel()); + } + if (dirtyRecord > 0) { + for (int i = 0; i < dirtyRecord; i++) { + taskPluginCollector.collectDirtyRecord(new DirtyRecord(), errorMsg); + } + } + } + + private String getAvailableHost() { + List hostList = writerOptions.getLoadUrlList(); + long tmp = pos + hostList.size(); + for (; pos < tmp; pos++) { + String host = new StringBuilder("http://").append(hostList.get((int) (pos % hostList.size()))).toString(); + if (tryHttpConnection(host)) { + return host; + } + } + return null; + } + + private boolean tryHttpConnection(String host) { + try { + URL url = new URL(host); + HttpURLConnection co = (HttpURLConnection) url.openConnection(); + co.setConnectTimeout(1000); + co.connect(); + co.disconnect(); + return true; + } catch (Exception e1) { + LOG.warn("Failed to connect to address:{}", host, e1); + return false; + } + } + + private byte[] joinRows(List rows, int totalBytes) { + if (StarRocksWriterOptions.StreamLoadFormat.CSV.equals(writerOptions.getStreamLoadFormat())) { + Map props = (writerOptions.getLoadProps() == null ? new HashMap<>() : writerOptions.getLoadProps()); + byte[] lineDelimiter = StarRocksDelimiterParser.parse((String)props.get("row_delimiter"), "\n").getBytes(StandardCharsets.UTF_8); + ByteBuffer bos = ByteBuffer.allocate(totalBytes + rows.size() * lineDelimiter.length); + for (byte[] row : rows) { + bos.put(row); + bos.put(lineDelimiter); + } + return bos.array(); + } + + if (StarRocksWriterOptions.StreamLoadFormat.JSON.equals(writerOptions.getStreamLoadFormat())) { + ByteBuffer bos = ByteBuffer.allocate(totalBytes + (rows.isEmpty() ? 2 : rows.size() + 1)); + bos.put("[".getBytes(StandardCharsets.UTF_8)); + byte[] jsonDelimiter = ",".getBytes(StandardCharsets.UTF_8); + boolean isFirstElement = true; + for (byte[] row : rows) { + if (!isFirstElement) { + bos.put(jsonDelimiter); + } + bos.put(row); + isFirstElement = false; + } + bos.put("]".getBytes(StandardCharsets.UTF_8)); + return bos.array(); + } + throw new RuntimeException("Failed to join rows data, unsupported `format` from stream load properties:"); + } + + @SuppressWarnings("unchecked") + private void checkLabelState(String host, String label) throws IOException { + int idx = 0; + while(true) { + try { + TimeUnit.SECONDS.sleep(Math.min(++idx, 5)); + } catch (InterruptedException ex) { + break; + } + try (CloseableHttpClient httpclient = HttpClients.createDefault()) { + HttpGet httpGet = new HttpGet(new StringBuilder(host).append("/api/").append(writerOptions.getDatabase()).append("/get_load_state?label=").append(label).toString()); + httpGet.setHeader("Authorization", getBasicAuthHeader(writerOptions.getUsername(), writerOptions.getPassword())); + httpGet.setHeader("Connection", "close"); + + try (CloseableHttpResponse resp = httpclient.execute(httpGet)) { + HttpEntity respEntity = getHttpEntity(resp); + if (respEntity == null) { + throw new IOException(String.format("Failed to flush data to StarRocks, Error " + + "could not get the final state of label[%s].\n", label), null); + } + Map result = (Map)JSON.parse(EntityUtils.toString(respEntity)); + String labelState = (String)result.get("state"); + if (null == labelState) { + throw new IOException(String.format("Failed to flush data to StarRocks, Error " + + "could not get the final state of label[%s]. response[%s]\n", label, EntityUtils.toString(respEntity)), null); + } + LOG.info(String.format("Checking label[%s] state[%s]\n", label, labelState)); + switch(labelState) { + case LAEBL_STATE_VISIBLE: + case LAEBL_STATE_COMMITTED: + return; + case RESULT_LABEL_PREPARE: + continue; + case RESULT_LABEL_ABORTED: + throw new StarRocksStreamLoadFailedException(String.format("Failed to flush data to StarRocks, Error " + + "label[%s] state[%s]\n", label, labelState), null, true); + case RESULT_LABEL_UNKNOWN: + default: + throw new IOException(String.format("Failed to flush data to StarRocks, Error " + + "label[%s] state[%s]\n", label, labelState), null); + } + } + } + } + } + + @SuppressWarnings("unchecked") + private Map doHttpPut(String loadUrl, String label, byte[] data) throws IOException { + LOG.info(String.format("Executing stream load to: '%s', size: '%s'", loadUrl, data.length)); + final HttpClientBuilder httpClientBuilder = HttpClients.custom() + .setRedirectStrategy(new DefaultRedirectStrategy() { + @Override + protected boolean isRedirectable(String method) { + return true; + } + }); + try (CloseableHttpClient httpclient = httpClientBuilder.build()) { + HttpPut httpPut = new HttpPut(loadUrl); + List cols = writerOptions.getColumns(); + if (null != cols && !cols.isEmpty() && StarRocksWriterOptions.StreamLoadFormat.CSV.equals(writerOptions.getStreamLoadFormat())) { + httpPut.setHeader("columns", String.join(",", cols.stream().map(f -> String.format("`%s`", f)).collect(Collectors.toList()))); + } + if (null != writerOptions.getLoadProps()) { + for (Map.Entry entry : writerOptions.getLoadProps().entrySet()) { + httpPut.setHeader(entry.getKey(), String.valueOf(entry.getValue())); + } + } + httpPut.setHeader("strict_mode", "true"); + httpPut.setHeader("Expect", "100-continue"); + httpPut.setHeader("label", label); + httpPut.setHeader("Content-Type", "application/x-www-form-urlencoded"); + httpPut.setHeader("Authorization", getBasicAuthHeader(writerOptions.getUsername(), writerOptions.getPassword())); + httpPut.setEntity(new ByteArrayEntity(data)); + httpPut.setConfig(RequestConfig.custom().setRedirectsEnabled(true).build()); + try (CloseableHttpResponse resp = httpclient.execute(httpPut)) { + int code = resp.getStatusLine().getStatusCode(); + if (200 != code) { + String errorText; + try { + HttpEntity respEntity = resp.getEntity(); + errorText = EntityUtils.toString(respEntity); + } catch (Exception err) { + errorText = "find errorText failed: " + err.getMessage(); + } + LOG.warn("Request failed with code:{}, err:{}", code, errorText); + Map errorMap = new HashMap<>(); + errorMap.put("Status", "Fail"); + errorMap.put("Message", errorText); + return errorMap; + } + HttpEntity respEntity = resp.getEntity(); + if (null == respEntity) { + LOG.warn("Request failed with empty response."); + return null; + } + return (Map)JSON.parse(EntityUtils.toString(respEntity)); + } + } + } + + private String getBasicAuthHeader(String username, String password) { + String auth = username + ":" + password; + byte[] encodedAuth = Base64.encodeBase64(auth.getBytes(StandardCharsets.UTF_8)); + return new StringBuilder("Basic ").append(new String(encodedAuth)).toString(); + } + + private HttpEntity getHttpEntity(CloseableHttpResponse resp) { + int code = resp.getStatusLine().getStatusCode(); + if (200 != code) { + LOG.warn("Request failed with code:{}", code); + return null; + } + HttpEntity respEntity = resp.getEntity(); + if (null == respEntity) { + LOG.warn("Request failed with empty response."); + return null; + } + return respEntity; + } + + private String doHttpGet(String getUrl) throws IOException { + LOG.info("Executing GET from {}.", getUrl); + try (CloseableHttpClient httpclient = buildHttpClient()) { + HttpGet httpGet = new HttpGet(getUrl); + try (CloseableHttpResponse resp = httpclient.execute(httpGet)) { + HttpEntity respEntity = resp.getEntity(); + if (null == respEntity) { + LOG.warn("Request failed with empty response."); + return null; + } + return EntityUtils.toString(respEntity); + } + } + } + + private CloseableHttpClient buildHttpClient(){ + final HttpClientBuilder httpClientBuilder = HttpClients.custom() + .setRedirectStrategy(new DefaultRedirectStrategy() { + @Override + protected boolean isRedirectable(String method) { + return true; + } + }); + return httpClientBuilder.build(); + } + +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksWriterManager.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksWriterManager.java new file mode 100644 index 000000000..349712c70 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksWriterManager.java @@ -0,0 +1,204 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.manager; + +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.concurrent.BasicThreadFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; + +import com.alibaba.datax.plugin.writer.starrockswriter.StarRocksWriterOptions; + +public class StarRocksWriterManager { + + private static final Logger LOG = LoggerFactory.getLogger(StarRocksWriterManager.class); + + private final StarRocksStreamLoadVisitor starrocksStreamLoadVisitor; + private final StarRocksWriterOptions writerOptions; + + private final List buffer = new ArrayList<>(); + private int batchCount = 0; + private long batchSize = 0; + private volatile boolean closed = false; + private volatile Exception flushException; + private final LinkedBlockingDeque flushQueue; + private ScheduledExecutorService scheduler; + private ScheduledFuture scheduledFuture; + + public StarRocksWriterManager(StarRocksWriterOptions writerOptions, TaskPluginCollector taskPluginCollector) { + this.writerOptions = writerOptions; + this.starrocksStreamLoadVisitor = new StarRocksStreamLoadVisitor(writerOptions); + flushQueue = new LinkedBlockingDeque<>(writerOptions.getFlushQueueLength()); + this.startScheduler(); + this.startAsyncFlushing(taskPluginCollector); + } + + public void startScheduler() { + stopScheduler(); + this.scheduler = Executors.newScheduledThreadPool(1, new BasicThreadFactory.Builder().namingPattern("starrocks-interval-flush").daemon(true).build()); + this.scheduledFuture = this.scheduler.schedule(() -> { + synchronized (StarRocksWriterManager.this) { + if (!closed) { + try { + String label = createBatchLabel(); + LOG.info(String.format("StarRocks interval Sinking triggered: label[%s].", label)); + if (batchCount == 0) { + startScheduler(); + } + flush(label, false); + } catch (Exception e) { + flushException = e; + } + } + } + }, writerOptions.getFlushInterval(), TimeUnit.MILLISECONDS); + } + + public void stopScheduler() { + if (this.scheduledFuture != null) { + scheduledFuture.cancel(false); + this.scheduler.shutdown(); + } + } + + public final synchronized void writeRecord(String record) throws IOException { + checkFlushException(); + try { + byte[] bts = record.getBytes(StandardCharsets.UTF_8); + buffer.add(bts); + batchCount++; + batchSize += bts.length; + if (batchCount >= writerOptions.getBatchRows() || batchSize >= writerOptions.getBatchSize()) { + String label = createBatchLabel(); + if (LOG.isDebugEnabled()) { + LOG.debug(String.format("StarRocks buffer Sinking triggered: rows[%d] label[%s].", batchCount, label)); + } + flush(label, false); + } + } catch (Exception e) { + throw new IOException("Writing records to StarRocks failed.", e); + } + } + + public synchronized void flush(String label, boolean waitUtilDone) throws Exception { + checkFlushException(); + if (batchCount == 0) { + if (waitUtilDone) { + waitAsyncFlushingDone(); + } + return; + } + flushQueue.put(new StarRocksFlushTuple(label, batchSize, new ArrayList<>(buffer))); + if (waitUtilDone) { + // wait the last flush + waitAsyncFlushingDone(); + } + buffer.clear(); + batchCount = 0; + batchSize = 0; + } + + public synchronized void close() { + if (!closed) { + closed = true; + try { + String label = createBatchLabel(); + if (batchCount > 0) { + if (LOG.isDebugEnabled()) { + LOG.debug(String.format("StarRocks Sink is about to close: label[%s].", label)); + } + } + flush(label, true); + } catch (Exception e) { + throw new RuntimeException("Writing records to StarRocks failed.", e); + } + } + checkFlushException(); + } + + public String createBatchLabel() { + StringBuilder sb = new StringBuilder(); + if (StringUtils.isNotBlank(writerOptions.getLabelPrefix())) { + sb.append(writerOptions.getLabelPrefix()); + } + return sb.append(UUID.randomUUID().toString()) + .toString(); + } + + private void startAsyncFlushing(TaskPluginCollector taskPluginCollector) { + // start flush thread + Thread flushThread = new Thread(new Runnable(){ + public void run() { + while(true) { + try { + asyncFlush(taskPluginCollector); + } catch (Exception e) { + flushException = e; + } + } + } + }); + flushThread.setDaemon(true); + flushThread.start(); + } + + private void waitAsyncFlushingDone() throws InterruptedException { + // wait previous flushings + for (int i = 0; i <= writerOptions.getFlushQueueLength(); i++) { + flushQueue.put(new StarRocksFlushTuple("", 0l, null)); + } + checkFlushException(); + } + + private void asyncFlush(TaskPluginCollector taskPluginCollector) throws Exception { + StarRocksFlushTuple flushData = flushQueue.take(); + if (StringUtils.isBlank(flushData.getLabel())) { + return; + } + stopScheduler(); + if (LOG.isDebugEnabled()) { + LOG.debug(String.format("Async stream load: rows[%d] bytes[%d] label[%s].", flushData.getRows().size(), flushData.getBytes(), flushData.getLabel())); + } + for (int i = 0; i <= writerOptions.getMaxRetries(); i++) { + try { + // flush to StarRocks with stream load + starrocksStreamLoadVisitor.doStreamLoad(flushData, taskPluginCollector); + LOG.info(String.format("Async stream load finished: label[%s].", flushData.getLabel())); + startScheduler(); + break; + } catch (Exception e) { + LOG.warn("Failed to flush batch data to StarRocks, retry times = {}", i, e); + if (i >= writerOptions.getMaxRetries()) { + throw new IOException(e); + } + if (e instanceof StarRocksStreamLoadFailedException && ((StarRocksStreamLoadFailedException)e).needReCreateLabel()) { + String newLabel = createBatchLabel(); + LOG.warn(String.format("Batch label changed from [%s] to [%s]", flushData.getLabel(), newLabel)); + flushData.setLabel(newLabel); + } + try { + Thread.sleep(1000l * Math.min(i + 1, 10)); + } catch (InterruptedException ex) { + Thread.currentThread().interrupt(); + throw new IOException("Unable to flush, interrupted while doing another attempt", e); + } + } + } + } + + private void checkFlushException() { + if (flushException != null) { + throw new RuntimeException("Writing records to StarRocks failed.", flushException); + } + } +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksBaseSerializer.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksBaseSerializer.java new file mode 100644 index 000000000..c2948b44a --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksBaseSerializer.java @@ -0,0 +1,26 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.row; + +import com.alibaba.datax.common.element.Column; +import com.alibaba.datax.common.element.Column.Type; + +public class StarRocksBaseSerializer { + + protected String fieldConvertion(Column col) { + if (null == col.getRawData() || Type.NULL == col.getType()) { + return null; + } + if (Type.BOOLEAN == col.getType()) { + return String.valueOf(col.asLong()); + } + if (Type.BYTES == col.getType()) { + byte[] bts = (byte[])col.getRawData(); + long value = 0; + for (int i = 0; i < bts.length; i++) { + value += (bts[bts.length - i - 1] & 0xffL) << (8 * i); + } + return String.valueOf(value); + } + return col.asString(); + } + +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksCsvSerializer.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksCsvSerializer.java new file mode 100644 index 000000000..55b429fea --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksCsvSerializer.java @@ -0,0 +1,28 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.row; + +import com.alibaba.datax.common.element.Record; + +public class StarRocksCsvSerializer extends StarRocksBaseSerializer implements StarRocksISerializer { + + private static final long serialVersionUID = 1L; + + private final String columnSeparator; + + public StarRocksCsvSerializer(String sp) { + this.columnSeparator = StarRocksDelimiterParser.parse(sp, "\t"); + } + + @Override + public String serialize(Record row) { + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < row.getColumnNumber(); i++) { + String value = fieldConvertion(row.getColumn(i)); + sb.append(null == value ? "\\N" : value); + if (i < row.getColumnNumber() - 1) { + sb.append(columnSeparator); + } + } + return sb.toString(); + } + +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksDelimiterParser.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksDelimiterParser.java new file mode 100644 index 000000000..523d7dcf7 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksDelimiterParser.java @@ -0,0 +1,55 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.row; + +import org.apache.commons.lang3.StringUtils; + +import java.io.StringWriter; + +public class StarRocksDelimiterParser { + + private static final String HEX_STRING = "0123456789ABCDEF"; + + public static String parse(String sp, String dSp) throws RuntimeException { + if (StringUtils.isBlank(sp)) { + return dSp; + } + if (!sp.toUpperCase().startsWith("\\X")) { + return sp; + } + String hexStr = sp.substring(2); + // check hex str + if (hexStr.isEmpty()) { + throw new RuntimeException("Failed to parse delimiter: `Hex str is empty`"); + } + if (hexStr.length() % 2 != 0) { + throw new RuntimeException("Failed to parse delimiter: `Hex str length error`"); + } + for (char hexChar : hexStr.toUpperCase().toCharArray()) { + if (HEX_STRING.indexOf(hexChar) == -1) { + throw new RuntimeException("Failed to parse delimiter: `Hex str format error`"); + } + } + // transform to separator + StringWriter writer = new StringWriter(); + for (byte b : hexStrToBytes(hexStr)) { + writer.append((char) b); + } + return writer.toString(); + } + + private static byte[] hexStrToBytes(String hexStr) { + String upperHexStr = hexStr.toUpperCase(); + int length = upperHexStr.length() / 2; + char[] hexChars = upperHexStr.toCharArray(); + byte[] bytes = new byte[length]; + for (int i = 0; i < length; i++) { + int pos = i * 2; + bytes[i] = (byte) (charToByte(hexChars[pos]) << 4 | charToByte(hexChars[pos + 1])); + } + return bytes; + } + + private static byte charToByte(char c) { + return (byte) HEX_STRING.indexOf(c); + } + +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksISerializer.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksISerializer.java new file mode 100644 index 000000000..5924a4274 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksISerializer.java @@ -0,0 +1,11 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.row; + +import java.io.Serializable; + +import com.alibaba.datax.common.element.Record; + +public interface StarRocksISerializer extends Serializable { + + String serialize(Record row); + +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksJsonSerializer.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksJsonSerializer.java new file mode 100644 index 000000000..4648a446f --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksJsonSerializer.java @@ -0,0 +1,34 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.row; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import com.alibaba.datax.common.element.Record; +import com.alibaba.fastjson2.JSON; + +public class StarRocksJsonSerializer extends StarRocksBaseSerializer implements StarRocksISerializer { + + private static final long serialVersionUID = 1L; + + private final List fieldNames; + + public StarRocksJsonSerializer(List fieldNames) { + this.fieldNames = fieldNames; + } + + @Override + public String serialize(Record row) { + if (null == fieldNames) { + return ""; + } + Map rowMap = new HashMap<>(fieldNames.size()); + int idx = 0; + for (String fieldName : fieldNames) { + rowMap.put(fieldName, fieldConvertion(row.getColumn(idx))); + idx++; + } + return JSON.toJSONString(rowMap); + } + +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksSerializerFactory.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksSerializerFactory.java new file mode 100644 index 000000000..f5da30963 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksSerializerFactory.java @@ -0,0 +1,22 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.row; + +import java.util.Map; + +import com.alibaba.datax.plugin.writer.starrockswriter.StarRocksWriterOptions; + +public class StarRocksSerializerFactory { + + private StarRocksSerializerFactory() {} + + public static StarRocksISerializer createSerializer(StarRocksWriterOptions writerOptions) { + if (StarRocksWriterOptions.StreamLoadFormat.CSV.equals(writerOptions.getStreamLoadFormat())) { + Map props = writerOptions.getLoadProps(); + return new StarRocksCsvSerializer(null == props || !props.containsKey("column_separator") ? null : String.valueOf(props.get("column_separator"))); + } + if (StarRocksWriterOptions.StreamLoadFormat.JSON.equals(writerOptions.getStreamLoadFormat())) { + return new StarRocksJsonSerializer(writerOptions.getColumns()); + } + throw new RuntimeException("Failed to create row serializer, unsupported `format` from stream load properties."); + } + +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/util/StarRocksWriterUtil.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/util/StarRocksWriterUtil.java new file mode 100644 index 000000000..279ce9fb3 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/util/StarRocksWriterUtil.java @@ -0,0 +1,93 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.util; + +import com.alibaba.datax.plugin.rdbms.util.DBUtil; +import com.alibaba.datax.plugin.rdbms.util.DataBaseType; +import com.alibaba.datax.plugin.rdbms.util.RdbmsException; +import com.alibaba.datax.plugin.rdbms.writer.Constant; +import com.alibaba.datax.plugin.writer.starrockswriter.StarRocksWriterOptions; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.Statement; +import java.util.*; + +public class StarRocksWriterUtil { + + private static final Logger LOG = LoggerFactory.getLogger(StarRocksWriterUtil.class); + + private StarRocksWriterUtil() {} + + public static List getStarRocksColumns(Connection conn, String databaseName, String tableName) { + String currentSql = String.format("SELECT COLUMN_NAME FROM `information_schema`.`COLUMNS` WHERE `TABLE_SCHEMA` = '%s' AND `TABLE_NAME` = '%s' ORDER BY `ORDINAL_POSITION` ASC;", databaseName, tableName); + List columns = new ArrayList<>(); + ResultSet rs = null; + try { + rs = DBUtil.query(conn, currentSql); + while (DBUtil.asyncResultSetNext(rs)) { + String colName = rs.getString("COLUMN_NAME"); + columns.add(colName); + } + return columns; + } catch (Exception e) { + throw RdbmsException.asQueryException(DataBaseType.MySql, e, currentSql, null, null); + } finally { + DBUtil.closeDBResources(rs, null, null); + } + } + + public static List renderPreOrPostSqls(List preOrPostSqls, String tableName) { + if (null == preOrPostSqls) { + return Collections.emptyList(); + } + List renderedSqls = new ArrayList<>(); + for (String sql : preOrPostSqls) { + if (StringUtils.isNotBlank(sql)) { + renderedSqls.add(sql.replace(Constant.TABLE_NAME_PLACEHOLDER, tableName)); + } + } + return renderedSqls; + } + + public static void executeSqls(Connection conn, List sqls) { + Statement stmt = null; + String currentSql = null; + try { + stmt = conn.createStatement(); + for (String sql : sqls) { + currentSql = sql; + DBUtil.executeSqlWithoutResultSet(stmt, sql); + } + } catch (Exception e) { + throw RdbmsException.asQueryException(DataBaseType.MySql, e, currentSql, null, null); + } finally { + DBUtil.closeDBResources(null, stmt, null); + } + } + + public static void preCheckPrePareSQL(StarRocksWriterOptions options) { + String table = options.getTable(); + List preSqls = options.getPreSqlList(); + List renderedPreSqls = StarRocksWriterUtil.renderPreOrPostSqls(preSqls, table); + if (null != renderedPreSqls && !renderedPreSqls.isEmpty()) { + LOG.info("Begin to preCheck preSqls:[{}].", String.join(";", renderedPreSqls)); + for (String sql : renderedPreSqls) { + DBUtil.sqlValid(sql, DataBaseType.MySql); + } + } + } + + public static void preCheckPostSQL(StarRocksWriterOptions options) { + String table = options.getTable(); + List postSqls = options.getPostSqlList(); + List renderedPostSqls = StarRocksWriterUtil.renderPreOrPostSqls(postSqls, table); + if (null != renderedPostSqls && !renderedPostSqls.isEmpty()) { + LOG.info("Begin to preCheck postSqls:[{}].", String.join(";", renderedPostSqls)); + for(String sql : renderedPostSqls) { + DBUtil.sqlValid(sql, DataBaseType.MySql); + } + } + } +} diff --git a/exchangis-engines/engines/datax/datax-textfilereader/pom.xml b/exchangis-engines/engines/datax/datax-textfilereader/pom.xml new file mode 100644 index 000000000..e8c690ddf --- /dev/null +++ b/exchangis-engines/engines/datax/datax-textfilereader/pom.xml @@ -0,0 +1,87 @@ + + + + exchangis-engine-datax + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + 3.0.0-Plus-2 + datax-textfilereader + jar + + + com.webank.wedatasphere.exchangis + datax-core + provided + + + slf4j-log4j12 + org.slf4j + + + ${datax.engine.version} + + + org.slf4j + slf4j-api + provided + + + ch.qos.logback + logback-classic + provided + + + commons-io + commons-io + provided + + + org.apache.commons + commons-lang3 + provided + + + commons-codec + commons-codec + ${commons-codec} + provided + + + com.google.guava + guava + provided + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.2.1 + + + assemble + + single + + + install + + + + false + false + + ${basedir}/src/main/assembly/package.xml + + plugin + + + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-textfilereader/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-textfilereader/src/main/assembly/package.xml new file mode 100644 index 000000000..9ba8d8fa2 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-textfilereader/src/main/assembly/package.xml @@ -0,0 +1,33 @@ + + textfilereader + + dir + + false + + + src/main/resources + + plugin.json + plugin_job_template.json + + plugin/reader/textfilereader + + + target/ + + datax-textfilereader-${datax.engine.version}.jar + + plugin/reader/textfilereader + + + + + false + plugin/reader/textfilereader/libs + runtime + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/Constant.java b/exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/Constant.java new file mode 100644 index 000000000..06c15e8eb --- /dev/null +++ b/exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/Constant.java @@ -0,0 +1,9 @@ +package com.alibaba.datax.plugin.reader.txtfilereader; + + +/** + * Created by haiwei.luo on 14-9-20. + */ +public class Constant { + public static final String SOURCE_FILES = "sourceFiles"; +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/Key.java b/exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/Key.java new file mode 100644 index 000000000..ca81dc26d --- /dev/null +++ b/exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/Key.java @@ -0,0 +1,8 @@ +package com.alibaba.datax.plugin.reader.txtfilereader; + +/** + * Created by haiwei.luo on 14-9-20. + */ +public class Key { + public static final String PATH = "path"; +} diff --git a/exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/TxtFileReader.java b/exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/TxtFileReader.java new file mode 100644 index 000000000..3106a0442 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/TxtFileReader.java @@ -0,0 +1,500 @@ +package com.alibaba.datax.plugin.reader.txtfilereader; + +import com.webank.wedatasphere.exchangis.datax.common.constant.TransportType; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordSender; +import com.alibaba.datax.common.spi.Reader; +import com.alibaba.datax.common.util.Configuration; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.ChannelOutput; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.StreamMeta; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.plugin.unstructuredstorage.PathMeta; +import com.alibaba.datax.plugin.unstructuredstorage.reader.UnstructuredStorageReaderErrorCode; +import com.alibaba.datax.plugin.unstructuredstorage.reader.UnstructuredStorageReaderUtil; +import com.google.common.collect.Sets; +import com.webank.wedatasphere.exchangis.datax.util.Json; +import org.apache.commons.io.Charsets; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.BooleanUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; +import java.nio.charset.UnsupportedCharsetException; +import java.util.*; +import java.util.regex.Pattern; + +import static com.alibaba.datax.plugin.unstructuredstorage.reader.Key.INCR_BEGIN_TIME; +import static com.alibaba.datax.plugin.unstructuredstorage.reader.Key.INCR_END_TIME; + +/** + * Created by haiwei.luo on 14-9-20. + */ +public class TxtFileReader extends Reader { + public static class Job extends Reader.Job { + private static final Logger LOG = LoggerFactory.getLogger(Job.class); + + private Configuration originConfig = null; + + private List path = null; + + private List sourceFiles; + + private Map pattern; + + private Map isRegexPath; + + private long incrBeginTime = 0; + + private long incrEndTime = 0; + + @Override + public boolean isSupportStream() { + return true; + } + + @Override + public void init() { + this.originConfig = this.getPluginJobConf(); + this.pattern = new HashMap(); + this.isRegexPath = new HashMap(); + this.validateParameter(); + } + + private void validateParameter() { + // Compatible with the old version, path is a string before + String pathInString = this.originConfig.getNecessaryValue(Key.PATH, + TxtFileReaderErrorCode.REQUIRED_VALUE); + if (StringUtils.isBlank(pathInString)) { + throw DataXException.asDataXException( + TxtFileReaderErrorCode.REQUIRED_VALUE, + "您需要指定待读取的源目录或文件"); + } + if (!pathInString.startsWith("[") && !pathInString.endsWith("]")) { + path = new ArrayList<>(); + path.add(pathInString); + } else { + path = this.originConfig.getList(Key.PATH, String.class); + if (null == path || path.size() == 0) { + throw DataXException.asDataXException( + TxtFileReaderErrorCode.REQUIRED_VALUE, + "您需要指定待读取的源目录或文件"); + } + for(String eachPath : path){ + if(!eachPath.startsWith("/")){ + String message = String.format("请检查参数path:[%s],需要配置为绝对路径", eachPath); + LOG.error(message); + throw DataXException.asDataXException(TxtFileReaderErrorCode.ILLEGAL_VALUE, message); + } + if(!new File(eachPath).exists()){ + String message = String.format("cannot find the path: [%s], please check your configuration", eachPath); + LOG.error(message); + throw DataXException.asDataXException(TxtFileReaderErrorCode.PATH_NOT_FOUND, message); + } + } + } + + String encoding = this.originConfig + .getString( + com.alibaba.datax.plugin.unstructuredstorage.reader.Key.ENCODING, + com.alibaba.datax.plugin.unstructuredstorage.reader.Constant.DEFAULT_ENCODING); + if (StringUtils.isBlank(encoding)) { + this.originConfig + .set(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.ENCODING, + com.alibaba.datax.plugin.unstructuredstorage.reader.Constant.DEFAULT_ENCODING); + } else { + try { + encoding = encoding.trim(); + this.originConfig + .set(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.ENCODING, + encoding); + Charsets.toCharset(encoding); + } catch (UnsupportedCharsetException uce) { + throw DataXException.asDataXException( + TxtFileReaderErrorCode.ILLEGAL_VALUE, + String.format("不支持您配置的编码格式 : [%s]", encoding), uce); + } catch (Exception e) { + throw DataXException.asDataXException( + TxtFileReaderErrorCode.CONFIG_INVALID_EXCEPTION, + String.format("编码配置异常, 请联系我们: %s", e.getMessage()), + e); + } + } + + // column: 1. index type 2.value type 3.when type is Date, may have + // format + List columns = this.originConfig + .getListConfiguration(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.COLUMN); + // handle ["*"] + if (null != columns && 1 == columns.size()) { + String columnsInStr = columns.get(0).toString(); + if ("\"*\"".equals(columnsInStr) || "'*'".equals(columnsInStr)) { + this.originConfig + .set(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.COLUMN, + null); + columns = null; + } + } + + if (null != columns && columns.size() != 0) { + for (Configuration eachColumnConf : columns) { + eachColumnConf + .getNecessaryValue( + com.alibaba.datax.plugin.unstructuredstorage.reader.Key.TYPE, + TxtFileReaderErrorCode.REQUIRED_VALUE); + Integer columnIndex = eachColumnConf + .getInt(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.INDEX); + String columnValue = eachColumnConf + .getString(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.VALUE); + + if (null == columnIndex && null == columnValue) { + throw DataXException.asDataXException( + TxtFileReaderErrorCode.NO_INDEX_VALUE, + "由于您配置了type, 则至少需要配置 index 或 value"); + } + + if (null != columnIndex && null != columnValue) { + throw DataXException.asDataXException( + TxtFileReaderErrorCode.MIXED_INDEX_VALUE, + "您混合配置了index, value, 每一列同时仅能选择其中一种"); + } + if (null != columnIndex && columnIndex < 0) { + throw DataXException.asDataXException( + TxtFileReaderErrorCode.ILLEGAL_VALUE, String + .format("index需要大于等于0, 您配置的index为[%s]", + columnIndex)); + } + } + } + + // only support compress types + String compress = this.originConfig + .getString(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.COMPRESS); + if (StringUtils.isBlank(compress)) { + this.originConfig + .set(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.COMPRESS, + null); + } else { + Set supportedCompress = Sets + .newHashSet("gzip", "bzip2", "zip"); + compress = compress.toLowerCase().trim(); + if (!supportedCompress.contains(compress)) { + throw DataXException + .asDataXException( + TxtFileReaderErrorCode.ILLEGAL_VALUE, + String.format( + "仅支持 gzip, bzip2, zip 文件压缩格式 , 不支持您配置的文件压缩格式: [%s]", + compress)); + } + this.originConfig + .set(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.COMPRESS, + compress); + } + if(getTransportType() == TransportType.RECORD) { + String delimiterInStr = this.originConfig + .getString(com.alibaba.datax.plugin.unstructuredstorage.reader.Key.FIELD_DELIMITER, ","); + // warn: if have, length must be one + if (null != delimiterInStr && 1 != delimiterInStr.length()) { + throw DataXException.asDataXException( + UnstructuredStorageReaderErrorCode.ILLEGAL_VALUE, + String.format("仅仅支持单字符切分, 您配置的切分为 : [%s]", + delimiterInStr)); + } + } + this.incrBeginTime = this.originConfig.getLong(INCR_BEGIN_TIME, 0); + this.incrEndTime = this.originConfig.getLong(INCR_END_TIME, Calendar.getInstance().getTimeInMillis()); + } + + @Override + public void prepare() { + LOG.debug("prepare() begin..."); + // warn:make sure this regex string + // warn:no need trim + for (String eachPath : this.path) { + String regexString = eachPath.replace("*", ".*").replace("?", + ".?"); + Pattern patt = Pattern.compile(regexString); + this.pattern.put(eachPath, patt); + this.sourceFiles = this.buildSourceTargets(); + } + + LOG.info(String.format("您即将读取的文件数为: [%s]", this.sourceFiles.size())); + } + + @Override + public void post() { + } + + @Override + public void destroy() { + } + + // warn: 如果源目录为空会报错,拖空目录意图=>空文件显示指定此意图 + @Override + public List split(int adviceNumber) { + LOG.debug("split() begin..."); + List readerSplitConfigs = new ArrayList(); + + // warn:每个slice拖且仅拖一个文件, + // int splitNumber = adviceNumber; + int splitNumber = this.sourceFiles.size(); + if (0 == splitNumber) { + return new ArrayList<>(); + } + + List> splitedSourceFiles = this.splitSourceFiles( + this.sourceFiles, splitNumber); + for (List files : splitedSourceFiles) { + Configuration splitedConfig = this.originConfig.clone(); + splitedConfig.set(Constant.SOURCE_FILES, files); + readerSplitConfigs.add(splitedConfig); + } + LOG.debug("split() ok and end..."); + return readerSplitConfigs; + } + + /** + * validate the path, path must be a absolute path + */ + private List buildSourceTargets() { + //0: absolute path, 1: relative path + List sourceTargets = new ArrayList<>(); + for (String eachPath : this.path) { + // for each path + Set toBeReadFiles = new HashSet<>(); + int endMark; + for (endMark = 0; endMark < eachPath.length(); endMark++) { + if ('*' == eachPath.charAt(endMark) + || '?' == eachPath.charAt(endMark)) { + this.isRegexPath.put(eachPath, true); + break; + } + } + + String parentDirectory; + if (BooleanUtils.isTrue(this.isRegexPath.get(eachPath))) { + int lastDirSeparator = eachPath.substring(0, endMark) + .lastIndexOf(IOUtils.DIR_SEPARATOR); + parentDirectory = eachPath.substring(0, + lastDirSeparator + 1); + } else { + this.isRegexPath.put(eachPath, false); + parentDirectory = eachPath; + } + this.buildSourceTargetsEathPath(eachPath, parentDirectory, + toBeReadFiles); + toBeReadFiles.forEach( toBeReadFile ->{ + boolean toBeRead = true; + if(getTransportType() == TransportType.STREAM){ + File localFile = new File(toBeReadFile); + if(localFile.lastModified() <= incrBeginTime + || localFile.lastModified() > incrEndTime){ + toBeRead = false; + } + } + if(toBeRead) { + String relativePath; + if (toBeReadFile.equals(parentDirectory)) { + relativePath = parentDirectory.substring(parentDirectory + .lastIndexOf(IOUtils.DIR_SEPARATOR)); + } else { + relativePath = toBeReadFile.substring(parentDirectory.length()); + } + sourceTargets.add(new PathMeta(toBeReadFile , relativePath)); + } + }); + } + return sourceTargets; + } + + private void buildSourceTargetsEathPath(String regexPath, + String parentDirectory, Set toBeReadFiles) { + // 检测目录是否存在,错误情况更明确 + try { + File dir = new File(parentDirectory); + boolean isExists = dir.exists(); + if (!isExists) { + String message = String.format("您设定的目录不存在 : [%s]", + parentDirectory); + LOG.error(message); + throw DataXException.asDataXException( + TxtFileReaderErrorCode.FILE_NOT_EXISTS, message); + } + } catch (SecurityException se) { + String message = String.format("您没有权限查看目录 : [%s]", + parentDirectory); + LOG.error(message); + throw DataXException.asDataXException( + TxtFileReaderErrorCode.SECURITY_NOT_ENOUGH, message); + } + + directoryRover(regexPath, parentDirectory, toBeReadFiles); + } + + private void directoryRover(String regexPath, String parentDirectory, + Set toBeReadFiles) { + File directory = new File(parentDirectory); + if(directory.getName().startsWith(".")){ + //skip hidden files + return; + } + // is a normal file + if (!directory.isDirectory()) { + if (this.isTargetFile(regexPath, directory.getAbsolutePath())) { + toBeReadFiles.add(parentDirectory); + LOG.info(String.format( + "add file [%s] as a candidate to be read.", + parentDirectory)); + + } + } else { + // 是目录 + try { + // warn:对于没有权限的目录,listFiles 返回null,而不是抛出SecurityException + File[] files = directory.listFiles(); + if (null != files) { + for (File subFileNames : files) { + directoryRover(regexPath, + subFileNames.getAbsolutePath(), + toBeReadFiles); + } + } else { + // warn: 对于没有权限的文件,是直接throw DataXException + String message = String.format("您没有权限查看目录 : [%s]", + directory); + LOG.error(message); + throw DataXException.asDataXException( + TxtFileReaderErrorCode.SECURITY_NOT_ENOUGH, + message); + } + + } catch (SecurityException e) { + String message = String.format("您没有权限查看目录 : [%s]", + directory); + LOG.error(message); + throw DataXException.asDataXException( + TxtFileReaderErrorCode.SECURITY_NOT_ENOUGH, + message, e); + } + } + } + + // 正则过滤 + private boolean isTargetFile(String regexPath, String absoluteFilePath) { + if (this.isRegexPath.get(regexPath)) { + return this.pattern.get(regexPath).matcher(absoluteFilePath) + .matches(); + } else { + return true; + } + + } + + private List> splitSourceFiles(final List sourceList, + int adviceNumber) { + List> splitedList = new ArrayList>(); + int averageLength = sourceList.size() / adviceNumber; + averageLength = averageLength == 0 ? 1 : averageLength; + + for (int begin = 0, end = 0; begin < sourceList.size(); begin = end) { + end = begin + averageLength; + if (end > sourceList.size()) { + end = sourceList.size(); + } + splitedList.add(sourceList.subList(begin, end)); + } + return splitedList; + } + + } + + public static class Task extends Reader.Task { + private static Logger LOG = LoggerFactory.getLogger(Task.class); + + private Configuration readerSliceConfig; + private List sourceFiles; + + @Override + public void init() { + this.readerSliceConfig = this.getPluginJobConf(); + this.sourceFiles = this.readerSliceConfig.getList( + Constant.SOURCE_FILES, Object.class); + } + + @Override + public void prepare() { + + } + + @Override + public void post() { + + } + + @Override + public void destroy() { + + } + + @Override + public void startRead(RecordSender recordSender) { + LOG.debug("start read source files..."); + for (Object sourceFile : this.sourceFiles) { + PathMeta pathMeta = Json.fromJson(Json.toJson(sourceFile, null), PathMeta.class); + assert pathMeta != null; + String fileName = pathMeta.getAbsolute(); + LOG.info(String.format("reading file : [%s]", fileName)); + InputStream inputStream; + try { + inputStream = new FileInputStream(fileName); + UnstructuredStorageReaderUtil.readFromStream(inputStream, + fileName, this.readerSliceConfig, recordSender, + this.getTaskPluginCollector()); + recordSender.flush(); + } catch (FileNotFoundException e) { + // warn: sock 文件无法read,能影响所有文件的传输,需要用户自己保证 + String message = String + .format("找不到待读取的文件 : [%s]", fileName); + LOG.error(message); + throw DataXException.asDataXException( + TxtFileReaderErrorCode.OPEN_FILE_ERROR, message); + } + } + LOG.debug("end read source files..."); + } + + @Override + public void startRead(ChannelOutput channelOutput) { + LOG.info("start read source files to stream channel..."); + for(Object sourceFile: this.sourceFiles){ + PathMeta pathMeta = Json.fromJson(Json.toJson(sourceFile, null), PathMeta.class); + assert pathMeta != null; + String absolutePath = pathMeta.getAbsolute(); + String relativePath = pathMeta.getRelative(); + LOG.info(String.format("reading file : [%s]", absolutePath)); + InputStream inputStream; + try{ + File file = new File(absolutePath); + StreamMeta streamMeta = new StreamMeta(); + streamMeta.setName(file.getName()); + streamMeta.setAbsolutePath(absolutePath); + streamMeta.setRelativePath(relativePath); + OutputStream outputStream = channelOutput.createStream(streamMeta,readerSliceConfig.getString( + com.alibaba.datax.plugin.unstructuredstorage.reader.Key.ENCODING, + com.alibaba.datax.plugin.unstructuredstorage.reader.Constant.DEFAULT_ENCODING)); + inputStream = new FileInputStream(file); + UnstructuredStorageReaderUtil.readFromStream(inputStream, outputStream, + this.readerSliceConfig); + }catch(FileNotFoundException e){ + String message = String.format("找不到待读取的文件 : [%s]", absolutePath); + LOG.error(message); + throw DataXException.asDataXException(TxtFileReaderErrorCode.OPEN_FILE_ERROR, message); + }catch(IOException e){ + throw DataXException.asDataXException(FrameworkErrorCode.CHANNEL_STREAM_ERROR, e); + } + } + LOG.info("end read source files to stream channel..."); + } + } +} diff --git a/exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/TxtFileReaderErrorCode.java b/exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/TxtFileReaderErrorCode.java new file mode 100644 index 000000000..8a4f1806e --- /dev/null +++ b/exchangis-engines/engines/datax/datax-textfilereader/src/main/java/com/alibaba/datax/plugin/reader/txtfilereader/TxtFileReaderErrorCode.java @@ -0,0 +1,46 @@ +package com.alibaba.datax.plugin.reader.txtfilereader; + +import com.alibaba.datax.common.spi.ErrorCode; + +/** + * Created by haiwei.luo on 14-9-20. + */ +public enum TxtFileReaderErrorCode implements ErrorCode { + REQUIRED_VALUE("TxtFileReader-00", "您缺失了必须填写的参数值."), + ILLEGAL_VALUE("TxtFileReader-01", "您填写的参数值不合法."), + MIXED_INDEX_VALUE("TxtFileReader-02", "您的列信息配置同时包含了index,value."), + NO_INDEX_VALUE("TxtFileReader-03", "您明确的配置列信息,但未填写相应的index,value."), + FILE_NOT_EXISTS("TxtFileReader-04", "您配置的目录文件路径不存在."), + OPEN_FILE_WITH_CHARSET_ERROR("TxtFileReader-05", "您配置的文件编码和实际文件编码不符合."), + OPEN_FILE_ERROR("TxtFileReader-06", "您配置的文件在打开时异常,建议您检查源目录是否有隐藏文件,管道文件等特殊文件."), + READ_FILE_IO_ERROR("TxtFileReader-07", "您配置的文件在读取时出现IO异常."), + SECURITY_NOT_ENOUGH("TxtFileReader-08", "您缺少权限执行相应的文件操作."), + CONFIG_INVALID_EXCEPTION("TxtFileReader-09", "您的参数配置错误."), + RUNTIME_EXCEPTION("TxtFileReader-10", "出现运行时异常, 请联系我们"), + EMPTY_DIR_EXCEPTION("TxtFileReader-11", "您尝试读取的文件目录为空."), + PATH_NOT_FOUND("TxtFileReader-12", "Path not found"); + + private final String code; + private final String description; + + private TxtFileReaderErrorCode(String code, String description) { + this.code = code; + this.description = description; + } + + @Override + public String getCode() { + return this.code; + } + + @Override + public String getDescription() { + return this.description; + } + + @Override + public String toString() { + return String.format("Code:[%s], Description:[%s].", this.code, + this.description); + } +} diff --git a/exchangis-engines/engines/datax/datax-textfilereader/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-textfilereader/src/main/resources/plugin.json new file mode 100644 index 000000000..2fe13b5cf --- /dev/null +++ b/exchangis-engines/engines/datax/datax-textfilereader/src/main/resources/plugin.json @@ -0,0 +1,6 @@ +{ + "name": "txtfilereader", + "class": "com.alibaba.datax.plugin.reader.txtfilereader.TxtFileReader", + "description": "useScene: test. mechanism: use datax framework to transport data from txt file. warn: The more you know about the data, the less problems you encounter.", + "developer": "alibaba" +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-textfilereader/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-textfilereader/src/main/resources/plugin_job_template.json new file mode 100644 index 000000000..dae3d37d5 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-textfilereader/src/main/resources/plugin_job_template.json @@ -0,0 +1,9 @@ +{ + "name": "txtfilereader", + "parameter": { + "path": [], + "encoding": "", + "column": [], + "fieldDelimiter": "" + } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-textfilewriter/pom.xml b/exchangis-engines/engines/datax/datax-textfilewriter/pom.xml new file mode 100644 index 000000000..7ce1bf54b --- /dev/null +++ b/exchangis-engines/engines/datax/datax-textfilewriter/pom.xml @@ -0,0 +1,87 @@ + + + + exchangis-engine-datax + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + 3.0.0-Plus-2 + datax-textfilewriter + jar + + + com.webank.wedatasphere.exchangis + datax-core + provided + + + slf4j-log4j12 + org.slf4j + + + ${datax.engine.version} + + + org.slf4j + slf4j-api + provided + + + ch.qos.logback + logback-classic + provided + + + commons-io + commons-io + provided + + + org.apache.commons + commons-lang3 + provided + + + commons-codec + commons-codec + ${commons-codec} + provided + + + com.google.guava + guava + provided + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.2.1 + + + assemble + + single + + + install + + + + false + false + + ${basedir}/src/main/assembly/package.xml + + plugin + + + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-textfilewriter/src/main/assembly/package.xml b/exchangis-engines/engines/datax/datax-textfilewriter/src/main/assembly/package.xml new file mode 100644 index 000000000..b646c50a8 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-textfilewriter/src/main/assembly/package.xml @@ -0,0 +1,33 @@ + + textfilewriter + + dir + + false + + + src/main/resources + + plugin.json + plugin_job_template.json + + plugin/writer/textfilewriter + + + target/ + + datax-textfilewriter-${datax.engine.version}.jar + + plugin/writer/textfilewriter + + + + + false + plugin/writer/textfilewriter/libs + runtime + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/Key.java b/exchangis-engines/engines/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/Key.java new file mode 100644 index 000000000..f57f9f961 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/Key.java @@ -0,0 +1,9 @@ +package com.alibaba.datax.plugin.writer.txtfilewriter; + +/** + * Created by haiwei.luo on 14-9-17. + */ +public class Key { + // must have + public static final String PATH = "path"; +} diff --git a/exchangis-engines/engines/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/TxtFileWriter.java b/exchangis-engines/engines/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/TxtFileWriter.java new file mode 100644 index 000000000..5ebb2ffb8 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/TxtFileWriter.java @@ -0,0 +1,439 @@ +package com.alibaba.datax.plugin.writer.txtfilewriter; + +import com.alibaba.datax.common.constant.CommonConstant; +import com.webank.wedatasphere.exchangis.datax.common.constant.TransportType; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordReceiver; +import com.alibaba.datax.common.spi.Writer; +import com.alibaba.datax.common.util.Configuration; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.ChannelInput; +import com.webank.wedatasphere.exchangis.datax.core.transport.stream.StreamMeta; +import com.alibaba.datax.core.util.CompressSuffixName; +import com.alibaba.datax.core.util.FrameworkErrorCode; +import com.alibaba.datax.plugin.unstructuredstorage.writer.UnstructuredStorageWriterUtil; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; +import java.util.*; + +/** + * Created by haiwei.luo on 14-9-17. + */ +public class TxtFileWriter extends Writer { + public static class Job extends Writer.Job { + private static final Logger LOG = LoggerFactory.getLogger(Job.class); + + private String tempPath; + private Configuration writerSliceConfig = null; + + @Override + public boolean isSupportStream() { + return true; + } + + @Override + public void init() { + this.writerSliceConfig = this.getPluginJobConf(); + this.validateParameter(); + String dateFormatOld = this.writerSliceConfig + .getString(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.FORMAT); + String dateFormatNew = this.writerSliceConfig + .getString(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.DATE_FORMAT); + if (null == dateFormatNew) { + this.writerSliceConfig + .set(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.DATE_FORMAT, + dateFormatOld); + } + if (null != dateFormatOld) { + LOG.warn("您使用format配置日期格式化, 这是不推荐的行为, 请优先使用dateFormat配置项, 两项同时存在则使用dateFormat."); + } + UnstructuredStorageWriterUtil + .validateParameter(this.writerSliceConfig); + } + + private void validateParameter() { + String path = this.writerSliceConfig.getNecessaryValue(Key.PATH, + TxtFileWriterErrorCode.REQUIRED_VALUE); + + try { + // warn: 这里用户需要配一个目录 + File dir = new File(path); + if (dir.isFile()) { + throw DataXException + .asDataXException( + TxtFileWriterErrorCode.ILLEGAL_VALUE, + String.format( + "您配置的path: [%s] 不是一个合法的目录, 请您注意文件重名, 不合法目录名等情况.", + path)); + } + if (!dir.exists()) { + boolean createdOk = dir.mkdirs(); + if (!createdOk) { + throw DataXException + .asDataXException( + TxtFileWriterErrorCode.CONFIG_INVALID_EXCEPTION, + String.format("您指定的文件路径 : [%s] 创建失败.", + path)); + } + } + } catch (SecurityException se) { + throw DataXException.asDataXException( + TxtFileWriterErrorCode.SECURITY_NOT_ENOUGH, + String.format("您没有权限创建文件路径 : [%s] ", path), se); + } + } + + @Override + public void prepare() { + String path = this.writerSliceConfig.getString(Key.PATH); + String writeMode = this.writerSliceConfig + .getString(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.WRITE_MODE); + // truncate option handler + if ("truncate".equals(writeMode)) { + try { + File dir = new File(path); + LOG.info(String.format("由于您配置了writeMode truncate, 开始清理 [%s] 下面所有内容", path)); + if(dir.exists()) { + for (File eachFile : Objects.requireNonNull(dir.listFiles())) { + FileUtils.forceDelete(eachFile); + } + } + } catch (NullPointerException npe) { + throw DataXException + .asDataXException( + TxtFileWriterErrorCode.Write_FILE_ERROR, + String.format("您配置的目录清空时出现空指针异常 : [%s]", + path), npe); + } catch (IllegalArgumentException iae) { + throw DataXException.asDataXException( + TxtFileWriterErrorCode.SECURITY_NOT_ENOUGH, + String.format("您配置的目录参数异常 : [%s]", path)); + } catch (SecurityException se) { + throw DataXException.asDataXException( + TxtFileWriterErrorCode.SECURITY_NOT_ENOUGH, + String.format("您没有权限查看目录 : [%s]", path)); + } catch (IOException e) { + throw DataXException.asDataXException( + TxtFileWriterErrorCode.Write_FILE_ERROR, + String.format("无法清空目录 : [%s]", path), e); + } + } else if ("append".equals(writeMode)) { + LOG.info("由于您配置了writeMode append, 写入前不做清理工作"); + } else if ("nonConflict".equals(writeMode)) { + LOG.info(String.format( + "由于您配置了writeMode nonConflict, 开始检查 [%s] 下面的内容", path)); + // warn: check two times about exists, mkdirs + File dir = new File(path); + try { + if (dir.exists()) { + if (dir.isFile()) { + throw DataXException + .asDataXException( + TxtFileWriterErrorCode.ILLEGAL_VALUE, + String.format( + "您配置的path: [%s] 不是一个合法的目录, 请您注意文件重名, 不合法目录名等情况.", + path)); + } + File[] filesWithFileNamePrefix = dir.listFiles(); + if (null != filesWithFileNamePrefix && filesWithFileNamePrefix.length > 0) { + List allFiles = new ArrayList(); + for (File eachFile : filesWithFileNamePrefix) { + allFiles.add(eachFile.getName()); + } + LOG.error(String.format("冲突文件列表为: [%s]", + StringUtils.join(allFiles, ","))); + throw DataXException + .asDataXException( + TxtFileWriterErrorCode.ILLEGAL_VALUE, + String.format( + "您配置的path: [%s] 目录不为空, 下面存在其他文件或文件夹.", + path)); + } + } else { + boolean createdOk = dir.mkdirs(); + if (!createdOk) { + throw DataXException + .asDataXException( + TxtFileWriterErrorCode.CONFIG_INVALID_EXCEPTION, + String.format( + "您指定的文件路径 : [%s] 创建失败.", + path)); + } + } + } catch (SecurityException se) { + throw DataXException.asDataXException( + TxtFileWriterErrorCode.SECURITY_NOT_ENOUGH, + String.format("您没有权限查看目录 : [%s]", path)); + } + } else { + throw DataXException + .asDataXException( + TxtFileWriterErrorCode.ILLEGAL_VALUE, + String.format( + "仅支持 truncate, append, nonConflict 三种模式, 不支持您配置的 writeMode 模式 : [%s]", + writeMode)); + } + } + + @Override + public void post() { + String path = this.writerSliceConfig.getNecessaryValue(Key.PATH, + TxtFileWriterErrorCode.REQUIRED_VALUE); + if(StringUtils.isNotBlank(this.tempPath)){ + try { + LOG.info(String.format("move files or directories under temporary path: %s to path: %s", tempPath, path)); + try { + File[] moveFiles = new File(this.tempPath).listFiles(); + for(File moveFile : moveFiles) { + moveToDirectory(moveFile, new File(path)); + } + } catch (IOException e) { + throw DataXException.asDataXException( + TxtFileWriterErrorCode.Write_FILE_IO_ERROR, + String.format("cannot move temporary directory, message: %s" + , e.getMessage()) + ); + } + }finally{ + try { + LOG.info(String.format("delete temporary path : %s", tempPath)); + FileUtils.forceDelete(new File(this.tempPath)); + this.tempPath = null; + }catch(IOException e){ + DataXException de = DataXException.asDataXException( + TxtFileWriterErrorCode.Write_FILE_IO_ERROR, + String.format("cannot delete temporary directory %s", this.tempPath) + ); + LOG.error(de.getMessage(), de); + } + } + } + } + + @Override + public void destroy() { + if(StringUtils.isNotBlank(tempPath)){ + try { + LOG.info(String.format("delete temporary path : %s", tempPath)); + FileUtils.forceDelete(new File(this.tempPath)); + }catch(IOException e){ + DataXException de = DataXException.asDataXException( + TxtFileWriterErrorCode.Write_FILE_IO_ERROR, + String.format("cannot delete temporary directory %s", this.tempPath) + ); + LOG.error(de.getMessage(), de); + } + } + } + + @Override + public List split(int mandatoryNumber) { + LOG.info("begin do split..."); + List writerSplitConfigs = new ArrayList(); + String filePrefix = this.writerSliceConfig + .getString(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.FILE_NAME, ""); + + Set allFiles; + String path = null; + try { + path = this.writerSliceConfig.getString(Key.PATH); + File dir = new File(path); + allFiles = new HashSet<>(Arrays.asList(Objects.requireNonNull(dir.list()))); + } catch (SecurityException se) { + throw DataXException.asDataXException( + TxtFileWriterErrorCode.SECURITY_NOT_ENOUGH, + String.format("您没有权限查看目录 : [%s]", path)); + } + this.tempPath = UnstructuredStorageWriterUtil.buildTmpFilePath(path, + String.format(CommonConstant.TEMP_PREFIX, System.currentTimeMillis()), IOUtils.DIR_SEPARATOR , + allFiles::contains); + String fileSuffix; + for (int i = 0; i < mandatoryNumber; i++) { + // handle same file name + + Configuration splitedTaskConfig = this.writerSliceConfig + .clone(); + splitedTaskConfig.set(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.TEMP_PATH, this.tempPath); + if(getTransportType() == TransportType.STREAM){ + splitedTaskConfig.set(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.FILE_NAME, + filePrefix); + writerSplitConfigs.add(splitedTaskConfig); + continue; + } + String fullFileName = null; + do{ + fileSuffix = UUID.randomUUID().toString().replace('-', '_'); + fullFileName = String.format("%s__%s", filePrefix, + fileSuffix); + }while(allFiles.contains(fullFileName)); + allFiles.add(fullFileName); + String suffix = CompressSuffixName.chooseSuffix(this.writerSliceConfig + .getString(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.COMPRESS, "")); + if(StringUtils.isNotBlank(suffix)){ + fullFileName += suffix; + } + splitedTaskConfig + .set(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.FILE_NAME, + fullFileName); + + LOG.info(String.format("splited write file name:[%s]", + fullFileName)); + + writerSplitConfigs.add(splitedTaskConfig); + } + LOG.info("end do split."); + return writerSplitConfigs; + } + + private void moveToDirectory(File src, File destDir) throws IOException{ + if(src.isDirectory()){ + File childDestDir = new File(destDir, src.getName()); + if(childDestDir.exists()){ + if(!childDestDir.isDirectory()){ + throw new IOException("Destination has the conflict file named '" + childDestDir.getPath() + "'"); + } + File[] childFiles = src.listFiles(); + if(null != childFiles) { + for (File childFile : childFiles) { + moveToDirectory(childFile, childDestDir); + } + } + }else{ + FileUtils.moveToDirectory(src, destDir, true); + } + + }else{ + File dest = new File(destDir, src.getName()); + boolean canMove = !dest.exists() || (dest.exists() && dest.delete()); + if(canMove){ + FileUtils.moveFile(src, dest); + } + } + } + } + + public static class Task extends Writer.Task { + private static final Logger LOG = LoggerFactory.getLogger(Task.class); + + private Configuration writerSliceConfig; + + + private String fileName; + + private String tempPath; + + + @Override + public void init() { + this.writerSliceConfig = this.getPluginJobConf(); + this.tempPath = this.writerSliceConfig + .getString(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.TEMP_PATH, ""); + this.fileName = this.writerSliceConfig + .getString(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.FILE_NAME, ""); + } + + @Override + public void prepare() { + + } + + @Override + public void startWrite(RecordReceiver lineReceiver) { + LOG.debug("begin do write..."); + String fileFullPath = this.buildFilePath(this.fileName); + LOG.debug(String.format("write to file : [%s]", fileFullPath)); + + OutputStream outputStream = null; + try { + File newFile = new File(fileFullPath); + newFile.getParentFile().mkdirs(); + if(!newFile.createNewFile()){ + throw DataXException.asDataXException(TxtFileWriterErrorCode.SECURITY_NOT_ENOUGH, + String.format("无法创建待写文件 : [%s]", this.fileName)); + } + outputStream = new FileOutputStream(newFile); + UnstructuredStorageWriterUtil.writeToStream(lineReceiver, + outputStream, this.writerSliceConfig, this.fileName, + this.getTaskPluginCollector()); + } catch (SecurityException se) { + throw DataXException.asDataXException( + TxtFileWriterErrorCode.SECURITY_NOT_ENOUGH, + String.format("您没有权限创建文件 : [%s]", this.fileName)); + } catch (IOException ioe) { + throw DataXException.asDataXException( + TxtFileWriterErrorCode.Write_FILE_IO_ERROR, + String.format("无法创建待写文件 : [%s]", this.fileName), ioe); + } finally { + IOUtils.closeQuietly(outputStream); + } + LOG.debug("end do write"); + } + + @Override + public void startWrite(ChannelInput channelInput) { + LOG.info("begin do write from stream channel..."); + try { + InputStream inputStream; + while((inputStream = channelInput.nextStream()) != null){ + StreamMeta metaData = channelInput.streamMetaData(this.writerSliceConfig + .getString(com.alibaba.datax.plugin.unstructuredstorage.writer.Key.ENCODING, "UTF-8")); + LOG.info("begin do read input stream, name : " + metaData.getName() + ", relativePath: " + metaData.getRelativePath()); + String relativePath = metaData.getRelativePath(); + if(StringUtils.isNotBlank(fileName)){ + //modify the relativePath + relativePath = relativePath.substring(0, relativePath.lastIndexOf(IOUtils.DIR_SEPARATOR) + 1) + + fileName + "_" + metaData.getName(); + } + String fileFullPath = this.buildFilePath(relativePath); + File file = new File(fileFullPath); + file.getParentFile().mkdirs(); + FileOutputStream outputStream = new FileOutputStream(file, false); + try { + UnstructuredStorageWriterUtil.writeToStream(inputStream, outputStream + , this.writerSliceConfig); + outputStream.flush(); + }finally{ + IOUtils.closeQuietly(outputStream); + } + } + } catch (IOException e) { + throw DataXException.asDataXException(FrameworkErrorCode.CHANNEL_STREAM_ERROR, e); + } + LOG.info("end do write from stream channel"); + } + + private String buildFilePath(String fileName) { + boolean isEndWithSeparator = false; + switch (IOUtils.DIR_SEPARATOR) { + case IOUtils.DIR_SEPARATOR_UNIX: + isEndWithSeparator = this.tempPath.endsWith(String + .valueOf(IOUtils.DIR_SEPARATOR)); + break; + case IOUtils.DIR_SEPARATOR_WINDOWS: + isEndWithSeparator = this.tempPath.endsWith(String + .valueOf(IOUtils.DIR_SEPARATOR_WINDOWS)); + break; + default: + break; + } + if (!isEndWithSeparator) { + this.tempPath = this.tempPath + IOUtils.DIR_SEPARATOR; + } + return String.format("%s%s", this.tempPath, fileName); + } + + @Override + public void post() { + + } + + @Override + public void destroy() { + + } + } +} diff --git a/exchangis-engines/engines/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/TxtFileWriterErrorCode.java b/exchangis-engines/engines/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/TxtFileWriterErrorCode.java new file mode 100644 index 000000000..449c2eeb5 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-textfilewriter/src/main/java/com/alibaba/datax/plugin/writer/txtfilewriter/TxtFileWriterErrorCode.java @@ -0,0 +1,41 @@ +package com.alibaba.datax.plugin.writer.txtfilewriter; + +import com.alibaba.datax.common.spi.ErrorCode; + +/** + * Created by haiwei.luo on 14-9-17. + */ +public enum TxtFileWriterErrorCode implements ErrorCode { + + CONFIG_INVALID_EXCEPTION("TxtFileWriter-00", "您的参数配置错误."), + REQUIRED_VALUE("TxtFileWriter-01", "您缺失了必须填写的参数值."), + ILLEGAL_VALUE("TxtFileWriter-02", "您填写的参数值不合法."), + Write_FILE_ERROR("TxtFileWriter-03", "您配置的目标文件在写入时异常."), + Write_FILE_IO_ERROR("TxtFileWriter-04", "您配置的文件在写入时出现IO异常."), + SECURITY_NOT_ENOUGH("TxtFileWriter-05", "您缺少权限执行相应的文件写入操作."); + + private final String code; + private final String description; + + private TxtFileWriterErrorCode(String code, String description) { + this.code = code; + this.description = description; + } + + @Override + public String getCode() { + return this.code; + } + + @Override + public String getDescription() { + return this.description; + } + + @Override + public String toString() { + return String.format("Code:[%s], Description:[%s].", this.code, + this.description); + } + +} diff --git a/exchangis-engines/engines/datax/datax-textfilewriter/src/main/resources/plugin.json b/exchangis-engines/engines/datax/datax-textfilewriter/src/main/resources/plugin.json new file mode 100644 index 000000000..4259e6531 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-textfilewriter/src/main/resources/plugin.json @@ -0,0 +1,6 @@ +{ + "name": "txtfilewriter", + "class": "com.alibaba.datax.plugin.writer.txtfilewriter.TxtFileWriter", + "description": "useScene: test. mechanism: use datax framework to transport data to txt file. warn: The more you know about the data, the less problems you encounter.", + "developer": "alibaba" +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-textfilewriter/src/main/resources/plugin_job_template.json b/exchangis-engines/engines/datax/datax-textfilewriter/src/main/resources/plugin_job_template.json new file mode 100644 index 000000000..cd123d619 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-textfilewriter/src/main/resources/plugin_job_template.json @@ -0,0 +1,10 @@ +{ + "name": "txtfilewriter", + "parameter": { + "path": "", + "fileName": "", + "writeMode": "", + "fieldDelimiter": "", + "dateFormat": "" + } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/pom.xml b/exchangis-engines/engines/datax/pom.xml new file mode 100644 index 000000000..59d8ac664 --- /dev/null +++ b/exchangis-engines/engines/datax/pom.xml @@ -0,0 +1,124 @@ + + + + com.webank.wedatasphere.exchangis + exchangis + ${revision} + ../../../pom.xml + + + 4.0.0 + + exchangis-engine-datax + pom + + 1.7.25 + 1.2.3 + 3.1.1 + 16.0.1 + 2.8.2 + 1.19 + 2.4 + 1.6 + 1.10 + 1.2 + 1.9.4 + 3.3.4 + 3.1.3 + 6.7.1 + 1.11 + 3.0.0-Plus-2 + 2.11.0 + + + datax-core + datax-ftpreader + datax-ftpwriter + datax-hdfsreader + datax-hdfswriter + datax-textfilereader + datax-textfilewriter + datax-elasticsearchwriter + datax-mysqlreader + datax-mysqlwriter + datax-oraclereader + datax-starrockswriter + datax-assembly + + + + + org.apache.commons + commons-lang3 + ${commons.lang3.version} + + + commons-pool + commons-pool + ${commons-pool} + + + com.google.guava + guava + ${guava-version} + + + ch.qos.logback + logback-classic + ${logback-classic-version} + + + org.slf4j + slf4j-api + ${slf4j-api-version} + + + org.apache.commons + commons-math3 + ${commons-math3-version} + + + org.apache.commons + commons-compress + ${commons-compress-version} + + + commons-io + commons-io + ${commons-io} + + + commons-beanutils + commons-beanutils + ${commons-beanutils} + + + com.google.code.gson + gson + ${gson-version} + + + commons-cli + commons-cli + ${commons-cli-version} + + + commons-configuration + commons-configuration + ${commons-configuration-version} + + + com.fasterxml.jackson.core + jackson-core + ${jackson.version} + + + com.fasterxml.jackson.core + jackson-databind + ${jackson.version} + + + + \ No newline at end of file diff --git a/exchangis-engines/exchangis-engine-common/pom.xml b/exchangis-engines/exchangis-engine-common/pom.xml new file mode 100644 index 000000000..16cf86da2 --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/pom.xml @@ -0,0 +1,31 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-engine-common + + + 8 + 8 + + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${project.version} + + + org.apache.linkis + linkis-common + ${linkis.version} + + + \ No newline at end of file diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/config/ExchangisEngineConfiguration.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/config/ExchangisEngineConfiguration.java new file mode 100644 index 000000000..728bae3f0 --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/config/ExchangisEngineConfiguration.java @@ -0,0 +1,23 @@ +package com.webank.wedatasphere.exchangis.engine.config; + +import org.apache.linkis.common.conf.CommonVars; + +/** + * Engine configuration + */ +public class ExchangisEngineConfiguration { + + public static final CommonVars ENGINE_RESOURCE_ROOT_PATH = CommonVars.apply("wds.exchangis.engine.root.path", + System.getProperty("user.dir", "/tmp/exchangis/") + "/engine"); + /** + * If need to store the merged resource into local path + */ + public static final CommonVars ENGINE_RESOURCE_MERGE_LOCAL = CommonVars.apply("wds.exchangis.engine.resource.merge.local", true); + + public static final CommonVars ENGINE_RESOURCE_TMP_PATH = CommonVars.apply("wds.exchangis.engine.resource.temp.path", "/tmp/exchangis/engine"); + + /** + * Packet suffix + */ + public static final CommonVars ENGINE_RESOURCE_PACKET_SUFFIX = CommonVars.apply("wds.exchangis.engine.resource.packet.suffix", ".zip"); +} diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineBmlResource.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineBmlResource.java new file mode 100644 index 000000000..04806258e --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineBmlResource.java @@ -0,0 +1,105 @@ +package com.webank.wedatasphere.exchangis.engine.domain; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Calendar; + +/** + * BML resources + */ +public class EngineBmlResource extends EngineResource { + + private static final Logger LOG = LoggerFactory.getLogger(EngineBmlResource.class); + + public static final String DEFAULT_SCHEME = "bml"; + + /** + * Resource id + */ + private String resourceId; + + /** + * Version + */ + private String version; + + public EngineBmlResource(String engineType, + String path, String name, + String resourceId, String version, String creator){ + this.type = DEFAULT_SCHEME; + this.name = name; + this.engineType = engineType; + this.resourceId = resourceId; + // Use the bml resource id as id + this.id = resourceId; + this.version = version; + this.path = path; + this.creator = creator; + Calendar calendar = Calendar.getInstance(); + this.createTime = calendar.getTime(); + this.modifyTime = calendar.getTime(); + } + + /** + * Get bml resource from stored resource + * @param storeResource store resource + */ + public EngineBmlResource(EngineStoreResource storeResource){ + this(storeResource.engineType, storeResource.path, storeResource.name, null, null, + storeResource.creator); + this.createTime = storeResource.createTime; + this.modifyTime = storeResource.modifyTime; + String storeUri = storeResource.getStoreUri(); + if (StringUtils.isNotBlank(storeUri)){ + try { + String storePath = new URI(storeUri).getPath(); + if (storePath.startsWith(IOUtils.DIR_SEPARATOR_UNIX + "")){ + storePath = storePath.substring(1); + } + String[] storeParts = storePath.split(IOUtils.DIR_SEPARATOR_UNIX + ""); + if (storeParts.length >= 2){ + this.resourceId = storeParts[0]; + this.version = storeParts[1]; + } + } catch (URISyntaxException e) { + LOG.warn("Unrecognized bml stored uri: [{}]", storeUri, e); + } + } + } + + @Override + public InputStream getInputStream() throws IOException { + // TODO get input stream from BML + return null; + } + + @Override + public URI getURI() throws URISyntaxException { + return new URI(DEFAULT_SCHEME, "", IOUtils.DIR_SEPARATOR_UNIX + + resourceId + IOUtils.DIR_SEPARATOR_UNIX + version, null, null); + } + + public String getResourceId() { + return resourceId; + } + + public void setResourceId(String resourceId) { + this.resourceId = resourceId; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + +} diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineLocalPathResource.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineLocalPathResource.java new file mode 100644 index 000000000..5c038ab57 --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineLocalPathResource.java @@ -0,0 +1,91 @@ +package com.webank.wedatasphere.exchangis.engine.domain; + +import com.webank.wedatasphere.exchangis.common.EnvironmentUtils; + +import java.io.*; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Calendar; +import java.util.UUID; + +/** + * Local engine resource + */ +public class EngineLocalPathResource extends EngineResource{ + + public static final String DEFAULT_SCHEME = "file"; + + /** + * Local file + */ + private final File localFile; + + /** + * Whether the resource is a packet + */ + private boolean isPacket; + + public EngineLocalPathResource(String engineType, URI baseUri, String path){ + this(engineType, baseUri, path, false); + } + + public EngineLocalPathResource(String engineType, URI baseUri, String path, boolean isPacket){ + this.type = DEFAULT_SCHEME; + this.engineType = engineType; + this.path = path; + this.localFile = new File(baseUri.getPath(), path); + this.name = localFile.getName(); + this.isPacket = isPacket; + Calendar calendar = Calendar.getInstance(); + this.createTime = calendar.getTime(); + this.modifyTime = calendar.getTime(); + // Set the creator as jvm user + this.creator = EnvironmentUtils.getJvmUser(); + // Random resource id + this.id = UUID.randomUUID().toString(); + } + @Override + public InputStream getInputStream() throws IOException { + if (localFile.exists() && localFile.isFile()){ + return new FileInputStream(this.localFile); + } + return null; + } + + @Override + public URI getURI() throws URISyntaxException { + return this.localFile.toURI(); + } + + + public String getPath() { + return path; + } + + public File getLocalFile(){ + return this.localFile; + } + + public boolean isPacket() { + return isPacket; + } + + public void setPacket(boolean isPacket){ + this.isPacket = isPacket; + } + + @Override + public int hashCode() { + return (getEngineType() + ":" + this.localFile.getPath()).hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof EngineLocalPathResource){ + EngineLocalPathResource other = (EngineLocalPathResource)obj; + return this.engineType.equals(other.getEngineType()) && + this.localFile.getPath().equals(other.localFile.getPath()); + } + return super.equals(obj); + } +} diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineResource.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineResource.java new file mode 100644 index 000000000..632614c91 --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineResource.java @@ -0,0 +1,141 @@ +package com.webank.wedatasphere.exchangis.engine.domain; + +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Date; + +/** + * Engine resource + */ +public abstract class EngineResource { + + /** + * Engine type + */ + protected String engineType; + + /** + * Resource id + */ + protected String id; + + /** + * Resource name + */ + protected String name; + + /** + * Resource type + */ + protected String type; + + /** + * Resource path + */ + protected String path; + + /** + * Create time + */ + protected Date createTime; + + /** + * Modify time + */ + protected Date modifyTime; + /** + * Create user + */ + protected String creator; + /** + * Get input stream from resource + * @return input stream + */ + public abstract InputStream getInputStream() throws IOException; + + /** + * URI value + * @return uri + */ + public abstract URI getURI() throws URISyntaxException; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getEngineType() { + return engineType; + } + + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getModifyTime() { + return modifyTime; + } + + public void setModifyTime(Date modifyTime) { + this.modifyTime = modifyTime; + } + + public String getCreator() { + return creator; + } + + public void setCreator(String creator) { + this.creator = creator; + } + + public String getPath() { + return path; + } + + public void setPath(String path) { + this.path = path; + } + + @Override + public String toString() { + return "EngineResource{" + + "engineType='" + engineType + '\'' + + ", id='" + id + '\'' + + ", name='" + name + '\'' + + ", type='" + type + '\'' + + ", path='" + path + '\'' + + ", createTime=" + createTime + + ", modifyTime=" + modifyTime + + ", creator='" + creator + '\'' + + '}'; + } +} diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineSettings.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineSettings.java new file mode 100644 index 000000000..10eb47a42 --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineSettings.java @@ -0,0 +1,179 @@ +package com.webank.wedatasphere.exchangis.engine.domain; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.common.utils.JsonUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +/** + * Engine settings + */ +public class EngineSettings { + + private static final Logger LOG = LoggerFactory.getLogger(EngineSettings.class); + /** + * ID + */ + private String id; + + /** + * Engine name: engine_name + */ + private String name; + + /** + * Description: engine_desc + */ + private String description; + + /** + * Settings: engine_settings_value + */ + @JsonIgnoreProperties + private String settings; + /** + * Direction: engine_direction => hdfs->local,mysql->hdfs,mysql->hdfs + */ + @JsonIgnoreProperties + private String direction; + + /** + * Resource loader class: res_loader_class + */ + private String resourceLoaderClass; + + /** + * Resource uploader class: res_uploader_class + */ + private String resourceUploaderClass; + + /** + * Direct rules + */ + private final List directionRules = new ArrayList<>(); + + /** + * Setting map + */ + private final Map settingsMap = new HashMap<>(); + + + public List getDirectionRules(){ + if (directionRules.isEmpty() && StringUtils.isNotBlank(direction)){ + synchronized (directionRules) { + if (directionRules.isEmpty()) { + String[] directs = direction.split(","); + for (String direct : directs) { + String[] parts = direct.trim().split("->"); + if (parts.length == 2) { + directionRules.add(new Direction(parts[0].trim(), parts[1].trim())); + } + } + } + } + } + return directionRules; + } + + @SuppressWarnings("unchecked") + public Map getSettingsMap(){ + if (settingsMap.isEmpty() && StringUtils.isNotBlank(settings)){ + synchronized (settingsMap){ + if (settingsMap.isEmpty()){ + try { + settingsMap.putAll(JsonUtils.jackson().reader().readValue(settings, Map.class)); + }catch(Exception e){ + // Ignore + LOG.warn("Fail to load engine settings properties", e); + } + } + } + } + return settingsMap; + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getSettings() { + return settings; + } + + public void setSettings(String settings) { + this.settings = settings; + } + + public String getDirection() { + return direction; + } + + public void setDirection(String direction) { + this.direction = direction; + } + + public String getResourceLoaderClass() { + return resourceLoaderClass; + } + + public void setResourceLoaderClass(String resourceLoaderClass) { + this.resourceLoaderClass = resourceLoaderClass; + } + + public String getResourceUploaderClass() { + return resourceUploaderClass; + } + + public void setResourceUploaderClass(String resourceUploaderClass) { + this.resourceUploaderClass = resourceUploaderClass; + } + + + public static class Direction{ + /** + * Source type + */ + private final String source; + + /** + * Sink type + */ + private final String sink; + + public Direction(String source, String sink){ + this.source = source; + this.sink = sink; + } + + public String getSource() { + return source; + } + + public String getSink() { + return sink; + } + } +} diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineStoreResource.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineStoreResource.java new file mode 100644 index 000000000..dd220d0ed --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineStoreResource.java @@ -0,0 +1,59 @@ +package com.webank.wedatasphere.exchangis.engine.domain; + +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Calendar; +import java.util.Objects; +import java.util.Optional; + +/** + * Engine store into database + */ +public class EngineStoreResource extends EngineResource{ + + private String storeUri; + + public EngineStoreResource(EngineResource engineResource){ + this.engineType = engineResource.getEngineType(); + this.name = engineResource.getName(); + this.type = engineResource.getType(); + this.path = engineResource.getPath(); + this.createTime = Optional.ofNullable(engineResource.getCreateTime()) + .orElse(Calendar.getInstance().getTime()); + this.modifyTime = Optional.ofNullable(engineResource.getModifyTime()) + .orElse(Calendar.getInstance().getTime()); + this.creator = engineResource.getCreator(); + try { + URI uri = engineResource.getURI(); + if (Objects.nonNull(uri)){ + this.storeUri = uri.toString(); + } + } catch (Exception e){ + // Ignore + } + + } + + public EngineStoreResource(){ + + } + @Override + public InputStream getInputStream() throws IOException { + throw new IllegalArgumentException("Unsupported method 'getInputStream()'"); + } + + @Override + public URI getURI() throws URISyntaxException { + throw new IllegalArgumentException("Unsupported method 'getURI()'"); + } + + public String getStoreUri() { + return storeUri; + } + + public void setStoreUri(String storeUri) { + this.storeUri = storeUri; + } +} diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineExceptionCode.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineExceptionCode.java new file mode 100644 index 000000000..db1b957f4 --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineExceptionCode.java @@ -0,0 +1,25 @@ +package com.webank.wedatasphere.exchangis.engine.exception; + +/** + * Exception code fo engine + * 32000 ~ 32999 + */ +public enum ExchangisEngineExceptionCode { + RESOURCE_ERROR(32000), + RESOURCE_LOAD_ERROR(32001), + RESOURCE_UPLOAD_ERROR(32002); + + private int code; + + ExchangisEngineExceptionCode(int code) { + this.code = code; + } + + public int getCode() { + return code; + } + + public void setCode(int code) { + this.code = code; + } +} diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineResException.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineResException.java new file mode 100644 index 000000000..c195cfd0f --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineResException.java @@ -0,0 +1,19 @@ +package com.webank.wedatasphere.exchangis.engine.exception; + +import org.apache.linkis.common.exception.ErrorException; + +import static com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineExceptionCode.RESOURCE_ERROR; + +/** + * Engine resource exception + */ +public class ExchangisEngineResException extends ErrorException { + public ExchangisEngineResException(String desc) { + this(desc, null); + } + + public ExchangisEngineResException(String desc, Throwable t){ + super(RESOURCE_ERROR.getCode(), desc); + super.initCause(t); + } +} diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineResLoadException.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineResLoadException.java new file mode 100644 index 000000000..afebd8911 --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineResLoadException.java @@ -0,0 +1,19 @@ +package com.webank.wedatasphere.exchangis.engine.exception; + +import static com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineExceptionCode.RESOURCE_LOAD_ERROR; + +/** + * Engine resource load exception + */ +public class ExchangisEngineResLoadException extends ExchangisEngineResException { + + + public ExchangisEngineResLoadException(String desc) { + super(desc); + } + + public ExchangisEngineResLoadException(String desc, Throwable t) { + super(desc, t); + super.setErrCode(RESOURCE_LOAD_ERROR.getCode()); + } +} diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineResUploadException.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineResUploadException.java new file mode 100644 index 000000000..02e415d62 --- /dev/null +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/exception/ExchangisEngineResUploadException.java @@ -0,0 +1,17 @@ +package com.webank.wedatasphere.exchangis.engine.exception; + +import static com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineExceptionCode.RESOURCE_UPLOAD_ERROR; + +/** + * Engine resource upload exception + */ +public class ExchangisEngineResUploadException extends ExchangisEngineResException{ + public ExchangisEngineResUploadException(String desc) { + super(desc); + } + + public ExchangisEngineResUploadException(String desc, Throwable t) { + super(desc, t); + super.setErrCode(RESOURCE_UPLOAD_ERROR.getCode()); + } +} diff --git a/exchangis-engines/exchangis-engine-core/pom.xml b/exchangis-engines/exchangis-engine-core/pom.xml new file mode 100644 index 000000000..379c1dbfd --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/pom.xml @@ -0,0 +1,38 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-engine-core + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-engine-common + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${project.version} + + + + org.apache.linkis + linkis-bml-client + ${linkis.version} + + + \ No newline at end of file diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/ExchangisEngine.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/ExchangisEngine.java new file mode 100644 index 000000000..2902dbad8 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/ExchangisEngine.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.exchangis.engine; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineSettings; +import com.webank.wedatasphere.exchangis.engine.resource.EngineResourceContainer; + +/** + * Exchangis engine + */ +public interface ExchangisEngine { + + /** + * engine name + * @return + */ + String getName(); + + /** + * Settings + * @return settings + */ + EngineSettings getSettings(); + + /** + * Resource container + * @return container + */ + EngineResourceContainer getResourceContainer(); +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/GenericExchangisEngine.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/GenericExchangisEngine.java new file mode 100644 index 000000000..190934005 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/GenericExchangisEngine.java @@ -0,0 +1,46 @@ +package com.webank.wedatasphere.exchangis.engine; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineSettings; +import com.webank.wedatasphere.exchangis.engine.resource.EngineResourceContainer; + +import java.util.Objects; + +/** + * Generic implement + */ +public class GenericExchangisEngine implements ExchangisEngine{ + + @Override + public String getName() { + return Objects.nonNull(settings) ? settings.getName(): null; + } + + /** + * Settings + */ + private EngineSettings settings; + + /** + * Resource container + */ + private EngineResourceContainer resourceContainer; + + public EngineSettings getSettings() { + return settings; + } + + public void setSettings(EngineSettings settings) { + this.settings = settings; + } + + @SuppressWarnings("unchecked") + public EngineResourceContainer getResourceContainer() { + return (EngineResourceContainer) resourceContainer; + } + + public void setResourceContainer(EngineResourceContainer resourceContainer) { + this.resourceContainer = resourceContainer; + } + +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/dao/EngineResourceDao.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/dao/EngineResourceDao.java new file mode 100644 index 000000000..bc0b006b9 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/dao/EngineResourceDao.java @@ -0,0 +1,30 @@ +package com.webank.wedatasphere.exchangis.engine.dao; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineStoreResource; + +import java.util.List; + +/** + * Engine resource dao + */ +public interface EngineResourceDao { + + /** + * Get resources by engine type + * @param engineType engine type + * @return store resource + */ + List getResources(String engineType); + + /** + * Insert Resource + * @param storeResource store resource + */ + void insertResource(EngineStoreResource storeResource); + + /** + * Update resource + * @param storeResource store resource + */ + void updateResource(EngineStoreResource storeResource); +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/dao/EngineSettingsDao.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/dao/EngineSettingsDao.java new file mode 100644 index 000000000..8ac429feb --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/dao/EngineSettingsDao.java @@ -0,0 +1,16 @@ +package com.webank.wedatasphere.exchangis.engine.dao; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineSettings; + +import java.util.List; + +/** + * Engine settings dao + */ +public interface EngineSettingsDao { + /** + * Settings + * @return list + */ + List getSettings(); +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/manager/DefaultExchangisEngineManager.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/manager/DefaultExchangisEngineManager.java new file mode 100644 index 000000000..5a2cd179b --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/manager/DefaultExchangisEngineManager.java @@ -0,0 +1,34 @@ +package com.webank.wedatasphere.exchangis.engine.manager; + +import com.webank.wedatasphere.exchangis.engine.ExchangisEngine; +import com.webank.wedatasphere.exchangis.engine.GenericExchangisEngine; +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineSettings; +import com.webank.wedatasphere.exchangis.engine.resource.EngineResourceContainer; + +import java.util.HashMap; +import java.util.Map; + + +/** + * Default Engine manager + */ +public class DefaultExchangisEngineManager implements ExchangisEngineManager { + + private static final ExchangisEngine EMPTY_ENGINE = new GenericExchangisEngine(); + /** + * Engine context + */ + protected Map engineContextMap = new HashMap<>(); + @Override + public EngineSettings getSettings(String engine) { + return engineContextMap.getOrDefault(engine, EMPTY_ENGINE).getSettings(); + } + + @Override + @SuppressWarnings("unchecked") + public EngineResourceContainer getResourceContainer(String engine) { + return (EngineResourceContainer) + engineContextMap.getOrDefault(engine, EMPTY_ENGINE).getResourceContainer(); + } +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/manager/ExchangisEngineManager.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/manager/ExchangisEngineManager.java new file mode 100644 index 000000000..907bb669a --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/manager/ExchangisEngineManager.java @@ -0,0 +1,27 @@ +package com.webank.wedatasphere.exchangis.engine.manager; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineSettings; +import com.webank.wedatasphere.exchangis.engine.resource.EngineResourceContainer; + +/** + * Engine manager + */ +public interface ExchangisEngineManager { + + /** + * + * @param engine engine + * @return + */ + EngineSettings getSettings(String engine); + + /** + * Get engine resource container + * @param engine engine + * @param + * @param + * @return + */ + EngineResourceContainer getResourceContainer(String engine); +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/provider/ExchangisEngineProvider.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/provider/ExchangisEngineProvider.java new file mode 100644 index 000000000..2dc4f7fb5 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/provider/ExchangisEngineProvider.java @@ -0,0 +1,11 @@ +package com.webank.wedatasphere.exchangis.engine.provider; + +import com.webank.wedatasphere.exchangis.engine.ExchangisEngine; + +import java.util.Map; + +public interface ExchangisEngineProvider { + + + ExchangisEngine getEngines(Map params); +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/AbstractEngineResourceContainer.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/AbstractEngineResourceContainer.java new file mode 100644 index 000000000..8b353c837 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/AbstractEngineResourceContainer.java @@ -0,0 +1,395 @@ +package com.webank.wedatasphere.exchangis.engine.resource; + +import com.webank.wedatasphere.exchangis.engine.config.ExchangisEngineConfiguration; +import com.webank.wedatasphere.exchangis.engine.dao.EngineResourceDao; +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineStoreResource; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResException; +import com.webank.wedatasphere.exchangis.engine.utils.ResourceUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.net.URI; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.function.Consumer; + +/** + * Abstract engine resource container + */ +public abstract class AbstractEngineResourceContainer implements EngineResourceContainer { + + private static final Logger LOG = LoggerFactory.getLogger(AbstractEngineResourceContainer.class); + + /** + * Resource root uri + */ + protected final URI rootUri; + /** + * Engine type + */ + private final String engineType; + + /** + * Resource dao + */ + protected final EngineResourceDao engineResourceDao; + + /** + * Resource loader in container + */ + protected final EngineResourceLoader engineResourceLoader; + + /** + * Resource uploader in container + */ + protected final EngineResourceUploader engineResourceUploader; + + /** + * Root node + */ + private final ResourcePathNode rootNode = new ResourcePathNode("/"); + + public AbstractEngineResourceContainer(String engineType, String rootPath, EngineResourceDao resourceDao, + EngineResourceLoader resourceLoader, + EngineResourceUploader resourceUploader){ + this.engineType = engineType; + this.rootUri = new File(ResourceUtils.normalizeFilePath(rootPath)).toURI(); + this.engineResourceDao = resourceDao; + this.engineResourceLoader = resourceLoader; + this.engineResourceUploader = resourceUploader; + } + + @Override + public String getEngineType() { + return engineType; + } + + @Override + public List getResources(String resourcePath) { + String[] paths = pathSplit(resourcePath); + if (Objects.nonNull(paths)){ + ResourcePathNode pathNode = searchResPathNode(paths); + if (Objects.nonNull(pathNode)){ + return pathNode.getSubEngineResources(); + } + } + return null; + } + + @Override + public void addResource(String resourcePath, T engineResource) { + String[] paths = pathSplit(resourcePath); + if (Objects.nonNull(paths)){ + operateResPathNode(paths, pathNode -> pathNode.addSubEngineResource(engineResource)); + } + } + + @Override + public void updateResources(String resourcePath, T[] engineResources) { + String[] paths = pathSplit(resourcePath); + if (Objects.nonNull(paths)){ + ResourcePathNode pathNode = searchResPathNode(paths); + if (Objects.nonNull(pathNode)){ + pathNode.updateSubEngineResource(engineResources); + } + } + } + + @Override + public T getResource(String resourcePath, String resourceId) { + String[] paths = pathSplit(resourcePath); + if (Objects.nonNull(paths)){ + ResourcePathNode pathNode = searchResPathNode(paths); + if (Objects.nonNull(pathNode)){ + return pathNode.getSubEngineResource(resourceId); + } + } + return null; + } + + @Override + public void flushResources(String resourcePath) throws ExchangisEngineResException{ + String[] paths = pathSplit(resourcePath); + if (Objects.nonNull(paths)){ + flushResources(searchResPathNode(paths)); + } + } + + @Override + public void flushAllResources() throws ExchangisEngineResException{ + Queue queue = new LinkedList<>(); + queue.offer(this.rootNode); + while(!queue.isEmpty()){ + ResourcePathNode currentNode = queue.poll(); + if (currentNode.hasSubEngineResources()){ + flushResources(currentNode); + } + currentNode.childNodes.values().forEach(queue::offer); + } + } + + @Override + public U getRemoteResource(String resourcePath) { + String[] paths = pathSplit(resourcePath); + if (Objects.nonNull(paths)){ + ResourcePathNode pathNode = searchResPathNode(paths); + if (Objects.nonNull(pathNode)){ + return pathNode.getRemoteResource(); + } + } + return null; + } + + @Override + public void removeResource(String resourcePath, String resourceId) { + String[] paths = pathSplit(resourcePath); + if (Objects.nonNull(paths)){ + ResourcePathNode pathNode = searchResPathNode(paths); + if (Objects.nonNull(pathNode)){ + pathNode.removeSubEngineResource(resourceId); + } + } + } + + @Override + public EngineResourceLoader getResourceLoader() { + return engineResourceLoader; + } + + @Override + public EngineResourceUploader getResourceUploader() { + return engineResourceUploader; + } + + protected void operateResPathNode(String[] paths, Consumer operate){ + operateResPathNode(null, paths, 0, operate); + } + + protected ResourcePathNode searchResPathNode(String[] paths){ + return searchResPathNode(null, paths, 0); + } + /** + * Operate resource path node + * @param parentNode parent node + * @param paths paths + * @param pos pos + * @param operate operate function + */ + private void operateResPathNode(ResourcePathNode parentNode, String[] paths, int pos, + Consumer operate){ + int upper = Math.min(pos + 1, paths.length); + String[] subPath = new String[upper]; + System.arraycopy(paths, 0, subPath, 0, upper); + // path + String path = subPath.length <= 1 ? "/" : StringUtils.join(subPath, "/"); + ResourcePathNode currentNode; + if (null == parentNode){ + if (path.equals("/")) { + currentNode = this.rootNode; + } else { + LOG.warn("Path: {} should start with '/'", StringUtils.join(paths, "/")); + return; + } + } else { + currentNode = parentNode.childNodes.computeIfAbsent(path, ResourcePathNode::new); + } + if (upper >= paths.length){ + operate.accept(currentNode); + } else { + operateResPathNode(currentNode, paths, pos + 1, operate); + } + } + + /** + * Search resource path node + * @param parentNode parent node + * @param paths paths + * @param pos pos + * @return resource path node + */ + private ResourcePathNode searchResPathNode(ResourcePathNode parentNode, String[] paths, int pos){ + int upper = Math.min(pos + 1, paths.length); + String[] subPath = new String[upper]; + System.arraycopy(paths, 0, subPath, 0, upper); + // path + String path = subPath.length <= 1 ? "/" : StringUtils.join(subPath, "/"); + ResourcePathNode currentNode; + if (null == parentNode){ + if (path.equals("/")) { + currentNode = this.rootNode; + } else { + LOG.warn("Path: {} should start with '/'", StringUtils.join(paths, "/")); + return null; + } + } else { + currentNode = parentNode.childNodes.get(path); + } + if (upper >= paths.length || Objects.isNull(currentNode)){ + return currentNode; + } + return searchResPathNode(currentNode, paths, pos + 1); + } + + private void flushResources(ResourcePathNode pathNode) throws ExchangisEngineResException { + if(Objects.nonNull(pathNode)){ + LOG.info("Flush the {} engine resources in path: [{}]", getEngineType(), pathNode.getPath()); + T nodeEngineRes = mergeNodeEngineResource(pathNode); + if (Objects.nonNull(nodeEngineRes)){ + // Mark the resource under the path + nodeEngineRes.setPath(pathNode.path); + // Try tp upload the node engine resource + try { + U uploadedRes = this.engineResourceUploader.upload(nodeEngineRes, pathNode.getRemoteResource()); + LOG.info("uploadedRes is {}", uploadedRes.toString()); + if (Objects.nonNull(uploadedRes)) { + // Store the uploaded remoted resource information + if (Objects.nonNull(pathNode.getRemoteResource())) { + this.engineResourceDao.updateResource(new EngineStoreResource(uploadedRes)); + } else { + this.engineResourceDao.insertResource(new EngineStoreResource(uploadedRes)); + } + pathNode.setRemoteResource(uploadedRes); + } + }catch(Exception e){ + // Not throw + LOG.warn(null, e); + } + } + } + } + protected String[] pathSplit(String path){ + return path == null ? null : path.split("/"); + } + + /** + * Merge the engine resources in path node + * @param pathNode path node + * @return + */ + protected abstract T mergeNodeEngineResource(ResourcePathNode pathNode); + /** + * Resource path node (in tree) + */ + protected class ResourcePathNode{ + + /** + * Resource path + */ + protected final String path; + /** + * Node lock + */ + protected final ReentrantReadWriteLock nodeLock; + + /** + * Modify time + */ + protected long lastModifyTime = -1; + + /** + * Resource in data + */ + protected final Map subResources = new HashMap<>(); + + /** + * Remote resource + */ + protected U remoteResource; + + /** + * Children nodes + */ + protected Map childNodes = new ConcurrentHashMap<>(); + + public ResourcePathNode(String path){ + this.path = path; + this.nodeLock = new ReentrantReadWriteLock(); + this.lastModifyTime = 0L; + } + + public void updateSubEngineResource(T[] engineResources){ + nodeLock.writeLock().lock(); + try{ + subResources.clear(); + if (Objects.nonNull(engineResources)){ + final AtomicLong modifyTime = new AtomicLong(0); + Arrays.asList(engineResources).forEach(engineResource -> { + Date resourceTime = engineResource.getModifyTime(); + if (resourceTime.getTime() > modifyTime.get()){ + modifyTime.set(resourceTime.getTime()); + } + subResources.put(engineResource.getId(), engineResource); + }); + this.lastModifyTime = modifyTime.get(); + } + } finally { + nodeLock.writeLock().unlock(); + } + } + public void addSubEngineResource(T engineResource){ + nodeLock.writeLock().lock(); + try{ + subResources.put(engineResource.getId(), engineResource); + Date resourceTime = engineResource.getModifyTime(); + if (resourceTime.getTime() > lastModifyTime){ + this.lastModifyTime = resourceTime.getTime(); + } + }finally { + nodeLock.writeLock().unlock(); + } + } + + public List getSubEngineResources(){ + nodeLock.readLock().lock(); + try{ + List resources = new ArrayList<>(); + subResources.forEach((key, resource) -> resources.add(resource)); + return resources; + }finally { + nodeLock.readLock().unlock(); + } + } + public boolean hasSubEngineResources(){ + nodeLock.readLock().lock(); + try{ + return !subResources.isEmpty(); + }finally { + nodeLock.readLock().unlock(); + } + } + public T getSubEngineResource(String resourceId){ + nodeLock.readLock().lock(); + try{ + return subResources.get(resourceId); + }finally { + nodeLock.readLock().unlock(); + } + } + + public EngineResource removeSubEngineResource(String resourceId){ + nodeLock.writeLock().lock(); + try{ + return subResources.remove(resourceId); + }finally { + nodeLock.writeLock().unlock(); + } + } + public U getRemoteResource(){ + return remoteResource; + } + + public void setRemoteResource(U engineResource){ + this.remoteResource = engineResource; + } + + public String getPath() { + return path; + } + } + +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/AbstractEngineResourceLoader.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/AbstractEngineResourceLoader.java new file mode 100644 index 000000000..2933c6453 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/AbstractEngineResourceLoader.java @@ -0,0 +1,22 @@ +package com.webank.wedatasphere.exchangis.engine.resource; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.resource.uri.ResourceURLStreamHandlerFactory; +import org.apache.linkis.common.conf.CommonVars; + +import java.net.URL; + +/** + * Abstract resource loader + * @param + */ +public abstract class AbstractEngineResourceLoader implements EngineResourceLoader{ + /** + * Support schemes for uri + */ + private static final CommonVars SUPPORT_SCHEMES = CommonVars.apply("wds.exchangis.engine.resource.schemes", "bml,hdfs,viewfs"); + static{ + URL.setURLStreamHandlerFactory(new ResourceURLStreamHandlerFactory( + SUPPORT_SCHEMES.getValue().split(","))); + } +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/DefaultEngineResourceContainer.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/DefaultEngineResourceContainer.java new file mode 100644 index 000000000..07117fd5b --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/DefaultEngineResourceContainer.java @@ -0,0 +1,107 @@ +package com.webank.wedatasphere.exchangis.engine.resource; + +import com.webank.wedatasphere.exchangis.engine.config.ExchangisEngineConfiguration; +import com.webank.wedatasphere.exchangis.engine.dao.EngineResourceDao; +import com.webank.wedatasphere.exchangis.engine.domain.EngineBmlResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineLocalPathResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineStoreResource; +import com.webank.wedatasphere.exchangis.engine.utils.ResourceUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.List; +import java.util.Objects; +import java.util.UUID; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.stream.Collectors; + +public class DefaultEngineResourceContainer extends AbstractEngineResourceContainer { + + private static final Logger LOG = LoggerFactory.getLogger(DefaultEngineResourceContainer.class); + + public DefaultEngineResourceContainer(String engineType, String rootPath, EngineResourceDao resourceDao, + EngineResourceLoader resourceLoader, + EngineResourceUploader resourceUploader) { + super(engineType, rootPath, resourceDao, resourceLoader, resourceUploader); + } + + @Override + public void init() { + List storeResources = this.engineResourceDao.getResources(getEngineType()); + storeResources.forEach(storeResource -> { + String path = storeResource.getPath(); + if (StringUtils.isNotBlank(path)){ + operateResPathNode(pathSplit(path), resourcePathNode -> + resourcePathNode.setRemoteResource(new EngineBmlResource(storeResource))); + } + }); + } + /** + * + * @param pathNode resource path node + * @return engine resource + */ + protected EngineLocalPathResource mergeNodeEngineResource(ResourcePathNode pathNode){ + if (Objects.isNull(pathNode.getRemoteResource()) || pathNode.getRemoteResource() + .getModifyTime().getTime() < pathNode.lastModifyTime) { + ReentrantReadWriteLock nodeLock = pathNode.nodeLock; + List resourcesFiltered; + nodeLock.readLock().lock(); + try { + resourcesFiltered = pathNode.subResources.values().stream().filter(Objects::nonNull) + .collect(Collectors.toList()); + }finally { + nodeLock.readLock().unlock(); + } + if (resourcesFiltered.size() == 1 && resourcesFiltered.get(0).isPacket()){ + //Ignore the packet resource + return resourcesFiltered.get(0); + } + // Merged resource is a local resource, its name is equal to the path in pathNode + String mergedResourcePath; + boolean temp = false; + if (ExchangisEngineConfiguration.ENGINE_RESOURCE_MERGE_LOCAL.getValue()) { + // Need to store the merged resource into local path + String rootPath = rootUri.getPath(); + mergedResourcePath = (rootPath.endsWith(IOUtils.DIR_SEPARATOR + "")? rootPath : rootPath + IOUtils.DIR_SEPARATOR ) + + pathNode.getPath() + ExchangisEngineConfiguration.ENGINE_RESOURCE_PACKET_SUFFIX.getValue(); + } else { + File temporaryPath = new File(ExchangisEngineConfiguration.ENGINE_RESOURCE_TMP_PATH.getValue()); + if (temporaryPath.mkdir()) { + LOG.info("Auto create the engine temporary directory [{}]", temporaryPath.getAbsolutePath()); + } + mergedResourcePath = temporaryPath.getAbsolutePath() + IOUtils.DIR_SEPARATOR + UUID.randomUUID(); + temp = true; + } + synchronized ((getEngineType() + ":" + pathNode.getPath()).intern()){ + // 1. DELETE the exists local resource + File resourceFile = new File(mergedResourcePath); + if (resourceFile.exists()){ + if (resourceFile.delete()){ + LOG.info("Success to delete the existed local resource file [{}] before", resourceFile.getPath()); + }else { + LOG.warn("Fail to delete the existed local resource file [{}], please examine the file permissions or occupation from the other program!", resourceFile.getPath()); + } + } + try { + if (resourceFile.createNewFile()) { + ResourceUtils.combinePacket(resourcesFiltered.stream().toArray(value -> new EngineResource[resourcesFiltered.size()]), new FileOutputStream(resourceFile)); + if (temp) { + resourceFile.deleteOnExit(); + } + return new EngineLocalPathResource(getEngineType(), rootUri, pathNode.getPath() + ExchangisEngineConfiguration.ENGINE_RESOURCE_PACKET_SUFFIX.getValue()); + } + } catch (IOException e) { + LOG.warn("Exception in combing and packet resources in [{}]", pathNode.getPath(), e); + } + } + } + return null; + } +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/DefaultEngineResourcePathScanner.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/DefaultEngineResourcePathScanner.java new file mode 100644 index 000000000..3cd53a1c0 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/DefaultEngineResourcePathScanner.java @@ -0,0 +1,139 @@ +package com.webank.wedatasphere.exchangis.engine.resource; + +import com.webank.wedatasphere.exchangis.engine.config.ExchangisEngineConfiguration; +import com.webank.wedatasphere.exchangis.engine.domain.EngineLocalPathResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResException; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResLoadException; +import com.webank.wedatasphere.exchangis.engine.utils.ResourceUtils; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.FilenameUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FilenameFilter; +import java.net.URI; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.stream.Collectors; + +/** + * Default path scanner + */ +public class DefaultEngineResourcePathScanner implements EngineResourcePathScanner{ + + private static final Logger LOG = LoggerFactory.getLogger(DefaultEngineResourcePathScanner.class); + /** + * Resource loader list + */ + private final Map> resourceLoaders = new ConcurrentHashMap<>(); + + @Override + public void registerResourceLoader(EngineResourceLoader resourceLoader) { + LOG.info("Register the resource loader: '{}'", resourceLoader.getClass().getCanonicalName()); + this.resourceLoaders.put(resourceLoader.engineType(), resourceLoader); + } + + @Override + public Set doScan(String rootPath) throws ExchangisEngineResException { + rootPath = FilenameUtils.normalize(rootPath); + File rootFile = new File(rootPath); + List resources = new ArrayList<>(); + if (!rootFile.exists()){ + throw new ExchangisEngineResLoadException("The engine resource root path: [" + rootPath +"] doesn't exist"); + } + if (rootFile.isFile()){ + throw new ExchangisEngineResLoadException("The engine resource root path: [" + rootPath + "] should be a directory/link, but not a file"); + } else { + LOG.info("Start to scan the resource root path: [{}]", rootPath); + resourceLoaders.forEach((engine, resourceLoader) ->{ + File engineFile = new File(rootFile, engine.toLowerCase()); + if (engineFile.exists() && engineFile.isDirectory()){ + LOG.info("Scan the resource path for engine: [{}] in [{}]", engine.toLowerCase(), engineFile.getPath()); + resources.addAll(scanPathAndLoadResource(rootFile.toURI(), + IOUtils.DIR_SEPARATOR + engineFile.getName(), (baseUri, path) -> resourceLoader.accept(baseUri, path)? resourceLoader : null)); + } else { + LOG.warn("Cannot find the resource path for engine: [{}] in [{}], ignore it.", engine.toLowerCase(), engineFile.getPath()); + } + }); + } + return new HashSet<>(resources); + } + + + private List scanPathAndLoadResource(URI baseUri, String path, + BiFunction> getResLoader) { + List resources = new ArrayList<>(); + File rootFile = new File(baseUri.getPath(), path); + if (rootFile.isDirectory()) { + File[] childFiles = rootFile.listFiles((dir, name) -> { + // skip the hidden file + return !name.startsWith("."); + }); + if (Objects.nonNull(childFiles)) { + List scanDirs = new ArrayList<>(); + List skipNames = new ArrayList<>(); + List directories = Arrays.stream(childFiles) + .filter(File::isDirectory).collect(Collectors.toList()); + directories.forEach(dir -> { + try { + String dirPath = path + IOUtils.DIR_SEPARATOR + dir.getName(); + EngineResourceLoader resourceLoader + = getResLoader.apply(baseUri, dirPath); + if (Objects.nonNull(resourceLoader)) { + resources.addAll(Arrays.asList(resourceLoader.loadResource(baseUri, dirPath))); + skipNames.add(dir.getName()); + } else { + scanDirs.add(dir); + } + } catch (Exception e) { + LOG.warn("Exception in loading engine directory resource: [" + dir.getPath() + "]", e); + } + }); + List rawFiles = Arrays.stream(childFiles).filter(file -> + file.isFile() && skipNames.stream().noneMatch(skipName -> + file.getName().equals(skipName + ExchangisEngineConfiguration.ENGINE_RESOURCE_PACKET_SUFFIX.getValue()))) + .collect(Collectors.toList()); + rawFiles.forEach(rawFile -> { + try { + String rawFilePath = path + IOUtils.DIR_SEPARATOR + rawFile.getName(); + EngineResourceLoader resourceLoader = + getResLoader.apply(baseUri, rawFilePath); + if (Objects.nonNull(resourceLoader)){ + EngineLocalPathResource[] resArray = resourceLoader.loadResource(baseUri, rawFilePath); + if (resArray.length == 1 && rawFile.getName() + .endsWith(ExchangisEngineConfiguration.ENGINE_RESOURCE_PACKET_SUFFIX.getValue())) { + LOG.info("Mark the engine resource: [{}] as a packet({}) resource", rawFile.getPath(), + ExchangisEngineConfiguration.ENGINE_RESOURCE_PACKET_SUFFIX.getValue()); + resArray[0].setPacket(true); + Path source = rawFile.toPath(); + Path dest = source.resolveSibling(StringUtils.substringBefore(rawFile.getName(), ".")); + if (!Files.isDirectory(dest)) { + Files.createDirectory(dest); + } + LOG.info("Un packet the engine resource: [{}] to [{}]", source, dest); + ResourceUtils.unPacket(source, dest); + // Update the path value + resArray[0].setPath(StringUtils.substringBeforeLast(rawFilePath, ".")); + } + resources.addAll(Arrays.asList(resArray)); + } + } catch (Exception e){ + LOG.warn("Exception in loading engine file resource: [" + rawFile.getPath() + "]", e); + } + }); + for(File scanDir : scanDirs) { + resources.addAll(scanPathAndLoadResource(baseUri, path + IOUtils.DIR_SEPARATOR + scanDir.getName(), getResLoader)); + } + } + } + return resources; + } +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourceContainer.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourceContainer.java new file mode 100644 index 000000000..c698f3450 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourceContainer.java @@ -0,0 +1,89 @@ +package com.webank.wedatasphere.exchangis.engine.resource; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResException; + +import java.util.List; + +/** + * Resource container + */ +public interface EngineResourceContainer { + + /** + * Engine type related + * @return string + */ + String getEngineType(); + + /** + * Init method + */ + void init(); + /** + * Get existed engine resources from resource path + * @param resourcePath resource path + * @return engine resources + */ + List getResources(String resourcePath); + + /** + * Add resource to path in container + * @param resourcePath resource path + * @param engineResource engine resource + */ + void addResource(String resourcePath, T engineResource); + + /** + * Update resource + * @param resourcePath resource path + * @param engineResource engine resource + * @return resource list + */ + void updateResources(String resourcePath, T[] engineResource); + + /** + * Get resource by path and id + * @param resourcePath resource path + * @param resourceId resource id + * @return engine + */ + T getResource(String resourcePath, String resourceId); + + /** + * Flush(upload) resources in path + * @param resourcePath resource path + * @return (merged)resource + */ + void flushResources(String resourcePath) throws ExchangisEngineResException; + + /** + * Flush(upload) all the resources in container + */ + void flushAllResources() throws ExchangisEngineResException; + + /** + * Get remote(upload) resource in path + * @param resourcePath resource path + * @return (merged)resource + */ + U getRemoteResource(String resourcePath); + /** + * Remove resource + * @param resourcePath resource path + * @param resourceId resource id + */ + void removeResource(String resourcePath, String resourceId); + + /** + * Engine resource loader + * @return scanner + */ + EngineResourceLoader getResourceLoader(); + + /** + * Engine resource uploader + * @return resource uploader + */ + EngineResourceUploader getResourceUploader(); +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourceLoader.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourceLoader.java new file mode 100644 index 000000000..dd5a74241 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourceLoader.java @@ -0,0 +1,31 @@ +package com.webank.wedatasphere.exchangis.engine.resource; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResLoadException; + +import java.net.URI; + +/** + * Engine resource loader + * @param + */ +public interface EngineResourceLoader { + /** + * Engine type + * @return engine + */ + String engineType(); + /** + * Accept uri + * @param baseUri uri + * @return boolean + */ + boolean accept(URI baseUri, String path); + + /** + * Load resources from uri + * @param baseUri uri + * @return resource array + */ + T[] loadResource(URI baseUri, String path) throws ExchangisEngineResLoadException; +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourcePathScanner.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourcePathScanner.java new file mode 100644 index 000000000..1621abb7b --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourcePathScanner.java @@ -0,0 +1,23 @@ +package com.webank.wedatasphere.exchangis.engine.resource; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineLocalPathResource; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResException; + +import java.util.Set; + +/** + * Engine resource path scanner + */ +public interface EngineResourcePathScanner { + + /** + * Register resource loader + * @param resourceLoader resource loader + */ + void registerResourceLoader(EngineResourceLoader resourceLoader); + /** + * Scan entrance + * @param rootPath root path + */ + Set doScan(String rootPath) throws ExchangisEngineResException; +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourceUploader.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourceUploader.java new file mode 100644 index 000000000..3e059e937 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/EngineResourceUploader.java @@ -0,0 +1,21 @@ +package com.webank.wedatasphere.exchangis.engine.resource; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResUploadException; + +/** + * Engine resource uploader + * @param + * @param + */ +public interface EngineResourceUploader { + + /** + * upload method + * @param needUploadResource resource need to be uploaded + * @return uploaded resource + */ + R upload(T needUploadResource) throws ExchangisEngineResUploadException; + + R upload(T needUploadResource, R relatedResource) throws ExchangisEngineResUploadException; +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/bml/BmlClients.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/bml/BmlClients.java new file mode 100644 index 000000000..bb41f626e --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/bml/BmlClients.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.exchangis.engine.resource.bml; + +import org.apache.linkis.bml.client.BmlClient; +import org.apache.linkis.bml.client.BmlClientFactory; + +/** + * BML client + */ +public class BmlClients { + + private static final BmlClient DEFAULT_CLIENT; + static{ + //TODO use the common client configuration + DEFAULT_CLIENT = BmlClientFactory.createBmlClient(); + } + + public static BmlClient getInstance(){ + return DEFAULT_CLIENT; + } +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/bml/BmlEngineResourceUploader.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/bml/BmlEngineResourceUploader.java new file mode 100644 index 000000000..a65a3cd0d --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/bml/BmlEngineResourceUploader.java @@ -0,0 +1,53 @@ +package com.webank.wedatasphere.exchangis.engine.resource.bml; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineBmlResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineLocalPathResource; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResUploadException; +import com.webank.wedatasphere.exchangis.engine.resource.EngineResourceUploader; +import org.apache.linkis.bml.protocol.BmlUpdateResponse; +import org.apache.linkis.bml.protocol.BmlUploadResponse; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Objects; + +/** + * Bml engine resource uploader + */ +public class BmlEngineResourceUploader implements EngineResourceUploader { + + private static final Logger LOG = LoggerFactory.getLogger(BmlEngineResourceUploader.class); + + @Override + public EngineBmlResource upload(EngineLocalPathResource res) throws ExchangisEngineResUploadException { + try { + BmlUploadResponse uploadResponse = BmlClients.getInstance() + .uploadResource(res.getCreator(), res.getName(), res.getInputStream()); + return new EngineBmlResource(res.getEngineType(), res.getPath(), + res.getName(), uploadResponse.resourceId(), uploadResponse.version(), res.getCreator()); + } catch (Exception e){ + throw new ExchangisEngineResUploadException( + "Fail to upload resource: [name: " + res.getName() + ", path: " + res.getPath() + + ", type: "+ res.getType() + ", creator: "+ res.getCreator() + "]", e); + } + } + + @Override + public EngineBmlResource upload(EngineLocalPathResource res, EngineBmlResource relatedResource) throws ExchangisEngineResUploadException { + if (Objects.isNull(relatedResource)){ + return upload(res); + } + try { + BmlUpdateResponse response = BmlClients.getInstance() + .updateResource(res.getCreator(), relatedResource.getResourceId(), + res.getName(), res.getInputStream()); + return new EngineBmlResource(relatedResource.getEngineType(), res.getPath(), + res.getName(), response.resourceId(), response.version(), res.getCreator()); + } catch (Exception e){ + throw new ExchangisEngineResUploadException( + "Fail to upload resource: [name: " + res.getName() + ", path: " + res.getPath() + + ", type: "+ res.getType() + ", resourceId: " + relatedResource.getResourceId() + + ",creator: "+ res.getCreator() + "]", e); + } + } +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/loader/AbstractEngineLocalPathResourceLoader.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/loader/AbstractEngineLocalPathResourceLoader.java new file mode 100644 index 000000000..34705b1e7 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/loader/AbstractEngineLocalPathResourceLoader.java @@ -0,0 +1,129 @@ +package com.webank.wedatasphere.exchangis.engine.resource.loader; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineLocalPathResource; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResLoadException; +import com.webank.wedatasphere.exchangis.engine.resource.AbstractEngineResourceLoader; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.net.URI; +import java.util.Arrays; +import java.util.Date; +import java.util.Objects; +import java.util.regex.Pattern; + +/** + * Load the engine resources by local path + */ +public abstract class AbstractEngineLocalPathResourceLoader extends AbstractEngineResourceLoader { + + private static final Logger LOG = LoggerFactory.getLogger(AbstractEngineLocalPathResourceLoader.class); + private static final String DEFAULT_SUPPORT_SCHEMA = "file"; + + /** + * Pattern object + */ + private Pattern[] patterns = new Pattern[0]; + + public AbstractEngineLocalPathResourceLoader(){ + String[] pathPatterns = pathPatterns(); + if (Objects.nonNull(pathPatterns)){ + patterns = new Pattern[pathPatterns.length]; + for(int i = 0; i < pathPatterns.length; i++){ + Pattern pattern = Pattern.compile(pathPatterns[i]); + patterns[i] = pattern; + } + } + } + + @Override + public boolean accept(URI baseUri, String path) { + if (StringUtils.isBlank(baseUri.getScheme()) || DEFAULT_SUPPORT_SCHEMA.equals(baseUri.getScheme())){ + return Arrays.stream(patterns) + .anyMatch(pattern -> pattern.matcher(path).matches()); + } + return false; + } + + @Override + public EngineLocalPathResource[] loadResource(URI baseUri, String path) throws ExchangisEngineResLoadException { + LOG.info("Load local engine resource, path: {}", path); + String scheme = baseUri.getScheme(); + if (StringUtils.isBlank(baseUri.getScheme()) || DEFAULT_SUPPORT_SCHEMA.equals(scheme)){ + return loadLocalResource(baseUri, path); + } else { + throw new ExchangisEngineResLoadException("Unsupported scheme: [" + scheme + "] in basic uri: [" + baseUri + "] for local resource loader."); + } + } + + /** + * Path pattern list + * @return pattern string array + */ + protected abstract String[] pathPatterns(); + /** + * Load local resource + * @param path path + * @return resource array + */ + private EngineLocalPathResource[] loadLocalResource(URI baseUri, String path) throws ExchangisEngineResLoadException { + File localFile = new File(baseUri.getPath(), path); + EngineLocalPathResource[] resources = new EngineLocalPathResource[0]; + if (localFile.isDirectory()) { + File[] resourceFiles = localFile.listFiles(); + if (Objects.nonNull(resourceFiles)) { + resources = new EngineLocalPathResource[resourceFiles.length]; + for (int i = 0; i < resources.length; i++) { + resources[i] = createLocalResource(resourceFiles[i], baseUri, path); + } + } + } else if (localFile.isFile()) { + resources = new EngineLocalPathResource[]{createLocalResource(localFile, baseUri, path)}; + } + // Important: make all the resources have the same value in 'path' + for(EngineLocalPathResource resource : resources){ + resource.setPath(path); + } + return resources; + } + + /** + * Create local resource + * @param localFile local file + * @param baseUri base uri + * @param path path + * @return local resource + */ + private EngineLocalPathResource createLocalResource(File localFile, URI baseUri, String path){ + EngineLocalPathResource localResource = new EngineLocalPathResource(engineType(), baseUri, + path + IOUtils.DIR_SEPARATOR + localFile.getName()); + long lastModifyTime = traverseExtractTime(localFile, 0L); + localResource.setCreateTime(new Date(lastModifyTime)); + localResource.setModifyTime(new Date(lastModifyTime)); + return localResource; + } + /** + * Traverse the extract last time + * @param localFile local file + * @param timestamp timestamp + * @return + */ + private long traverseExtractTime(File localFile, long timestamp){ + long lastTime = timestamp; + if (localFile.lastModified() > lastTime){ + lastTime = localFile.lastModified(); + } + if (localFile.isDirectory()){ + File[] subFiles = localFile.listFiles(); + if (Objects.nonNull(subFiles)) { + for (File subFile : subFiles) { + lastTime = traverseExtractTime(subFile, lastTime); + } + } + } + return lastTime; + } +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/loader/datax/DataxEngineResourceConf.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/loader/datax/DataxEngineResourceConf.java new file mode 100644 index 000000000..20dc2f841 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/loader/datax/DataxEngineResourceConf.java @@ -0,0 +1,14 @@ +package com.webank.wedatasphere.exchangis.engine.resource.loader.datax; + +import org.apache.linkis.common.conf.CommonVars; + +/** + * Resource config for datax + */ +public class DataxEngineResourceConf { + + /** + * Resource path prefix + */ + public static final CommonVars RESOURCE_PATH_PREFIX = CommonVars.apply("wds.exchangis.engine.datax.resource.path-prefix", "/datax/plugin"); +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/loader/datax/DataxEngineResourceLoader.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/loader/datax/DataxEngineResourceLoader.java new file mode 100644 index 000000000..cdec9aeae --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/loader/datax/DataxEngineResourceLoader.java @@ -0,0 +1,30 @@ +package com.webank.wedatasphere.exchangis.engine.resource.loader.datax; + +import com.webank.wedatasphere.exchangis.engine.resource.loader.AbstractEngineLocalPathResourceLoader; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.conf.CommonVars; + +import java.util.regex.Pattern; + +/** + * Datax engine resource loader + */ +public class DataxEngineResourceLoader extends AbstractEngineLocalPathResourceLoader { + + private static final CommonVars ENGINE_DATAX_LOADER_PATH_PATTERN = CommonVars.apply("engine.datax.resource.loader.path-pattern", + StringUtils.join(new String[]{ + DataxEngineResourceConf.RESOURCE_PATH_PREFIX.getValue() + "/reader/.*[/]?", + DataxEngineResourceConf.RESOURCE_PATH_PREFIX.getValue() + "/writer/.*[/]?" + }, ",")); + @Override + protected String[] pathPatterns() { + return ENGINE_DATAX_LOADER_PATH_PATTERN.getValue().split(","); + } + + + @Override + public String engineType() { + return "datax"; + } + +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/uri/ResourceURLStreamHandler.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/uri/ResourceURLStreamHandler.java new file mode 100644 index 000000000..93fe1c9a7 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/uri/ResourceURLStreamHandler.java @@ -0,0 +1,31 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.engine.resource.uri; + +import java.io.IOException; +import java.net.URL; +import java.net.URLConnection; +import java.net.URLStreamHandler; + +/** + * URL stream handler for linkis client (cannot open connection) + */ +public class ResourceURLStreamHandler extends URLStreamHandler { + @Override + protected URLConnection openConnection(URL url) throws IOException { + throw new IllegalArgumentException("Cannot open connection for url [" + url + "]"); + } +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/uri/ResourceURLStreamHandlerFactory.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/uri/ResourceURLStreamHandlerFactory.java new file mode 100644 index 000000000..bcf6c10e5 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/uri/ResourceURLStreamHandlerFactory.java @@ -0,0 +1,51 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.exchangis.engine.resource.uri; + +import java.net.URLStreamHandler; +import java.net.URLStreamHandlerFactory; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +/** + * Engine resource stream handler factory (support specific schemas) + */ +public class ResourceURLStreamHandlerFactory implements URLStreamHandlerFactory { + + /** + * Support schemas + */ + private final List supportSchemas = new ArrayList<>(); + + /** + * Stream handler + */ + private final URLStreamHandler defaultStreamHandler; + + public ResourceURLStreamHandlerFactory(String... schemas){ + supportSchemas.addAll(Arrays.asList(schemas)); + this.defaultStreamHandler = new ResourceURLStreamHandler(); + } + + @Override + public URLStreamHandler createURLStreamHandler(String protocol) { + if (supportSchemas.stream().anyMatch( schema -> schema.equals(protocol))){ + return this.defaultStreamHandler; + } + return null; + } +} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/utils/ResourceUtils.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/utils/ResourceUtils.java new file mode 100644 index 000000000..f63422313 --- /dev/null +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/utils/ResourceUtils.java @@ -0,0 +1,134 @@ +package com.webank.wedatasphere.exchangis.engine.utils; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineLocalPathResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import org.apache.commons.io.FilenameUtils; +import org.apache.commons.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.file.Files; +import java.nio.file.LinkOption; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.Objects; +import java.util.stream.Collectors; +import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; +import java.util.zip.ZipInputStream; +import java.util.zip.ZipOutputStream; + +/** + * Resource utils + */ +public class ResourceUtils { + + private static final Logger LOG = LoggerFactory.getLogger(ResourceUtils.class); + + private static final Integer BUFFER_SIZE = 2 * 1024; + + public static String normalizeFilePath(String path){ + return FilenameUtils.normalize(path); + } + + /** + * Combine the resources and packet + * @param resources resources + * @param outputStream output stream + * @throws IOException + */ + public static void combinePacket(EngineResource[] resources, OutputStream outputStream) throws IOException { + LOG.info("Start to combine the resources to packet file..."); + long startTime = System.currentTimeMillis(); + try(ZipOutputStream zipOutputStream = new ZipOutputStream(outputStream)) { + for (EngineResource resource : resources) { + if (resource instanceof EngineLocalPathResource) { + packet(resource.getName(), ((EngineLocalPathResource) resource).getLocalFile().toPath(), + zipOutputStream); + } else { + packet(resource.getName(), resource.getInputStream(), zipOutputStream); + } + } + } + LOG.info("Success to combine the resources to packet file, taken: {}", System.currentTimeMillis() - startTime); + } + + public static void packet(Path source, Path target, boolean includeBaseDir) throws IOException { + + } + + public static void unPacket(Path source, Path target) throws IOException{ + if (Files.isRegularFile(source, LinkOption.NOFOLLOW_LINKS)){ + ZipFile zipFile = new ZipFile(source.toFile()); + InputStream inputStream = Files.newInputStream(source); + try(ZipInputStream zipInputStream = new ZipInputStream(inputStream)) { + ZipEntry zipEntry = null; + while (null != (zipEntry = zipInputStream.getNextEntry())) { + Path entryPath = target.resolve(zipEntry.getName()); + if (zipEntry.isDirectory()) { + if (!Files.isDirectory(entryPath)) { + Files.createDirectories(entryPath); + } + } else { + try (InputStream entryStream = zipFile.getInputStream(zipEntry)) { + try (OutputStream outputStream = Files.newOutputStream(entryPath, StandardOpenOption.CREATE_NEW)) { + byte[] buffer = new byte[BUFFER_SIZE]; + int pos = -1; + while ((pos = entryStream.read(buffer)) != -1) { + outputStream.write(buffer, 0, pos); + } + outputStream.flush(); + } + } + } + } + } + } + } + + /** + * Packet path source + * @param name name + * @param source source path + * @param outputStream stream + * @throws IOException + */ + private static void packet(String name, Path source, ZipOutputStream outputStream) throws IOException { + if (Files.isDirectory(source, LinkOption.NOFOLLOW_LINKS)){ + name = name + IOUtils.DIR_SEPARATOR_UNIX; + // Accept empty directory + ZipEntry zipEntry = new ZipEntry(name); + outputStream.putNextEntry(zipEntry); + outputStream.closeEntry(); + for(Path child : Files.list(source).collect(Collectors.toList())) { + packet(name + child.toFile().getName(), child, outputStream); + } + } else if (Files.isRegularFile(source, LinkOption.NOFOLLOW_LINKS)){ + packet(name, Files.newInputStream(source), outputStream); + } + } + + /** + * Packet input stream + * @param name name + * @param inputStream input stream + * @param outputStream output stream + * @throws IOException + */ + private static void packet(String name, InputStream inputStream, ZipOutputStream outputStream) throws IOException{ + if (Objects.nonNull(inputStream)) { + ZipEntry zipEntry = new ZipEntry(name); + outputStream.putNextEntry(zipEntry); + byte[] buffer = new byte[BUFFER_SIZE]; + int pos = -1; + while ((pos = inputStream.read(buffer)) != -1) { + outputStream.write(buffer, 0, pos); + } + outputStream.closeEntry(); + } + } +} diff --git a/exchangis-engines/exchangis-engine-server/pom.xml b/exchangis-engines/exchangis-engine-server/pom.xml new file mode 100644 index 000000000..5671e01bb --- /dev/null +++ b/exchangis-engines/exchangis-engine-server/pom.xml @@ -0,0 +1,57 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-engine-server + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-engine-core + ${project.version} + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + + src/main/java + + **/*.xml + + + + + \ No newline at end of file diff --git a/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/ExchangisEngineAutoConfiguration.java b/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/ExchangisEngineAutoConfiguration.java new file mode 100644 index 000000000..20ad739fb --- /dev/null +++ b/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/ExchangisEngineAutoConfiguration.java @@ -0,0 +1,33 @@ +package com.webank.wedatasphere.exchangis.engine.server; + +import com.webank.wedatasphere.exchangis.engine.config.ExchangisEngineConfiguration; +import com.webank.wedatasphere.exchangis.engine.dao.EngineResourceDao; +import com.webank.wedatasphere.exchangis.engine.dao.EngineSettingsDao; +import com.webank.wedatasphere.exchangis.engine.manager.ExchangisEngineManager; +import com.webank.wedatasphere.exchangis.engine.resource.DefaultEngineResourcePathScanner; +import com.webank.wedatasphere.exchangis.engine.resource.EngineResourcePathScanner; +import com.webank.wedatasphere.exchangis.engine.server.manager.SpringExchangisEngineManager; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * Auto configure the beans in engine + */ +@Configuration +public class ExchangisEngineAutoConfiguration { + + @Bean + @ConditionalOnMissingBean(EngineResourcePathScanner.class) + public EngineResourcePathScanner resourcePathScanner(){ + return new DefaultEngineResourcePathScanner(); + } + + @Bean(initMethod = "init") + @ConditionalOnMissingBean(ExchangisEngineManager.class) + public ExchangisEngineManager engineManager(EngineResourceDao resourceDao, + EngineSettingsDao settingsDao, EngineResourcePathScanner scanner){ + return new SpringExchangisEngineManager(ExchangisEngineConfiguration.ENGINE_RESOURCE_ROOT_PATH.getValue(), + resourceDao, settingsDao, scanner); + } +} diff --git a/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/manager/SpringExchangisEngineManager.java b/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/manager/SpringExchangisEngineManager.java new file mode 100644 index 000000000..8fa6b88e4 --- /dev/null +++ b/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/manager/SpringExchangisEngineManager.java @@ -0,0 +1,127 @@ +package com.webank.wedatasphere.exchangis.engine.server.manager; + +import com.webank.wedatasphere.exchangis.engine.ExchangisEngine; +import com.webank.wedatasphere.exchangis.engine.GenericExchangisEngine; +import com.webank.wedatasphere.exchangis.engine.dao.EngineResourceDao; +import com.webank.wedatasphere.exchangis.engine.dao.EngineSettingsDao; +import com.webank.wedatasphere.exchangis.engine.domain.EngineBmlResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineLocalPathResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineSettings; +import com.webank.wedatasphere.exchangis.engine.exception.ExchangisEngineResException; +import com.webank.wedatasphere.exchangis.engine.manager.DefaultExchangisEngineManager; +import com.webank.wedatasphere.exchangis.engine.resource.DefaultEngineResourceContainer; +import com.webank.wedatasphere.exchangis.engine.resource.EngineResourceContainer; +import com.webank.wedatasphere.exchangis.engine.resource.EngineResourcePathScanner; +import com.webank.wedatasphere.exchangis.engine.resource.bml.BmlEngineResourceUploader; +import com.webank.wedatasphere.exchangis.engine.resource.loader.AbstractEngineLocalPathResourceLoader; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.common.utils.JsonUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +/** + * Engine manager of spring context + */ +public class SpringExchangisEngineManager extends DefaultExchangisEngineManager { + + private static final Logger LOG = LoggerFactory.getLogger(SpringExchangisEngineManager.class); + + /** + * Resource dao + */ + private final EngineResourceDao resourceDao; + + /** + * Settings dao + */ + private final EngineSettingsDao settingsDao; + + /** + * Resource root path + */ + private final String rootPath; + + private final EngineResourcePathScanner scanner; + + public SpringExchangisEngineManager(String rootPath, EngineResourceDao resourceDao, + EngineSettingsDao settingsDao, EngineResourcePathScanner scanner){ + this.rootPath = rootPath; + this.resourceDao = resourceDao; + this.settingsDao = settingsDao; + this.scanner = scanner; + } + + public void init(){ + List settingsList = this.settingsDao.getSettings(); + try { + String settingsJson = JsonUtils.jackson().writer().writeValueAsString(settingsList); + LOG.info("Engine settings: {}", settingsJson); + }catch(Exception e){ + //Ignore + } + settingsList.forEach(settings -> { + GenericExchangisEngine engine = new GenericExchangisEngine(); + engine.setSettings(settings); + AbstractEngineLocalPathResourceLoader loader = null; + BmlEngineResourceUploader uploader = null; + String loaderClassName = settings.getResourceLoaderClass(); + if (StringUtils.isNotBlank(loaderClassName)){ + try { + Class loaderClass = Class.forName(loaderClassName); + if (AbstractEngineLocalPathResourceLoader.class.isAssignableFrom(loaderClass)){ + loader = (AbstractEngineLocalPathResourceLoader) loaderClass.newInstance(); + this.scanner.registerResourceLoader(loader); + } else { + LOG.warn("Not allow the loader class: '{}' which does not implement '{}'", loaderClass, AbstractEngineLocalPathResourceLoader.class.getName()); + } + } catch (ClassNotFoundException e) { + LOG.warn("Cannot find the loader class: '{}' for engine [{}]", loaderClassName, engine.getName()); + } catch (InstantiationException | IllegalAccessException e) { + LOG.warn("Fail to instantiate the loader class: '{}'", loaderClassName, e); + } + } + String uploaderClassName = Optional.ofNullable(settings.getResourceUploaderClass()) + .orElse(BmlEngineResourceUploader.class.getCanonicalName()); + try { + Class uploaderClass = Class.forName(uploaderClassName); + if (BmlEngineResourceUploader.class.isAssignableFrom(uploaderClass)){ + uploader = (BmlEngineResourceUploader) uploaderClass.newInstance(); + } else { + LOG.warn("Not allow the uploader class: '{}' which does not implement '{}'", uploaderClass, + BmlEngineResourceUploader.class.getName()); + } + } catch (ClassNotFoundException e) { + LOG.warn("Cannot find the uploader class: '{}' for engine [{}]", uploaderClassName, engine.getName()); + } catch (InstantiationException | IllegalAccessException e) { + LOG.warn("Fail to instantiate the uploader class: '{}'", uploaderClassName, e); + } + EngineResourceContainer + resourceContainer = new DefaultEngineResourceContainer(engine.getName(), rootPath, resourceDao, loader, uploader); + LOG.info("Init engine resource container for engine: [{}]", engine.getName()); + resourceContainer.init(); + engine.setResourceContainer(resourceContainer); + engineContextMap.put(engine.getName(), engine); + }); + try { + // Start to scan and load local resources + Set localResources = this.scanner.doScan(this.rootPath); + localResources.forEach(resource -> Optional.ofNullable(engineContextMap.get(resource.getEngineType())) + .ifPresent(engine -> engine.getResourceContainer().addResource(resource.getPath(), resource))); + }catch (ExchangisEngineResException e){ + LOG.warn("Exception happened when scanning root path: [{}]", this.rootPath, e); + } + LOG.info("Flush all the resources in engine resource containers"); + for(Map.Entry entry : engineContextMap.entrySet()){ + try { + entry.getValue().getResourceContainer().flushAllResources(); + } catch (ExchangisEngineResException e) { + LOG.warn("Unable to flush the resources in container for engine: [{}]", entry.getValue().getName(), e); + } + } + } +} diff --git a/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/mapper/EngineSettingsMapper.xml b/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/mapper/EngineSettingsMapper.xml new file mode 100644 index 000000000..f8d7aac95 --- /dev/null +++ b/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/mapper/EngineSettingsMapper.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/mapper/ProjectExportService.xml b/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/mapper/ProjectExportService.xml new file mode 100644 index 000000000..71fee099e --- /dev/null +++ b/exchangis-engines/exchangis-engine-server/src/main/java/com/webank/wedatasphere/exchangis/engine/server/mapper/ProjectExportService.xml @@ -0,0 +1,41 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/exchangis-engines/pom.xml b/exchangis-engines/pom.xml new file mode 100644 index 000000000..edec6ab46 --- /dev/null +++ b/exchangis-engines/pom.xml @@ -0,0 +1,32 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + + exchangis-engines + pom + ${revision} + + + exchangis-engine-common + exchangis-engine-core + exchangis-engine-server + + engines/datax + + engineconn-plugins/sqoop + engineconn-plugins/datax + + + 8 + 8 + + + \ No newline at end of file diff --git a/exchangis-job/exchangis-job-builder/pom.xml b/exchangis-job/exchangis-job-builder/pom.xml new file mode 100644 index 000000000..e2eec71ec --- /dev/null +++ b/exchangis-job/exchangis-job-builder/pom.xml @@ -0,0 +1,34 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-job-builder + + + + com.webank.wedatasphere.exchangis + exchangis-job-common + ${project.version} + + + com.google.code.gson + gson + 2.8.8 + + + com.webank.wedatasphere.exchangis + exchangis-datasource-service + ${project.version} + compile + + + + \ No newline at end of file diff --git a/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/ExchangisJobBuilderContext.java b/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/ExchangisJobBuilderContext.java new file mode 100644 index 000000000..e6aee9968 --- /dev/null +++ b/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/ExchangisJobBuilderContext.java @@ -0,0 +1,72 @@ +package com.webank.wedatasphere.exchangis.job.builder; + + +import com.webank.wedatasphere.exchangis.job.builder.api.ExchangisJobBuilder; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; + +import java.util.HashMap; +import java.util.Map; + +/** + * Builder context + */ +public class ExchangisJobBuilderContext { + + /** + * Origin job + */ + protected ExchangisJobInfo originalJob; + + /** + * Current builder + */ + protected ExchangisJobBuilder currentBuilder; + + private Map env = new HashMap<>(); + + private Map> datasourceParams = new HashMap<>(); + + public ExchangisJobBuilderContext() { + + } + + public ExchangisJobBuilderContext(ExchangisJobInfo originalJob){ + this.originalJob = originalJob; + } + + + public ExchangisJobInfo getOriginalJob() { + return originalJob; + } + + public void setOriginalJob(ExchangisJobInfo originalJob) { + this.originalJob = originalJob; + } + + public void putDatasourceParam(String datasourceId, Map datasourceParams) { + this.datasourceParams.put(datasourceId, datasourceParams); + } + public Map getDatasourceParam(String datasourceId) { + return this.datasourceParams.get(datasourceId); + } + + public void putEnv(String name, Object value) { + this.env.put(name, value); + } + + public Object getEnv (String name) { + return this.env.get(name); + } + + public boolean containsEnv(String name) { + return this.env.containsKey(name); + } + + public ExchangisJobBuilder getCurrentBuilder() { + return currentBuilder; + } + + public void setCurrentBuilder(ExchangisJobBuilder currentBuilder) { + this.currentBuilder = currentBuilder; + } +} diff --git a/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/api/AbstractExchangisJobBuilder.java b/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/api/AbstractExchangisJobBuilder.java new file mode 100644 index 000000000..d2800991c --- /dev/null +++ b/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/api/AbstractExchangisJobBuilder.java @@ -0,0 +1,67 @@ +package com.webank.wedatasphere.exchangis.job.builder.api; + +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisBase; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.utils.TypeGenericUtils; + +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; +import java.lang.reflect.TypeVariable; +import java.util.*; + +public abstract class AbstractExchangisJobBuilder implements ExchangisJobBuilder { + + private static final ThreadLocal contextThreadLocal = new ThreadLocal<>(); + + @Override + @SuppressWarnings("unchecked") + public Class inputJob() { + return (Class) TypeGenericUtils.getActualTypeFormGenericClass(this.getClass(), AbstractExchangisJobBuilder.class, 0); + } + + @Override + @SuppressWarnings("unchecked") + public Class outputJob() { + return (Class) TypeGenericUtils.getActualTypeFormGenericClass(this.getClass(), AbstractExchangisJobBuilder.class, 1); + } + + @Override + public int priority() { + return Integer.MAX_VALUE; + } + + @Override + public boolean canBuild(T inputJob) { + return true; + } + + + @Override + public E build(T inputJob, E expectOut, ExchangisJobBuilderContext ctx) throws ExchangisJobException { + ExchangisJobBuilder beforeJoBuilder = ctx.getCurrentBuilder(); + JobParamDefine.defaultParam.set(new JobParamSet()); + contextThreadLocal.set(ctx); + ctx.setCurrentBuilder(this); + try { + return buildJob(inputJob, expectOut, ctx); + } finally{ + ctx.setCurrentBuilder(beforeJoBuilder); + contextThreadLocal.remove(); + JobParamDefine.defaultParam.remove(); + } + } + + public abstract E buildJob(T inputJob, E expectOut, ExchangisJobBuilderContext ctx) throws ExchangisJobException; + + /** + * Get current job builder context + * @return + */ + public static ExchangisJobBuilderContext getCurrentBuilderContext(){ + return contextThreadLocal.get(); + } +} diff --git a/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/api/ExchangisJobBuilder.java b/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/api/ExchangisJobBuilder.java new file mode 100644 index 000000000..cdcf9c989 --- /dev/null +++ b/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/api/ExchangisJobBuilder.java @@ -0,0 +1,48 @@ +package com.webank.wedatasphere.exchangis.job.builder.api; + +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisBase; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJob; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; + +/** + * Builder interface + * @param input job + * @param output job + */ +public interface ExchangisJobBuilder { + + /** + * Input job class + * @return class type + */ + Class inputJob(); + + /** + * Output job class + * @return class type + */ + Class outputJob(); + + /** + * Priority + * @return value + */ + int priority(); + + /** + * If the input job can be built + * @param inputJob input job entity + * @return boolean + */ + boolean canBuild(T inputJob); + + /** + * Main entrance of building + * @param inputJob input job + * @param expectOut expect output entity (can be null or get from output of early builder) + * @param ctx context + * @return outputJob + */ + E build(T inputJob, E expectOut, ExchangisJobBuilderContext ctx) throws ExchangisJobException; +} diff --git a/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/api/ExchangisJobBuilderChain.java b/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/api/ExchangisJobBuilderChain.java new file mode 100644 index 000000000..b6ca29e8e --- /dev/null +++ b/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/api/ExchangisJobBuilderChain.java @@ -0,0 +1,37 @@ +package com.webank.wedatasphere.exchangis.job.builder.api; + +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisBase; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJob; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; + +/** + * Builder chain + */ +public interface ExchangisJobBuilderChain { + + /** + * Register builder + * @param jobBuilder builder + * @return boolean + */ + boolean registerBuilder(ExchangisJobBuilder jobBuilder); + + /** + * Build method + * @param inputJob input job + * @param ctx context + * @return output entity + */ + E build(T inputJob, ExchangisJobBuilderContext ctx) throws ExchangisJobException; + + /** + * Init method + */ + void initialize(); + + /** + * Clean method + */ + void clean(); +} diff --git a/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/api/GenericExchangisJobBuilderChain.java b/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/api/GenericExchangisJobBuilderChain.java new file mode 100644 index 000000000..2f17a73e0 --- /dev/null +++ b/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/api/GenericExchangisJobBuilderChain.java @@ -0,0 +1,63 @@ +package com.webank.wedatasphere.exchangis.job.builder.api; + +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisBase; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJob; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; + +import java.util.*; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.atomic.AtomicReference; + +/** + * Generic implement + * @param input job + * @param output job + */ +public class GenericExchangisJobBuilderChain implements ExchangisJobBuilderChain{ + + /** + * Chain list + */ + private List> builderChain = new CopyOnWriteArrayList<>(); + + @Override + public boolean registerBuilder(ExchangisJobBuilder jobBuilder) { + //Need to have inputClass and outputClass + if (Objects.nonNull(jobBuilder.inputJob()) && Objects.nonNull(jobBuilder.outputJob())){ + builderChain.add(jobBuilder); + return true; + } + return false; + } + + @Override + public E build(T inputJob, ExchangisJobBuilderContext ctx) throws ExchangisJobException { + final AtomicReference expectJob = new AtomicReference<>(null); + if (Objects.nonNull(inputJob)){ + for( ExchangisJobBuilder builder : builderChain){ + if(builder.canBuild(inputJob)){ + expectJob.set(builder.build(inputJob, expectJob.get(), ctx)); + } + } + } + return expectJob.get(); + } + + @Override + public void initialize() { + doInnerSort(); + } + + @Override + public void clean() { + builderChain.clear(); + } + + /** + * Sort method + */ + public synchronized void doInnerSort(){ + builderChain.sort(Comparator.comparingInt(ExchangisJobBuilder::priority)); + } +} diff --git a/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/manager/DefaultExchangisJobBuilderManager.java b/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/manager/DefaultExchangisJobBuilderManager.java new file mode 100644 index 000000000..9b97a088f --- /dev/null +++ b/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/manager/DefaultExchangisJobBuilderManager.java @@ -0,0 +1,106 @@ +package com.webank.wedatasphere.exchangis.job.builder.manager; + +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.builder.api.ExchangisJobBuilder; +import com.webank.wedatasphere.exchangis.job.builder.api.ExchangisJobBuilderChain; +import com.webank.wedatasphere.exchangis.job.builder.api.GenericExchangisJobBuilderChain; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisBase; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJob; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; + +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; + +/** + * Default implement + */ +public class DefaultExchangisJobBuilderManager implements ExchangisJobBuilderManager{ + + /** + * Builder chains + */ + @SuppressWarnings("rawtypes") + private Map jobBuilderChains = new ConcurrentHashMap<>(); + + @Override + @SuppressWarnings("unchecked") + public E doBuild(T originJob, Class expectJobClass, + ExchangisJobBuilderContext ctx) throws ExchangisJobException { + return doBuild(originJob, (Class)originJob.getClass(), expectJobClass, ctx); + } + + @Override + @SuppressWarnings("unchecked") + public E doBuild(T originJob, Class inputJobClass, Class expectJobClass, ExchangisJobBuilderContext ctx) throws ExchangisJobException { + BuilderDirection direction = new BuilderDirection(inputJobClass, expectJobClass); + ExchangisJobBuilderChain chain = (ExchangisJobBuilderChain) jobBuilderChains.get(direction); + if(Objects.nonNull(chain)) { + return chain.build(originJob, ctx); + } + return null; + } + + @Override + @SuppressWarnings("unchecked") + public void addJobBuilder(ExchangisJobBuilder jobBuilder) { + BuilderDirection direction = new BuilderDirection(jobBuilder.inputJob(), jobBuilder.outputJob()); + + jobBuilderChains.compute(direction, (key, value) -> { + if(Objects.isNull(value)){ + value = new GenericExchangisJobBuilderChain<>(); + } + value.registerBuilder(jobBuilder); + return value; + }); + } + + @Override + @SuppressWarnings("unchecked") + public ExchangisJobBuilderChain getJobBuilderChain(Class inputJob, Class outputJob) { + BuilderDirection direction = new BuilderDirection(inputJob, outputJob); + return jobBuilderChains.get(direction); + } + + @Override + public void resetJobBuilder(Class inputJob, Class outputJob) { + BuilderDirection direction = new BuilderDirection(inputJob, outputJob); + jobBuilderChains.compute(direction, (key, value) ->{ + if(Objects.nonNull(value)){ + value.clean(); + } + return value; + }); + } + + @Override + public void initBuilderChains() { + jobBuilderChains.values().forEach(ExchangisJobBuilderChain::initialize); + } + + private static class BuilderDirection { + + private Class inputJobClass; + + private Class outputJobClass; + + public BuilderDirection(Class inputJobClass, Class outputJobClass){ + this.inputJobClass = inputJobClass; + this.outputJobClass = outputJobClass; + } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BuilderDirection that = (BuilderDirection) o; + return Objects.equals(inputJobClass, that.inputJobClass) && + Objects.equals(outputJobClass, that.outputJobClass); + } + + @Override + public int hashCode() { + return Objects.hash(inputJobClass, outputJobClass); + } + } + +} diff --git a/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/manager/ExchangisJobBuilderManager.java b/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/manager/ExchangisJobBuilderManager.java new file mode 100644 index 000000000..262d30033 --- /dev/null +++ b/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/builder/manager/ExchangisJobBuilderManager.java @@ -0,0 +1,57 @@ +package com.webank.wedatasphere.exchangis.job.builder.manager; + +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.builder.api.ExchangisJobBuilder; +import com.webank.wedatasphere.exchangis.job.builder.api.ExchangisJobBuilderChain; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisBase; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJob; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; + +/** + * Define the builder manager interface + */ +public interface ExchangisJobBuilderManager { + + /** + * + * @param originJob origin job + * @param expectEntityClass expect entity class + * @param input class + * @param output class + * @return output + */ + E doBuild(T originJob, Class expectEntityClass, + ExchangisJobBuilderContext ctx) throws ExchangisJobException; + + E doBuild(T originJob, Class inputJobClass, Class expectEntityClass, + ExchangisJobBuilderContext ctx) throws ExchangisJobException; + /** + * + * @param jobBuilder job builder + */ + void addJobBuilder(ExchangisJobBuilder jobBuilder); + + /** + * + * @param inputJob input job + * @param outputEntity output entity + * @param + * @param + * @return + */ + ExchangisJobBuilderChain getJobBuilderChain(Class inputJob, Class outputEntity); + + /** + * Reset builder chain + * @param inputJob input job + * @param outputEntity output entity + * @param + * @param + */ + void resetJobBuilder(Class inputJob, Class outputEntity); + + /** + * Invoke the 'initialize' method of chains + */ + void initBuilderChains(); +} diff --git a/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/utils/Utils.java b/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/utils/Utils.java new file mode 100644 index 000000000..90cb9b9c0 --- /dev/null +++ b/exchangis-job/exchangis-job-builder/src/main/java/com/webank/wedatasphere/exchangis/job/utils/Utils.java @@ -0,0 +1,112 @@ +package com.webank.wedatasphere.exchangis.job.utils; + +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; + +import java.io.File; +import java.net.JarURLConnection; +import java.net.URL; +import java.util.ArrayList; +import java.util.Enumeration; +import java.util.List; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; + +public class Utils { + + private static final String CLASS_SUFFIX = ".class"; + private static final String CLASS_FILE_PREFIX = File.separator + "classes" + File.separator; + private static final String PACKAGE_SEPARATOR = "."; + + public static List getClazzName(String packageName, boolean showChildPackageFlag) throws ExchangisDataSourceException { + + List result = new ArrayList<>(); + String suffixPath = packageName.replaceAll("\\.", "/"); + ClassLoader loader = Thread.currentThread().getContextClassLoader(); + + try { + Enumeration urls = loader.getResources(suffixPath); + while (urls.hasMoreElements()) { + URL url = urls.nextElement(); + if (url != null) { + String protocol = url.getProtocol(); + if ("file".equals(protocol)) { + String path = url.getPath(); + result.addAll(getAllClassNameByFile(new File(path), showChildPackageFlag)); + } else if ("jar".equals(protocol)) { + JarFile jarFile = null; + jarFile = ((JarURLConnection) url.openConnection()).getJarFile(); + if (jarFile != null) { + result.addAll(getAllClassNameByJar(jarFile, packageName, showChildPackageFlag)); + } + } + } + } + } catch (Exception e) { + throw new ExchangisDataSourceException(23001, e.getLocalizedMessage()); + } + return result; + } + + private static List getAllClassNameByFile(File file, boolean flag) { + List result = new ArrayList<>(); + if (!file.exists()) { + return result; + } + if (file.isFile()) { + String path = file.getPath(); + if (path.endsWith(CLASS_SUFFIX)) { + path = path.replace(CLASS_SUFFIX, ""); + String clazzName = path.substring(path.indexOf(CLASS_FILE_PREFIX) + CLASS_FILE_PREFIX.length()) + .replace(File.separator, PACKAGE_SEPARATOR); + if (-1 == clazzName.indexOf("$")) { + result.add(clazzName); + } + } + return result; + } else { + File[] listFiles = file.listFiles(); + if (listFiles != null && listFiles.length > 0) { + for (File f : listFiles) { + if (flag) { + result.addAll(getAllClassNameByFile(f, flag)); + } else { + if (f.isFile()) { + String path = f.getPath(); + if (path.endsWith(CLASS_SUFFIX)) { + path = path.replace(CLASS_SUFFIX, ""); + String clazzName = path.substring(path.indexOf(CLASS_FILE_PREFIX) + CLASS_FILE_PREFIX.length()) + .replace(File.separator, PACKAGE_SEPARATOR); + if (-1 == clazzName.indexOf("$")) { + result.add(clazzName); + } + } + } + } + } + } + return result; + } + } + + private static List getAllClassNameByJar(JarFile jarFile, String packageName, boolean flag) { + List result = new ArrayList<>(); + Enumeration entries = jarFile.entries(); + while (entries.hasMoreElements()) { + JarEntry jarEntry = entries.nextElement(); + String name = jarEntry.getName(); + if (name.endsWith(CLASS_SUFFIX)) { + name = name.replace(CLASS_SUFFIX, "").replace("/", "."); + if (flag) { + if (name.startsWith(packageName) && -1 == name.indexOf("$")) { + result.add(name); + } + } else { + if (packageName.equals(name.substring(0, name.lastIndexOf("."))) && -1 == name.indexOf("$")) { + result.add(name); + } + } + } + } + return result; + } +} diff --git a/exchangis-job/exchangis-job-common/pom.xml b/exchangis-job/exchangis-job-common/pom.xml new file mode 100644 index 000000000..aba4edd1a --- /dev/null +++ b/exchangis-job/exchangis-job-common/pom.xml @@ -0,0 +1,38 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-job-common + + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${project.version} + + + org.apache.linkis + linkis-label-common + ${linkis.version} + + + com.webank.wedatasphere.exchangis + exchangis-engine-common + ${project.version} + + + org.apache.linkis + linkis-protocol + ${linkis.version} + + + + \ No newline at end of file diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/api/ExchangisJobOpenService.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/api/ExchangisJobOpenService.java new file mode 100644 index 000000000..484fc58ce --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/api/ExchangisJobOpenService.java @@ -0,0 +1,35 @@ +package com.webank.wedatasphere.exchangis.job.api; + +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobQueryVo; + +import java.util.List; + +/** + * Open for the other module to invoke + */ +public interface ExchangisJobOpenService { + + /** + * Get job entity by id + * @param id + * @return + */ + ExchangisJobEntity getJobById(Long id, boolean basic) throws ExchangisJobException; + + /** + * Query job entity + * @param queryVo query vo + * @param inPage if in page + * @return + */ + List queryJobs(ExchangisJobQueryVo queryVo, boolean inPage) + throws ExchangisJobException; + + /** + * Delete the job entities + * @param idList id list + */ + void deleteJobBatch(List idList) throws ExchangisJobException; +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/constraints/LabelSerializeConstraints.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/constraints/LabelSerializeConstraints.java new file mode 100644 index 000000000..0effb14f5 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/constraints/LabelSerializeConstraints.java @@ -0,0 +1,16 @@ +package com.webank.wedatasphere.exchangis.job.constraints; + +/** + * Constraint for label serialization + */ +public class LabelSerializeConstraints { + /** + * Splitter symbol + */ + public static final String LABEL_ENTITY_SPLITTER_SYMBOL = ","; + + /** + * Combine symbol + */ + public static final String LABEL_KV_COMBINE_SYMBOL = "="; +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisBase.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisBase.java new file mode 100644 index 000000000..f83fcc3d7 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisBase.java @@ -0,0 +1,42 @@ +package com.webank.wedatasphere.exchangis.job.domain; + +import java.util.Date; + +/** + * Basic method of entity + */ +public interface ExchangisBase { + + /** + * ID + * @return LONG + */ + Long getId(); + + void setId(Long id); + + + /** + * Name + * @return STRING + */ + String getName(); + + void setName(String name); + + /** + * Create time + * @return DATE + */ + Date getCreateTime(); + + void setCreateTime(Date createTime); + + /** + * Last update time + * @return DATE + */ + Date getLastUpdateTime(); + + void setLastUpdateTime(Date lastUpdateTime); +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisEngineJob.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisEngineJob.java new file mode 100644 index 000000000..72d979504 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisEngineJob.java @@ -0,0 +1,115 @@ +package com.webank.wedatasphere.exchangis.job.domain; + + +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.job.utils.MemUtils; + +import java.util.*; + +/** + * EngineJob + */ +public class ExchangisEngineJob extends GenericExchangisJob { + + public ExchangisEngineJob(){ + + } + + public ExchangisEngineJob(ExchangisEngineJob engineJob){ + if (Objects.nonNull(engineJob)) { + setName(engineJob.getName()); + setEngineType(engineJob.getEngineType()); + getJobContent().putAll(engineJob.getJobContent()); + getRuntimeParams().putAll(engineJob.getRuntimeParams()); + setMemoryUsed(engineJob.getMemoryUsed()); + getResources().addAll(engineJob.getResources()); + } + } + /** + * Job content + */ + private Map jobContent = new HashMap<>(); + + /** + * Job runtime params(defined by user) + */ + private Map runtimeParams = new HashMap<>(); + + /** + * Memory used in engine job + */ + private Long memoryUsed; + + private String memoryUnit = MemUtils.StoreUnit.MB.name(); + + /** + * If lock the unit of memory + */ + private boolean memoryUnitLock = false; + /** + * Cpu used in engine job + */ + private Long cpuUsed; + + /** + * Engine resources + */ + private List resources = new ArrayList<>(); + + public Map getJobContent() { + return jobContent; + } + + public void setJobContent(Map jobContent) { + this.jobContent = jobContent; + } + + public Map getRuntimeParams() { + return runtimeParams; + } + + public void setRuntimeParams(Map runtimeParams) { + this.runtimeParams = runtimeParams; + } + + + public Long getMemoryUsed() { + return memoryUsed; + } + + public void setMemoryUsed(Long memoryUsed) { + this.memoryUsed = memoryUsed; + } + + public Long getCpuUsed() { + return cpuUsed; + } + + public void setCpuUsed(Long cpuUsed) { + this.cpuUsed = cpuUsed; + } + + public String getMemoryUnit() { + return memoryUnit; + } + + public void setMemoryUnit(String memoryUnit) { + this.memoryUnit = memoryUnit; + } + + public List getResources() { + return resources; + } + + public void setResources(List resources) { + this.resources = resources; + } + + public boolean isMemoryUnitLock() { + return memoryUnitLock; + } + + public void setMemoryUnitLock(boolean memoryUnitLock) { + this.memoryUnitLock = memoryUnitLock; + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisJob.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisJob.java new file mode 100644 index 000000000..8afd21e49 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisJob.java @@ -0,0 +1,52 @@ +package com.webank.wedatasphere.exchangis.job.domain; + + +import java.util.Map; + +/** + * Basic job interface + */ +public interface ExchangisJob extends ExchangisBase{ + + /** + * Engine type + * @return type value + */ + String getEngineType(); + + void setEngineType(String engineType); + + /** + * Label String + * @return Label value + */ + String getJobLabel(); + + void setJobLabel(String engineType); + + /** + * Label map + * @return entities + */ + Map getJobLabels(); + + /** + * Set label Json + * @param labels + */ + void setJobLabels(String labels); + + /** + * Set label Map + * @param jobLabels + */ + void setJobLabels(Map jobLabels); + + /** + * Create user + * @return user name + */ + String getCreateUser(); + + void setCreateUser(String createUser); +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisJobEntity.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisJobEntity.java new file mode 100644 index 000000000..67c84efb1 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisJobEntity.java @@ -0,0 +1,39 @@ +package com.webank.wedatasphere.exchangis.job.domain; + + +/** + * + */ +public class ExchangisJobEntity extends ExchangisJobInfo{ + + private Long projectId; + + private String source; + + private String modifyUser; + + public Long getProjectId() { + return projectId; + } + + public void setProjectId(Long projectId) { + this.projectId = projectId; + } + + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public String getModifyUser() { + return modifyUser; + } + + public void setModifyUser(String modifyUser) { + this.modifyUser = modifyUser; + } + +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisJobInfo.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisJobInfo.java new file mode 100644 index 000000000..ffb854643 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisJobInfo.java @@ -0,0 +1,92 @@ +package com.webank.wedatasphere.exchangis.job.domain; + +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVo; + +/** + * Contain the content and parameters + */ +public class ExchangisJobInfo extends GenericExchangisJob { + /** + * Job content (JSON) + */ + protected String jobContent; + + /** + * Execute user + */ + protected String executeUser; + + /** + * Job params (JSON) + */ + protected String jobParams; + + /** + * Job description + */ + protected String jobDesc; + + /** + * Job type + */ + protected String jobType; + /** + * Convert from view object + * @param jobVo vo + */ + public ExchangisJobInfo(ExchangisJobVo jobVo){ + this.id = jobVo.getId(); + this.name = jobVo.getJobName(); + this.engineType = jobVo.getEngineType(); + this.jobLabel = jobVo.getJobLabels(); + this.createTime = jobVo.getCreateTime(); + this.createUser = jobVo.getCreateUser(); + this.lastUpdateTime = jobVo.getModifyTime(); + this.jobContent = jobVo.getContent(); + this.executeUser = jobVo.getExecuteUser(); + this.jobParams = jobVo.getJobParams(); + } + + public ExchangisJobInfo(){ + + } + public String getJobContent() { + return jobContent; + } + + public void setJobContent(String jobContent) { + this.jobContent = jobContent; + } + + public String getExecuteUser() { + return executeUser; + } + + public void setExecuteUser(String executeUser) { + this.executeUser = executeUser; + } + + public String getJobParams() { + return jobParams; + } + + public void setJobParams(String jobParams) { + this.jobParams = jobParams; + } + + public String getJobDesc() { + return jobDesc; + } + + public void setJobDesc(String jobDesc) { + this.jobDesc = jobDesc; + } + + public String getJobType() { + return jobType; + } + + public void setJobType(String jobType) { + this.jobType = jobType; + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisJobPageQuery.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisJobPageQuery.java new file mode 100644 index 000000000..babfe84bc --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisJobPageQuery.java @@ -0,0 +1,50 @@ +package com.webank.wedatasphere.exchangis.job.domain; + +import com.webank.wedatasphere.exchangis.common.pager.PageQuery; + +/** + * For querying page + */ +public class ExchangisJobPageQuery extends PageQuery { + + /** + * Project id + */ + protected Long projectId; + + /** + * Job type + */ + protected String jobType; + + /** + * Job name + */ + protected String jobName; + + protected String createUser; + + public Long getProjectId() { + return projectId; + } + + public void setProjectId(Long projectId) { + this.projectId = projectId; + } + + public String getJobType() { + return jobType; + } + + public void setJobType(String jobType) { + this.jobType = jobType; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisTask.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisTask.java new file mode 100644 index 000000000..e6f0f6b0a --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisTask.java @@ -0,0 +1,23 @@ +package com.webank.wedatasphere.exchangis.job.domain; + +/** + * Basic task interface + */ +public interface ExchangisTask extends ExchangisBase { + + /** + * Engine type + * @return type value + */ + String getEngineType(); + + void setEngineType(String engineType); + + /** + * Execute user + * @return + */ + String getExecuteUser(); + + void setExecuteUser(String executeUser); +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/GenericExchangisJob.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/GenericExchangisJob.java new file mode 100644 index 000000000..7493ce4ed --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/GenericExchangisJob.java @@ -0,0 +1,116 @@ +package com.webank.wedatasphere.exchangis.job.domain; + +import com.webank.wedatasphere.exchangis.job.utils.LabelConvertUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +/** + * Generic implement + */ +public class GenericExchangisJob implements ExchangisJob { + + private static final Logger LOG = LoggerFactory.getLogger(GenericExchangisJob.class); + + protected Long id; + + protected String name; + + protected String engineType; + + protected String jobLabel; + + private Map labelHolder = new HashMap<>(); + + protected Date createTime; + + protected Date lastUpdateTime; + + protected String createUser; + + @Override + public Long getId() { + return id; + } + + @Override + public void setId(Long id) { + this.id = id; + } + + @Override + public String getName() { + return name; + } + + @Override + public void setName(String name) { + this.name = name; + } + + @Override + public Date getCreateTime() { + return createTime; + } + + @Override + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + @Override + public Date getLastUpdateTime() { + return lastUpdateTime; + } + + @Override + public void setLastUpdateTime(Date lastUpdateTime) { + this.lastUpdateTime = lastUpdateTime; + } + + @Override + public String getEngineType() { + return this.engineType; + } + + @Override + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + @Override + public String getJobLabel() { + return jobLabel; + } + + @Override + public void setJobLabel(String jobLabel) { + this.jobLabel = jobLabel; + } + + @Override + public Map getJobLabels() { + return labelHolder; + } + + @Override + public void setJobLabels(String labels) { + this.labelHolder = LabelConvertUtils.stringToLabelMap(labels); + } + + @Override + public void setJobLabels(Map jobLabels) { + this.labelHolder = jobLabels; + } + + @Override + public String getCreateUser() { + return createUser; + } + + @Override + public void setCreateUser(String createUser) { + this.createUser = createUser; + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/OperationType.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/OperationType.java new file mode 100644 index 000000000..d8c97943c --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/OperationType.java @@ -0,0 +1,44 @@ +package com.webank.wedatasphere.exchangis.job.domain; + +/** + * @author jefftlin + * @create 2022-09-15 + **/ +public enum OperationType { + + /** + * job operation: + * query project + */ + JOB_QUERY("JOB_QUERY"), + + /** + * job operation: + * create jpb + * update job info + * update job config + * update job content + * delete job + */ + JOB_ALTER("JOB_ALTER"), + + /** + * job operation: + * job execute + * job kill + * sub job delete + */ + JOB_EXECUTE("JOB_EXECUTE"), + + /** + * job operation: + * job release + */ + JOB_RELEASE("JOB_RELEASE"); + + private String type; + + OperationType(String type) { + this.type = type; + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/SubExchangisJob.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/SubExchangisJob.java new file mode 100644 index 000000000..7812212ac --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/SubExchangisJob.java @@ -0,0 +1,281 @@ +package com.webank.wedatasphere.exchangis.job.domain; + +import com.webank.wedatasphere.exchangis.job.domain.params.JobParam; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; + +/** + * For each sub job entity, + * will have parameter set which divided into different realm + */ +public class SubExchangisJob extends GenericExchangisJob { + + + protected String sourceType; + + protected String sinkType; + + public static final String REALM_JOB_SETTINGS = "job.realm.settings"; + + public static final String REALM_JOB_DATA_SOURCE = "job.realm.data-source"; + + public static final String REALM_JOB_CONTENT_SOURCE = "job.realm.content.source"; + + public static final String REALM_JOB_CONTENT_SINK = "job.realm.content.sink"; + +// public static final String REALM_JOB_COLUMN_MAPPING = "job.realm.column-mappings"; + + /** + * Realm params set + */ + private final Map realmParamSet = new ConcurrentHashMap<>(); + + /** + * Source columns + */ + private final List sourceColumns = new ArrayList<>(); + + /** + * Sink columns + */ + private final List sinkColumns = new ArrayList<>(); + + /** + * Functions + */ + private final List columnFunctions = new ArrayList<>(); + + public String getSourceType() { + return sourceType; + } + + public void setSourceType(String sourceType) { + this.sourceType = sourceType; + } + + public String getSinkType() { + return sinkType; + } + + public void setSinkType(String sinkType) { + this.sinkType = sinkType; + } + + /** + * Add + * @param realm realm info + * @param paramSet param set + */ + public void addRealmParams(String realm, JobParamSet paramSet){ + realmParamSet.put(realm, paramSet); + } + + /** + * Get + * @param realm realm info + * @return param set + */ + public JobParamSet getRealmParams(String realm){ + return realmParamSet.get(realm); + } + + /** + * Get and convert to map + * @param realm realm info + * @return map + */ + public Map getParamsToMap(String realm, boolean isTemp){ + JobParamSet jobParamSet = getRealmParams(realm); + return jobParamSet.toList(isTemp).stream().collect( + Collectors.toMap(JobParam::getStrKey, JobParam::getValue)); + } + + /** + * Get all and convert to map + * @return map + */ + public Map getParamsToMap(){ + return realmParamSet.values().stream().flatMap(realmParam -> realmParam.toList().stream()) + .collect(Collectors.toMap(JobParam::getStrKey, JobParam::getValue)); + } + + public Map getParamsToMap(boolean isTemp){ + return realmParamSet.values().stream().flatMap(realmParam -> realmParam.toList(isTemp).stream()) + .collect(Collectors.toMap(JobParam::getStrKey, JobParam::getValue, (left, right) -> right)); + } + + public List getSourceColumns() { + return sourceColumns; + } + + public List getSinkColumns() { + return sinkColumns; + } + + public List getColumnFunctions() { + return columnFunctions; + } + + /** + * Column definition + */ + public static class ColumnDefine{ + + /** + * Column name + */ + private String name; + + /** + * Column type + */ + private String type; + + /** + * Raw column type + */ + private String rawType; + + /** + * Column index + */ + private Integer index; + + public ColumnDefine(){ + + } + + public ColumnDefine(String name, String type){ + this.name = name; + this.type = type; + } + + public ColumnDefine(String name, String type, Integer index){ + this.name = name; + this.type = type; + this.index = index; + } + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public Integer getIndex() { + return index; + } + + public void setIndex(Integer index) { + this.index = index; + } + + public String getRawType() { + return rawType; + } + + public void setRawType(String rawType) { + this.rawType = rawType; + } + } + + /** + * Column define with precision and scale + */ + public static class DecimalColumnDefine extends ColumnDefine{ + + private static final int DEFAULT_PRECISION = 38; + + private static final int DEFAULT_SCALE = 18; + + /** + * Precision + */ + private int precision = DEFAULT_PRECISION; + + /** + * Scale + */ + private int scale = DEFAULT_SCALE; + + public DecimalColumnDefine(){ + + } + + public DecimalColumnDefine(String name, String type, Integer index, int precision, int scale){ + super(name, type, index); + this.precision = precision; + this.scale = scale; + } + + public int getPrecision() { + return precision; + } + + public void setPrecision(int precision) { + this.precision = precision; + } + + public int getScale() { + return scale; + } + + public void setScale(int scale) { + this.scale = scale; + } + } + /** + * Column function + */ + public static class ColumnFunction{ + + private Integer index; + /** + * Function name + */ + private String name; + + /** + * Function params + */ + private List params = new ArrayList<>(); + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public List getParams() { + return params; + } + + public void setParams(List params) { + this.params = params; + } + + public Integer getIndex() { + return index; + } + + public void setIndex(Integer index) { + this.index = index; + } + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/DefaultJobParam.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/DefaultJobParam.java new file mode 100644 index 000000000..7d938ebcd --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/DefaultJobParam.java @@ -0,0 +1,99 @@ +package com.webank.wedatasphere.exchangis.job.domain.params; + +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode; + +import java.util.Objects; +import java.util.function.BiFunction; + +public class DefaultJobParam implements JobParam { + private String key; + + private T value; + + private BiFunction valueLoader; + + private Object sourceReference = null; + + private boolean isTemp = false; + + private Class sourceType = Object.class; + + public DefaultJobParam(){ + + } + + DefaultJobParam(String key, BiFunction valueLoader){ + this.key = key; + setValueLoader(valueLoader); + } + @Override + public String getStrKey() { + return key; + } + + @Override + public T getValue() { + return value; + } + + @Override + public T getValue(Object source) { + if(Objects.nonNull(source)) { + if (!Objects.equals(sourceReference, source) && + Objects.nonNull(valueLoader) && + sourceType.isAssignableFrom(source.getClass())) { + try { + this.value = this.valueLoader.apply(key, source); + } catch (Exception e){ + throw new ExchangisJobException.Runtime(ExchangisJobExceptionCode.TASK_PARM_ERROR.getCode(), + "Exception in loading param: [" + key + "]", e); + } + this.sourceReference = source; + } + } + return value; + } + + @Override + public JobParam loadValue(Object source){ + if(Objects.nonNull(source) && + Objects.nonNull(valueLoader) && + sourceType.isAssignableFrom(source.getClass())){ + try { + this.value = this.valueLoader.apply(key, source); + } catch (Exception e){ + throw new ExchangisJobException.Runtime(ExchangisJobExceptionCode.TASK_PARM_ERROR.getCode(), + "Exception in loading param: [" + key + "]", e); + } + this.sourceReference = source; + } + return this; + } + @Override + public void setKey(String key) { + this.key = key; + } + + @Override + public void setValue(T value) { + this.value = value; + } + + @Override + @SuppressWarnings("unchecked") + public void setValueLoader(BiFunction valueLoader) { + Class obj = null; + this.valueLoader = (BiFunction) valueLoader; + } + + @Override + public boolean isTemp() { + return isTemp; + } + + @Override + public void setTemp(boolean isTemp) { + this.isTemp = isTemp; + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParam.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParam.java new file mode 100644 index 000000000..52850007f --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParam.java @@ -0,0 +1,60 @@ +package com.webank.wedatasphere.exchangis.job.domain.params; + +import java.util.function.BiFunction; + +/** + * Define param entity + * @param + */ +public interface JobParam { + + /** + * Key of parameter, always string + * @return nullable + */ + String getStrKey(); + + /** + * Value of parameter + * @return nullable + */ + T getValue(); + + /** + * Get value of parameter form source obj + * @param source + * @return + */ + T getValue(Object source); + /** + * Set string key + * @param key nullable + */ + void setKey(String key); + + /** + * Set value + * @param value nullable + */ + void setValue(T value); + + JobParam loadValue(Object source); + /** + * Value loader + * @param valueLoader + * @param + */ + void setValueLoader(BiFunction valueLoader); + + /** + * Is temporary + * @return + */ + default boolean isTemp(){ + return false; + } + + default void setTemp(boolean isTemp){ + //Empty + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParamDefine.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParamDefine.java new file mode 100644 index 000000000..341b4bf2e --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParamDefine.java @@ -0,0 +1,74 @@ +package com.webank.wedatasphere.exchangis.job.domain.params; + + +import java.util.Objects; +import java.util.function.BiFunction; +import java.util.function.Function; + +/** + * Definition of job params + * @param + */ +public class JobParamDefine{ + + public static ThreadLocal defaultParam = new ThreadLocal<>(); + + private String key; + + private BiFunction valueLoader; + + @SuppressWarnings("unchecked") + JobParamDefine(String key, BiFunction valueLoader){ + this.key = key; + this.valueLoader = (BiFunction)valueLoader; + } + JobParamDefine(String key, Function valueLoader){ + this.key = key; + this.valueLoader = (s, paramSet) -> valueLoader.apply((JobParamSet) paramSet); + } + + @SuppressWarnings("unchecked") + JobParamDefine(String key, Function valueLoader, Class clazz){ + this.key = key; + this.valueLoader = (s, paramSet) -> valueLoader.apply((U) paramSet); + } + + public String getKey() { + return key; + } + + public BiFunction getValueLoader() { + return valueLoader; + } + + /** + * New one param + * @param source source + * @return + */ + public JobParam newParam(Object source){ + JobParam jobParam = new DefaultJobParam<>(key, valueLoader); + return jobParam.loadValue(source); + } + + public T newValue(Object source){ + return newParam(source).getValue(); + } + /** + * Get param from source (if param has been exist,it will not invoke the loadValue method) + * @param source source + * @return + */ + public JobParam get(Object source){ + JobParamSet paramSet = defaultParam.get(); + if (Objects.nonNull(paramSet)){ + return paramSet.load(this, source); + } + return newParam(source); + } + + public T getValue(Object source){ + return get(source).getValue(); + } + +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParamSet.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParamSet.java new file mode 100644 index 000000000..6d7480251 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParamSet.java @@ -0,0 +1,123 @@ +package com.webank.wedatasphere.exchangis.job.domain.params; + +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; + +/** + * Collection of JobParam + */ +public class JobParamSet { + + /** + * Param set + */ + private Map> jobParamStore = new ConcurrentHashMap<>(); + + + public JobParamSet add(JobParam jobParam){ + jobParamStore.put(jobParam.getStrKey(), jobParam); + return this; + } + + + public JobParamSet add(JobParamDefine jobParamDefine){ + return add(prepare(jobParamDefine, this)); + } + + public JobParamSet addNonNull(JobParamDefine jobParamDefine){ + JobParam prepared = prepare(jobParamDefine, this); + if(Objects.nonNull(prepared.getValue())){ + return add(prepared); + } + return null; + } + + public JobParamSet addNonNull(JobParam jobParam){ + if (Objects.nonNull(jobParam.getValue())) { + jobParamStore.put(jobParam.getStrKey(), jobParam); + } + return this; + } + /** + * Append + * @param key custom key + * @param jobParam job parameter + */ + public JobParamSet add(String key, JobParam jobParam){ + jobParamStore.put(key, jobParam); + return this; + } + + public JobParamSet add(String key, JobParamDefine jobParamDefine){ + return add(key, prepare(jobParamDefine, this)); + } + + public JobParam load(JobParamDefine jobParamDefine){ + return load(jobParamDefine, this); + } + + @SuppressWarnings("unchecked") + public JobParam load(JobParamDefine jobParamDefine, Object source){ + // Avoid the deadlock problem in nested call, we should not use compute/computeIfAbsent method + JobParam jobParam = this.jobParamStore.get(jobParamDefine.getKey()); + if (Objects.isNull(jobParam)){ + jobParam = prepare(jobParamDefine, source); + this.jobParamStore.put(jobParamDefine.getKey(),jobParam); + } + return (JobParam) jobParam; + } + + public JobParamSet combine(JobParamSet paramSet){ + Map> other = paramSet.jobParamStore; + this.jobParamStore.putAll(other); + return this; + } + /** + * Get + * @param key param key + * @return param entity + */ + @SuppressWarnings("unchecked") + public JobParam get(String key){ + return (JobParam)jobParamStore.get(key); + } + + @SuppressWarnings("unchecked") + public JobParam get(String key, Class type){ + return (JobParam)jobParamStore.get(key); + } + /** + * Remove + * @param key param key + * @return removed param entity + */ + @SuppressWarnings("unchecked") + public JobParam remove(String key){ + return (JobParam) jobParamStore.remove(key); + } + + /** + * To List + * @return param list + */ + public List> toList(){ + return new ArrayList<>(jobParamStore.values()); + } + + public List> toList(boolean isTemp){ + return jobParamStore.values().stream().filter(param -> isTemp == param.isTemp()).collect(Collectors.toList()); + } + + /** + * New param from definition + * @param jobParam + */ + private JobParam prepare(JobParamDefine jobParam, Object source){ + return jobParam.newParam(source); + } + + public static void main(String[] args){ + JobParam ok = new JobParamSet().get("ok"); + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParams.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParams.java new file mode 100644 index 000000000..a19256c7c --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/JobParams.java @@ -0,0 +1,129 @@ +package com.webank.wedatasphere.exchangis.job.domain.params; + + +import java.util.Map; +import java.util.Objects; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.function.Supplier; + +/** + * Util class + */ +public class JobParams { + + /** + * Define default job param + * @param key key + * @param valueLoader value loader + * @param + * @param + * @return + */ + public static JobParamDefine define(String key, BiFunction valueLoader){ + return new JobParamDefine<>(key, valueLoader); + } + + public static JobParamDefine define(String key, Function valueLoader){ + return new JobParamDefine<>(key, valueLoader); + } + + public static JobParamDefine define(String key, Function valueLoader, Class type){ + return new JobParamDefine<>(key, valueLoader, type); + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + public static JobParamDefine define(String key){ + return new JobParamDefine<>(key, (paramKey, source) -> { + if(Objects.nonNull(source)){ + if(source instanceof JobParamSet) { + JobParam result = ((JobParamSet)source).get(key); + return Objects.nonNull(result)? (U)result.getValue() : null; + }else if (source instanceof Map){ + return (U) ((Map)source).get(key); + } + } + return null; + }); + } + + + /** + * Use default value loader: (string, JobParamSet) -> ? + * @param key key + * @param mappingKey mapping key + * @return + */ + public static JobParamDefine define(String key, String mappingKey){ + return define(key, new String[]{mappingKey}, result-> result, (Class)null); + } + + public static JobParamDefine define(String key, String mappingKey, Function transform, Class inputType){ + return define(key, new String[]{mappingKey}, transform, inputType); + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + private static JobParamDefine define(String key, String[] mappingKeys, Function transform, Class inputType){ + return new JobParamDefine<>(key, (paramKey, source) -> { + if (Objects.nonNull(source)) { + if (source instanceof JobParamSet) { + for (String mappingKey : mappingKeys) { + JobParam result = ((JobParamSet) source).remove(mappingKey); + if (Objects.nonNull(result)) { + return transform.apply((U)result.getValue()); + } + } + return null; + } else if (source instanceof Map) { + for (String mappingKey : mappingKeys) { + Object result = ((Map) source).remove(mappingKey); + if (Objects.nonNull(result)) { + return transform.apply((U)result); + } + } + return null; + } + } + return null; + }); + } + + /** + * Use operator instead of value loader + * @param key + * @param operator + * @param + * @return + */ + @SuppressWarnings({"unchecked", "rawtypes"}) + public static JobParamDefine define(String key, Supplier operator){ + return new JobParamDefine<>(key, (paramKey, source) -> { + T finalValue = null; + if (Objects.nonNull(source)) { + if (source instanceof JobParamSet) { + JobParam result = ((JobParamSet) source).get(key); + if (Objects.nonNull(result)){ + finalValue = (T)result.getValue(); + } + } else if (source instanceof Map) { + Object result = ((Map) source).get(key); + if (Objects.nonNull(result)){ + return (T)result; + } + } + } + return Objects.nonNull(finalValue) ? finalValue : operator.get(); + }); + } + + public static JobParam newOne(String key, T value){ + return newOne(key, value, false); + } + + public static JobParam newOne(String key, T value, boolean isTemp){ + JobParamDefine result = define(key, () -> value); + JobParam param = result.newParam(value); + param.setTemp(isTemp); + return param; + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/enums/EngineTypeEnum.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/enums/EngineTypeEnum.java new file mode 100644 index 000000000..a6ccf9f51 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/enums/EngineTypeEnum.java @@ -0,0 +1,5 @@ +package com.webank.wedatasphere.exchangis.job.enums; + +public enum EngineTypeEnum { + DATAX, SQOOP, DICTCOPY, NONE +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/enums/JobTypeEnum.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/enums/JobTypeEnum.java new file mode 100644 index 000000000..fcd9b083b --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/enums/JobTypeEnum.java @@ -0,0 +1,12 @@ +package com.webank.wedatasphere.exchangis.job.enums; + +public enum JobTypeEnum { + /* + 离线同步 + */ + OFFLINE, + /* + 流式同步 + */ + STREAM; +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/exception/ExchangisJobException.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/exception/ExchangisJobException.java new file mode 100644 index 000000000..e770a1ea4 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/exception/ExchangisJobException.java @@ -0,0 +1,43 @@ +package com.webank.wedatasphere.exchangis.job.exception; + + +import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.common.exception.ExceptionLevel; +import org.apache.linkis.common.exception.LinkisRuntimeException; + +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.JOB_EXCEPTION_CODE; + +/** + * Exchangis Job Exception + */ +public class ExchangisJobException extends ErrorException { + + public ExchangisJobException(int errCode, String desc) { + super(errCode, desc); + } + + public ExchangisJobException(String desc, Throwable t){ + this(JOB_EXCEPTION_CODE.getCode(), desc, t); + } + public ExchangisJobException(int errCode, String desc, Throwable t) { + super(errCode, desc); + this.initCause(t); + } + + public ExchangisJobException(int errCode, String desc, String ip, int port, String serviceKind) { + super(errCode, desc, ip, port, serviceKind); + } + + public static class Runtime extends LinkisRuntimeException { + + public Runtime(int errCode, String desc, Throwable t) { + super(errCode, desc); + super.initCause(t); + } + + @Override + public ExceptionLevel getLevel() { + return ExceptionLevel.ERROR; + } + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/exception/ExchangisJobExceptionCode.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/exception/ExchangisJobExceptionCode.java new file mode 100644 index 000000000..fd1609ccd --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/exception/ExchangisJobExceptionCode.java @@ -0,0 +1,37 @@ +package com.webank.wedatasphere.exchangis.job.exception; + +/** + * Exception code, range:(31000 ~ 31999), the same as "ExchangisDataSourceExceptionCode" + */ +public enum ExchangisJobExceptionCode { + RENDER_TRANSFORM_ERROR(31885), + METRICS_OP_ERROR(31886), + TASK_LAUNCH_NOT_EXIST(31887), + TASK_LAUNCH_ERROR(31888), + TASK_PARM_ERROR(31889), + LOG_OP_ERROR(31990), + TASK_EXECUTE_ERROR(31991), + TASK_OBSERVER_ERROR(31992), + ON_EVENT_ERROR(31993), + SCHEDULER_ERROR(31994), + JOB_BUILDER_ERROR(31995), + UNSUPPORTED_TASK_LAUNCH_ENGINE(31996), + TASK_GENERATE_ERROR(31997), + JOB_EXCEPTION_CODE(31999), + BUILDER_ENGINE_ERROR(31998), + BUILDER_TRANSFORM_ERROR(31998), + UNSUPPORTED_OPERATION(31999); + private int code; + + ExchangisJobExceptionCode(int code) { + this.code = code; + } + + public int getCode() { + return code; + } + + public void setCode(int code) { + this.code = code; + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/exception/ExchangisOnEventException.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/exception/ExchangisOnEventException.java new file mode 100644 index 000000000..04b3cd9a5 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/exception/ExchangisOnEventException.java @@ -0,0 +1,14 @@ +package com.webank.wedatasphere.exchangis.job.exception; + +import org.apache.linkis.common.exception.ErrorException; + +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.ON_EVENT_ERROR; +/** + * Exception happened in listener + */ +public class ExchangisOnEventException extends ErrorException { + public ExchangisOnEventException(String desc, Throwable t) { + super(ON_EVENT_ERROR.getCode(), desc); + } + +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/listener/ExchangisEvent.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/listener/ExchangisEvent.java new file mode 100644 index 000000000..afe9c3af0 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/listener/ExchangisEvent.java @@ -0,0 +1,17 @@ +package com.webank.wedatasphere.exchangis.job.listener; + + +/** + * Event between different modules + */ +public interface ExchangisEvent { + + String eventId(); + + void setEventId(String eventId); + + long getEventTime(); + + void setEventTime(long timestamp); + +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/listener/ExchangisListener.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/listener/ExchangisListener.java new file mode 100644 index 000000000..983e5443d --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/listener/ExchangisListener.java @@ -0,0 +1,35 @@ +package com.webank.wedatasphere.exchangis.job.listener; + + +import com.webank.wedatasphere.exchangis.job.exception.ExchangisOnEventException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Listener + */ +public interface ExchangisListener{ + + default Logger getLogger(){ + return LoggerFactory.getLogger(ExchangisListener.class); + } + /** + * Listen the event + * @param event + */ + default void onEvent(T event) throws ExchangisOnEventException { + getLogger().info("Event: [id: {}, type: {}] in listener [{}]", event.eventId(), event.getClass().getSimpleName(), + this.getClass().getSimpleName()); + } + + default void onAsyncEvent(T event){ + try { + onEvent(event); + } catch (Exception e) { + if (e instanceof ExchangisOnEventException){ + getLogger().warn("OnEvent exception: [{}]", e.getMessage(), e); + } + getLogger().warn("Exception occurred in listen event: {} in listener [{}]", event.eventId(), this.getClass().getSimpleName(), e); + } + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/listener/JobLogListener.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/listener/JobLogListener.java new file mode 100644 index 000000000..15e8d92d3 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/listener/JobLogListener.java @@ -0,0 +1,12 @@ +package com.webank.wedatasphere.exchangis.job.listener; + + +import com.webank.wedatasphere.exchangis.job.listener.events.JobLogEvent; + +/** + * Job Log listener for ExchangisJobLogEvent + */ +public interface JobLogListener extends ExchangisListener { + + +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/listener/events/JobLogEvent.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/listener/events/JobLogEvent.java new file mode 100644 index 000000000..61b2669f0 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/listener/events/JobLogEvent.java @@ -0,0 +1,96 @@ +package com.webank.wedatasphere.exchangis.job.listener.events; + +import com.webank.wedatasphere.exchangis.job.listener.ExchangisEvent; +import org.apache.commons.lang.StringUtils; + +import java.util.Objects; + +public class JobLogEvent implements ExchangisEvent { + + public enum Level{ + TRACE, INFO, WARN, ERROR + } + private String jobExecutionId; + + private String message; + + private Level level = Level.INFO; + + private Object[] args; + + private Throwable throwable; + + private long eventTime; + + private String tenancy = "default"; + + public JobLogEvent(Level level, String tenancy, String jobExecutionId, String message, Object... args){ + if (StringUtils.isNotBlank(tenancy)){ + this.tenancy = tenancy; + } + this.jobExecutionId = jobExecutionId; + this.message = message; + this.eventTime = System.currentTimeMillis(); + if (Objects.nonNull(level)){ + this.level = level; + } + if (args != null && args.length > 0){ + Object lastEntry = args[args.length - 1]; + if (lastEntry instanceof Throwable ){ + this.throwable = (Throwable)lastEntry; + this.level = Level.ERROR; + } + this.args = args; + } + } + + public JobLogEvent(String tenancy, String jobExecutionId, String message, Object... args){ + this(Level.INFO, tenancy, jobExecutionId, jobExecutionId, message, args); + } + public JobLogEvent(String jobExecutionId, String message, Object... args){ + this(Level.INFO, null, jobExecutionId, message, args); + } + @Override + public String eventId() { + return jobExecutionId; + } + + @Override + public void setEventId(String eventId) { + //null + } + + @Override + public long getEventTime() { + return this.eventTime; + } + + @Override + public void setEventTime(long timestamp) { + this.eventTime = timestamp; + } + + public String getJobExecutionId() { + return jobExecutionId; + } + + public String getMessage() { + return message; + } + + public Object[] getArgs() { + return args; + } + + public Throwable getThrowable() { + return throwable; + } + + public String getTenancy() { + return tenancy; + } + + public Level getLevel() { + return level; + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/log/LogQuery.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/log/LogQuery.java new file mode 100644 index 000000000..de19116c9 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/log/LogQuery.java @@ -0,0 +1,105 @@ +package com.webank.wedatasphere.exchangis.job.log; + +import org.apache.commons.lang.StringUtils; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +/** + * Query task Log + */ +public class LogQuery { + + private int fromLine = 1; + + private int pageSize = 100; + + private String ignoreKeywords; + + private String onlyKeywords; + + private Integer lastRows; + + /** + * Reverse the reader + */ + private boolean enableTail; + + public LogQuery(){ + + } + + public LogQuery(int fromLine, int pageSize){ + this.fromLine = fromLine; + this.pageSize = pageSize; + } + public LogQuery(int fromLine, int pageSize, String ignoreKeywords, String onlyKeywords, Integer lastRows){ + this.fromLine = fromLine; + this.pageSize = pageSize; + this.ignoreKeywords = ignoreKeywords; + this.onlyKeywords = onlyKeywords; + this.lastRows = lastRows; + } + public Integer getLastRows() { + return lastRows; + } + + public void setLastRows(Integer lastRows) { + this.lastRows = lastRows; + } + + public int getFromLine() { + return fromLine; + } + + public void setFromLine(int fromLine) { + this.fromLine = fromLine; + } + + public int getPageSize() { + return pageSize; + } + + public void setPageSize(int pageSize) { + this.pageSize = pageSize; + } + + public String getIgnoreKeywords() { + return ignoreKeywords; + } + + public List getIgnoreKeywordsList(){ + if (StringUtils.isNotBlank(this.ignoreKeywords)){ + return Arrays.asList(this.ignoreKeywords.split(",")); + } + return Collections.emptyList(); + } + public void setIgnoreKeywords(String ignoreKeywords) { + this.ignoreKeywords = ignoreKeywords; + } + + public String getOnlyKeywords() { + return onlyKeywords; + } + + public List getOnlyKeywordsList(){ + if (StringUtils.isNotBlank(this.onlyKeywords)){ + return Arrays.asList(this.onlyKeywords.split(",")); + } + return Collections.emptyList(); + } + + public void setOnlyKeywords(String onlyKeywords) { + this.onlyKeywords = onlyKeywords; + } + + + public boolean isEnableTail() { + return enableTail; + } + + public void setEnableTail(boolean enableTail) { + this.enableTail = enableTail; + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/log/LogResult.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/log/LogResult.java new file mode 100644 index 000000000..e65192459 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/log/LogResult.java @@ -0,0 +1,55 @@ +package com.webank.wedatasphere.exchangis.job.log; + +import java.util.ArrayList; +import java.util.List; + +/** + * Task log + */ +public class LogResult { + + /** + * End line + */ + private int endLine; + + /** + * If is end + */ + private boolean isEnd; + + /** + * Log content + */ + private List logs = new ArrayList<>(); + + public LogResult(int endLine, boolean isEnd, List logs){ + this.endLine = endLine; + this.isEnd = isEnd; + this.logs = logs; + } + + public int getEndLine() { + return endLine; + } + + public void setEndLine(int endLine) { + this.endLine = endLine; + } + + public boolean isEnd() { + return isEnd; + } + + public void setEnd(boolean end) { + isEnd = end; + } + + public List getLogs() { + return logs; + } + + public void setLogs(List logs) { + this.logs = logs; + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/utils/ColumnDefineUtils.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/utils/ColumnDefineUtils.java new file mode 100644 index 000000000..6c440f320 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/utils/ColumnDefineUtils.java @@ -0,0 +1,40 @@ +package com.webank.wedatasphere.exchangis.job.utils; + +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import org.apache.commons.lang3.StringUtils; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Utils to data column + */ +public class ColumnDefineUtils { + /** + * Pattern of decimal column + */ + public static final Pattern DECIMAL_PATTERN = Pattern.compile("^decimal[((](\\d+)[^,]*?,[^,]*?(\\d+)[))]$"); + + /** + * Get data column + * @param name column name + * @param type column type + * @param index index + * @return data column + */ + public static SubExchangisJob.ColumnDefine getColumn(String name, String type, Integer index){ + if (StringUtils.isNotBlank(type)) { + Matcher decimalMatch = DECIMAL_PATTERN.matcher(type.toLowerCase()); + if (decimalMatch.matches()) { + int precision = Integer.parseInt(decimalMatch.group(1)); + int scale = Integer.parseInt(decimalMatch.group(2)); + return new SubExchangisJob.DecimalColumnDefine(name, type, index, precision, scale); + } + } + return new SubExchangisJob.ColumnDefine(name, type, index); + } + + public static SubExchangisJob.ColumnDefine getColumn(String name, String type){ + return getColumn(name, type, null); + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/utils/LabelConvertUtils.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/utils/LabelConvertUtils.java new file mode 100644 index 000000000..c781122dc --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/utils/LabelConvertUtils.java @@ -0,0 +1,49 @@ +package com.webank.wedatasphere.exchangis.job.utils; + +import com.webank.wedatasphere.exchangis.job.constraints.LabelSerializeConstraints; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.manager.label.entity.SerializableLabel; + +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Label converter + */ +public class LabelConvertUtils { + + /** + * Convert labels to string + * @param labels Map of label entities + */ + public static String labelsToString(Map labels){ + return labels.entrySet().stream().map( entry -> { + String labelKey = entry.getKey(); + Object entryValue = entry.getValue(); + return labelKey + LabelSerializeConstraints.LABEL_KV_COMBINE_SYMBOL + + (entryValue instanceof SerializableLabel? ((SerializableLabel)entryValue).getStringValue() : entryValue.toString()); + }).collect(Collectors.joining(LabelSerializeConstraints.LABEL_ENTITY_SPLITTER_SYMBOL)); + } + + /** + * Convert string to label map + * @param labelStr label string + * @return label map + */ + public static Map stringToLabelMap(String labelStr){ + if (StringUtils.isNotBlank(labelStr)) { + Map candidateLabels = new HashMap<>(); + String[] labelEntities = labelStr.split(LabelSerializeConstraints.LABEL_ENTITY_SPLITTER_SYMBOL); + for (String labelEntity : labelEntities) { + String[] kvContent = labelEntity.split(LabelSerializeConstraints.LABEL_KV_COMBINE_SYMBOL); + String key = kvContent[0]; + String value = kvContent.length > 1 ? kvContent[1] : null; + candidateLabels.put(key, value); + } + return candidateLabels; + } + return null; + } + +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/utils/MemUtils.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/utils/MemUtils.java new file mode 100644 index 000000000..5c3af8b64 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/utils/MemUtils.java @@ -0,0 +1,234 @@ +package com.webank.wedatasphere.exchangis.job.utils; + +import org.apache.commons.lang.StringUtils; + +import java.util.HashMap; +import java.util.Map; + +/** + * Mem utils + */ +public class MemUtils { + private static final Map UNIT_MAP = new HashMap<>(); + static{ + UNIT_MAP.put("G", StoreUnit.GB); + UNIT_MAP.put("GB", StoreUnit.GB); + UNIT_MAP.put("B", StoreUnit.B); + UNIT_MAP.put("M", StoreUnit.MB); + UNIT_MAP.put("MB", StoreUnit.MB); + UNIT_MAP.put("K", StoreUnit.KB); + UNIT_MAP.put("KB", StoreUnit.KB); + } + public static long convertToGB(long size, String unitFlag){ + if(size < 0){ + return -1L; + } + if(StringUtils.isNotBlank(unitFlag)){ + StoreUnit storeUnit = UNIT_MAP.get(unitFlag.trim().toUpperCase()); + if(null != storeUnit){ + return storeUnit.toGB(size); + } + } + return -1L; + } + + public static long convertToMB(long size, String unitFlag){ + if(size < 0){ + return -1L; + } + if(StringUtils.isNotBlank(unitFlag)){ + StoreUnit storeUnit = UNIT_MAP.get(unitFlag.trim().toUpperCase()); + if(null != storeUnit){ + return storeUnit.toMB(size); + } + } + return -1L; + } + + public static long convertToByte(long size, String unitFlag){ + if(size < 0){ + return -1L; + } + if(StringUtils.isNotBlank(unitFlag)){ + StoreUnit storeUnit = UNIT_MAP.get(unitFlag.trim().toUpperCase()); + if(null != storeUnit){ + return storeUnit.toB(size); + } + } + return -1L; + } + public enum StoreUnit { + /** + * byte + */ + B { + @Override + public long toB(long s){ + return s; + } + + @Override + public long toKB(long s){ + return s/(C1/C0); + } + + @Override + public long toMB(long s) { + return s/(C2/C0); + } + + @Override + public long toGB(long s) { + return s/(C3/C0); + } + + @Override + public long toTB(long s) { + return s/(C4/C0); + } + }, + /** + * kb + */ + KB{ + @Override + public long toB(long s){ + return x(s, C1/C0, Long.MAX_VALUE/(C1/C0)); + } + + @Override + public long toKB(long s){ + return s; + } + + @Override + public long toMB(long s) { + return s/(C2/C1); + } + + @Override + public long toGB(long s) { + return s/(C3/C1); + } + + @Override + public long toTB(long s) { + return s/(C4/C0); + } + }, + MB{ + @Override + public long toB(long s){ + return x(s, C2/C0, Long.MAX_VALUE/(C2/C0)); + } + + @Override + public long toKB(long s){ + return x(s, C2/C1, Long.MAX_VALUE/(C2/C1)); + } + + @Override + public long toMB(long s) { + return s; + } + + @Override + public long toGB(long s) { + return s/(C3/C2); + } + + @Override + public long toTB(long s) { + return s/(C4/C2); + } + }, + GB{ + @Override + public long toB(long s){ + return x(s, C3/C0, Long.MAX_VALUE/(C3/C0)); + } + + @Override + public long toKB(long s){ + return x(s, C3/C1, Long.MAX_VALUE/(C3/C1)); + } + + @Override + public long toMB(long s) { + return x(s, C3/C2, Long.MAX_VALUE/(C3/C2)); + } + + @Override + public long toGB(long s) { + return s; + } + + @Override + public long toTB(long s) { + return s/(C4/C3); + } + }, + TB{ + @Override + public long toB(long s){ + return x(s, C4/C0, Long.MAX_VALUE/(C4/C0)); + } + + @Override + public long toKB(long s){ + return x(s, C4/C1, Long.MAX_VALUE/(C4/C1)); + } + + @Override + public long toMB(long s) { + return x(s, C4/C2, Long.MAX_VALUE/(C4/C2)); + } + + @Override + public long toGB(long s) { + return x(s, C4/C3, Long.MAX_VALUE/(C4/C3)); + } + + @Override + public long toTB(long s) { + return s; + } + }; + + public long toB(long s){ + throw new AbstractMethodError(); + } + + public long toKB(long s){ + throw new AbstractMethodError(); + } + + public long toMB(long s){ + throw new AbstractMethodError(); + } + + public long toGB(long s){ + throw new AbstractMethodError(); + } + + public long toTB(long s){ + throw new AbstractMethodError(); + } + } + + static long x(long d, long m, long over){ + if(d > over){ + return Long.MAX_VALUE; + } + if(d < -over){ + return Long.MIN_VALUE; + } + return d * m; + } + static final long C0 = 1L; + static final long C1 = C0 * 1024L; + static final long C2 = C1 * 1024L; + static final long C3 = C2 * 1024L; + static final long C4 = C3 * 1024L; + +} \ No newline at end of file diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/utils/SnowFlake.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/utils/SnowFlake.java new file mode 100644 index 000000000..b16987e4c --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/utils/SnowFlake.java @@ -0,0 +1,86 @@ +package com.webank.wedatasphere.exchangis.job.utils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * snowflake + */ +public class SnowFlake { + private static final Logger logger = LoggerFactory.getLogger(SnowFlake.class); + + private static long workerIdBits = 5L; + private static long dataCenterIdBits = 5L; + + /** + * Counter + */ + private static long sequenceBits = 12L; + /** + * Shift left + */ + private static long workerIdShift = sequenceBits; + private static long dataCenterIdShift = sequenceBits + workerIdBits; + private static long timestampLeftShift = sequenceBits + workerIdBits + dataCenterIdBits; + private static long sequenceMask = -1L ^ (-1L << sequenceBits); + private static long maxWorkerId = -1L ^ (-1L << workerIdBits); + private static long maxDataCenterId = -1L ^ (-1L << dataCenterIdBits); + private long lastTimeStamp = -1L; + + private long workerId; + private long dataCenterId; + private long sequence=0L; + + private long startTime = 1238434978657L; + public SnowFlake(long dataCenterId, long workerId, long startTime){ + //check + if(workerId > maxWorkerId || workerId < 0){ + throw new IllegalArgumentException("worker Id can't be greater than " + maxWorkerId + " or less than 0"); + } + if(dataCenterId > maxDataCenterId || dataCenterId < 0){ + throw new IllegalArgumentException("dataCenter Id can't be greater than " + maxDataCenterId + " or less than 0"); + } + this.workerId = workerId; + this.startTime = startTime; + this.dataCenterId = dataCenterId; + } + + public static long generateId(long timestamp, long startTime , long dataCenterId, long workerId){ + return ((timestamp - startTime) << timestampLeftShift) | (dataCenterId << dataCenterIdShift) | (workerId << workerIdShift); + } + + public static long generateId(long timestamp, long startTime){ + return ((timestamp - startTime) << timestampLeftShift) | (maxDataCenterId << dataCenterIdShift) | (maxWorkerId << workerIdShift); + } + /** + * Application lock + */ + public synchronized long nextId(){ + long timestamp = timeGen(); + if(timestamp < lastTimeStamp){ + logger.info("clock is moving backwards.Rejecting request until " + lastTimeStamp); + } + if(lastTimeStamp == timestamp){ + sequence = (sequence + 1) & sequenceMask; + if(sequence == 0){ + timestamp = tillNextMills(lastTimeStamp); + } + }else{ + sequence = 0L; + } + lastTimeStamp = timestamp; + return ((timestamp - startTime) << timestampLeftShift)|(dataCenterId << dataCenterIdShift) | (workerId << workerIdShift) | sequence; + } + + private long tillNextMills(long lastTimeStamp){ + long timestamp = timeGen(); + while(timestamp <= lastTimeStamp){ + timestamp = timeGen(); + } + return timestamp; + } + private long timeGen(){ + return System.currentTimeMillis(); + } + +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/utils/TypeGenericUtils.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/utils/TypeGenericUtils.java new file mode 100644 index 000000000..801079d0d --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/utils/TypeGenericUtils.java @@ -0,0 +1,56 @@ +package com.webank.wedatasphere.exchangis.job.utils; + +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; +import java.lang.reflect.TypeVariable; +import java.util.*; + +/** + * Utils of type generic + */ +public class TypeGenericUtils { + + public static Class getActualTypeFormGenericClass(Class genericClass, Class abstractClass, + int position){ + Map typeVariableReflect = new HashMap<>(); + Map, Type[]> classTypeVariableMap = new HashMap<>(); + Queue> traverseQueue = new LinkedList<>(); + Type[] classTypes = null; + Class currentClass; + traverseQueue.offer(genericClass); + while (!traverseQueue.isEmpty()) { + currentClass = traverseQueue.poll(); + Type[] typeParameters = currentClass.getTypeParameters(); + if (typeParameters.length > 0) { + classTypes = classTypeVariableMap.get(currentClass); + //Ignore the builder which has the parameterType (not resolved) + if (null == classTypes) { + return null; + } + for (int i = 0; i < classTypes.length; i++) { + typeVariableReflect.put(typeParameters[i].getTypeName(), classTypes[i]); + } + } + if (Objects.equals(currentClass, abstractClass)){ + break; + } + //Just traverse the superclass ignore interfaces + Type superclassType = currentClass.getGenericSuperclass(); + if (Objects.nonNull(superclassType) && superclassType instanceof ParameterizedType) { + Type[] actualTypes = ((ParameterizedType) superclassType).getActualTypeArguments(); + for (int i = 0 ; i < actualTypes.length; i++){ + Type actualType = actualTypes[i]; + if (actualType instanceof TypeVariable){ + actualTypes[i] = typeVariableReflect.getOrDefault(actualType.getTypeName(), actualType); + } + } + classTypeVariableMap.put(currentClass.getSuperclass(), actualTypes); + } + Optional.ofNullable(currentClass.getSuperclass()).ifPresent(traverseQueue::offer); + } + if (Objects.nonNull(classTypes) && classTypes.length > position){ + return (Class)classTypes[position]; + } + return null; + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/vo/ExchangisJobQueryVo.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/vo/ExchangisJobQueryVo.java new file mode 100644 index 000000000..3806e8c44 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/vo/ExchangisJobQueryVo.java @@ -0,0 +1,26 @@ +package com.webank.wedatasphere.exchangis.job.vo; + +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobPageQuery; + +public class ExchangisJobQueryVo extends ExchangisJobPageQuery { + + private static final Integer defaultCurrentPage = 1; + + private static final Integer defaultPageSize = 10; + + public ExchangisJobQueryVo(){ + } + + public ExchangisJobQueryVo(Long projectId, String jobType, String name) { + this(projectId, jobType, name, defaultCurrentPage, defaultPageSize); + } + + public ExchangisJobQueryVo(Long projectId, String jobType, + String name, Integer current, Integer size){ + this.projectId = projectId; + this.jobType = jobType; + this.jobName = name; + this.current = current; + this.size = size; + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/vo/ExchangisJobVo.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/vo/ExchangisJobVo.java new file mode 100644 index 000000000..bfbf1d957 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/vo/ExchangisJobVo.java @@ -0,0 +1,285 @@ +package com.webank.wedatasphere.exchangis.job.vo; + + + +import com.webank.wedatasphere.exchangis.common.validator.groups.InsertGroup; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobEntity; +import org.hibernate.validator.constraints.NotBlank; + +import javax.validation.constraints.NotNull; +import javax.validation.constraints.Size; +import java.util.*; + +/** + * + */ +//@JsonInclude(JsonInclude.Include.NON_EMPTY) +public class ExchangisJobVo { + + /** + * Job id + */ + private Long id; + + /** + * Project id + */ + private Long projectId; + + /** + * Job type + */ + @NotBlank(message = "Job type cannot be null (任务类型不能为空)") + @Size(max = 50, message= "Length of job type should be less than 50 (任务类型长度不超过50)") + private String jobType; + + /** + * Engine type + */ + @NotBlank(message = "Engine type cannot be null (引擎类型不能为空)") + @Size(max = 50, message = "Length of engine type should be less than 50 (引擎类型长度不超过50)") + private String engineType; + + /** + * Job labels + */ + @Size(max = 200, message = "Length of labels should be less than 200 (标签长度不能超过200)") + private String jobLabels; + + /** + * Job name + */ + @Size(max = 100, message = "Length of name should be less than 100 (名称长度不超过100)") + @NotBlank(message = "Job name cannot be null (任务名不能为空)") + private String jobName; + + /** + * Job desc + */ + @Size(max = 200, message = "Length of desc should be less than 200 (描述长度不超过200)") + private String jobDesc; + + /** + * Content + */ + private String content; + + /** + * Execute user + */ + //@JsonProperty("proxyUser") + private String executeUser; + + /** + * Execute node + */ + @Deprecated + private String executeNode; + + /** + * Job params + */ + private String jobParams; + + /** + * Create time + */ + private Date createTime; + + /** + * Create user + */ + private String createUser; + + /** + * Modify time (last_update_time) + */ + private Date modifyTime; + + /** + * Modify user + */ + private String modifyUser; + + /** + * Source map + */ + private Map source = new HashMap(); + + private Map labels; + + private String proxyUser; + + public ExchangisJobVo(){ + + } + + public ExchangisJobVo(ExchangisJobEntity jobEntity){ + if (Objects.nonNull(jobEntity)) { + this.id = jobEntity.getId(); + this.projectId = jobEntity.getProjectId(); + this.engineType = jobEntity.getEngineType(); + this.jobDesc = jobEntity.getJobDesc(); + this.jobLabels = jobEntity.getJobLabel(); + this.jobName = jobEntity.getName(); + this.jobType = jobEntity.getJobType(); + this.createTime = jobEntity.getCreateTime(); + this.createUser = jobEntity.getCreateUser(); + this.modifyTime = jobEntity.getLastUpdateTime(); + this.jobParams = jobEntity.getJobParams(); + this.executeUser = jobEntity.getExecuteUser(); + this.proxyUser = jobEntity.getExecuteUser(); + } + } + + public String getProxyUser() { + return proxyUser; + } + + public void setProxyUser(String proxyUser) { + this.proxyUser = proxyUser; + } + + public Long getId() { return id; } + + public void setId(Long id) { this.id = id; } + + public String getJobName() { + return jobName; + } + + public void setJobName(String jobName) { + this.jobName = jobName; + } + + public String getJobDesc() { + return jobDesc; + } + + public void setJobDesc(String jobDesc) { + this.jobDesc = jobDesc; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getModifyTime() { + return modifyTime; + } + + public void setModifyTime(Date modifyTime) { + this.modifyTime = modifyTime; + } + + public Long getProjectId() { + return projectId; + } + + public void setProjectId(Long projectId) { + this.projectId = projectId; + } + + public String getJobType() { + return jobType; + } + + public void setJobType(String jobType) { + this.jobType = jobType; + } + + public String getEngineType() { + return engineType; + } + + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + public String getJobLabels() { + return jobLabels; + } + + public void setJobLabels(String jobLabel) { + this.jobLabels = jobLabel; + } + + public String getContent() { + return content; + } + + public void setContent(String content) { + this.content = content; + } + + public String getExecuteNode() { + Object executeNode = source.get("executeNode"); + return Objects.nonNull(executeNode)? String.valueOf(executeNode) : null; + } + + + public void setExecuteNode(String executeNode) { + source.put("executeNode", executeNode); + } + + public String getSyncType() { + Object syncType = source.get("syncType"); + return Objects.nonNull(syncType)? String.valueOf(syncType) : null; + } + + public void setSyncType(String syncType) { + source.put("syncType", syncType); + } + + public String getJobParams() { + return jobParams; + } + + public void setJobParams(String jobParams) { + this.jobParams = jobParams; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } + + public String getModifyUser() { + return modifyUser; + } + + public void setModifyUser(String modifyUser) { + this.modifyUser = modifyUser; + } + + public String getExecuteUser() { + return executeUser; + } + + public void setExecuteUser(String executeUser) { + this.executeUser = executeUser; + } + + public Map getSource() { + return source; + } + + public void setSource(Map source) { + this.source.putAll(source); + } + + public Map getLabels() { + return labels; + } + + public void setLabels(Map labels) { + this.labels = labels; + } +} diff --git a/exchangis-job/exchangis-job-launcher/pom.xml b/exchangis-job/exchangis-job-launcher/pom.xml new file mode 100644 index 000000000..b06b04516 --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/pom.xml @@ -0,0 +1,50 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-job-launcher + + + + com.webank.wedatasphere.exchangis + exchangis-job-common + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-job-builder + ${project.version} + + + org.apache.linkis + linkis-computation-client + ${linkis.version} + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + \ No newline at end of file diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/AccessibleLauncherTask.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/AccessibleLauncherTask.java new file mode 100644 index 000000000..45fa2785e --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/AccessibleLauncherTask.java @@ -0,0 +1,54 @@ +package com.webank.wedatasphere.exchangis.job.launcher; + +import com.webank.wedatasphere.exchangis.job.launcher.exception.ExchangisTaskLaunchException; +import com.webank.wedatasphere.exchangis.job.log.LogResult; +import com.webank.wedatasphere.exchangis.job.log.LogQuery; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskProgressInfo; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; + +import java.util.Map; + +/** + * Define the operation method of launched task + */ +public interface AccessibleLauncherTask { + + + /** + * Call the status interface + * @return task status + * @throws ExchangisTaskLaunchException + */ + TaskStatus getStatus() throws ExchangisTaskLaunchException; + + TaskStatus getLocalStatus(); + /** + * Call the metric interface + * @return map + */ + Map getMetricsInfo() throws ExchangisTaskLaunchException; + + /** + * Call the progress info interface + * @return double + */ + TaskProgressInfo getProgressInfo() throws ExchangisTaskLaunchException; + + /** + * Kill the task + */ + void kill() throws ExchangisTaskLaunchException; + + /** + * Query log + * @param query query + * @return + */ + LogResult queryLogs(LogQuery query) throws ExchangisTaskLaunchException; + + /** + * Submit + * @throws ExchangisTaskLaunchException exception + */ + void submit() throws ExchangisTaskLaunchException; +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/ExchangisLauncherConfiguration.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/ExchangisLauncherConfiguration.java new file mode 100644 index 000000000..4f3c714b6 --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/ExchangisLauncherConfiguration.java @@ -0,0 +1,33 @@ +package com.webank.wedatasphere.exchangis.job.launcher; + + +import org.apache.linkis.common.conf.CommonVars; + +// TODO Unified management of the linkis configuration +public class ExchangisLauncherConfiguration { + + public static final String TASK_NOT_EXIST = "Not exists EngineConn"; + + public static final String LAUNCHER_LINKIS_RUNTIME_PARAM_NAME = "runtimeParams"; + + public static final String LAUNCHER_LINKIS_STARTUP_PARAM_NAME = "startUpParams"; + + public static final String LAUNCHER_LINKIS_REQUEST_MEMORY = "wds.linkis.engineconn.java.driver.memory"; + + public static final String LAUNCHER_LINKIS_RESOURCES = "wds.linkis.engineconn.${engine}.bml.resources"; + + public static final String LAUNCHER_LINKIS_EXEC_ID = "wds.linkis.engineconn.${engine}.execution.id"; + + public static final String LAUNCHER_LINKIS_CUSTOM_PARAM_PREFIX = "_${engine}_."; + + public static final CommonVars LAUNCHER_LINKIS_CREATOR = CommonVars.apply("wds.exchangis.job.task.launcher.linkis.creator", "exchangis"); + + public static final CommonVars LAUNCHER_LINKIS_ENGINE_CONN_MODE = CommonVars.apply("wds.exchangis.job.task.launcher.linkis.engineConn.mode", "once"); + + public static final CommonVars LAUNCHER_LINKIS_MAX_SUBMIT = CommonVars.apply("wds.exchangis.job.task.launcher.linkis.max.submit", 30000L); + + public static final CommonVars LAUNCHER_LINKIS_MAX_ERROR = CommonVars.apply("wds.exchangis.job.task.launcher.linkis.max.error", 3); + + public static final CommonVars LIMIT_INTERFACE = CommonVars.apply("wds.exchangis.limit.interface.value", true); + +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/ExchangisTaskLaunchManager.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/ExchangisTaskLaunchManager.java new file mode 100644 index 000000000..c400e6ffe --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/ExchangisTaskLaunchManager.java @@ -0,0 +1,18 @@ +package com.webank.wedatasphere.exchangis.job.launcher; + + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; + +/** + * Task(Sub job) launcher + */ +public interface ExchangisTaskLaunchManager { + + void registerTaskLauncher(ExchangisTaskLauncher taskLauncher); + + void unRegisterTaskLauncher(String launcherName); + + ExchangisTaskLauncher getTaskLauncher(String launcherName); + +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/ExchangisTaskLauncher.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/ExchangisTaskLauncher.java new file mode 100644 index 000000000..7cde995c2 --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/ExchangisTaskLauncher.java @@ -0,0 +1,33 @@ +package com.webank.wedatasphere.exchangis.job.launcher; + +import com.webank.wedatasphere.exchangis.job.launcher.exception.ExchangisTaskLaunchException; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; + +import java.util.Map; + +/** + * Launcher interface + * @param + */ +public interface ExchangisTaskLauncher { + + String name(); + + default void init(ExchangisTaskLaunchManager jobLaunchManager){} + + /** + * Build launcher task (accessible) form launched task entity + * @param launchedTask launched task + * @return + */ + AccessibleLauncherTask launcherTask(U launchedTask) throws ExchangisTaskLaunchException; + /** + * Launch method + * @param launchableTask launchable task + * @return launched task + * @throws ExchangisTaskLaunchException exception in launching + */ + LaunchedExchangisTask launch(T launchableTask) throws ExchangisTaskLaunchException; + +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/builder/LinkisExchangisLauncherJobBuilder.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/builder/LinkisExchangisLauncherJobBuilder.java new file mode 100644 index 000000000..82386557d --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/builder/LinkisExchangisLauncherJobBuilder.java @@ -0,0 +1,85 @@ +package com.webank.wedatasphere.exchangis.job.launcher.builder; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.builder.api.AbstractExchangisJobBuilder; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisEngineJob; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.utils.MemUtils; +import org.apache.linkis.datasourcemanager.common.exception.JsonErrorException; +import org.apache.linkis.datasourcemanager.common.util.PatternInjectUtils; +import org.apache.linkis.datasourcemanager.common.util.json.Json; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; +import java.util.stream.Collectors; + +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.TASK_EXECUTE_ERROR; +import static com.webank.wedatasphere.exchangis.job.launcher.ExchangisLauncherConfiguration.*; + +/** + * Launcher job builder + */ + +public class LinkisExchangisLauncherJobBuilder extends AbstractExchangisJobBuilder { + + private static final String LAUNCHER_NAME = "Linkis"; + + private static final Logger LOG = LoggerFactory.getLogger(LinkisExchangisLauncherJobBuilder.class); + + @Override + public LaunchableExchangisTask buildJob(ExchangisEngineJob inputJob, LaunchableExchangisTask expectOut, ExchangisJobBuilderContext ctx) throws ExchangisJobException { + String engine = inputJob.getEngineType().toLowerCase(Locale.ROOT); + LaunchableExchangisTask launchableTask = new LaunchableExchangisTask(); + launchableTask.setName(inputJob.getName()); + launchableTask.setJobId(inputJob.getId()); + launchableTask.setExecuteUser(inputJob.getCreateUser()); +// launcherJob.setExecuteNode(exchangisJob.getExecuteNode()); + launchableTask.setLinkisContentMap(inputJob.getJobContent()); + Map linkisParams = new HashMap<>(); + Map startUpParams = new HashMap<>(); + linkisParams.put(LAUNCHER_LINKIS_STARTUP_PARAM_NAME, startUpParams); + try { + String customParamPrefix = PatternInjectUtils.inject(LAUNCHER_LINKIS_CUSTOM_PARAM_PREFIX, new String[]{engine}); + // Add the runtime params to startup params for once job + startUpParams.putAll(appendPrefixToParams(customParamPrefix, inputJob.getRuntimeParams())); + } catch (JsonErrorException e) { + throw new ExchangisJobException(TASK_EXECUTE_ERROR.getCode(), "Fail to convert custom params for launching", e); + } + long memoryUsed = Optional.ofNullable(inputJob.getMemoryUsed()).orElse(0L); + if (!inputJob.isMemoryUnitLock() && memoryUsed > 0){ + memoryUsed = MemUtils.convertToGB(inputJob.getMemoryUsed(), inputJob.getMemoryUnit()); + inputJob.setMemoryUnit("G"); + } + startUpParams.put(LAUNCHER_LINKIS_REQUEST_MEMORY, (memoryUsed <= 0 ? 1 : memoryUsed) + inputJob.getMemoryUnit()); + List resources = inputJob.getResources(); + if (!resources.isEmpty()){ + try { + LOG.info("Use the engine resources: {} for job/task: [{}]", Json.toJson(resources, null), inputJob.getName()); + startUpParams.put(PatternInjectUtils.inject(LAUNCHER_LINKIS_RESOURCES, new String[]{engine}), Json.toJson(resources, null)); + } catch (JsonErrorException e) { + throw new ExchangisJobException(TASK_EXECUTE_ERROR.getCode(), "Fail to use engine resources", e); + } + } + launchableTask.setLinkisParamsMap(linkisParams); + launchableTask.setEngineType(inputJob.getEngineType()); + launchableTask.setLabels(inputJob.getJobLabel()); + launchableTask.setName(inputJob.getName()); + // Use launcher name placeholder + launchableTask.setLinkisJobName(LAUNCHER_NAME); + return launchableTask; + } + + /** + * Append prefix to params + * @param prefix prefix + * @param customParams custom params + * @return params + */ + private Map appendPrefixToParams(String prefix, Map customParams){ + return customParams.entrySet().stream().collect(Collectors.toMap(entry -> prefix + entry.getKey(), + Map.Entry::getValue)); + } +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/domain/LaunchableExchangisJob.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/domain/LaunchableExchangisJob.java new file mode 100644 index 000000000..78c918810 --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/domain/LaunchableExchangisJob.java @@ -0,0 +1,62 @@ +package com.webank.wedatasphere.exchangis.job.launcher.domain; + +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; +import com.webank.wedatasphere.exchangis.job.domain.GenericExchangisJob; + +import java.util.ArrayList; +import java.util.List; + +/** + * Job could be executed + */ +public class LaunchableExchangisJob extends GenericExchangisJob { + + /** + * Job info + */ + private ExchangisJobInfo exchangisJobInfo; + + /** + * Execution id + */ + private String jobExecutionId; + + /** + * Execute user + */ + private String execUser; + + private List launchableExchangisTasks = new ArrayList<>(); + + public ExchangisJobInfo getExchangisJobInfo() { + return exchangisJobInfo; + } + + public void setExchangisJobInfo(ExchangisJobInfo exchangisJobInfo) { + this.exchangisJobInfo = exchangisJobInfo; + } + + public String getJobExecutionId() { + return jobExecutionId; + } + + public void setJobExecutionId(String jobExecutionId) { + this.jobExecutionId = jobExecutionId; + } + + public List getLaunchableExchangisTasks() { + return launchableExchangisTasks; + } + + public void setLaunchableExchangisTasks(List launchableExchangisTasks) { + this.launchableExchangisTasks = launchableExchangisTasks; + } + + public String getExecUser() { + return execUser; + } + + public void setExecUser(String execUser) { + this.execUser = execUser; + } +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/domain/LaunchableExchangisTask.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/domain/LaunchableExchangisTask.java new file mode 100644 index 000000000..bd5158e76 --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/domain/LaunchableExchangisTask.java @@ -0,0 +1,233 @@ +package com.webank.wedatasphere.exchangis.job.launcher.domain; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.constraints.LabelSerializeConstraints; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisTask; +import com.webank.wedatasphere.exchangis.job.utils.LabelConvertUtils; + +import java.util.*; + +/** + * Task could be executed + */ +public class LaunchableExchangisTask implements ExchangisTask { + + private Long id; + + private String name; + + private Date createTime; + + private Date lastUpdateTime; + + private String engineType; + + private String executeUser; + + private String linkisJobName; + + private Long jobId; + + private String jobExecutionId; + + /** + * Job content in Linkis + */ + private String linkisJobContent; + + /** + * Job params in Linkis + */ + private String linkisParams; + + /** + * Source message in Linkis + */ + private String linkisSource; + + /** + * Labels string value + */ + private String labels; + + private Map linkisContentMap; + + private Map linkisParamsMap; + + private Map linkisSourceMap; + + private Map labelsMap; + + @Override + public String getEngineType() { + return this.engineType; + } + + @Override + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + @Override + public String getExecuteUser() { + return this.executeUser; + } + + @Override + public void setExecuteUser(String executeUser) { + this.executeUser = executeUser; + } + + @Override + public Long getId() { + return this.id; + } + + @Override + public void setId(Long id) { + this.id = id; + } + + @Override + public String getName() { + return this.name; + } + + @Override + public void setName(String name) { + this.name = name; + } + + @Override + public Date getCreateTime() { + return createTime; + } + + @Override + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + @Override + public Date getLastUpdateTime() { + return this.lastUpdateTime; + } + + @Override + public void setLastUpdateTime(Date lastUpdateTime) { + this.lastUpdateTime = lastUpdateTime; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public String getJobExecutionId() { + return jobExecutionId; + } + + public void setJobExecutionId(String jobExecutionId) { + this.jobExecutionId = jobExecutionId; + } + + public void setLinkisContentMap(Map linkisContentMap) { + Optional.ofNullable(linkisContentMap).ifPresent(value -> this.linkisContentMap = value); + } + + public void setLinkisParamsMap(Map linkisParamsMap) { + Optional.ofNullable(linkisParamsMap).ifPresent(value -> this.linkisParamsMap = value); + } + + public void setLinkisSourceMap(Map linkisSourceMap) { + Optional.ofNullable(linkisSourceMap).ifPresent(value -> this.linkisSourceMap = value); + } + + public Map getLinkisContentMap() { + if(Objects.isNull(this.linkisContentMap) && Objects.nonNull(this.linkisJobContent)){ + this.linkisContentMap = Json.fromJson(this.linkisJobContent, Map.class); + } + return linkisContentMap; + } + + public Map getLinkisParamsMap() { + if(Objects.isNull(this.linkisParamsMap) && Objects.nonNull(this.linkisParams)){ + this.linkisParamsMap = Json.fromJson(this.linkisParams, Map.class); + } + return linkisParamsMap; + } + + public Map getLinkisSourceMap() { + if(Objects.isNull(this.linkisSourceMap) && Objects.nonNull(this.linkisSource)){ + this.linkisSourceMap = Json.fromJson(this.linkisParams, Map.class); + } + return linkisSourceMap; + } + + public Map getLabelsMap() { + if(Objects.isNull(this.labelsMap) && Objects.nonNull(this.labels)){ + this.labelsMap = LabelConvertUtils.stringToLabelMap(this.labels); + } + return labelsMap; + } + + public void setLabelsMap(Map labelsMap) { + this.labelsMap = labelsMap; + } + + public String getLinkisJobName() { + return linkisJobName; + } + + public void setLinkisJobName(String linkisJobName) { + this.linkisJobName = linkisJobName; + } + + public String getLinkisJobContent() { + if (Objects.isNull(this.linkisJobContent) && Objects.nonNull(this.linkisContentMap)){ + this.linkisJobContent = Json.toJson(this.linkisContentMap, null); + } + return linkisJobContent; + } + + public void setLinkisJobContent(String linkisJobContent) { + this.linkisJobContent = linkisJobContent; + } + + public String getLinkisParams() { + if (Objects.isNull(this.linkisParams) && Objects.nonNull(this.linkisParamsMap)){ + this.linkisParams = Json.toJson(this.linkisParamsMap, null); + } + return linkisParams; + } + + public void setLinkisParams(String linkisParams) { + this.linkisParams = linkisParams; + } + + public String getLinkisSource() { + if (Objects.isNull(this.linkisSource) && Objects.nonNull(this.linkisSourceMap)){ + this.linkisSource = Json.toJson(this.linkisSourceMap, null); + } + return linkisSource; + } + + public void setLinkisSource(String linkisSource) { + this.linkisSource = linkisSource; + } + + public String getLabels() { + if (Objects.isNull(this.linkisParams) && Objects.nonNull(this.linkisParamsMap)){ + this.linkisParams = Json.toJson(this.linkisParamsMap, null); + } + return labels; + } + + public void setLabels(String labels) { + this.labels = labels; + } + +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/domain/LaunchedExchangisTask.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/domain/LaunchedExchangisTask.java new file mode 100644 index 000000000..48a7c3b45 --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/domain/LaunchedExchangisTask.java @@ -0,0 +1,33 @@ +package com.webank.wedatasphere.exchangis.job.launcher.domain; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.webank.wedatasphere.exchangis.job.launcher.AccessibleLauncherTask; +import com.webank.wedatasphere.exchangis.job.launcher.entity.LaunchedExchangisTaskEntity; + +/** + * To be hold by top level + */ +@JsonInclude(JsonInclude.Include.NON_EMPTY) +public class LaunchedExchangisTask extends LaunchedExchangisTaskEntity{ + public LaunchedExchangisTask(LaunchableExchangisTask launchableExchangisTask) { + super(launchableExchangisTask); + } + + @JsonIgnore + private AccessibleLauncherTask launcherTask; + + public LaunchedExchangisTask(){ + + } + + public AccessibleLauncherTask getLauncherTask() { + return launcherTask; + } + + public void setLauncherTask(AccessibleLauncherTask launcherTask) { + this.launcherTask = launcherTask; + } + + +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/domain/LaunchedJobHistory.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/domain/LaunchedJobHistory.java new file mode 100644 index 000000000..f42a21485 --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/domain/LaunchedJobHistory.java @@ -0,0 +1,114 @@ +package com.webank.wedatasphere.exchangis.job.launcher.domain; + +import java.util.Date; + +/** + * + * @Date 2022/1/25 14:36 + */ +public class LaunchedJobHistory { + + private String jobExecutionId; + + private String executeNode; + + private String name; + + private Date createTime; + + private Long flow; + + private String executeUser; + + private String status; + + private double progress; + + private Date lastUpdateTime; + + public LaunchedJobHistory(String jobExecutionId, String executeNode, String name, Date createTime, Long flow, String executeUser, String status, double progress, Date lastUpdateTime){ + this.jobExecutionId = jobExecutionId; + this.executeNode = executeNode; + this.name = name; + this.status = status; + this.createTime = createTime; + this.flow = flow; + this.lastUpdateTime = lastUpdateTime; + this.progress = progress; + this.lastUpdateTime = lastUpdateTime; + this.executeUser = executeUser; + } + + public String getJobExecutionId() { + return jobExecutionId; + } + + public void setJobExecutionId(String jobExecutionId) { + this.jobExecutionId = jobExecutionId; + } + + public String getExecuteNode() { + return executeNode; + } + + public void setExecuteNode(String executeNode) { + this.executeNode = executeNode; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Long getFlow() { + return flow; + } + + public void setFlow(Long flow) { + this.flow = flow; + } + + public String getExecuteUser() { + return executeUser; + } + + public void setExecuteUser(String executeUser) { + this.executeUser = executeUser; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public double getProgress() { + return progress; + } + + public void setProgress(double progress) { + this.progress = progress; + } + + public Date getLastUpdateTime() { + return lastUpdateTime; + } + + public void setLastUpdateTime(Date lastUpdateTime) { + this.lastUpdateTime = lastUpdateTime; + } + +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/domain/task/TaskProgressInfo.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/domain/task/TaskProgressInfo.java new file mode 100644 index 000000000..e474d4c4d --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/domain/task/TaskProgressInfo.java @@ -0,0 +1,92 @@ +package com.webank.wedatasphere.exchangis.job.launcher.domain.task; + +public class TaskProgressInfo { + /** + * Total sub tasks + */ + private int total; + + /** + * Running sub tasks + */ + private int running; + + /** + * Failed sub tasks + */ + private int failed; + + /** + * Succeed sub tasks + */ + private int succeed; + + /** + * Progress value + */ + private float progress; + + /** + * Whole status + */ + private TaskStatus status = TaskStatus.Undefined; + + public TaskProgressInfo(int total, int running, int failed, int succeed, float progress){ + this.total = total; + this.running = running; + this.failed = failed; + this.succeed = succeed; + this.progress = progress; + } + public TaskProgressInfo(){ + + } + + public int getTotal() { + return total; + } + + public void setTotal(int total) { + this.total = total; + } + + public int getRunning() { + return running; + } + + public void setRunning(int running) { + this.running = running; + } + + public int getFailed() { + return failed; + } + + public void setFailed(int failed) { + this.failed = failed; + } + + public int getSucceed() { + return succeed; + } + + public void setSucceed(int succeed) { + this.succeed = succeed; + } + + public TaskStatus getStatus() { + return status; + } + + public void setStatus(TaskStatus status) { + this.status = status; + } + + public float getProgress() { + return progress; + } + + public void setProgress(float progress) { + this.progress = progress; + } +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/domain/task/TaskStatus.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/domain/task/TaskStatus.java new file mode 100644 index 000000000..1862929e4 --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/domain/task/TaskStatus.java @@ -0,0 +1,17 @@ +package com.webank.wedatasphere.exchangis.job.launcher.domain.task; + +/** + * Status of task + */ +public enum TaskStatus { + Inited, Scheduled, Running, WaitForRetry, Cancelled, Failed, Success, Undefined, Timeout; + + /** + * Is completed status + * @param status status + * @return boolean + */ + public static boolean isCompleted(TaskStatus status){ + return null == status || status.equals(Cancelled) || status.equals(Failed) || status.equals(Success); + } +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/ExchangisLaunchTask.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/ExchangisLaunchTask.java new file mode 100644 index 000000000..9ec1c79e0 --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/ExchangisLaunchTask.java @@ -0,0 +1,166 @@ +package com.webank.wedatasphere.exchangis.job.launcher.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; + +import java.util.Date; + +/** + *

+ * 执行任务表. + *

+ * + * @author yuxin.yuan + * @since 2021-08-10 + */ +@TableName("exchangis_launch_task") +public class ExchangisLaunchTask { + + @TableId(type = IdType.AUTO) + private Long id; + + private String taskName; + + private Long jobId; + + private String jobName; + + private String content; + + private String executeNode; + + private Date createTime; + + private String createUser; + + private Date launchTime; + + private String proxyUser; + + private String paramsJson; + + private String launchId; + + private String status; + + private Date completeTime; + + private String engineType; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getTaskName() { + return taskName; + } + + public void setTaskName(String taskName) { + this.taskName = taskName; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public String getJobName() { + return jobName; + } + + public void setJobName(String jobName) { + this.jobName = jobName; + } + + public String getContent() { + return content; + } + + public void setContent(String content) { + this.content = content; + } + + public String getExecuteNode() { + return executeNode; + } + + public void setExecuteNode(String executeNode) { + this.executeNode = executeNode; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } + + public Date getLaunchTime() { + return launchTime; + } + + public void setLaunchTime(Date launchTime) { + this.launchTime = launchTime; + } + + public String getProxyUser() { + return proxyUser; + } + + public void setProxyUser(String proxyUser) { + this.proxyUser = proxyUser; + } + + public String getParamsJson() { + return paramsJson; + } + + public void setParamsJson(String paramsJson) { + this.paramsJson = paramsJson; + } + + public String getLaunchId() { return launchId; } + + public void setLaunchId(String launchId) { this.launchId = launchId; } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public Date getCompleteTime() { + return completeTime; + } + + public void setCompleteTime(Date completeTime) { + this.completeTime = completeTime; + } + + public String getEngineType() { + return engineType; + } + + public void setEngineType(String engineType) { + this.engineType = engineType; + } +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/ExchangisTaskEntity.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/ExchangisTaskEntity.java new file mode 100644 index 000000000..b4ca5996b --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/ExchangisTaskEntity.java @@ -0,0 +1,68 @@ +package com.webank.wedatasphere.exchangis.job.launcher.entity; + +import com.webank.wedatasphere.exchangis.job.domain.ExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; + +/** + * Task entity (could be persisted) + */ +public interface ExchangisTaskEntity extends ExchangisTask { + + /** + * Job id + * @return long value + */ + Long getJobId(); + + void setJobId(Long jobId); + + /** + * Job name + * @return string value + */ + String jobName(); + + void setJobName(String jobName); + + + /** + * Task status + * @return status enum + */ + TaskStatus getStatus(); + + + void setStatus(TaskStatus status); + + /** + * Progress + * @return 0.0 to 1.0, default 0.0 + */ + double getProgress(); + + void setProgress(double progress); + + /** + * Error code + * @return default null + */ + Integer getErrorCode(); + + void setErrorCode(Integer code); + + /** + * Error message + * @return default null + */ + String getErrorMessage(); + + void setErrorMessage(String errorMessage); + + /** + * Retry number + * @return default 0 + */ + Integer getRetryNum(); + + void setRetryNum(Integer retryNum); +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/GenericExchangisTaskEntity.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/GenericExchangisTaskEntity.java new file mode 100644 index 000000000..8b804a34c --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/GenericExchangisTaskEntity.java @@ -0,0 +1,177 @@ +package com.webank.wedatasphere.exchangis.job.launcher.entity; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; + +import java.util.Date; + +/** + * Inheritable task entity + */ +public class GenericExchangisTaskEntity implements ExchangisTaskEntity{ + + protected Long id; + + protected String name; + + protected Date createTime; + + protected Date lastUpdateTime; + + protected String engineType; + + protected String executeUser; + + protected Long jobId; + + protected String jobName; + + protected TaskStatus status = TaskStatus.Inited; + + protected double progress = 0.0; + + protected Integer errorCode; + + protected String errorMessage; + + protected Integer retryNum = 0; + + protected String createUser; + + @Override + public Long getJobId() { + return this.jobId; + } + + @Override + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + @Override + public String jobName() { + return this.jobName; + } + + @Override + public void setJobName(String jobName) { + this.jobName = jobName; + } + + @Override + public TaskStatus getStatus() { + return this.status; + } + + @Override + public void setStatus(TaskStatus status) { + this.status = status; + } + + @Override + public double getProgress() { + return this.progress; + } + + @Override + public void setProgress(double progress) { + this.progress = progress; + } + + @Override + public Integer getErrorCode() { + return this.errorCode; + } + + @Override + public void setErrorCode(Integer code) { + this.errorCode = code; + } + + @Override + public String getErrorMessage() { + return this.errorMessage; + } + + @Override + public void setErrorMessage(String errorMessage) { + this.errorMessage = errorMessage; + } + + @Override + public Integer getRetryNum() { + return this.retryNum; + } + + @Override + public void setRetryNum(Integer retryNum) { + this.retryNum = retryNum; + } + + @Override + public String getEngineType() { + return engineType; + } + + @Override + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + @Override + public String getExecuteUser() { + return this.executeUser; + } + + @Override + public void setExecuteUser(String executeUser) { + this.executeUser = executeUser; + } + + @Override + public Long getId() { + return this.id; + } + + @Override + public void setId(Long id) { + this.id = id; + } + + @Override + public String getName() { + return this.name; + } + + @Override + public void setName(String name) { + this.name = name; + } + + @Override + public Date getCreateTime() { + return this.createTime; + } + + @Override + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + @Override + public Date getLastUpdateTime() { + return this.lastUpdateTime; + } + + @Override + public void setLastUpdateTime(Date lastUpdateTime) { + this.lastUpdateTime = lastUpdateTime; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/LaunchedExchangisJobEntity.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/LaunchedExchangisJobEntity.java new file mode 100644 index 000000000..bfc9d2a8e --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/LaunchedExchangisJobEntity.java @@ -0,0 +1,93 @@ +package com.webank.wedatasphere.exchangis.job.launcher.entity; + +import com.webank.wedatasphere.exchangis.common.EnvironmentUtils; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobEntity; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisJob; +import org.apache.commons.io.IOUtils; + +import java.text.SimpleDateFormat; +import java.util.Calendar; +import java.util.Date; +import java.util.Optional; + +/** + * Entity to persist the launched job + */ +public class LaunchedExchangisJobEntity extends GenericExchangisTaskEntity{ + + /** + * Execution id + */ + private String jobExecutionId; + + /** + * ExchangisJobEntity + */ + + private ExchangisJobEntity exchangisJobEntity; + + /** + * Log path, could be a uri + */ + private String logPath; + + /** + * Number of launchable task + */ + private int launchableTaskNum = 0; + + public ExchangisJobEntity getExchangisJobEntity() { + return exchangisJobEntity; + } + + public void setExchangisJobEntity(ExchangisJobEntity exchangisJobEntity) { + this.exchangisJobEntity = exchangisJobEntity; + } + + + public LaunchedExchangisJobEntity(){ + + } + public LaunchedExchangisJobEntity(LaunchableExchangisJob job){ + this.name = job.getName(); + this.engineType = job.getEngineType(); + this.jobId = job.getId(); + this.jobName = job.getName(); + this.executeUser = job.getExecUser(); + this.createUser = job.getCreateUser(); + this.createTime = job.getCreateTime(); + this.lastUpdateTime = job.getLastUpdateTime(); + this.jobExecutionId = job.getJobExecutionId(); + SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd"); + String logPath = this.executeUser + IOUtils.DIR_SEPARATOR_UNIX + + simpleDateFormat.format(new Date()) + IOUtils.DIR_SEPARATOR_UNIX + this.jobExecutionId; + logPath = EnvironmentUtils.getServerAddress() + "@" + logPath; + this.logPath = logPath; + } + public String getJobExecutionId() { + return jobExecutionId; + } + + public void setJobExecutionId(String jobExecutionId) { + this.jobExecutionId = jobExecutionId; + } + + public String getLogPath() { + return logPath; + } + + public void setLogPath(String logPath) { + this.logPath = logPath; + } + + public int getLaunchableTaskNum() { + return launchableTaskNum; + } + + public void setLaunchableTaskNum(int launchableTaskNum) { + this.launchableTaskNum = launchableTaskNum; + } + + +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/LaunchedExchangisTaskEntity.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/LaunchedExchangisTaskEntity.java new file mode 100644 index 000000000..f894c0fda --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/LaunchedExchangisTaskEntity.java @@ -0,0 +1,157 @@ +package com.webank.wedatasphere.exchangis.job.launcher.entity; + + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; + +import java.util.Calendar; +import java.util.Date; +import java.util.Map; +import java.util.Objects; + +/** + * Entity to persist the launched task + */ +public class LaunchedExchangisTaskEntity extends GenericExchangisTaskEntity { + + /** + * Default, the task id has the same value with id + */ + private String taskId; + + private String linkisJobId; + + private String linkisJobInfo; + + /** + * Execution id + */ + private String jobExecutionId; + + private Date launchTime; + + private Date runningTime; + + private String metrics; + + private LaunchableExchangisTask launchableExchangisTask; + + private Map linkisJobInfoMap; + + private Map metricsMap; + + public LaunchedExchangisTaskEntity(){ + + } + + public LaunchedExchangisTaskEntity(LaunchableExchangisTask launchableExchangisTask){ + Calendar calendar = Calendar.getInstance(); + this.launchableExchangisTask = launchableExchangisTask; + this.id = launchableExchangisTask.getId(); + this.name = launchableExchangisTask.getName(); + this.createTime = calendar.getTime(); + this.lastUpdateTime = calendar.getTime(); + this.engineType = launchableExchangisTask.getEngineType(); + this.executeUser = launchableExchangisTask.getExecuteUser(); + this.jobId = launchableExchangisTask.getJobId(); + // jobName + this.jobExecutionId = launchableExchangisTask.getJobExecutionId(); + this.status = TaskStatus.Scheduled; + this.lastUpdateTime = Calendar.getInstance().getTime(); + } + public String getTaskId(){ + if (Objects.isNull(taskId) && Objects.nonNull(getId())){ + this.taskId = String.valueOf(getId()); + } + return this.taskId; + } + + public void setTaskId(String taskId){ + this.taskId = taskId; + } + + public String getLinkisJobId() { + return linkisJobId; + } + + public void setLinkisJobId(String linkisJobId) { + this.linkisJobId = linkisJobId; + } + + public String getLinkisJobInfo() { + if (Objects.isNull(this.linkisJobInfo) && Objects.nonNull(this.linkisJobInfoMap)){ + this.linkisJobInfo = Json.toJson(this.linkisJobInfoMap, null); + } + return linkisJobInfo; + } + + public void setLinkisJobInfo(String linkisJobInfo) { + this.linkisJobInfo = linkisJobInfo; + } + + public String getJobExecutionId() { + return jobExecutionId; + } + + public void setJobExecutionId(String jobExecutionId) { + this.jobExecutionId = jobExecutionId; + } + + public Date getLaunchTime() { + return launchTime; + } + + public void setLaunchTime(Date launchTime) { + this.launchTime = launchTime; + } + + public Date getRunningTime() { + return runningTime; + } + + public void setRunningTime(Date runningTime) { + this.runningTime = runningTime; + } + + public String getMetrics() { + if (Objects.isNull(this.metrics) && Objects.nonNull(this.metricsMap)){ + this.metrics = Json.toJson(this.metricsMap, null); + } + return metrics; + } + + public void setMetrics(String metrics) { + this.metrics = metrics; + } + + public Map getLinkisJobInfoMap() { + if (Objects.isNull(this.linkisJobInfoMap) && Objects.nonNull(this.linkisJobInfo)){ + this.linkisJobInfoMap = Json.fromJson(this.linkisJobInfo, Map.class); + } + return linkisJobInfoMap; + } + + public void setLinkisJobInfoMap(Map linkisJobInfoMap) { + this.linkisJobInfoMap = linkisJobInfoMap; + } + + public Map getMetricsMap() { + if (Objects.isNull(this.metricsMap) && Objects.nonNull(this.metrics)){ + this.metricsMap = Json.fromJson(this.metrics, Map.class); + } + return metricsMap; + } + + public void setMetricsMap(Map metricsMap) { + this.metricsMap = metricsMap; + } + + public LaunchableExchangisTask getLaunchableExchangisTask() { + return launchableExchangisTask; + } + + public void setLaunchableExchangisTask(LaunchableExchangisTask launchableExchangisTask) { + this.launchableExchangisTask = launchableExchangisTask; + } +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/exception/ExchangisTaskLaunchException.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/exception/ExchangisTaskLaunchException.java new file mode 100644 index 000000000..206cd8fff --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/exception/ExchangisTaskLaunchException.java @@ -0,0 +1,15 @@ +package com.webank.wedatasphere.exchangis.job.launcher.exception; + +import org.apache.linkis.common.exception.ErrorException; + +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.TASK_LAUNCH_ERROR; + +/** + * Launch exception + */ +public class ExchangisTaskLaunchException extends ErrorException { + public ExchangisTaskLaunchException(String desc, Throwable t) { + super(TASK_LAUNCH_ERROR.getCode(), desc); + super.initCause(t); + } +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/exception/ExchangisTaskNotExistException.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/exception/ExchangisTaskNotExistException.java new file mode 100644 index 000000000..44ce9870e --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/exception/ExchangisTaskNotExistException.java @@ -0,0 +1,13 @@ +package com.webank.wedatasphere.exchangis.job.launcher.exception; + +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.TASK_LAUNCH_NOT_EXIST; + +/** + * Task not exist + */ +public class ExchangisTaskNotExistException extends ExchangisTaskLaunchException{ + public ExchangisTaskNotExistException(String desc, Throwable t) { + super(desc, t); + this.setErrCode(TASK_LAUNCH_NOT_EXIST.getCode()); + } +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisExchangisTaskLauncher.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisExchangisTaskLauncher.java new file mode 100644 index 000000000..807c1eaaf --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisExchangisTaskLauncher.java @@ -0,0 +1,115 @@ +package com.webank.wedatasphere.exchangis.job.launcher.linkis; + +import com.webank.wedatasphere.exchangis.common.linkis.client.config.ExchangisClientConfig; +import com.webank.wedatasphere.exchangis.common.linkis.client.config.ExchangisClientConfigBuilder; +import com.webank.wedatasphere.exchangis.job.enums.EngineTypeEnum; +import com.webank.wedatasphere.exchangis.job.launcher.AccessibleLauncherTask; +import com.webank.wedatasphere.exchangis.job.launcher.exception.ExchangisTaskLaunchException; +import com.webank.wedatasphere.exchangis.job.launcher.ExchangisTaskLaunchManager; +import com.webank.wedatasphere.exchangis.job.launcher.ExchangisTaskLauncher; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.linkis.client.ExchangisLaunchClient; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.exception.LinkisRetryException; +import org.apache.linkis.common.utils.DefaultRetryHandler; +import org.apache.linkis.common.utils.RetryHandler; +import org.apache.linkis.computation.client.LinkisJobClient$; +import org.apache.linkis.computation.client.once.simple.SimpleOnceJobBuilder$; + +import java.lang.reflect.Field; +import java.util.*; + +/** + * Linkis task launcher + */ +public class LinkisExchangisTaskLauncher implements ExchangisTaskLauncher { + + private final String[] STORE_INFO = new String[]{"ecmServiceInstance"}; + /** + * Engine versions + */ + private Map engineVersions = new HashMap<>(); + + @Override + public String name() { + return "Linkis"; + } + + @Override + public void init(ExchangisTaskLaunchManager jobLaunchManager) { + this.engineVersions.put(EngineTypeEnum.SQOOP.name().toLowerCase(), "1.4.6"); + this.engineVersions.put(EngineTypeEnum.DATAX.name().toLowerCase(), "3.0.0"); + RetryHandler retryHandler = new DefaultRetryHandler(){}; + retryHandler.addRetryException(LinkisRetryException.class); + ExchangisClientConfigBuilder builder = (ExchangisClientConfigBuilder) ExchangisClientConfig.newBuilder().discoveryEnabled(false) + .retryEnabled(true) + .setRetryHandler(retryHandler); + ExchangisClientConfig clientConfig = builder.build(); + // Try to set the static method + Class clz = SimpleOnceJobBuilder$.MODULE$.getClass(); + Field field; + boolean setField = false; + try { + field = clz.getDeclaredField(SimpleOnceJobBuilder$.class.getName().replace(".", "$") + "$linkisManagerClient"); + field.setAccessible(true); + try { + ExchangisLaunchClient client = new ExchangisLaunchClient(clientConfig); + field.set(SimpleOnceJobBuilder$.MODULE$, client); + Runtime.getRuntime().addShutdownHook(new Thread(client::close)); + setField = true; + } catch (IllegalAccessException e) { + // Ignore + } + } catch (NoSuchFieldException e) { + // Ignore + } + if (!setField){ + LinkisJobClient$.MODULE$.config().setDefaultClientConfig(clientConfig); + } + } + + @Override + public AccessibleLauncherTask launcherTask(LaunchedExchangisTask launchedTask) throws ExchangisTaskLaunchException { + String linkisJobId = launchedTask.getLinkisJobId(); + String execUser = launchedTask.getExecuteUser(); + Map jobInfoMap = launchedTask.getLinkisJobInfoMap(); + try { + return LinkisLauncherTask.init(linkisJobId, execUser, jobInfoMap, launchedTask.getEngineType()); + }catch (Exception e){ + throw new ExchangisTaskLaunchException("Unable to build accessible launcher task from launched task: [" + launchedTask.getTaskId() + "]", e); + } + } + + @Override + public LaunchedExchangisTask launch(LaunchableExchangisTask launchableTask) throws ExchangisTaskLaunchException { + String engineType = launchableTask.getEngineType(); + if (StringUtils.isBlank(engineType)) { + throw new ExchangisTaskLaunchException("Unsupported job execution engine: '" + launchableTask.getEngineType() + "'.", null); + } + LaunchedExchangisTask launchedExchangisTask = new LaunchedExchangisTask(launchableTask); + LinkisLauncherTask launcherTask = LinkisLauncherTask.init(launchableTask, this.engineVersions); + launcherTask.submit(); + launchedExchangisTask.setLinkisJobId(launcherTask.getJobId()); + launchedExchangisTask.setLinkisJobInfoMap(convertJobInfoToStore(launcherTask.getJobInfo(false))); + launchedExchangisTask.setLauncherTask(launcherTask); + return launchedExchangisTask; + } + + /** + * Convert to store job information + * @param jobInfo job info + * @return + */ + private Map convertJobInfoToStore(Map jobInfo){ + Map storeInfo = new HashMap<>(); + Optional.ofNullable(jobInfo).ifPresent( info -> { + for (String infoKey : STORE_INFO){ + Optional.ofNullable(info.get(infoKey)).ifPresent(infoItem -> storeInfo.put(infoKey,infoItem)); + } + }); + return storeInfo; + } + +} + diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisLauncherTask.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisLauncherTask.java new file mode 100644 index 000000000..c51a1293c --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisLauncherTask.java @@ -0,0 +1,365 @@ +package com.webank.wedatasphere.exchangis.job.launcher.linkis; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.launcher.exception.ExchangisTaskLaunchException; +import com.webank.wedatasphere.exchangis.job.launcher.exception.ExchangisTaskNotExistException; +import com.webank.wedatasphere.exchangis.job.launcher.AccessibleLauncherTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.log.LogResult; +import com.webank.wedatasphere.exchangis.job.log.LogQuery; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskProgressInfo; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.computation.client.LinkisJobBuilder; +import org.apache.linkis.computation.client.LinkisJobClient; +import org.apache.linkis.computation.client.once.SubmittableOnceJob; +import org.apache.linkis.computation.client.once.simple.SimpleOnceJob; +import org.apache.linkis.computation.client.once.simple.SubmittableSimpleOnceJob; +import org.apache.linkis.computation.client.operator.impl.*; +import org.apache.linkis.computation.client.utils.LabelKeyUtils; +import org.apache.linkis.datasourcemanager.common.exception.JsonErrorException; +import org.apache.linkis.datasourcemanager.common.util.PatternInjectUtils; +import org.apache.linkis.protocol.engine.JobProgressInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; +import java.util.concurrent.atomic.AtomicInteger; + +import static com.webank.wedatasphere.exchangis.job.launcher.ExchangisLauncherConfiguration.*; + +/** + * Linkis launcher task + */ +public class LinkisLauncherTask implements AccessibleLauncherTask { + + private static final Logger LOG = LoggerFactory.getLogger(LinkisLauncherTask.class); + + private static final String METRIC_NAME = "ecMetrics"; + + /** + * Engine versions + */ + private Map engineVersions = new HashMap<>(); + + /** + * Linkis Job information + */ + private Map jobInfo = new HashMap<>(); + + /** + * Linkis job id + */ + private String jobId; + + /** + * Engine Conn type + */ + private String engineConn; + /** + * Refer to linkis job status + */ + private TaskStatus status = TaskStatus.Undefined; + + /** + * Progress + */ + private TaskProgressInfo progressInfo; + /** + * Hold the once job instance + */ + private SimpleOnceJob onceJob; + + /** + * Progress operator + */ + private EngineConnProgressOperator progressOperator; + + /** + * Metrics operator + */ + private EngineConnMetricsOperator metricsOperator; + + /** + * Request error count + */ + private AtomicInteger reqError = new AtomicInteger(0); + + static{ + + } + public static LinkisLauncherTask init(LaunchableExchangisTask task, Map engineVersions){ + return new LinkisLauncherTask(task, engineVersions); + } + + public static LinkisLauncherTask init(String jobId, String user, Map jobInfo, String engineConn){ + return new LinkisLauncherTask(jobId, user, jobInfo, engineConn); + } + + private LinkisLauncherTask(LaunchableExchangisTask task, Map engineVersions){ + this.engineVersions = engineVersions; + this.onceJob = toSubmittableJob(task); + } + + private LinkisLauncherTask(String jobId, String user, Map jobInfo, String engineConn){ + // Build existing job + this.onceJob = LinkisJobClient.once().simple().build(jobId, user); + this.jobId = jobId; + this.jobInfo = jobInfo; + if (StringUtils.isNotBlank(engineConn)){ + // To lower case + engineConn = engineConn.toLowerCase(); + } + this.engineConn = engineConn; + prepareOperators(this.onceJob); + } + + @Override + public TaskStatus getStatus() throws ExchangisTaskLaunchException { + if (Objects.nonNull(this.onceJob)) { + if (TaskStatus.isCompleted(this.status)){ + return status; + } + try { + // Fetch the latest info + getJobInfo(true); + String linkisJobStatus = this.onceJob.getStatus(this.jobInfo); + if ("success".equalsIgnoreCase(linkisJobStatus)) { + this.status = TaskStatus.Success; + } else if ("failed".equalsIgnoreCase(linkisJobStatus)){ + this.status = TaskStatus.Failed; + } else if ("shuttingdown".equalsIgnoreCase(linkisJobStatus)) { + LOG.warn("Will retry on linkis job status: [{}]", linkisJobStatus); + // Retry on shutting down status + this.status = TaskStatus.WaitForRetry; + } else { + this.status = TaskStatus.Running; + } + // Init the error count + this.reqError.set(0); + } catch (Exception e){ + try { + dealException(e); + } catch (ExchangisTaskNotExistException ne){ + LOG.warn("Not find the launcher task in exchangis", e); + this.status = TaskStatus.Failed; + } + } + } + return this.status; + } + + @Override + public TaskStatus getLocalStatus() { + return this.status; + } + + @Override + public Map getMetricsInfo() throws ExchangisTaskLaunchException { + if (Objects.nonNull(this.metricsOperator)){ + try{ + // Invoke getStatus() to get real time status + if(!TaskStatus.isCompleted(getStatus())){ + Map metrics = (Map)this.metricsOperator.apply(); + // Init the error count + this.reqError.set(0); + return metrics; + }else { + // Try to get metric from job info + Map jobInfo = getJobInfo(false); + Object metric = jobInfo.get(METRIC_NAME); + if (Objects.nonNull(metric)){ + return Json.fromJson(String.valueOf(metric), Map.class); + } + } + }catch(Exception e){ + dealException(e); + } + } + return null; + } + + @Override + public TaskProgressInfo getProgressInfo() throws ExchangisTaskLaunchException { + if (Objects.nonNull(this.progressOperator)){ + try { + // invoke getStatus() to get real time status + TaskStatus taskStatus = getStatus(); + if (!TaskStatus.isCompleted(taskStatus)){ + EngineConnProgressInfo progressInfo = (EngineConnProgressInfo)this.progressOperator.apply(); + JobProgressInfo[] progressInfoArray = progressInfo.progressInfo(); + TaskProgressInfo taskProgressInfo = new TaskProgressInfo(); + if (progressInfoArray.length > 1){ + taskProgressInfo.setTotal(progressInfoArray[0].totalTasks()); + taskProgressInfo.setFailed(progressInfoArray[0].failedTasks()); + taskProgressInfo.setRunning(progressInfoArray[0].runningTasks()); + taskProgressInfo.setSucceed(progressInfoArray[0].succeedTasks()); + } + taskProgressInfo.setProgress(progressInfo.progress()); + this.progressInfo = taskProgressInfo; + } else if (taskStatus == TaskStatus.Success){ + if (Objects.isNull(this.progressInfo)){ + this.progressInfo = new TaskProgressInfo(); + } + this.progressInfo.setProgress(1.0f); + } + // Init the error count + this.reqError.set(0); + } catch(Exception e){ + dealException(e); + } + } + return this.progressInfo; + } + + @Override + public void kill() throws ExchangisTaskLaunchException { + if (Objects.nonNull(this.onceJob)){ + try{ + this.onceJob.kill(); + this.status = TaskStatus.Cancelled; + // Init the error count + this.reqError.set(0); + }catch(Exception e){ + dealException(e); + } + } + } + + @Override + public LogResult queryLogs(LogQuery query) throws ExchangisTaskLaunchException { + // The logOperator is not thread safe, so create it each time + if (Objects.nonNull(this.onceJob)){ + try{ + LaunchTaskLogOperator logOperator = (LaunchTaskLogOperator) this.onceJob.getOperator(LaunchTaskLogOperator.OPERATOR_NAME()); + logOperator.setFromLine(query.getFromLine()); + logOperator.setPageSize(query.getPageSize()); + logOperator.setEngineConnType(this.engineConn); + logOperator.setECMServiceInstance(this.onceJob.getECMServiceInstance(this.jobInfo)); + logOperator.setIgnoreKeywords(query.getIgnoreKeywords()); + logOperator.setOnlyKeywords(query.getOnlyKeywords()); + logOperator.setEnableTail(query.isEnableTail()); + if (Objects.nonNull(query.getLastRows())){ + logOperator.setLastRows(query.getLastRows()); + } + EngineConnLogs logs = (EngineConnLogs)logOperator.apply(); + boolean isEnd = logs.logs().size() <= 0; + if (isEnd){ + isEnd = TaskStatus.isCompleted(getStatus()); + } + // Init the error count + this.reqError.set(0); + return new LogResult(logs.endLine(), isEnd, logs.logs()); + } catch (Exception e){ + dealException(e); + } + } + + return null; + } + + @Override + public synchronized void submit() throws ExchangisTaskLaunchException { + if (Objects.isNull(this.onceJob) || !(this.onceJob instanceof SubmittableOnceJob)){ + throw new ExchangisTaskLaunchException("Unsupported 'submit' method", null); + } + try { + ((SubmittableOnceJob) this.onceJob).submit(); + TaskStatus status = getStatus(); + if (status == TaskStatus.Undefined || status == TaskStatus.WaitForRetry){ + throw new ExchangisTaskLaunchException("Fail to submit to linkis server with unexpected final status: [" + status + "]", null); + } + // New the operators for job + prepareOperators(this.onceJob); + Map jobInfo = getJobInfo(false); + jobInfo.put("ecmServiceInstance", ((SubmittableSimpleOnceJob)this.onceJob).getECMServiceInstance()); + } catch (Exception e){ + dealException(e); + } + } + + /** + * Convert launchable task to once job + * @param task task + * @return once job + */ + @SuppressWarnings("unchecked") + private SimpleOnceJob toSubmittableJob(LaunchableExchangisTask task){ + //TODO deal the start up params + LinkisJobBuilder jobBuilder = LinkisJobClient.once().simple().builder().setCreateService(LAUNCHER_LINKIS_CREATOR.getValue()) + .setMaxSubmitTime(LAUNCHER_LINKIS_MAX_SUBMIT.getValue()) + .addLabel(LabelKeyUtils.ENGINE_TYPE_LABEL_KEY(), task.getEngineType().toLowerCase() + "-" + + engineVersions.getOrDefault(task.getEngineType().toLowerCase(), "0.0.0")) + .addLabel(LabelKeyUtils.USER_CREATOR_LABEL_KEY(), task.getExecuteUser() + "-" + LAUNCHER_LINKIS_CREATOR.getValue()) + .addLabel(LabelKeyUtils.ENGINE_CONN_MODE_LABEL_KEY(), LAUNCHER_LINKIS_ENGINE_CONN_MODE.getValue()) + .addExecuteUser(task.getExecuteUser()); + Optional.ofNullable(task.getLinkisContentMap()).ifPresent(params -> params.forEach(jobBuilder::addJobContent)); + Optional.ofNullable(task.getLinkisParamsMap()).ifPresent(params -> { + Object runtimeParams = params.get(LAUNCHER_LINKIS_RUNTIME_PARAM_NAME); + if (Objects.nonNull(runtimeParams) && runtimeParams instanceof Map){ + jobBuilder.setRuntimeParams((Map) runtimeParams); + } + Object startupParams = params.get(LAUNCHER_LINKIS_STARTUP_PARAM_NAME); + if (Objects.nonNull(startupParams) && startupParams instanceof Map){ + jobBuilder.setStartupParams((Map) startupParams); + } + }); + try { + jobBuilder.addStartupParam(PatternInjectUtils.inject(LAUNCHER_LINKIS_EXEC_ID, + new String[]{task.getEngineType().toLowerCase(Locale.ROOT)}), task.getId()); + } catch (JsonErrorException e) { + //Ignore + } + return jobBuilder.build(); + } + + private void prepareOperators(SimpleOnceJob onceJob){ + this.progressOperator = (EngineConnProgressOperator) onceJob.getOperator(EngineConnProgressOperator.OPERATOR_NAME()); + this.metricsOperator = (EngineConnMetricsOperator) onceJob.getOperator(EngineConnMetricsOperator.OPERATOR_NAME()); + } + /** + * Deal exception + * @param e exception entity + * @throws ExchangisTaskLaunchException + */ + private void dealException(Exception e) throws ExchangisTaskLaunchException { + String message = e.getMessage(); + if (reqError.incrementAndGet() > LAUNCHER_LINKIS_MAX_ERROR.getValue()){ + this.status = TaskStatus.Failed; + LOG.info("Error to connect to the linkis server over {} times, linkis_id: {}, now to mark the task status: {}", LAUNCHER_LINKIS_CREATOR.getValue(), this.jobId, this.status, e); + return; + } + if (StringUtils.isNotBlank(message) && message.contains(TASK_NOT_EXIST)){ + throw new ExchangisTaskNotExistException("It seems that the linkis job: [ linkis_id: " + getJobId() + "] cannot be found in linkis server", e); + } else{ + throw new ExchangisTaskLaunchException("Unexpected exception in communicating with linkis server", e); + } + } + /** + * Get the linkis's job information + * @return info map + */ + public Map getJobInfo(boolean refresh){ + if (Objects.nonNull(onceJob)){ + if (Objects.isNull(this.jobInfo) || refresh){ + this.jobInfo = this.onceJob.getNodeInfo(); + } + } + return this.jobInfo; + } + + /** + * Get the linkis's job id + * @return id + */ + public String getJobId(){ + if (Objects.nonNull(onceJob)){ + this.jobId = onceJob.getId(); + } + return this.jobId; + } + + public void setStatus(TaskStatus status) { + this.status = status; + } +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/manager/LinkisExchangisTaskLaunchManager.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/manager/LinkisExchangisTaskLaunchManager.java new file mode 100644 index 000000000..734cbf740 --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/manager/LinkisExchangisTaskLaunchManager.java @@ -0,0 +1,42 @@ +package com.webank.wedatasphere.exchangis.job.launcher.manager; + +import com.webank.wedatasphere.exchangis.job.launcher.ExchangisTaskLaunchManager; +import com.webank.wedatasphere.exchangis.job.launcher.ExchangisTaskLauncher; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.linkis.LinkisExchangisTaskLauncher; +import org.springframework.stereotype.Component; + +import javax.annotation.PostConstruct; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +@Component +public class LinkisExchangisTaskLaunchManager implements ExchangisTaskLaunchManager { + + private final Map> launchers = new ConcurrentHashMap<>(); + + @PostConstruct + public void init() { + LinkisExchangisTaskLauncher linkisExchangisJobLauncher = new LinkisExchangisTaskLauncher(); + linkisExchangisJobLauncher.init(this); + this.registerTaskLauncher(linkisExchangisJobLauncher); + } + + + @Override + public void registerTaskLauncher(ExchangisTaskLauncher taskLauncher) { + this.launchers.put(taskLauncher.name().toUpperCase(Locale.ROOT), taskLauncher); + } + + @Override + public void unRegisterTaskLauncher(String launcherName) { + this.launchers.remove(launcherName.toUpperCase(Locale.ROOT)); + } + + @Override + public ExchangisTaskLauncher getTaskLauncher(String launcherName) { + return this.launchers.get(launcherName.toUpperCase(Locale.ROOT)); + } +} diff --git a/exchangis-job/exchangis-job-launcher/src/main/scala/com/webank/wedatasphere/exchangis/job/launcher/linkis/LaunchTaskLogOperator.scala b/exchangis-job/exchangis-job-launcher/src/main/scala/com/webank/wedatasphere/exchangis/job/launcher/linkis/LaunchTaskLogOperator.scala new file mode 100644 index 000000000..c31911609 --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/scala/com/webank/wedatasphere/exchangis/job/launcher/linkis/LaunchTaskLogOperator.scala @@ -0,0 +1,31 @@ +package com.webank.wedatasphere.exchangis.job.launcher.linkis + +import org.apache.linkis.computation.client.once.action.EngineConnOperateAction +import org.apache.linkis.computation.client.operator.impl.EngineConnLogOperator + +/** + * Enable to reverse read log file + */ +class LaunchTaskLogOperator extends EngineConnLogOperator{ + + private var enableTail: Boolean = false + + def setEnableTail(enableTail: Boolean): Unit = { + this.enableTail = enableTail + } + + def isEnableTail: Boolean = { + this.enableTail + } + + protected override def addParameters(builder: EngineConnOperateAction.Builder): Unit = { + super.addParameters(builder) + builder.operatorName(EngineConnLogOperator.OPERATOR_NAME) + builder.addParameter("enableTail", enableTail) + } + + override def getName: String = LaunchTaskLogOperator.OPERATOR_NAME +} +object LaunchTaskLogOperator { + val OPERATOR_NAME = "launchTaskLog" +} \ No newline at end of file diff --git a/exchangis-job/exchangis-job-launcher/src/main/scala/com/webank/wedatasphere/exchangis/job/launcher/linkis/client/ExchangisLaunchClient.scala b/exchangis-job/exchangis-job-launcher/src/main/scala/com/webank/wedatasphere/exchangis/job/launcher/linkis/client/ExchangisLaunchClient.scala new file mode 100644 index 000000000..e3558dfe9 --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/scala/com/webank/wedatasphere/exchangis/job/launcher/linkis/client/ExchangisLaunchClient.scala @@ -0,0 +1,50 @@ +package com.webank.wedatasphere.exchangis.job.launcher.linkis.client + +import com.webank.wedatasphere.exchangis.common.linkis.client.ExchangisHttpClient +import com.webank.wedatasphere.exchangis.common.linkis.client.config.ExchangisClientConfig +import org.apache.linkis.common.utils.Utils +import org.apache.linkis.computation.client.once.LinkisManagerClient +import org.apache.linkis.computation.client.once.action.{AskEngineConnAction, CreateEngineConnAction, EngineConnOperateAction, GetEngineConnAction, KillEngineConnAction, LinkisManagerAction} +import org.apache.linkis.computation.client.once.result.{AskEngineConnResult, CreateEngineConnResult, EngineConnOperateResult, GetEngineConnResult, KillEngineConnResult, LinkisManagerResult} +import org.apache.linkis.httpclient.request.Action + +/** + * Exchangis launch client + */ +class ExchangisLaunchClient(clientConfig: ExchangisClientConfig) extends LinkisManagerClient{ + private val dwsHttpClient = new ExchangisHttpClient(clientConfig, "Linkis-Job-Execution-Thread") + + protected def execute[T <: LinkisManagerResult](linkisManagerAction: LinkisManagerAction): T = + linkisManagerAction match { + case action: Action => dwsHttpClient.execute(action).asInstanceOf[T] + } + + override def createEngineConn( + createEngineConnAction: CreateEngineConnAction + ): CreateEngineConnResult = execute(createEngineConnAction) + + override def getEngineConn(getEngineConnAction: GetEngineConnAction): GetEngineConnResult = + execute(getEngineConnAction) + + override def killEngineConn(killEngineConnAction: KillEngineConnAction): KillEngineConnResult = + execute(killEngineConnAction) + + override def executeEngineConnOperation( + engineConnOperateAction: EngineConnOperateAction + ): EngineConnOperateResult = { + Utils.tryCatch { + val rs = execute[EngineConnOperateResult](engineConnOperateAction) + rs + } { case e: Exception => + val rs = new EngineConnOperateResult + rs.setIsError(true) + rs.setErrorMsg(e.getMessage) + rs + } + } + + override def close(): Unit = dwsHttpClient.close() + + override def askEngineConn(askEngineConnAction: AskEngineConnAction): AskEngineConnResult = + execute(askEngineConnAction) +} diff --git a/exchangis-job/exchangis-job-metrics/pom.xml b/exchangis-job/exchangis-job-metrics/pom.xml new file mode 100644 index 000000000..7da6f5992 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/pom.xml @@ -0,0 +1,60 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-job-metrics + + + 8 + 8 + + + + + org.apache.linkis + linkis-module + ${linkis.version} + + + + org.apache.linkis + linkis-mybatis + ${linkis.version} + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + + src/main/java + + **/*.xml + + + + + + \ No newline at end of file diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/CacheGuageMetric.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/CacheGuageMetric.java new file mode 100644 index 000000000..d9f6f5562 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/CacheGuageMetric.java @@ -0,0 +1,4 @@ +package com.webank.wedatasphere.exchangis.metrics; + +public interface CacheGuageMetric extends Metric { +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/GraphMetric.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/GraphMetric.java new file mode 100644 index 000000000..749e798b2 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/GraphMetric.java @@ -0,0 +1,4 @@ +package com.webank.wedatasphere.exchangis.metrics; + +public interface GraphMetric extends Metric { +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/GuageMetric.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/GuageMetric.java new file mode 100644 index 000000000..02b38d9d9 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/GuageMetric.java @@ -0,0 +1,7 @@ +package com.webank.wedatasphere.exchangis.metrics; + +public interface GuageMetric extends Metric { + + void setValue(T value); + +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/Metric.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/Metric.java new file mode 100644 index 000000000..377edc92e --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/Metric.java @@ -0,0 +1,21 @@ +package com.webank.wedatasphere.exchangis.metrics; + +import com.webank.wedatasphere.exchangis.metrics.api.MetricName; + +import java.util.Date; + +public interface Metric { + +// String getMetricTitle(); +// +// String getMetricNorm(); +// +// T getMetricValue(); +// +// Date getMetricTime(); + + long lastUpdateTime(); + + MetricName getMetricName(); + +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/MetricBuilder.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/MetricBuilder.java new file mode 100644 index 000000000..f8eb4d3e8 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/MetricBuilder.java @@ -0,0 +1,9 @@ +package com.webank.wedatasphere.exchangis.metrics; + +import com.webank.wedatasphere.exchangis.metrics.api.MetricName; + +public interface MetricBuilder> { + + T build(MetricName name); + +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/PersistableMetric.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/PersistableMetric.java new file mode 100644 index 000000000..f3c6869c5 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/PersistableMetric.java @@ -0,0 +1,7 @@ +package com.webank.wedatasphere.exchangis.metrics; + +public interface PersistableMetric extends PersistenceMetric { + + PersistableMetric toPersistenceMetric(); + +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/PersistenceMetric.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/PersistenceMetric.java new file mode 100644 index 000000000..4640a233d --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/PersistenceMetric.java @@ -0,0 +1,9 @@ +package com.webank.wedatasphere.exchangis.metrics; + +public interface PersistenceMetric extends Metric { + + String getMetricSeq(); + + String getMetricValueType(); + +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/Scope.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/Scope.java new file mode 100644 index 000000000..14ea26513 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/Scope.java @@ -0,0 +1,5 @@ +package com.webank.wedatasphere.exchangis.metrics; + +public enum Scope { + SINGLETON, PROTOTYPE +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/Counter.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/Counter.java new file mode 100644 index 000000000..0e1fa3eb9 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/Counter.java @@ -0,0 +1,23 @@ +package com.webank.wedatasphere.exchangis.metrics.api; + +import com.webank.wedatasphere.exchangis.metrics.Metric; + +/** + *
+ * An incrementing and decrementing counter metric.
+ *
+ * 计数器型指标,适用于记录调用总量等类型的数据
+ * 
+ */ +public interface Counter extends Metric { + + void inc(); + + void inc(long n); + + void dec(); + + void dec(long n); + + long getCount(); +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/ExchangisMetricManager.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/ExchangisMetricManager.java new file mode 100644 index 000000000..e6f1e40de --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/ExchangisMetricManager.java @@ -0,0 +1,46 @@ +package com.webank.wedatasphere.exchangis.metrics.api; + +import com.webank.wedatasphere.exchangis.metrics.Metric; +import com.webank.wedatasphere.exchangis.metrics.impl.ExchangisMetricRegistry; + +public class ExchangisMetricManager implements IMetricManager { + + private volatile boolean enabled; + private final ExchangisMetricRegistry exchangisMetricRegistry; + + public ExchangisMetricManager() { + this.exchangisMetricRegistry = new ExchangisMetricRegistry(); + enabled = true; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + @Override + public Counter getCounter(MetricName name) { + if (!this.enabled) { + return MetricManager.NOP_METRIC_MANAGER.getCounter(name); + } + + return this.exchangisMetricRegistry.counter(name); + } + + @Override + public Counter getJdbcCounter(MetricName name) { + if (!this.enabled) { + return MetricManager.NOP_METRIC_MANAGER.getJdbcCounter(name); + } + + return this.exchangisMetricRegistry.jdbcCounter(name); + } + + @Override + public void register(MetricName name, Metric metric) { + if (!this.enabled) { + return; + } + this.exchangisMetricRegistry.register(name, metric); + } + +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/IMetricManager.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/IMetricManager.java new file mode 100644 index 000000000..31ecaf563 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/IMetricManager.java @@ -0,0 +1,24 @@ +package com.webank.wedatasphere.exchangis.metrics.api; + +import com.webank.wedatasphere.exchangis.metrics.Metric; + +public interface IMetricManager { + + /** + * Create a {@link Counter} metric in given group, and name. + * if not exist, an instance will be created. + * + * @param name the name of the metric + * @return an instance of counter + */ + Counter getCounter(MetricName name); + + Counter getJdbcCounter(MetricName name); + + /** + * Register a customized metric to specified group. + * @param metric the metric to register + */ + void register(MetricName name, Metric metric); + +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/MetricManager.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/MetricManager.java new file mode 100644 index 000000000..159976c9d --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/MetricManager.java @@ -0,0 +1,76 @@ +package com.webank.wedatasphere.exchangis.metrics.api; + +import com.webank.wedatasphere.exchangis.metrics.Metric; + +import java.lang.reflect.Method; + +/** + * The design concept is heavily borrowed from alibaba metrics. + * + * alibaba metrics + * ==================================== + * The design concept is heavily borrowed from SLF4j (http://www.slf4j.org/), the logging framework. + * The application only depends on the metrics api. + * The implementation will be dynamically bound. + * If the implementation if not found in classpath, by default the {@link NOPMetricManager} will be bound. + * + * 这里采用了 alibaba metrics 代码,在其之上做了一些简化,取消了 group 分组,目前只考虑针对 exchangis 的服务做一些监控 + */ +public class MetricManager { + + private static final String BINDER_CLASS = "com.webank.wedatasphere.exchangis.metrics.api.MetricManagerBinder"; + public static final IMetricManager NOP_METRIC_MANAGER = new NOPMetricManager(); + + private static volatile IMetricManager iMetricManager; + + /** + * Create a {@link Counter} metric in given group, and name. + * if not exist, an instance will be created. + * 根据给定的group和name, 获取一个Counter实例,如果不存在则会创建 + * Counter(计数器), 主要用于用于计数,支持+1, -1, +n, -n等操作 + * + * @param name the name of the metric + * @return an instance of counter + */ + public static Counter getCounter(MetricName name) { + IMetricManager manager = getIMetricManager(); + return manager.getCounter(name); + } + + public static Counter getJdbcCounter(MetricName name) { + IMetricManager manager = getIMetricManager(); + return manager.getJdbcCounter(name); + } + + /** + * Register a customized metric to specified group. + * @param metric the metric to register + */ + public static void register(MetricName name, Metric metric) { + IMetricManager manager = getIMetricManager(); + manager.register(name, metric); + } + + /** + * get dynamically bound {@link IMetricManager} instance + * @return the {@link IMetricManager} instance bound + */ + public static IMetricManager getIMetricManager() { + if (iMetricManager == null) { + synchronized (MetricManager.class) { + if (iMetricManager == null) { + try { + Class binderClazz = MetricManager.class.getClassLoader().loadClass(BINDER_CLASS); + Method getSingleton = binderClazz.getMethod("getSingleton"); + Object binderObject = getSingleton.invoke(null); + Method getMetricManager = binderClazz.getMethod("getMetricManager"); + iMetricManager = (IMetricManager) getMetricManager.invoke(binderObject); + } catch (Exception e) { + iMetricManager = NOP_METRIC_MANAGER; + } + } + } + } + return iMetricManager; + } +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/MetricManagerBinder.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/MetricManagerBinder.java new file mode 100644 index 000000000..b4851fd5f --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/MetricManagerBinder.java @@ -0,0 +1,21 @@ +package com.webank.wedatasphere.exchangis.metrics.api; + +public class MetricManagerBinder { + + private static final MetricManagerBinder instance = new MetricManagerBinder(); + + private final IMetricManager manager; + + private MetricManagerBinder() { + manager = new ExchangisMetricManager(); + } + + public static MetricManagerBinder getSingleton() { + return instance; + } + + public IMetricManager getMetricManager() { + return manager; + } + +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/MetricName.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/MetricName.java new file mode 100644 index 000000000..cf56cfb25 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/MetricName.java @@ -0,0 +1,87 @@ +package com.webank.wedatasphere.exchangis.metrics.api; + +/** + * A metric name with the ability to include semantic tags. + * + * This replaces the previous style where metric names where strictly + * dot-separated strings. + * + */ +public class MetricName { + public static final String SEPARATOR = "."; + public static final MetricName EMPTY = new MetricName(); + + private String norm; + + private String title; + + public MetricName() { + this(null, null); + } + + public MetricName(String norm) { + this(norm, null); + } + + public MetricName(String norm, String title) { + this.norm = norm; + this.title = title; + } + + /** + * Build a new metric name using the specific path components. + * + * @param parts Path of the new metric name. + * @return A newly created metric name with the specified path. + **/ + public static MetricName build(String... parts) { + if (parts == null || parts.length == 0) + return MetricName.EMPTY; + + if (parts.length == 1) + return new MetricName(parts[0]); + + return new MetricName(buildName(parts)); + } + + public MetricName withTitle(String title) { + this.title = title; + return this; + } + + private static String buildName(String... names) { + final StringBuilder builder = new StringBuilder(); + boolean first = true; + + for (String name : names) { + if (name == null || name.isEmpty()) + continue; + + if (first) { + first = false; + } else { + builder.append(SEPARATOR); + } + + builder.append(name); + } + + return builder.toString(); + } + + public String getNorm() { + return norm; + } + + public void setNorm(String norm) { + this.norm = norm; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/MetricRegistry.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/MetricRegistry.java new file mode 100644 index 000000000..c937a99bb --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/MetricRegistry.java @@ -0,0 +1,27 @@ +package com.webank.wedatasphere.exchangis.metrics.api; + +import com.webank.wedatasphere.exchangis.metrics.Metric; +import com.webank.wedatasphere.exchangis.metrics.MetricBuilder; + +import java.util.List; +import java.util.Map; + +public interface MetricRegistry { + + Map>> getMetrics(); + + List> getMetrics(String norm); + + void addMetricBuilder(MetricBuilder> builder); + + > T register(String norm, T metric); + + > T register(MetricName name, T metric); + + Metric register(String norm, Class> metricCls); + + > T newMetric(String norm); + + > T removeMetric(String norm); + +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/MetricRegistyListener.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/MetricRegistyListener.java new file mode 100644 index 000000000..0d20a93ab --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/MetricRegistyListener.java @@ -0,0 +1,145 @@ +//package com.webank.wedatasphere.exchangis.metrics.api; +// +//import java.util.EventListener; +// +//public interface MetricRegistyListener extends EventListener { +// +// abstract class Base implements MetricRegistryListener { +// @Override +// public void onGaugeAdded(MetricName name, Gauge gauge) { +// } +// +// @Override +// public void onGaugeRemoved(MetricName name) { +// } +// +// @Override +// public void onCounterAdded(MetricName name, Counter counter) { +// } +// +// @Override +// public void onCounterRemoved(MetricName name) { +// } +// +// @Override +// public void onHistogramAdded(MetricName name, Histogram histogram) { +// } +// +// @Override +// public void onHistogramRemoved(MetricName name) { +// } +// +// @Override +// public void onMeterAdded(MetricName name, Meter meter) { +// } +// +// @Override +// public void onMeterRemoved(MetricName name) { +// } +// +// @Override +// public void onTimerAdded(MetricName name, Timer timer) { +// } +// +// @Override +// public void onTimerRemoved(MetricName name) { +// } +// +// @Override +// public void onCompassAdded(MetricName name, Compass compass) { +// +// } +// +// @Override +// public void onCompassRemoved(MetricName name) { +// +// } +// +// @Override +// public void onFastCompassAdded(MetricName name, FastCompass compass) { +// +// } +// +// @Override +// public void onFastCompassRemoved(MetricName name) { +// +// } +// } +// +// +// /** +// * Called when a {@link Gauge} is added to the registry. +// * +// * @param name the gauge's name +// * @param gauge the gauge +// */ +// void onGaugeAdded(String name, Gauge gauge); +// +// /** +// * Called when a {@link Gauge} is removed from the registry. +// * +// * @param name the gauge's name +// */ +// void onGaugeRemoved(MetricName name); +// +// /** +// * Called when a {@link Counter} is added to the registry. +// * +// * @param name the counter's name +// * @param counter the counter +// */ +// void onCounterAdded(MetricName name, Counter counter); +// +// /** +// * Called when a {@link Counter} is removed from the registry. +// * +// * @param name the counter's name +// */ +// void onCounterRemoved(MetricName name); +// +// /** +// * Called when a {@link Histogram} is added to the registry. +// * +// * @param name the histogram's name +// * @param histogram the histogram +// */ +// void onHistogramAdded(MetricName name, Histogram histogram); +// +// /** +// * Called when a {@link Histogram} is removed from the registry. +// * +// * @param name the histogram's name +// */ +// void onHistogramRemoved(MetricName name); +// +// /** +// * Called when a {@link Meter} is added to the registry. +// * +// * @param name the meter's name +// * @param meter the meter +// */ +// void onMeterAdded(MetricName name, Meter meter); +// +// /** +// * Called when a {@link Meter} is removed from the registry. +// * +// * @param name the meter's name +// */ +// void onMeterRemoved(MetricName name); +// +// /** +// * Called when a {@link Timer} is added to the registry. +// * +// * @param name the timer's name +// * @param timer the timer +// */ +// void onTimerAdded(MetricName name, Timer timer); +// +// /** +// * Called when a {@link Timer} is removed from the registry. +// * +// * @param name the timer's name +// */ +// void onTimerRemoved(MetricName name); +// +//} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/NOPMetricManager.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/NOPMetricManager.java new file mode 100644 index 000000000..70d611703 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/api/NOPMetricManager.java @@ -0,0 +1,58 @@ +package com.webank.wedatasphere.exchangis.metrics.api; + + +import com.webank.wedatasphere.exchangis.metrics.Metric; + +public class NOPMetricManager implements IMetricManager { + @Override + public Counter getCounter(MetricName name) { + return NOP_COUNTER; + } + + @Override + public Counter getJdbcCounter(MetricName name) { + return NOP_COUNTER; + } + + @Override + public void register(MetricName name, Metric metric) { + + } + + public static final Counter NOP_COUNTER = new Counter() { + @Override + public void inc() { + + } + + @Override + public void inc(long n) { + + } + + @Override + public void dec() { + + } + + @Override + public void dec(long n) { + + } + + @Override + public long getCount() { + return 0; + } + + @Override + public long lastUpdateTime() { + return 0; + } + + @Override + public MetricName getMetricName() { + return MetricName.build("nop.counter"); + } + }; +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/dao/entity/ExchangisMetric.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/dao/entity/ExchangisMetric.java new file mode 100644 index 000000000..2abc057eb --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/dao/entity/ExchangisMetric.java @@ -0,0 +1,72 @@ +package com.webank.wedatasphere.exchangis.metrics.dao.entity; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; + +import java.util.Date; + +@TableName("exchangis_metric") +public class ExchangisMetric { + + @TableId(type = IdType.AUTO) + private Long id; + + private String title; + + private String norm; + + private String value; + + private Date ts; + + private Long version; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + + public String getNorm() { + return norm; + } + + public void setNorm(String norm) { + this.norm = norm; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public Date getTs() { + return ts; + } + + public void setTs(Date ts) { + this.ts = ts; + } + + public Long getVersion() { + return version; + } + + public void setVersion(Long version) { + this.version = version; + } +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/dao/mapper/ExchangisMetricMapper.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/dao/mapper/ExchangisMetricMapper.java new file mode 100644 index 000000000..437246794 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/dao/mapper/ExchangisMetricMapper.java @@ -0,0 +1,17 @@ +package com.webank.wedatasphere.exchangis.metrics.dao.mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.webank.wedatasphere.exchangis.metrics.dao.entity.ExchangisMetric; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; +import org.apache.ibatis.annotations.Select; + +import java.util.Optional; + +@Mapper +public interface ExchangisMetricMapper extends BaseMapper { + + @Select("select * from exchangis_metric where norm = #{norm}") + Optional getByNorm(@Param("norm") String norm); + +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/dto/ExchangisDataSourceFlowMetricsDTO.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/dto/ExchangisDataSourceFlowMetricsDTO.java new file mode 100644 index 000000000..81fc85317 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/dto/ExchangisDataSourceFlowMetricsDTO.java @@ -0,0 +1,47 @@ +package com.webank.wedatasphere.exchangis.metrics.dto; + +// 数据源流量指标对象 +// datasource flow metrics pojo +public class ExchangisDataSourceFlowMetricsDTO { + + private String dataSourceTitle; + + private Long dataSourceId; + + // 监控的维度(秒、分钟、小时) + private String dimension; + + private String flow; + + public String getDataSourceTitle() { + return dataSourceTitle; + } + + public void setDataSourceTitle(String dataSourceTitle) { + this.dataSourceTitle = dataSourceTitle; + } + + public Long getDataSourceId() { + return dataSourceId; + } + + public void setDataSourceId(Long dataSourceId) { + this.dataSourceId = dataSourceId; + } + + public String getDimension() { + return dimension; + } + + public void setDimension(String dimension) { + this.dimension = dimension; + } + + public String getFlow() { + return flow; + } + + public void setFlow(String flow) { + this.flow = flow; + } +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/dto/ExchangisEngineResourceMetricsDTO.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/dto/ExchangisEngineResourceMetricsDTO.java new file mode 100644 index 000000000..a7da75f41 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/dto/ExchangisEngineResourceMetricsDTO.java @@ -0,0 +1,34 @@ +package com.webank.wedatasphere.exchangis.metrics.dto; + +public class ExchangisEngineResourceMetricsDTO { + + private String engine; + + private String cpu; + + private String mem; + + public String getEngine() { + return engine; + } + + public void setEngine(String engine) { + this.engine = engine; + } + + public String getCpu() { + return cpu; + } + + public void setCpu(String cpu) { + this.cpu = cpu; + } + + public String getMem() { + return mem; + } + + public void setMem(String mem) { + this.mem = mem; + } +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/dto/ExchangisTaskProcessMetricsDTO.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/dto/ExchangisTaskProcessMetricsDTO.java new file mode 100644 index 000000000..0456f8adf --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/dto/ExchangisTaskProcessMetricsDTO.java @@ -0,0 +1,64 @@ +package com.webank.wedatasphere.exchangis.metrics.dto; + +public class ExchangisTaskProcessMetricsDTO { + + private String key; + + private String title; + + private Integer running; + + private Integer initialized; + + private Integer total; + + private String percentOfComplete; + + public Integer getRunning() { + return running; + } + + public void setRunning(Integer running) { + this.running = running; + } + + public Integer getInitialized() { + return initialized; + } + + public void setInitialized(Integer initialized) { + this.initialized = initialized; + } + + public Integer getTotal() { + return total; + } + + public void setTotal(Integer total) { + this.total = total; + } + + public String getPercentOfComplete() { + return percentOfComplete; + } + + public void setPercentOfComplete(String percentOfComplete) { + this.percentOfComplete = percentOfComplete; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/dto/ExchangisTaskStatusMetricsDTO.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/dto/ExchangisTaskStatusMetricsDTO.java new file mode 100644 index 000000000..b006a4ff3 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/dto/ExchangisTaskStatusMetricsDTO.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.exchangis.metrics.dto; + +public class ExchangisTaskStatusMetricsDTO { + + private String status; + + private long num; + + public ExchangisTaskStatusMetricsDTO(String status, long num) { + this.status = status; + this.num = num; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public long getNum() { + return num; + } + + public void setNum(long num) { + this.num = num; + } +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/Collector.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/Collector.java new file mode 100644 index 000000000..eaf8e36d7 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/Collector.java @@ -0,0 +1,10 @@ +package com.webank.wedatasphere.exchangis.metrics.impl; + +import com.webank.wedatasphere.exchangis.metrics.api.Counter; +import com.webank.wedatasphere.exchangis.metrics.api.MetricName; + +public interface Collector { + + void collect(MetricName name, Counter counter, long timestamp); + +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/CounterImpl.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/CounterImpl.java new file mode 100644 index 000000000..9a8cb1d3b --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/CounterImpl.java @@ -0,0 +1,51 @@ +package com.webank.wedatasphere.exchangis.metrics.impl; + +import com.webank.wedatasphere.exchangis.metrics.api.Counter; +import com.webank.wedatasphere.exchangis.metrics.api.MetricName; + +import java.util.Date; +import java.util.Optional; + +public class CounterImpl implements Counter { + + private final MetricName name; + + @Override + public MetricName getMetricName() { + return name; + } + + public CounterImpl(MetricName name) { + this.name = name; + } + + @Override + public long lastUpdateTime() { + return 0; + } + + @Override + public void inc() { + + } + + @Override + public void inc(long n) { + + } + + @Override + public void dec() { + + } + + @Override + public void dec(long n) { + + } + + @Override + public long getCount() { + return 0; + } +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/ExchangisMetricRegistry.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/ExchangisMetricRegistry.java new file mode 100644 index 000000000..d030bd08e --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/ExchangisMetricRegistry.java @@ -0,0 +1,141 @@ +package com.webank.wedatasphere.exchangis.metrics.impl; + +import com.webank.wedatasphere.exchangis.metrics.Metric; +import com.webank.wedatasphere.exchangis.metrics.MetricBuilder; +import com.webank.wedatasphere.exchangis.metrics.api.Counter; +import com.webank.wedatasphere.exchangis.metrics.api.MetricName; +import com.webank.wedatasphere.exchangis.metrics.api.MetricRegistry; +import com.webank.wedatasphere.exchangis.metrics.api.NOPMetricManager; +import org.springframework.stereotype.Component; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +@Component +public class ExchangisMetricRegistry implements MetricRegistry { + + private static final int DEFAULT_MAX_METRIC_COUNT = Integer.getInteger("exchangis.maxMetricCountPerRegistry", 5000); + + private final ConcurrentMap>> metricBuilders; + private final ConcurrentMap> metrics; + private final int maxMetricCount; + + public ExchangisMetricRegistry() { + this(DEFAULT_MAX_METRIC_COUNT); + } + + public ExchangisMetricRegistry(int maxMetricCount) { + this.metrics = new ConcurrentHashMap<>(); + this.metricBuilders = new ConcurrentHashMap<>(); + this.maxMetricCount = maxMetricCount; + } + + /** + * Return the {@link Counter} registered under this name; or create and register + * a new {@link Counter} if none is registered. + * + * @param name the name of the metric + * @return a new or pre-existing {@link Counter} + */ + public Counter counter(MetricName name) { + Counter counter = getOrAdd(name, COUNTER_BUILDER); + if (counter == null) { + return NOPMetricManager.NOP_COUNTER; + } + return counter; + } + + public Counter jdbcCounter(MetricName name) { + Counter counter = getOrAdd(name, JDBC_COUNTER_BUILDER); + if (counter == null) { + return NOPMetricManager.NOP_COUNTER; + } + return counter; + } + + @Override + public Map>> getMetrics() { + return null; + } + + @Override + public List> getMetrics(String norm) { + // TODO + return null; + } + + @Override + public void addMetricBuilder(MetricBuilder> builder) { + // TODO + } + + @Override + public > T register(String norm, T metric) { + Metric existing = metrics.putIfAbsent(norm, metric); + if (null == existing) { + onMetricAdded(norm, metric); + } else { + throw new IllegalArgumentException("A metric named " + norm + " already exists"); + } + return metric; + } + + @Override + public > T register(MetricName name, T metric) { + return register(name.getNorm(), metric); + } + + @Override + public Metric register(String norm, Class> metricCls) { + return null; + } + + @Override + public > T newMetric(String norm) { + return null; + } + + @Override + public > T removeMetric(String norm) { + return null; + } + + private void onMetricAdded(String norm, Metric metric) { + // TODO + } + + @SuppressWarnings("unchecked") + private > T getOrAdd(MetricName name, MetricBuilder metricBuilder) { + final Metric metric = metrics.get(name.getNorm()); + if (metric == null) { + try { + T newMetric = metricBuilder.build(name); + if (newMetric == null) return null; + return register(name, newMetric); + } catch (IllegalArgumentException e) { + throw e; + } + } + return (T) metric; +// throw new IllegalArgumentException(name + " is already used for a different type of metric"); + } + + /** + * A quick and easy way of capturing the notion of default metrics. + */ + private final MetricBuilder COUNTER_BUILDER = new MetricBuilder() { + @Override + public Counter build(MetricName name) { + return new CounterImpl(name); + } + }; + + private final MetricBuilder JDBC_COUNTER_BUILDER = new MetricBuilder() { + @Override + public JdbcCounterImpl build(MetricName name) { + return new JdbcCounterImpl(name); + } + }; +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/JdbcCounterImpl.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/JdbcCounterImpl.java new file mode 100644 index 000000000..7d8455384 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/JdbcCounterImpl.java @@ -0,0 +1,131 @@ +package com.webank.wedatasphere.exchangis.metrics.impl; + +import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper; +import com.webank.wedatasphere.exchangis.metrics.api.Counter; +import com.webank.wedatasphere.exchangis.metrics.api.MetricName; +import com.webank.wedatasphere.exchangis.metrics.dao.entity.ExchangisMetric; +import com.webank.wedatasphere.exchangis.metrics.dao.mapper.ExchangisMetricMapper; +import com.webank.wedatasphere.exchangis.metrics.support.SpringContextHolder; +import org.springframework.context.ApplicationContext; + +import java.util.Date; +import java.util.Optional; +import java.util.concurrent.TimeUnit; + +public class JdbcCounterImpl implements Counter { + private static final long MAX_LOCK_WAIT = 60 * 1000; + private final MetricName name; + private final ExchangisMetricMapper mapper; + private long ts; + + @Override + public MetricName getMetricName() { + return name; + } + + public JdbcCounterImpl(MetricName name) { + this.name = name; + + ApplicationContext ctx = SpringContextHolder.getApplicationContext(); + this.mapper = ctx.getBean(ExchangisMetricMapper.class); + } + + @Override + public long lastUpdateTime() { + return ts; + } + + @Override + public void inc() { + update(1); + } + + @Override + public void inc(long n) { + update(n); + } + + @Override + public void dec() { + update(-1); + } + + @Override + public void dec(long n) { + update(-n); + } + + @Override + public long getCount() { + Optional metricOptional = mapper.getByNorm(name.getNorm()); + if (metricOptional.isPresent()) { + ExchangisMetric metric = metricOptional.get(); + String value = metric.getValue(); + return Long.parseLong(value); + } + return 0; + } + + private void update(long n) { + String norm = name.getNorm(); + String title = name.getTitle(); + // TODO 是否要做锁超时处理 +// long lockWaitStartMs = System.currentTimeMillis(); + ApplicationContext ctx = SpringContextHolder.getApplicationContext(); + ExchangisMetricMapper mapper = ctx.getBean(ExchangisMetricMapper.class); + Optional metricOptional = mapper.getByNorm(norm); + if (metricOptional.isPresent()) { + ExchangisMetric metric = metricOptional.get(); + long metricId = metric.getId(); + int update; + ExchangisMetric dblCheckMetric; + do { + long ts = System.currentTimeMillis(); + dblCheckMetric = mapper.selectById(metricId); + if (null == dblCheckMetric) { + break; + } + // update + Long oldVersion = dblCheckMetric.getVersion(); + String value = dblCheckMetric.getValue(); + long longVal; + try { + longVal = Long.parseLong(value); + } catch (Exception e) { + longVal = 0; + } + + UpdateWrapper updateWrapper = new UpdateWrapper<>(); + updateWrapper.eq("id", dblCheckMetric.getId()); + updateWrapper.eq("version", oldVersion); + + ExchangisMetric updateBean = new ExchangisMetric(); + updateBean.setValue((longVal + n) + ""); + updateBean.setVersion(oldVersion + 1); + updateBean.setTs(new Date(ts)); + update = mapper.update(updateBean, updateWrapper); + if (1 != update) { + try { + TimeUnit.MILLISECONDS.sleep(30); + } catch (InterruptedException e) { + // ignore + } + } + this.ts = ts; + } while (update != 1); + } else { + long ts = System.currentTimeMillis(); + + // new create + ExchangisMetric metric = new ExchangisMetric(); + metric.setNorm(norm); + metric.setTitle(title); + metric.setValue(n+""); + metric.setVersion(1L); + metric.setTs(new Date(ts)); + mapper.insert(metric); + + this.ts = ts; + } + } +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/MetricNames.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/MetricNames.java new file mode 100644 index 000000000..77106a12a --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/MetricNames.java @@ -0,0 +1,14 @@ +package com.webank.wedatasphere.exchangis.metrics.impl; + +import com.webank.wedatasphere.exchangis.metrics.api.MetricName; + +public class MetricNames { + + public static final MetricName TASK_RUNNING_COUNT_METRIC_NAME = MetricName.build("RUNNING").withTitle("运行中的任务数"); + public static final MetricName TASK_SUCCESS_COUNT_METRIC_NAME = MetricName.build("SUCCESS").withTitle("成功的任务数"); + public static final MetricName TASK_FAILED_COUNT_METRIC_NAME = MetricName.build("FAILED").withTitle("失败的任务数"); + public static final MetricName TASK_IDLE_COUNT_METRIC_NAME = MetricName.build("IDLE").withTitle("等待中的任务数"); + public static final MetricName TASK_BUSY_COUNT_METRIC_NAME = MetricName.build("BUSY").withTitle("慢任务任务数"); + public static final MetricName TASK_UNLOCK_COUNT_METRIC_NAME = MetricName.build("UNLOCK").withTitle("等待重试的任务数"); + +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/MetricObject.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/MetricObject.java new file mode 100644 index 000000000..d99bec752 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/MetricObject.java @@ -0,0 +1,46 @@ +package com.webank.wedatasphere.exchangis.metrics.impl; + +import java.util.HashMap; +import java.util.Map; + +public class MetricObject { + + private MetricObject() { + + } + + public static Builder named(String name) { + return new Builder(name); + } + + private String metric; + + private Long timestamp; + + private Object value; + + public static class Builder { + + private final MetricObject metric; + + public Builder(String name) { + this.metric = new MetricObject(); + metric.metric = name; + } + + public MetricObject build() { + return metric; + } + + public Builder withValue(Object value) { + metric.value = value; + return this; + } + + public Builder withTimestamp(Long timestamp) { + metric.timestamp = timestamp; + return this; + } + } + +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/MetricsCollector.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/MetricsCollector.java new file mode 100644 index 000000000..2b9109cc2 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/MetricsCollector.java @@ -0,0 +1,18 @@ +package com.webank.wedatasphere.exchangis.metrics.impl; + +import java.util.List; + +public abstract class MetricsCollector implements Collector { + + protected final List metrics; + + + MetricsCollector() { + this.metrics = null; + } + + public List build() { + return metrics; + } + +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/MetricsCollectorFactory.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/MetricsCollectorFactory.java new file mode 100644 index 000000000..d33ddf16b --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/MetricsCollectorFactory.java @@ -0,0 +1,9 @@ +package com.webank.wedatasphere.exchangis.metrics.impl; + +public class MetricsCollectorFactory { + + public static MetricsCollector create() { + return new NormalMetricsCollector(); + } + +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/NormalMetricsCollector.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/NormalMetricsCollector.java new file mode 100644 index 000000000..8183ade2a --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/impl/NormalMetricsCollector.java @@ -0,0 +1,11 @@ +package com.webank.wedatasphere.exchangis.metrics.impl; + +import com.webank.wedatasphere.exchangis.metrics.api.Counter; +import com.webank.wedatasphere.exchangis.metrics.api.MetricName; + +public class NormalMetricsCollector extends MetricsCollector { + @Override + public void collect(MetricName name, Counter counter, long timestamp) { + + } +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/service/ExchangisMetricsService.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/service/ExchangisMetricsService.java new file mode 100644 index 000000000..d1a68bf65 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/service/ExchangisMetricsService.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.exchangis.metrics.service; + + +import org.apache.linkis.server.Message; + +import javax.servlet.http.HttpServletRequest; + +public interface ExchangisMetricsService { + Message getTaskStatusMetrics(HttpServletRequest request); + + Message getTaskProcessMetrics(HttpServletRequest request); + + Message getDataSourceFlowMetrics(HttpServletRequest request); + + Message getEngineResourceCpuMetrics(HttpServletRequest request); + + Message getEngineResourceMemMetrics(HttpServletRequest request); + + Message getEngineResourceMetrics(HttpServletRequest request); +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/service/impl/ExchangisMetricsServiceImpl.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/service/impl/ExchangisMetricsServiceImpl.java new file mode 100644 index 000000000..8ce15f72e --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/service/impl/ExchangisMetricsServiceImpl.java @@ -0,0 +1,387 @@ +package com.webank.wedatasphere.exchangis.metrics.service.impl; + +import com.webank.wedatasphere.exchangis.metrics.api.Counter; +import com.webank.wedatasphere.exchangis.metrics.api.IMetricManager; +import com.webank.wedatasphere.exchangis.metrics.api.MetricManager; +import com.webank.wedatasphere.exchangis.metrics.dto.ExchangisEngineResourceMetricsDTO; +import com.webank.wedatasphere.exchangis.metrics.dto.ExchangisTaskProcessMetricsDTO; +import com.webank.wedatasphere.exchangis.metrics.dto.ExchangisTaskStatusMetricsDTO; +import com.webank.wedatasphere.exchangis.metrics.impl.MetricNames; +import com.webank.wedatasphere.exchangis.metrics.service.ExchangisMetricsService; +import org.apache.linkis.server.Message; +import org.springframework.stereotype.Service; + +import javax.servlet.http.HttpServletRequest; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.Date; +import java.util.List; + +@Service +public class ExchangisMetricsServiceImpl implements ExchangisMetricsService { + + private static final IMetricManager manager = MetricManager.getIMetricManager(); + +// private final ExchangisLaunchTaskMapper exchangisLaunchTaskMapper; +// +// @Autowired +// public ExchangisMetricsServiceImpl(ExchangisLaunchTaskMapper exchangisLaunchTaskMapper) { +// this.exchangisLaunchTaskMapper = exchangisLaunchTaskMapper; +// } +// +// @Override +// public Message getTaskStateMetrics(HttpServletRequest request) { +// List metrices = new ArrayList<>(); +// // TODO hard code +// ExchangisTaskStatusMetricsDTO success = exchangisLaunchTaskMapper.getTaskMetricsByStatus("SUCCESS"); +// ExchangisTaskStatusMetricsDTO failed = exchangisLaunchTaskMapper.getTaskMetricsByStatus("FAILED"); +// ExchangisTaskStatusMetricsDTO running = exchangisLaunchTaskMapper.getTaskMetricsByStatus("RUNNING"); +// ExchangisTaskStatusMetricsDTO busy = exchangisLaunchTaskMapper.getTaskMetricsByStatus("BUSY"); +// ExchangisTaskStatusMetricsDTO idle = exchangisLaunchTaskMapper.getTaskMetricsByStatus("IDLE"); +// ExchangisTaskStatusMetricsDTO unlock = exchangisLaunchTaskMapper.getTaskMetricsByStatus("UNLOCK"); +// +// Optional.ofNullable(success).ifPresent(metrices::add); +// Optional.ofNullable(failed).ifPresent(metrices::add); +// Optional.ofNullable(running).ifPresent(metrices::add); +// Optional.ofNullable(busy).ifPresent(metrices::add); +// Optional.ofNullable(idle).ifPresent(metrices::add); +// Optional.ofNullable(unlock).ifPresent(metrices::add); +// +// Message message = Message.ok(); +// message.setMethod("/dss/exchangis/main/metrics/taskstate"); +// message.data("metrices", metrices); +// return message; +// } + + @Override + public Message getTaskStatusMetrics(HttpServletRequest request) { + List metrices = new ArrayList<>(); + Counter successCounter = manager.getJdbcCounter(MetricNames.TASK_SUCCESS_COUNT_METRIC_NAME); + ExchangisTaskStatusMetricsDTO success = new ExchangisTaskStatusMetricsDTO(successCounter.getMetricName().getNorm(), successCounter.getCount()); + metrices.add(success); + + Counter runningCounter = manager.getJdbcCounter(MetricNames.TASK_RUNNING_COUNT_METRIC_NAME); + ExchangisTaskStatusMetricsDTO running = new ExchangisTaskStatusMetricsDTO(runningCounter.getMetricName().getNorm(), runningCounter.getCount()); + metrices.add(running); + + Counter failedCounter = manager.getJdbcCounter(MetricNames.TASK_FAILED_COUNT_METRIC_NAME); + ExchangisTaskStatusMetricsDTO failed = new ExchangisTaskStatusMetricsDTO(failedCounter.getMetricName().getNorm(), failedCounter.getCount()); + metrices.add(failed); + + Counter busyCounter = manager.getJdbcCounter(MetricNames.TASK_BUSY_COUNT_METRIC_NAME); + ExchangisTaskStatusMetricsDTO busy = new ExchangisTaskStatusMetricsDTO(busyCounter.getMetricName().getNorm(), busyCounter.getCount()); + metrices.add(busy); + + Counter idleCounter = manager.getJdbcCounter(MetricNames.TASK_IDLE_COUNT_METRIC_NAME); + ExchangisTaskStatusMetricsDTO idle = new ExchangisTaskStatusMetricsDTO(idleCounter.getMetricName().getNorm(), idleCounter.getCount()); + metrices.add(idle); + + Counter unlockCounter = manager.getJdbcCounter(MetricNames.TASK_UNLOCK_COUNT_METRIC_NAME); + ExchangisTaskStatusMetricsDTO unlock = new ExchangisTaskStatusMetricsDTO(unlockCounter.getMetricName().getNorm(), unlockCounter.getCount()); + metrices.add(unlock); + + Message message = Message.ok(); + message.setMethod("/dss/exchangis/main/metrics/taskstate"); + message.data("metrices", metrices); + return message; + } + + @Override + public Message getTaskProcessMetrics(HttpServletRequest request) { + // TODO mock data for process metrics + List list = new ArrayList<>(); + + // total + ExchangisTaskProcessMetricsDTO total = new ExchangisTaskProcessMetricsDTO(); + total.setKey("total"); + total.setTitle("总进度"); + total.setRunning(50); + total.setInitialized(10); + total.setTotal(120); + total.setPercentOfComplete("48%"); + list.add(total); + + // bdp + ExchangisTaskProcessMetricsDTO bdp = new ExchangisTaskProcessMetricsDTO(); + bdp.setKey("bdp"); + bdp.setTitle("BDP"); + bdp.setRunning(20); + bdp.setInitialized(10); + bdp.setTotal(60); + bdp.setPercentOfComplete("33%"); + list.add(bdp); + + // es + ExchangisTaskProcessMetricsDTO es = new ExchangisTaskProcessMetricsDTO(); + es.setKey("es"); + es.setTitle("ES"); + es.setRunning(20); + es.setInitialized(0); + es.setTotal(40); + es.setPercentOfComplete("50%"); + list.add(es); + + // fps + ExchangisTaskProcessMetricsDTO fps = new ExchangisTaskProcessMetricsDTO(); + fps.setKey("fps"); + fps.setTitle("FPS"); + fps.setRunning(10); + fps.setInitialized(0); + fps.setTotal(20); + fps.setPercentOfComplete("50%"); + list.add(fps); + + Message message = Message.ok(); + message.setMethod("/dss/exchangis/main/metrics/taskprocess"); + message.data("list", list); + return message; + } + + // mock data for echarts + + /** + * + * dataset: { + * source: [ + * ['datasource', '2021-10-25 15:00', '2021-10-25 15:01', '2021-10-25 15:02', '2021-10-25 15:03', '2021-10-25 15:04'], + * ['ds1', 41.1, 30.4, 65.1, 53.3, 44.2], + * ['ds2', 86.5, 92.1, 85.7, 83.1, 93.2], + * ['ds3', 24.1, 67.2, 79.5, 86.4, 76.2] + * ] + * }, + */ + @Override + public Message getDataSourceFlowMetrics(HttpServletRequest request) { + // TODO + // make last past 4 hours data, dimension is min + String fromDateTime = "2021-10-25 15:00"; + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm"); + Date parsedFrom; + try { + parsedFrom = sdf.parse(fromDateTime); + } catch (Exception e) { + parsedFrom = new Date(); + } + Calendar calendar = Calendar.getInstance(); + calendar.setTime(parsedFrom); + + List> dataset = new ArrayList<>(); + List header = new ArrayList<>(); + int loopNum = 4 * 60; + + // 添加第一行,头信息 +// header.add("数据源"); +// for (int i = 1; i <= loopNum; i++) { +// header.add(sdf.format(calendar.getTime())); +// calendar.add(Calendar.MINUTE, 1); +// } + header.add("时间"); + header.add("ds1"); + header.add("ds2"); + header.add("ds3"); + dataset.add(header); + + List realData; + int max = 10240; + int min = 512; + for (int i = 1; i <= loopNum; i++) { + realData = new ArrayList<>(); + realData.add(sdf.format(calendar.getTime())); + calendar.add(Calendar.MINUTE, 1); + realData.add(Math.random() * (max - min) + min); + realData.add(Math.random() * (max - min) + min); + realData.add( Math.random() * (max - min) + min); + dataset.add(realData); + } + + // 添加数据信息 +// List ds1Data = new ArrayList<>(); +// ds1Data.add("ds1"); +// +// List ds2Data = new ArrayList<>(); +// ds2Data.add("ds2"); +// +// List ds3Data = new ArrayList<>(); +// ds3Data.add("ds3"); +// for (int i = 1; i <= loopNum; i++) { +// ds1Data.add(i * RandomUtils.nextInt(1024)); +// ds2Data.add(i * RandomUtils.nextInt(512)); +// ds3Data.add(i * RandomUtils.nextInt(2048)); +// } +// dataset.add(ds1Data); +// dataset.add(ds2Data); +// dataset.add(ds3Data); + Message message = Message.ok(); + message.setMethod("/dss/exchangis/main/metrics/datasourceflow"); + message.data("dataset", dataset); + return message; + } + + @Override + public Message getEngineResourceCpuMetrics(HttpServletRequest request) { + // make last past 4 hours data, dimension is min + String fromDateTime = "2021-10-25 15:00"; + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm"); + Date parsedFrom; + try { + parsedFrom = sdf.parse(fromDateTime); + } catch (Exception e) { + parsedFrom = new Date(); + } + Calendar calendar = Calendar.getInstance(); + calendar.setTime(parsedFrom); + + List> dataset = new ArrayList<>(); + List header = new ArrayList<>(); + int loopNum = 4 * 60; + + // 添加第一行,头信息 + header.add("时间"); + header.add("datax"); + header.add("sqoop"); + header.add("linkis"); +// for (int i = 1; i <= loopNum; i++) { +// header.add(sdf.format(calendar.getTime())); +// calendar.add(Calendar.MINUTE, 1); +// } + dataset.add(header); + + // 添加数据信息 + List realData; +// ds1Data.add("datax"); + +// List ds2Data = new ArrayList<>(); +// ds2Data.add("sqoop"); +// +// List ds3Data = new ArrayList<>(); +// ds3Data.add("linkis"); + int min = 1; + int max = 8; + for (int i = 1; i <= loopNum; i++) { + realData = new ArrayList<>(); + realData.add(sdf.format(calendar.getTime())); + calendar.add(Calendar.MINUTE, 1); + realData.add(Math.random() * (max - min) + min); + realData.add(Math.random() * (max - min) + min); + realData.add( Math.random() * (max - min) + min); + dataset.add(realData); + } +// dataset.add(ds1Data); +// dataset.add(ds2Data); +// dataset.add(ds3Data); +// dataset.add(realData); + Message message = Message.ok(); + message.setMethod("/dss/exchangis/main/metrics/engineresourcecpu"); + message.data("dataset", dataset); + return message; + + + } + + @Override + public Message getEngineResourceMemMetrics(HttpServletRequest request) { + // make last past 4 hours data, dimension is min + String fromDateTime = "2021-10-25 15:00"; + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm"); + Date parsedFrom; + try { + parsedFrom = sdf.parse(fromDateTime); + } catch (Exception e) { + parsedFrom = new Date(); + } + Calendar calendar = Calendar.getInstance(); + calendar.setTime(parsedFrom); + + List> dataset = new ArrayList<>(); + List header = new ArrayList<>(); + int loopNum = 4 * 60; + + // 添加第一行,头信息 +// header.add("引擎"); +// for (int i = 1; i <= loopNum; i++) { +// header.add(sdf.format(calendar.getTime())); +// calendar.add(Calendar.MINUTE, 1); +// } +// dataset.add(header); +// +// // 添加数据信息 +// List ds1Data = new ArrayList<>(); +// ds1Data.add("datax"); +// +// List ds2Data = new ArrayList<>(); +// ds2Data.add("sqoop"); +// +// List ds3Data = new ArrayList<>(); +// ds3Data.add("linkis"); +// for (int i = 1; i <= loopNum; i++) { +// ds1Data.add(i * RandomUtils.nextInt(4192)); +// ds2Data.add(i * RandomUtils.nextInt(2048)); +// ds3Data.add(i * RandomUtils.nextInt(1024)); +// } + + // 添加第一行,头信息 + header.add("时间"); + header.add("datax"); + header.add("sqoop"); + header.add("linkis"); +// for (int i = 1; i <= loopNum; i++) { +// header.add(sdf.format(calendar.getTime())); +// calendar.add(Calendar.MINUTE, 1); +// } + dataset.add(header); + + // 添加数据信息 + List realData; + int max = 8192; + int min = 1024; + for (int i = 1; i <= loopNum; i++) { + realData = new ArrayList<>(); + realData.add(sdf.format(calendar.getTime())); + calendar.add(Calendar.MINUTE, 1); + realData.add(Math.random() * (max - min) + min); + realData.add(Math.random() * (max - min) + min); + realData.add( Math.random() * (max - min) + min); +// realData.add(i * RandomUtils.nextInt(4)); +// realData.add(i * RandomUtils.nextInt(4)); +// realData.add(i * RandomUtils.nextInt(4)); + dataset.add(realData); + } +// dataset.add(ds1Data); +// dataset.add(ds2Data); +// dataset.add(ds3Data); + Message message = Message.ok(); + message.setMethod("/dss/exchangis/main/metrics/engineresourcemem"); + message.data("dataset", dataset); + return message; + } + + @Override + public Message getEngineResourceMetrics(HttpServletRequest request) { + List list = new ArrayList<>(); + ExchangisEngineResourceMetricsDTO sqoop = new ExchangisEngineResourceMetricsDTO(); + sqoop.setEngine("sqoop"); + sqoop.setCpu("45%"); + sqoop.setMem("1782Mi"); + list.add(sqoop); + + ExchangisEngineResourceMetricsDTO datax = new ExchangisEngineResourceMetricsDTO(); + datax.setEngine("datax"); + datax.setCpu("32%"); + datax.setMem("512Mi"); + list.add(datax); + + ExchangisEngineResourceMetricsDTO linkis = new ExchangisEngineResourceMetricsDTO(); + linkis.setEngine("linkis"); + linkis.setCpu("78%"); + linkis.setMem("4196Mi"); + list.add(linkis); + + Message message = Message.ok(); + message.setMethod("/dss/exchangis/main/metrics/engineresource"); + message.data("list", list); + return message; + } + + +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/support/SpringContextHolder.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/support/SpringContextHolder.java new file mode 100644 index 000000000..b7fbe930e --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/support/SpringContextHolder.java @@ -0,0 +1,38 @@ +/** + * + * + * Licensed under the Apache License, Version 2.0 (the "License"); + */ +package com.webank.wedatasphere.exchangis.metrics.support; + +import org.springframework.beans.factory.DisposableBean; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.stereotype.Component; + +@Component +public class SpringContextHolder implements ApplicationContextAware, DisposableBean { + + private static ApplicationContext applicationContext = null; + + public static ApplicationContext getApplicationContext() { + return applicationContext; + } + + public static T getBean(Class requiredType) { + return applicationContext.getBean(requiredType); + } + + @Override + public void setApplicationContext(ApplicationContext applicationContext) { + if (SpringContextHolder.applicationContext == null) { + SpringContextHolder.applicationContext = applicationContext; + } + } + + @Override + public void destroy() throws Exception { + applicationContext = null; + } + +} diff --git a/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/web/ExchangisMetricController.java b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/web/ExchangisMetricController.java new file mode 100644 index 000000000..8cfd2dd56 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/main/java/com/webank/wedatasphere/exchangis/metrics/web/ExchangisMetricController.java @@ -0,0 +1,89 @@ +package com.webank.wedatasphere.exchangis.metrics.web; + +import com.webank.wedatasphere.exchangis.metrics.api.Counter; +import com.webank.wedatasphere.exchangis.metrics.api.MetricManager; +import com.webank.wedatasphere.exchangis.metrics.impl.MetricNames; +import org.apache.linkis.server.Message; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +import javax.servlet.http.HttpServletRequest; + +@RestController +@RequestMapping(value = "exchangis", produces = {"application/json;charset=utf-8"}) +public class ExchangisMetricController { + private static final Counter metricTaskRunningCounter = MetricManager.getJdbcCounter(MetricNames.TASK_RUNNING_COUNT_METRIC_NAME); + +// @Resource +// private ExchangisMetricRegistry exchangisMetricsRegister; +// +// @Resource +// private ExchangisMetricsService exchangisMetricsService; +// +// @GET +// @Path("metric/{norm}") +// public Response getMetric( +// @Context HttpServletRequest request, +// @PathParam(value = "norm") String norm +// ) { +// List> metrics = this.exchangisMetricsRegister.getMetrics(norm); +// Message message = Message.ok().data("metrics", metrics); +// return Message.messageToResponse(message); +// } + + @RequestMapping( value = "metric/test", method = RequestMethod.GET) + @Deprecated + public Message test(HttpServletRequest request + ) { + metricTaskRunningCounter.inc(); + return Message.ok().data("count", metricTaskRunningCounter.getCount()); + } + + // get task state metrics +// @GET +// @Path("metrics/taskstate") +// public Response getTaskStateMetrics(@Context HttpServletRequest request) throws Exception { +// Message message = this.exchangisMetricsService.getTaskStatusMetrics(request); +// return Message.messageToResponse(message); +// } +// +// // get task process metrics +// @GET +// @Path("metrics/taskprocess") +// public Response getTaskProcessMetrics(@Context HttpServletRequest request) throws Exception { +// Message message = this.exchangisMetricsService.getTaskProcessMetrics(request); +// return Message.messageToResponse(message); +// } +// +// // get datasource flow metrics +// @GET +// @Path("metrics/datasourceflow") +// public Response getDataSourceFlowMetrics(@Context HttpServletRequest request) throws Exception { +// Message message = this.exchangisMetricsService.getDataSourceFlowMetrics(request); +// return Message.messageToResponse(message); +// } +// +// // get engine (sqoop datax linkis etc.) resource metrics +// @GET +// @Path("metrics/engineresource") +// public Response getEngineResourceMetrics(@Context HttpServletRequest request) throws Exception { +// Message message = this.exchangisMetricsService.getEngineResourceMetrics(request); +// return Message.messageToResponse(message); +// } +// +// @GET +// @Path("metrics/engineresourcecpu") +// public Response getEngineResourceCpuMetrics(@Context HttpServletRequest request) throws Exception { +// Message message = this.exchangisMetricsService.getEngineResourceCpuMetrics(request); +// return Message.messageToResponse(message); +// } +// +// @GET +// @Path("metrics/engineresourcemem") +// public Response getEngineResourceMemMetrics(@Context HttpServletRequest request) throws Exception { +// Message message = this.exchangisMetricsService.getEngineResourceMemMetrics(request); +// return Message.messageToResponse(message); +// } + +} diff --git a/exchangis-job/exchangis-job-metrics/src/test/resources/application.yml b/exchangis-job/exchangis-job-metrics/src/test/resources/application.yml new file mode 100644 index 000000000..2315ce303 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/test/resources/application.yml @@ -0,0 +1,13 @@ +server: + port: 9322 +spring: + application: + name: exchangis-server + +management: + endpoints: + web: + exposure: + include: refresh,info +logging: + config: classpath:log4j2.xml diff --git a/exchangis-job/exchangis-job-metrics/src/test/resources/linkis.properties b/exchangis-job/exchangis-job-metrics/src/test/resources/linkis.properties new file mode 100644 index 000000000..db04266c0 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/test/resources/linkis.properties @@ -0,0 +1,47 @@ +# +# Copyright 2019 WeBank +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# + +wds.linkis.test.mode=true +#wds.linkis.test.mode=false + +wds.linkis.server.mybatis.datasource.url=jdbc:mysql://localhost:3306/exchangis_v3?useSSL=false&characterEncoding=UTF-8 + +wds.linkis.server.mybatis.datasource.username=root + +wds.linkis.server.mybatis.datasource.password=123456 + +wds.linkis.log.clear=true + +wds.linkis.server.version=v1 + +## datasource client +wds.exchangis.datasource.client.serverurl=http://dss.shineweng.com:8088 +wds.exchangis.datasource.client.authtoken.key=hdfs +wds.exchangis.datasource.client.authtoken.value=hdfs +wds.exchangis.datasource.client.dws.version=v1 + +wds.exchangis.datasource.extension.dir=exchangis-extds + +##restful +wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.exchangis.metrics.web +wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/exchangis/metrics/mapper/impl/*.xml + +wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.exchangis.metrics.dao + + + + diff --git a/exchangis-job/exchangis-job-metrics/src/test/resources/log4j.properties b/exchangis-job/exchangis-job-metrics/src/test/resources/log4j.properties new file mode 100644 index 000000000..0807e6087 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/test/resources/log4j.properties @@ -0,0 +1,37 @@ +# +# Copyright 2019 WeBank +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# + +### set log levels ### + +log4j.rootCategory=INFO,console + +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.Threshold=INFO +log4j.appender.console.layout=org.apache.log4j.PatternLayout +#log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n +log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) %p %c{1} - %m%n + + +log4j.appender.com.webank.bdp.ide.core=org.apache.log4j.DailyRollingFileAppender +log4j.appender.com.webank.bdp.ide.core.Threshold=INFO +log4j.additivity.com.webank.bdp.ide.core=false +log4j.appender.com.webank.bdp.ide.core.layout=org.apache.log4j.PatternLayout +log4j.appender.com.webank.bdp.ide.core.Append=true +log4j.appender.com.webank.bdp.ide.core.File=logs/linkis.log +log4j.appender.com.webank.bdp.ide.core.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n + +log4j.logger.org.springframework=INFO diff --git a/exchangis-job/exchangis-job-metrics/src/test/resources/log4j2.xml b/exchangis-job/exchangis-job-metrics/src/test/resources/log4j2.xml new file mode 100644 index 000000000..5ae60f144 --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/test/resources/log4j2.xml @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/exchangis-job/exchangis-job-metrics/src/test/scala/ExchangisMetricBoot.scala b/exchangis-job/exchangis-job-metrics/src/test/scala/ExchangisMetricBoot.scala new file mode 100644 index 000000000..e1c99922c --- /dev/null +++ b/exchangis-job/exchangis-job-metrics/src/test/scala/ExchangisMetricBoot.scala @@ -0,0 +1,7 @@ +import org.apache.linkis.DataWorkCloudApplication + +object ExchangisMetricBoot { + def main(args: Array[String]): Unit = { + DataWorkCloudApplication.main(args) + } +} diff --git a/exchangis-job/exchangis-job-server/pom.xml b/exchangis-job/exchangis-job-server/pom.xml new file mode 100644 index 000000000..0c83c65d4 --- /dev/null +++ b/exchangis-job/exchangis-job-server/pom.xml @@ -0,0 +1,94 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-job-server + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-project-provider + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-job-launcher + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-service + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-engine-core + ${project.version} + + + + org.apache.linkis + linkis-scheduler + ${linkis.version} + + + + mysql + mysql-connector-java + 5.1.49 + + + + org.apache.linkis + linkis-rpc + ${linkis.version} + + + org.modelmapper + modelmapper + 2.4.3 + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + ${maven-deploy-plugin.version} + + + net.alchim31.maven + scala-maven-plugin + ${scala-maven-plugin.version} + + + org.apache.maven.plugins + maven-jar-plugin + ${maven-jar-plugin.version} + + + + + src/main/java + + **/*.xml + + + + + + \ No newline at end of file diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/ExchangisJobExecuteAutoConfiguration.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/ExchangisJobExecuteAutoConfiguration.java new file mode 100644 index 000000000..3d2139c68 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/ExchangisJobExecuteAutoConfiguration.java @@ -0,0 +1,179 @@ +package com.webank.wedatasphere.exchangis.job.server; + +import com.webank.wedatasphere.exchangis.job.builder.manager.ExchangisJobBuilderManager; +import com.webank.wedatasphere.exchangis.job.launcher.ExchangisTaskLaunchManager; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.manager.LinkisExchangisTaskLaunchManager; +import com.webank.wedatasphere.exchangis.job.listener.JobLogListener; +import com.webank.wedatasphere.exchangis.job.server.builder.SpringExchangisJobBuilderManager; +import com.webank.wedatasphere.exchangis.job.server.execution.*; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.*; +import com.webank.wedatasphere.exchangis.job.server.execution.loadbalance.AbstractTaskSchedulerLoadBalancer; +import com.webank.wedatasphere.exchangis.job.server.execution.loadbalance.FlexibleTenancyLoadBalancer; +import com.webank.wedatasphere.exchangis.job.server.execution.loadbalance.TaskSchedulerLoadBalancer; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.ExchangisSchedulerExecutorManager; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.ExchangisGenericScheduler; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.TenancyParallelConsumerManager; +import com.webank.wedatasphere.exchangis.job.server.execution.subscriber.MaxUsageTaskChooseRuler; +import com.webank.wedatasphere.exchangis.job.server.execution.subscriber.TaskChooseRuler; +import com.webank.wedatasphere.exchangis.job.server.execution.subscriber.TaskObserver; +import com.webank.wedatasphere.exchangis.job.server.log.DefaultRpcJobLogger; +import com.webank.wedatasphere.exchangis.job.server.log.JobLogService; +import com.webank.wedatasphere.exchangis.job.server.log.service.RpcJobLogService; +import com.webank.wedatasphere.exchangis.job.server.utils.SpringContextHolder; +import org.apache.linkis.scheduler.Scheduler; +import org.apache.linkis.scheduler.executer.ExecutorManager; +import org.apache.linkis.scheduler.queue.ConsumerManager; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.DependsOn; + +import java.util.List; +import java.util.Objects; +import java.util.Optional; + +/** + * Auto configure the beans in job execution + */ +@Configuration +@DependsOn("springContextHolder") +public class ExchangisJobExecuteAutoConfiguration { + + @Bean + @ConditionalOnMissingBean(JobLogListener.class) + public JobLogListener logListener(){ + return new DefaultRpcJobLogger(); + } + + @Bean + @ConditionalOnMissingBean(JobLogService.class) + public JobLogService jobLogService(){ + return new RpcJobLogService(); + } + + /** + * Job builder manager + * @return builder manager + */ + @Bean(initMethod = "init") + @ConditionalOnMissingBean(ExchangisJobBuilderManager.class) + public ExchangisJobBuilderManager jobBuilderManager(){ + return new SpringExchangisJobBuilderManager(); + } + + @Bean + @ConditionalOnMissingBean(TaskGeneratorContext.class) + public TaskGeneratorContext taskGeneratorContext(JobLogListener jobLogListener){ + return new SpringTaskGeneratorContext(jobLogListener, SpringContextHolder.getApplicationContext()); + } + + /** + * Task generator + * @param taskGeneratorContext generator context + * @param jobBuilderManager job builder manager + * @return generator + */ + @Bean(initMethod = "init") + @ConditionalOnMissingBean(TaskGenerator.class) + public AbstractTaskGenerator taskGenerator(TaskGeneratorContext taskGeneratorContext, + ExchangisJobBuilderManager jobBuilderManager, List generateListeners){ + AbstractTaskGenerator taskGenerator = new DefaultTaskGenerator(taskGeneratorContext, jobBuilderManager); + Optional.ofNullable(generateListeners).ifPresent(listeners -> listeners.forEach(taskGenerator::addListener)); + return taskGenerator; + } + + @Bean + @ConditionalOnMissingBean(ExecutorManager.class) + public ExecutorManager executorManagerInScheduler(){ + return new ExchangisSchedulerExecutorManager(); + } + + @Bean + @ConditionalOnMissingBean(ConsumerManager.class) + public ConsumerManager consumerManager(){ + return new TenancyParallelConsumerManager(); + } + + /** + * Task manager + * @param jobLogListener log listener + * @return + */ + @Bean + @ConditionalOnMissingBean(TaskManager.class) + public AbstractTaskManager taskManager(JobLogListener jobLogListener){ + return new DefaultTaskManager(jobLogListener); + } + + /** + * Task scheduler + * @param executorManager executor manager + * @param consumerManager consumer manage + * @return scheduler + */ + @Bean(initMethod = "init") + @ConditionalOnMissingBean(Scheduler.class) + public Scheduler scheduler(ExecutorManager executorManager, ConsumerManager consumerManager){ + return new ExchangisGenericScheduler(executorManager, consumerManager); + } + + /** + * Flexible tenancy load balancer + * @param scheduler scheduler + * @param taskManager task manager + * @return + */ + @Bean + @ConditionalOnMissingBean(TaskSchedulerLoadBalancer.class) + public AbstractTaskSchedulerLoadBalancer taskSchedulerLoadBalancer(Scheduler scheduler, + TaskManager taskManager){ + return new FlexibleTenancyLoadBalancer(scheduler, taskManager); + } + + /** + * Task launch manager + * @return + */ + @Bean(initMethod = "init") + @ConditionalOnMissingBean(ExchangisTaskLaunchManager.class) + public ExchangisTaskLaunchManager taskLaunchManager(){ + return new LinkisExchangisTaskLaunchManager(); + } + + /** + * Choose rule + * @return + */ + @Bean + @ConditionalOnMissingBean(TaskChooseRuler.class) + public TaskChooseRuler taskChooseRuler(){ + return new MaxUsageTaskChooseRuler(); + } + /** + * Task execution + * @param scheduler scheduler + * @param launchManager launch manager + * @param taskManager task manager + * @param observers observers + * @param loadBalancer load balancer + * @param taskChooseRuler ruler + * @return task execution + */ + @Bean(initMethod = "start", destroyMethod = "stop") + @ConditionalOnMissingBean(TaskExecution.class) + public AbstractTaskExecution taskExecution(Scheduler scheduler, ExchangisTaskLaunchManager launchManager, + TaskManager taskManager, List> observers, + TaskSchedulerLoadBalancer loadBalancer, + TaskChooseRuler taskChooseRuler, List executionListeners){ + AbstractTaskExecution taskExecution = new DefaultTaskExecution(scheduler, launchManager, taskManager, observers, loadBalancer, taskChooseRuler); + ConsumerManager consumerManager = scheduler.getSchedulerContext().getOrCreateConsumerManager(); + if (consumerManager instanceof TenancyParallelConsumerManager){ + ((TenancyParallelConsumerManager) consumerManager).setInitResidentThreads(observers.size() + + (Objects.nonNull(loadBalancer)? 1: 0)); + } + Optional.ofNullable(executionListeners).ifPresent(listeners -> listeners.forEach(taskExecution::addListener)); + return taskExecution; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/ExchangisJobRenderAutoConfiguration.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/ExchangisJobRenderAutoConfiguration.java new file mode 100644 index 000000000..43af16c15 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/ExchangisJobRenderAutoConfiguration.java @@ -0,0 +1,83 @@ +package com.webank.wedatasphere.exchangis.job.server; + +import com.webank.wedatasphere.exchangis.job.server.mapper.JobTransformRuleDao; +import com.webank.wedatasphere.exchangis.job.server.render.transform.*; +import com.webank.wedatasphere.exchangis.job.server.render.transform.def.DefaultTransformDefineRulesFusion; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.DefaultFieldMappingRulesFusion; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.FieldMappingRulesFusion; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.FieldMappingTransformer; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.match.*; +import com.webank.wedatasphere.exchangis.job.server.render.transform.processor.ProcessorTransformer; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.DependsOn; + +import java.util.List; +import java.util.Optional; + +@Configuration +@DependsOn("springContextHolder") +public class ExchangisJobRenderAutoConfiguration { + + /** + * Field match strategy factory + * @return factory + */ + @Bean + @ConditionalOnMissingBean(FieldMatchNamedStrategyFactory.class) + public FieldMatchStrategyFactory matchStrategyFactory(){ + FieldMatchNamedStrategyFactory namedStrategyFactory = new FieldMatchNamedStrategyFactory(); + namedStrategyFactory.registerStrategy(FieldAllMatchStrategy.ALL_MATCH, new FieldAllMatchStrategy()); + namedStrategyFactory.registerStrategy(FieldAllMatchIgnoreCaseStrategy.ALL_MATCH_IGNORE_CASE, new FieldAllMatchIgnoreCaseStrategy()); + namedStrategyFactory.registerStrategy(FieldCamelCaseMatchStrategy.CAMEL_CASE_MATCH, new FieldCamelCaseMatchStrategy()); + return namedStrategyFactory; + } + /** + * Field mapping rule fusion + * @param strategyFactory match strategy factory + * @return rule fusion + */ + @Bean + @ConditionalOnMissingBean(FieldMappingRulesFusion.class) + public FieldMappingRulesFusion fieldMappingRulesFusion(FieldMatchStrategyFactory strategyFactory){ + return new DefaultFieldMappingRulesFusion(strategyFactory); + } + /** + * Transform definition rule fusion + * @return fusion + */ + @Bean + public TransformRulesFusion defineRuleFusion(){ + return new DefaultTransformDefineRulesFusion(); + } + /** + * Field mapping transformer + * @param rulesFusion rule fusion + * @param transformRuleDao transform rule dao + * @return transformer + */ + @Bean + public FieldMappingTransformer fieldMappingTransformer(FieldMappingRulesFusion rulesFusion, JobTransformRuleDao transformRuleDao){ + return new FieldMappingTransformer(rulesFusion, transformRuleDao); + } + /** + * Processor transformer + * @return transformer + */ + @Bean + public ProcessorTransformer processorTransformer(){ + return new ProcessorTransformer(); + } + + @Bean + @ConditionalOnMissingBean(TransformerContainer.class) + public TransformerContainer transformerContainer(List transformers){ + TransformerContainer container = new DefaultTransformContainer(); + Optional.ofNullable(transformers).ifPresent(elements -> { + elements.forEach(element -> container.registerTransformer(element.name(), element)); + }); + return container; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/AbstractLoggingExchangisJobBuilder.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/AbstractLoggingExchangisJobBuilder.java new file mode 100644 index 000000000..d2dbba1da --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/AbstractLoggingExchangisJobBuilder.java @@ -0,0 +1,63 @@ +package com.webank.wedatasphere.exchangis.job.server.builder; + +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.builder.api.AbstractExchangisJobBuilder; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisBase; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJob; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Abstract implement for engine job builder + */ +public abstract class AbstractLoggingExchangisJobBuilder extends + AbstractExchangisJobBuilder { + private static final Logger LOG = LoggerFactory.getLogger(AbstractLoggingExchangisJobBuilder.class); + /** + * Get builder context + * @return context + * @throws ExchangisJobException.Runtime exception + */ + protected static SpringExchangisJobBuilderContext getSpringBuilderContext() throws ExchangisJobException.Runtime{ + ExchangisJobBuilderContext context = getCurrentBuilderContext(); + if (!(context instanceof SpringExchangisJobBuilderContext)) { + throw new ExchangisJobException.Runtime(-1, "The job builder context cannot not be casted to " + SpringExchangisJobBuilderContext.class.getCanonicalName(), null); + } + return (SpringExchangisJobBuilderContext)context; + } + + /** + * Warn message + * @param message message + */ + public static void warn(String message, Object... args){ + getSpringBuilderContext().getLogging().warn(null, message, args); + } + + public static void warn(String message, Throwable t){ + getSpringBuilderContext().getLogging().warn(null, message, t); + } + + /** + * Info message + * @param message message + */ + public static void info(String message, Object... args){ + getSpringBuilderContext().getLogging().info(null, message, args); + } + + public static void info(String message, Throwable t){ + getSpringBuilderContext().getLogging().info(null, message, t); + } + + /** + * Get bean in spring context + * @param beanClass bean class + * @param + * @return + */ + public static T getBean(Class beanClass){ + return getSpringBuilderContext().getBean(beanClass); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/JobParamConstraints.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/JobParamConstraints.java new file mode 100644 index 000000000..64c48831c --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/JobParamConstraints.java @@ -0,0 +1,48 @@ +package com.webank.wedatasphere.exchangis.job.server.builder; + +/** + * Contains the common name of job parameters + */ +public class JobParamConstraints { + + public static final String ENCODING = "encoding"; + + public static final String NULL_FORMAT = "nullFormat"; + + public static final String DATA_SOURCE_ID = "data_source_id"; + + public static final String USERNAME = "username"; + + public static final String PASSWORD = "password"; + + public static final String APP_ID = "appid"; + + public static final String OBJECT_ID = "objectid"; + + public static final String DK = "dk"; + + public static final String DATABASE = "database"; + + public static final String CONNECT_PARAMS = "params"; + + public static final String TABLE = "table"; + + public static final String HOST = "host"; + + public static final String PORT = "port"; + + public static final String HTTP_PORT = "http_port"; + + public static final String LOAD_URL = "load_url"; + + public static final String SERVICE_NAME = "instance"; + + public static final String WHERE = "where"; + + public static final String WRITE_MODE = "writeMode"; + + public static final String PARTITION = "partition"; + + public static final String SETTINGS_MAX_PARALLEL = "setting.max.parallelism"; + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/SpringExchangisJobBuilderContext.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/SpringExchangisJobBuilderContext.java new file mode 100644 index 000000000..ad5811a7b --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/SpringExchangisJobBuilderContext.java @@ -0,0 +1,72 @@ +package com.webank.wedatasphere.exchangis.job.server.builder; + +import com.webank.wedatasphere.exchangis.datasource.core.service.MetadataInfoService; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.builder.api.ExchangisJobBuilder; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; +import com.webank.wedatasphere.exchangis.job.listener.JobLogListener; +import com.webank.wedatasphere.exchangis.job.listener.events.JobLogEvent; +import com.webank.wedatasphere.exchangis.job.server.log.JobServerLogging; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.context.ApplicationContext; + +import java.util.Objects; + +/** + * Service in job builder context + */ +public class SpringExchangisJobBuilderContext extends ExchangisJobBuilderContext { + + + /** + * Job execution id + */ + private String jobExecutionId; + + /** + * Logging + */ + private JobServerLogging> logging; + + private ApplicationContext applicationContext; + + public SpringExchangisJobBuilderContext(ExchangisJobInfo originalJob, + ApplicationContext applicationContext, JobLogListener jobLogListener) { + super(originalJob); + this.applicationContext = applicationContext; + this.logging = new JobServerLogging>() { + @Override + public Logger getLogger() { + return Objects.nonNull(currentBuilder)? + LoggerFactory.getLogger(currentBuilder.getClass()) : null; + } + + @Override + public JobLogListener getJobLogListener() { + return jobLogListener; + } + + @Override + public JobLogEvent getJobLogEvent(JobLogEvent.Level level, ExchangisJobBuilder builder, String message, Object... args) { + return new JobLogEvent(level, originalJob.getExecuteUser(), jobExecutionId, message, args); + } + }; + } + + public String getJobExecutionId() { + return jobExecutionId; + } + + public void setJobExecutionId(String jobExecutionId) { + this.jobExecutionId = jobExecutionId; + } + + public T getBean(Class requiredType) { + return applicationContext.getBean(requiredType); + } + + public JobServerLogging> getLogging() { + return logging; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/SpringExchangisJobBuilderManager.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/SpringExchangisJobBuilderManager.java new file mode 100644 index 000000000..2ce3aec40 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/SpringExchangisJobBuilderManager.java @@ -0,0 +1,37 @@ +package com.webank.wedatasphere.exchangis.job.server.builder; + +import com.webank.wedatasphere.exchangis.job.builder.manager.DefaultExchangisJobBuilderManager; +import com.webank.wedatasphere.exchangis.job.launcher.builder.LinkisExchangisLauncherJobBuilder; +import com.webank.wedatasphere.exchangis.job.server.builder.engine.DataxExchangisEngineJobBuilder; +import com.webank.wedatasphere.exchangis.job.server.builder.engine.DefaultExchangisEngineJobBuilder; +import com.webank.wedatasphere.exchangis.job.server.builder.engine.SqoopExchangisEngineJobBuilder; +import com.webank.wedatasphere.exchangis.job.server.builder.transform.GenericExchangisTransformJobBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Builder manager of spring context + */ +public class SpringExchangisJobBuilderManager extends DefaultExchangisJobBuilderManager { + + private static final Logger LOG = LoggerFactory.getLogger(SpringExchangisJobBuilderManager.class); + + public void init(){ + //TransformJobBuilder + GenericExchangisTransformJobBuilder transformJobBuilder = new GenericExchangisTransformJobBuilder(); + LOG.info("Init to load job handlers in [" + transformJobBuilder.getClass().getSimpleName() + "]"); + transformJobBuilder.initHandlers(); + //EngineJobBuilder + DefaultExchangisEngineJobBuilder engineJobBuilder = new DefaultExchangisEngineJobBuilder(); + DataxExchangisEngineJobBuilder dataxExchangisEngineJobBuilder = new DataxExchangisEngineJobBuilder(); + SqoopExchangisEngineJobBuilder sqoopExchangisEngineJobBuilder = new SqoopExchangisEngineJobBuilder(); + //LaunchJobBuilder + LinkisExchangisLauncherJobBuilder linkisExchangisLauncherJobBuilder = new LinkisExchangisLauncherJobBuilder(); + super.addJobBuilder(transformJobBuilder); + super.addJobBuilder(engineJobBuilder); + super.addJobBuilder(dataxExchangisEngineJobBuilder); + super.addJobBuilder(sqoopExchangisEngineJobBuilder); + super.addJobBuilder(linkisExchangisLauncherJobBuilder); + super.initBuilderChains(); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/AbstractResourceEngineJobBuilder.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/AbstractResourceEngineJobBuilder.java new file mode 100644 index 000000000..57edfeade --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/AbstractResourceEngineJobBuilder.java @@ -0,0 +1,42 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.engine; + +import com.webank.wedatasphere.exchangis.engine.domain.EngineBmlResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineLocalPathResource; +import com.webank.wedatasphere.exchangis.engine.domain.EngineResource; +import com.webank.wedatasphere.exchangis.engine.manager.ExchangisEngineManager; +import com.webank.wedatasphere.exchangis.engine.resource.EngineResourceContainer; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisEngineJob; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.server.builder.AbstractLoggingExchangisJobBuilder; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.common.conf.CommonVars; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.Optional; + +/** + * Engine job builder with engine resources + */ +public abstract class AbstractResourceEngineJobBuilder extends AbstractLoggingExchangisJobBuilder { + + /** + * Get resources + * @return path + */ + protected List getResources(String engine, String[] paths){ + EngineResourceContainer resourceContainer = + getBean(ExchangisEngineManager.class).getResourceContainer(engine); + List resources = new ArrayList<>(); + if (Objects.nonNull(resourceContainer)){ + for(String path : paths){ + if (StringUtils.isNotBlank(path)) { + Optional.ofNullable(resourceContainer.getRemoteResource(path)) + .ifPresent(resources::add); + } + } + } + return resources; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxExchangisEngineJob.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxExchangisEngineJob.java new file mode 100644 index 000000000..6741cc07f --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxExchangisEngineJob.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.engine; + +import com.webank.wedatasphere.exchangis.job.domain.ExchangisEngineJob; + +import java.util.Objects; + +/** + * Datax engine job + */ +public class DataxExchangisEngineJob extends ExchangisEngineJob { + + private static final String CODE_NAME = "job"; + + public DataxExchangisEngineJob(ExchangisEngineJob engineJob){ + super(engineJob); + } + @SuppressWarnings({"unchecked"}) + public T getCode(Class type) { + Object code = super.getJobContent().get(CODE_NAME); + if (Objects.nonNull(code) && type.isAssignableFrom(code.getClass())){ + return (T)code; + } + return null; + } + + public void setCode(Object code) { + super.getJobContent().put(CODE_NAME, code); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxExchangisEngineJobBuilder.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxExchangisEngineJobBuilder.java new file mode 100644 index 000000000..df0ec7131 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxExchangisEngineJobBuilder.java @@ -0,0 +1,223 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.engine; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.engine.domain.EngineBmlResource; +import com.webank.wedatasphere.exchangis.engine.resource.loader.datax.DataxEngineResourceConf; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisEngineJob; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode; +import com.webank.wedatasphere.exchangis.job.server.builder.transform.TransformExchangisJob; +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformTypes; +import com.webank.wedatasphere.exchangis.job.server.utils.JsonEntity; +import com.webank.wedatasphere.exchangis.job.utils.MemUtils; +import org.apache.commons.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +/** + * Datax engine job builder + */ +public class DataxExchangisEngineJobBuilder extends AbstractResourceEngineJobBuilder { + + private static final Logger LOG = LoggerFactory.getLogger(DataxExchangisEngineJob.class); + + private static final String BYTE_SPEED_SETTING_PARAM = "setting.speed.byte"; + + private static final String PROCESSOR_SWITCH = "setting.useProcessor"; + + private static final String PROCESSOR_BASE_PATH = "core.processor.loader.plugin.sourcePath"; + + private static final Map PLUGIN_NAME_MAPPER = new HashMap<>(); + + static{ + //hive use hdfs plugin resource + PLUGIN_NAME_MAPPER.put("hive", "hdfs"); + PLUGIN_NAME_MAPPER.put("tdsql", "mysql"); + } + + /** + * Column mappings define + */ + private static final JobParamDefine COLUMN_MAPPINGS = JobParams.define("column.mappings", job -> { + DataxMappingContext mappingContext = new DataxMappingContext(); + job.getSourceColumns().forEach(columnDefine -> mappingContext.getSourceColumns().add( + new DataxMappingContext.Column(columnDefine.getName(), columnDefine.getType(), + columnDefine.getRawType(), columnDefine.getIndex() + "") + )); + job.getSinkColumns().forEach(columnDefine -> mappingContext.getSinkColumns().add( + new DataxMappingContext.Column(columnDefine.getName(), columnDefine.getType(), + columnDefine.getRawType(), columnDefine.getIndex() + "") + )); + job.getColumnFunctions().forEach(function -> { + DataxMappingContext.Transformer.Parameter parameter = new DataxMappingContext.Transformer.Parameter(); + parameter.setColumnIndex(function.getIndex() + ""); + parameter.setParas(function.getParams()); + mappingContext.getTransformers() + .add(new DataxMappingContext.Transformer(function.getName(), parameter)); + }); + return mappingContext; + }, SubExchangisJob.class); + + /** + * Source content + */ + private static final JobParamDefine PLUGIN_SOURCE_NAME = JobParams.define("content[0].reader.name", job -> + getPluginName(job.getSourceType(), "reader"), SubExchangisJob.class); + + private static final JobParamDefine> PLUGIN_SOURCE_PARAM = JobParams.define("content[0].reader.parameter", job -> + job.getParamsToMap(SubExchangisJob.REALM_JOB_CONTENT_SOURCE, false), SubExchangisJob.class); + + /** + * Sink content + */ + private static final JobParamDefine PLUGIN_SINK_NAME = JobParams.define("content[0]].writer.name", job -> + getPluginName(job.getSinkType(), "writer"), SubExchangisJob.class); + + private static final JobParamDefine> PLUGIN_SINK_PARAM = JobParams.define("content[0].writer.parameter", job -> + job.getParamsToMap(SubExchangisJob.REALM_JOB_CONTENT_SINK, false), SubExchangisJob.class); + + /** + * Source columns + */ + private static final JobParamDefine> SOURCE_COLUMNS = JobParams.define("content[0].reader.parameter.column", + DataxMappingContext::getSourceColumns,DataxMappingContext.class); + + /** + * Sink columns + */ + private static final JobParamDefine> SINK_COLUMNS = JobParams.define("content[0].writer.parameter.column", + DataxMappingContext::getSinkColumns,DataxMappingContext.class); + + /** + * Transform list + */ + private static final JobParamDefine> TRANSFORM_LIST = JobParams.define("content[0].transformer", + DataxMappingContext::getTransformers, DataxMappingContext.class); + + @Override + public int priority() { + return 1; + } + + @Override + public boolean canBuild(SubExchangisJob inputJob) { + return "datax".equalsIgnoreCase(inputJob.getEngineType()); + } + + @Override + public DataxExchangisEngineJob buildJob(SubExchangisJob inputJob, ExchangisEngineJob expectOut, ExchangisJobBuilderContext ctx) throws ExchangisJobException { + + try { + DataxExchangisEngineJob engineJob = new DataxExchangisEngineJob(expectOut); + engineJob.setId(inputJob.getId()); + Map codeMap = buildDataxCode(inputJob, ctx); + if (Objects.nonNull(codeMap)){ + try { + if (LOG.isDebugEnabled()) { + LOG.debug("Datax-code built complete, output: " + Json.getMapper().writerWithDefaultPrettyPrinter().writeValueAsString(codeMap)); + } + info("Datax-code built complete, output: " + Json.getMapper().writerWithDefaultPrettyPrinter().writeValueAsString(codeMap)); + } catch (JsonProcessingException e) { + //Ignore + } + engineJob.setCode(codeMap); + } + // engine resources + engineJob.getResources().addAll( + getResources(inputJob.getEngineType().toLowerCase(Locale.ROOT), getResourcesPaths(inputJob))); + if (inputJob instanceof TransformExchangisJob.TransformSubExchangisJob){ + TransformExchangisJob.TransformSubExchangisJob transformJob = ((TransformExchangisJob.TransformSubExchangisJob) inputJob); + TransformTypes type = transformJob.getTransformType(); + if (type == TransformTypes.PROCESSOR){ + settingProcessorInfo(transformJob, engineJob); + } + } + engineJob.setName(inputJob.getName()); + //Unit MB + Optional.ofNullable(engineJob.getRuntimeParams().get(BYTE_SPEED_SETTING_PARAM)).ifPresent(byteLimit -> { + long limit = Long.parseLong(String.valueOf(byteLimit)); + // Convert to bytes + engineJob.getRuntimeParams().put(BYTE_SPEED_SETTING_PARAM, + MemUtils.convertToByte(limit, MemUtils.StoreUnit.MB.name())); + }); + + engineJob.setCreateUser(inputJob.getCreateUser()); + // Lock the memory unit + engineJob.setMemoryUnitLock(true); + return engineJob; + + } catch (Exception e) { + throw new ExchangisJobException(ExchangisJobExceptionCode.BUILDER_ENGINE_ERROR.getCode(), + "Fail to build datax engine job, message:[" + e.getMessage() + "]", e); + } + } + + /** + * Build datax code content + * @param inputJob input job + * @param ctx ctx + * @return code map + */ + private Map buildDataxCode(SubExchangisJob inputJob, ExchangisJobBuilderContext ctx){ + JsonEntity dataxJob = JsonEntity.from("{}"); + dataxJob.set(PLUGIN_SOURCE_NAME.getKey(), PLUGIN_SOURCE_NAME.getValue(inputJob)); + Optional.ofNullable(PLUGIN_SOURCE_PARAM.getValue(inputJob)).ifPresent(source -> source.forEach((key, value) ->{ + dataxJob.set(PLUGIN_SOURCE_PARAM.getKey() + "." + key, value); + })); + dataxJob.set(PLUGIN_SINK_NAME.getKey(), PLUGIN_SINK_NAME.getValue(inputJob)); + Optional.ofNullable(PLUGIN_SINK_PARAM.getValue(inputJob)).ifPresent(sink -> sink.forEach((key, value) -> { + dataxJob.set(PLUGIN_SINK_PARAM.getKey() + "." + key, value); + })); + DataxMappingContext mappingContext = COLUMN_MAPPINGS.getValue(inputJob); + if (Objects.isNull(dataxJob.get(SOURCE_COLUMNS.getKey()))) { + dataxJob.set(SOURCE_COLUMNS.getKey(), SOURCE_COLUMNS.getValue(mappingContext)); + } + if (Objects.isNull(dataxJob.get(SINK_COLUMNS.getKey()))){ + dataxJob.set(SINK_COLUMNS.getKey(), SINK_COLUMNS.getValue(mappingContext)); + } + dataxJob.set(TRANSFORM_LIST.getKey(), TRANSFORM_LIST.getValue(mappingContext)); + return dataxJob.toMap(); + } + + /** + * Setting processor info into engine job + * @param transformJob transform job + * @param engineJob engine job + */ + private void settingProcessorInfo(TransformExchangisJob.TransformSubExchangisJob transformJob, ExchangisEngineJob engineJob){ + Optional.ofNullable(transformJob.getCodeResource()).ifPresent(codeResource ->{ + engineJob.getRuntimeParams().put(PROCESSOR_SWITCH, true); + Object basePath = engineJob.getRuntimeParams().computeIfAbsent(PROCESSOR_BASE_PATH, key -> "proc/src"); + engineJob.getResources().add(new EngineBmlResource(engineJob.getEngineType(), ".", + String.valueOf(basePath) + IOUtils.DIR_SEPARATOR_UNIX + "code_" + transformJob.getId(), + codeResource.getResourceId(), codeResource.getVersion(), transformJob.getCreateUser())); + }); + } + private String[] getResourcesPaths(SubExchangisJob inputJob){ + return new String[]{ + DataxEngineResourceConf.RESOURCE_PATH_PREFIX.getValue() + IOUtils.DIR_SEPARATOR_UNIX + "reader" + IOUtils.DIR_SEPARATOR_UNIX + + PLUGIN_SOURCE_NAME.getValue(inputJob), + DataxEngineResourceConf.RESOURCE_PATH_PREFIX.getValue() + IOUtils.DIR_SEPARATOR_UNIX + "writer" + IOUtils.DIR_SEPARATOR_UNIX + + PLUGIN_SINK_NAME.getValue(inputJob) + }; + } + // core.processor.loader.plugin.sourcePath + /** + * Plugin name + * @param typeName type name + * @param suffix suffix + * @return plugin name + */ + private static String getPluginName(String typeName, String suffix){ + return Objects.nonNull(typeName) ? PLUGIN_NAME_MAPPER.getOrDefault(typeName.toLowerCase(Locale.ROOT), + typeName.toLowerCase(Locale.ROOT)) + + suffix : null; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxMappingContext.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxMappingContext.java new file mode 100644 index 000000000..56e90c863 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxMappingContext.java @@ -0,0 +1,199 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.engine; + +import java.util.ArrayList; +import java.util.List; + +/** + * Datax mapping context + */ +public class DataxMappingContext { + + /** + * Source columns + */ + private List sourceColumns = new ArrayList<>(); + + /** + * Sink columns + */ + private List sinkColumns = new ArrayList<>(); + + /** + * Transform + */ + private List transformers = new ArrayList<>(); + + public List getSourceColumns() { + return sourceColumns; + } + + public void setSourceColumns(List sourceColumns) { + this.sourceColumns = sourceColumns; + } + + public List getSinkColumns() { + return sinkColumns; + } + + public void setSinkColumns(List sinkColumns) { + this.sinkColumns = sinkColumns; + } + + public List getTransformers() { + return transformers; + } + + public void setTransformers(List transformers) { + this.transformers = transformers; + } + + /** + * Column entity + */ + public static class Column{ + /** + * Colum name + */ + private String name; + + /** + * Column type + */ + private String type; + + /** + * Raw column type + */ + private String rawType; + + /** + * Index name + */ + private String index; + + public Column(){ + + } + + public Column(String name, String type, String index){ + this(name, type, null, index); + } + + public Column(String name, String type, String rawType, String index){ + this.name = name; + this.type = type; + this.rawType = rawType; + this.index = index; + } + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getIndex() { + return index; + } + + public void setIndex(String index) { + this.index = index; + } + + public String getRawType() { + return rawType; + } + + public void setRawType(String rawType) { + this.rawType = rawType; + } + } + + /** + * Transformer + */ + public static class Transformer { + + /** + * Parameter context + */ + private Parameter parameter = new Parameter(); + /** + * Name + */ + private String name; + + public Transformer(){ + + } + + public Transformer(String name, Parameter parameter){ + this.name = name; + this.parameter = parameter; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Parameter getParameter() { + return parameter; + } + + public void setParameter(Parameter parameter) { + this.parameter = parameter; + } + + /** + * Parameter + */ + public static class Parameter { + /** + * Index + */ + private String columnIndex; + + /** + * Params + */ + private List paras = new ArrayList<>(); + + public Parameter(){ + + } + + public Parameter(List paras){ + this.paras = paras; + } + + public String getColumnIndex() { + return columnIndex; + } + + public void setColumnIndex(String columnIndex) { + this.columnIndex = columnIndex; + } + + public List getParas() { + return paras; + } + + public void setParas(List paras) { + this.paras = paras; + } + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DefaultExchangisEngineJobBuilder.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DefaultExchangisEngineJobBuilder.java new file mode 100644 index 000000000..152e058bd --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DefaultExchangisEngineJobBuilder.java @@ -0,0 +1,45 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.engine; + + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.builder.api.AbstractExchangisJobBuilder; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisEngineJob; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import org.apache.commons.lang.StringUtils; + +import java.util.Map; +import java.util.Objects; +import java.util.Optional; + +/** + * Default implements + */ +public class DefaultExchangisEngineJobBuilder extends AbstractExchangisJobBuilder { + + private static final String ENGINE_JOB_MEMORY_USED = "setting.max.memory"; + @Override + public int priority() { + return Integer.MIN_VALUE; + } + + @Override + public ExchangisEngineJob buildJob(SubExchangisJob inputJob, ExchangisEngineJob expectOut, ExchangisJobBuilderContext ctx) throws ExchangisJobException { + String paramsString = ctx.getOriginalJob().getJobParams(); + ExchangisEngineJob exchangisEngineJob = new ExchangisEngineJob(); + if (StringUtils.isNotBlank(paramsString)){ + Map jobParams = Json.fromJson(paramsString, Map.class); + if (Objects.nonNull(jobParams)){ + exchangisEngineJob.getJobContent().putAll(jobParams); + } + } + Map settings = inputJob.getParamsToMap(SubExchangisJob.REALM_JOB_SETTINGS, false); + Optional.ofNullable(settings.get(ENGINE_JOB_MEMORY_USED)).ifPresent(memoryUsed -> exchangisEngineJob.setMemoryUsed(Long.valueOf(String.valueOf(memoryUsed)))); + exchangisEngineJob.setRuntimeParams(settings); + exchangisEngineJob.setEngineType(ctx.getOriginalJob().getEngineType()); + exchangisEngineJob.setName(inputJob.getName()); + exchangisEngineJob.setJobLabel(ctx.getOriginalJob().getJobLabel()); + return exchangisEngineJob; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/SqoopExchangisEngineJob.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/SqoopExchangisEngineJob.java new file mode 100644 index 000000000..73d4f0a91 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/SqoopExchangisEngineJob.java @@ -0,0 +1,18 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.engine; + +import com.webank.wedatasphere.exchangis.job.domain.ExchangisEngineJob; + +import java.util.Objects; + +/** + * Sqoop engine job + */ +public class SqoopExchangisEngineJob extends ExchangisEngineJob { + //Empty + + public SqoopExchangisEngineJob(ExchangisEngineJob engineJob){ + super(engineJob); + } + + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/SqoopExchangisEngineJobBuilder.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/SqoopExchangisEngineJobBuilder.java new file mode 100644 index 000000000..5db542a42 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/SqoopExchangisEngineJobBuilder.java @@ -0,0 +1,607 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.engine; + +import com.webank.wedatasphere.exchangis.datasource.core.domain.MetaColumn; +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; +import com.webank.wedatasphere.exchangis.datasource.core.service.MetadataInfoService; +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisEngineJob; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParam; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode; +import com.webank.wedatasphere.exchangis.job.server.builder.AbstractLoggingExchangisJobBuilder; +import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; +import com.webank.wedatasphere.exchangis.job.server.builder.SpringExchangisJobBuilderContext; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import java.util.*; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; + +import static com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob.REALM_JOB_CONTENT_SINK; +import static com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob.REALM_JOB_CONTENT_SOURCE; +import static com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob.REALM_JOB_SETTINGS; +import static com.webank.wedatasphere.exchangis.job.server.builder.engine.SqoopExchangisEngineJobBuilder.MODE_TYPE.EXPORT; +import static com.webank.wedatasphere.exchangis.job.server.builder.engine.SqoopExchangisEngineJobBuilder.MODE_TYPE.IMPORT; + +public class SqoopExchangisEngineJobBuilder extends AbstractLoggingExchangisJobBuilder { + + private static final Logger LOG = LoggerFactory.getLogger(SqoopExchangisEngineJobBuilder.class); + + private static final List SUPPORT_BIG_DATA_TYPES = Arrays.asList("HIVE", "HBASE"); + + private static final List SUPPORT_RDBMS_TYPES = Arrays.asList("MYSQL", "ORACLE"); + + private static final String META_INPUT_FORMAT = "file.inputformat"; + + private static final String META_OUTPUT_FORMAT = "file.outputformat"; + + private static final String META_FIELD_DELIMITER = "field.delim"; + + /** + * //TODO To support different hadoop version + */ + private static final List HADOOP_TEXT_INPUT_FORMAT = Collections.singletonList("org.apache.hadoop.mapred.TextInputFormat"); + + private static final List HADOOP_TEXT_OUTPUT_FORMAT = Arrays.asList("org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", + "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"); + + public enum MODE_TYPE { IMPORT, EXPORT} + /** + * Verbose, default null (means not open verbose) + */ + private static final JobParamDefine VERBOSE = JobParams.define("sqoop.args.verbose", job -> null, SubExchangisJob.class); + /** + * Sqoop mode + */ + private static final JobParamDefine MODE = JobParams.define("sqoop.mode", job -> SUPPORT_BIG_DATA_TYPES.contains(job.getSourceType().toUpperCase())? "export": "import", SubExchangisJob.class); + + private static final JobParamDefine MODE_ENUM = JobParams.define("sqoop.mode.enum", job -> SUPPORT_BIG_DATA_TYPES.contains(job.getSourceType().toUpperCase())? EXPORT: IMPORT, SubExchangisJob.class); + /** + * Sqoop RDBMS mode params + */ + private static final JobParamDefine MODE_RDBMS_PARAMS = JobParams.define("sqoop.mode.rdbms.params", job -> { + MODE_TYPE modeParam = MODE_ENUM.getValue(job); + return modeParam.equals(IMPORT)? job.getRealmParams(REALM_JOB_CONTENT_SOURCE) : job.getRealmParams(REALM_JOB_CONTENT_SINK); + }, SubExchangisJob.class); + + /** + * Sqoop hadoop mode params + */ + private static final JobParamDefine MODE_HADOOP_PARAMS = JobParams.define("sqoop.mode.hadoop.params", job -> { + MODE_TYPE modeParam = MODE_ENUM.getValue(job); + return modeParam.equals(IMPORT)? job.getRealmParams(REALM_JOB_CONTENT_SINK) : job.getRealmParams(REALM_JOB_CONTENT_SOURCE); + }, SubExchangisJob.class); + + /** + * Hive-partition-map + */ + @SuppressWarnings("unchecked") + private static final JobParamDefine> PARTITION_MAP = JobParams.define("sqoop.partition.map", job -> { + if ("hive".equalsIgnoreCase(job.getSinkType()) || "hive".equalsIgnoreCase(job.getSourceType())){ + JobParam partitionParam = MODE_HADOOP_PARAMS.getValue(job).get(JobParamConstraints.PARTITION); + if (Objects.nonNull(partitionParam)) { + Object partition = partitionParam.getValue(); + if (partition instanceof Map) { + return (Map) partition; + } else { + String partitionStr = String.valueOf(partition); + if (StringUtils.isNotBlank(partitionStr)){ + Map partitionMap = new HashMap<>(); + String[] partValues = partitionStr.split(","); + for (String partValue : partValues){ + String[] parts = partValue.split("="); + if (parts.length == 2){ + partitionMap.put(parts[0], parts[1]); + } + } + return partitionMap; + } + } + } + } + return null; + }, SubExchangisJob.class); + + /** + * Meta columns + */ + private static final JobParamDefine> META_COLUMNS = JobParams.define("sqoop.meta.table.columns", paramSet -> { + SpringExchangisJobBuilderContext context = getSpringBuilderContext(); + JobParam dataSourceId = paramSet.get(JobParamConstraints.DATA_SOURCE_ID); + JobParam database = paramSet.get(JobParamConstraints.DATABASE, String.class); + JobParam table = paramSet.get(JobParamConstraints.TABLE, String.class); + try { + return getBean(MetadataInfoService.class).getColumns(context.getOriginalJob().getCreateUser(), + Long.valueOf(dataSourceId.getValue()), database.getValue(), table.getValue()); + } catch (ExchangisDataSourceException e) { + throw new ExchangisJobException.Runtime(e.getErrCode(), e.getMessage(), e.getCause()); + } + }); + + /** + * Meta hadoop columns + */ + private static final JobParamDefine> META_HADOOP_COLUMNS = JobParams.define("sqoop.meta.hadoop.table.columns", job -> META_COLUMNS.newValue(MODE_HADOOP_PARAMS.getValue(job)), SubExchangisJob.class); + + /** + * Meta rdbms columns + */ + private static final JobParamDefine> META_RDBMS_COLUMNS = JobParams.define("sqoop.meta.rdbms.table.columns", job -> META_COLUMNS.newValue(MODE_RDBMS_PARAMS.getValue(job)), SubExchangisJob.class); + /** + * Meta table/partition props + */ + private static final JobParamDefine> META_HADOOP_TABLE_PROPS = JobParams.define("sqoop.meta.hadoop.table.props", job ->{ + SpringExchangisJobBuilderContext context = getSpringBuilderContext(); + ExchangisJobInfo jobInfo = context.getOriginalJob(); + // Use the creator as userName + String userName = jobInfo.getCreateUser(); + JobParamSet hadoopParamSet = MODE_HADOOP_PARAMS.getValue(job); + JobParam dataSourceId = hadoopParamSet.get(JobParamConstraints.DATA_SOURCE_ID); + JobParam database = hadoopParamSet.get(JobParamConstraints.DATABASE, String.class); + JobParam table = hadoopParamSet.get(JobParamConstraints.TABLE, String.class); + Map partition = PARTITION_MAP.getValue(job); + try { + if (Objects.nonNull(partition)) { + Map props = getBean(MetadataInfoService.class).getPartitionProps(userName, Long.valueOf(dataSourceId.getValue()), + database.getValue(), table.getValue(), URLEncoder.encode(partition.entrySet().stream().map(entry -> + entry.getKey() + "=" + entry.getValue() + ).collect(Collectors.joining(",")), "UTF-8")); + if (!props.isEmpty()){ + return props; + } + } + return getBean(MetadataInfoService.class).getTableProps(userName, Long.valueOf(dataSourceId.getValue()), + database.getValue(), table.getValue()); + } catch (ExchangisDataSourceException e) { + throw new ExchangisJobException.Runtime(e.getErrCode(), e.getMessage(), e.getCause()); + } catch (UnsupportedEncodingException e) { + throw new ExchangisJobException.Runtime(-1, e.getMessage(), e); + } + }, SubExchangisJob.class); + + private static final JobParamDefine IS_TEXT_FILE_TYPE = JobParams.define("sqoop.file.is.text", job -> { + Map tableProps = META_HADOOP_TABLE_PROPS.getValue(job); + return HADOOP_TEXT_INPUT_FORMAT.contains(tableProps.getOrDefault(META_INPUT_FORMAT, "")) || + HADOOP_TEXT_OUTPUT_FORMAT.contains(tableProps.getOrDefault(META_OUTPUT_FORMAT, "")); + }, SubExchangisJob.class); + /** + * + * Whether hcatalog + */ + private static final JobParamDefine IS_USE_HCATALOG = JobParams.define("sqoop.use.hcatalog", job -> MODE_ENUM.getValue(job) == EXPORT || !IS_TEXT_FILE_TYPE.getValue(job), SubExchangisJob.class); + + /** + * Driver default 'com.mysql.jdbc.Driver' + */ + private static final JobParamDefine CONNECT_DRIVER = JobParams.define("sqoop.args.driver", job -> "com.mysql.jdbc.Driver", SubExchangisJob.class); + + /** + * Protocol + */ + private static final JobParamDefine CONNECT_PROTOCOL = JobParams.define("sqoop.args.protocol", () -> "jdbc:mysql://%s:%s/%s"); + + /** + * Number of mapper + */ + private static final JobParamDefine NUM_MAPPERS = JobParams.define("sqoop.args.num.mappers", job -> { + int numMappers = 1; + JobParamSet settings = job.getRealmParams(REALM_JOB_SETTINGS); + JobParam parallel = settings.get(JobParamConstraints.SETTINGS_MAX_PARALLEL); + if (Objects.nonNull(parallel)){ + Object value = parallel.getValue(); + try { + return Integer.parseInt(String.valueOf(value)); + }catch( NumberFormatException exception){ + //Ignore + } + } + return numMappers; + }, SubExchangisJob.class); + + /** + * Connect string + */ + private static final JobParamDefine CONNECT_STRING = JobParams.define("sqoop.args.connect", job -> { + JobParamSet paramSet = MODE_RDBMS_PARAMS.getValue(job); + String host = paramSet.get(JobParamConstraints.HOST, String.class).getValue(); + String database = paramSet.get(JobParamConstraints.DATABASE, String.class).getValue(); + JobParam connectParams = paramSet.get(JobParamConstraints.CONNECT_PARAMS, String.class); + Map paramsMap = null; + if (Objects.nonNull(connectParams)){ + paramsMap = Json.fromJson(connectParams.getValue(), Map.class); + } + Integer port = Integer.parseInt(String.valueOf(paramSet.get(JobParamConstraints.PORT).getValue())); + String connectStr = String.format(CONNECT_PROTOCOL.getValue(job), host, port, database); + if (Objects.nonNull(paramsMap) && !paramsMap.isEmpty()){ + connectStr += "?" + paramsMap.entrySet().stream().map(entry -> { + try { + return entry.getKey() + "=" + URLEncoder.encode(entry.getValue(), "UTF-8" ); + } catch (UnsupportedEncodingException e) { + return null; + } + }).filter(StringUtils::isNotBlank).collect(Collectors.joining("&")); + } + return connectStr; + }, SubExchangisJob.class); + + /** + * Username + */ + private static final JobParamDefine USERNAME = JobParams.define("sqoop.args.username", job -> + MODE_RDBMS_PARAMS.getValue(job).get(JobParamConstraints.USERNAME, String.class).getValue(), SubExchangisJob.class); + + /** + * Password + */ + private static final JobParamDefine PASSWORD = JobParams.define("sqoop.args.password", job -> + MODE_RDBMS_PARAMS.getValue(job).get(JobParamConstraints.PASSWORD, String.class).getValue(), SubExchangisJob.class); + + /** + * Table + */ + private static final JobParamDefine TABLE = JobParams.define("sqoop.args.table", job -> + MODE_RDBMS_PARAMS.getValue(job).get(JobParamConstraints.TABLE, String.class).getValue(), SubExchangisJob.class); + + /** + * Import: Query string in params, //TODO where to use query + */ + private static final JobParamDefine QUERY_STRING = JobParams.define("sqoop.args.query", "query"); + + /** + * Import: Where + */ + private static final JobParamDefine WHERE_CLAUSE = JobParams.define("sqoop.args.where", job -> { + if (MODE_ENUM.getValue(job) == MODE_TYPE.IMPORT){ + JobParam where = MODE_RDBMS_PARAMS.getValue(job).get(JobParamConstraints.WHERE); + if (Objects.nonNull(where) && StringUtils.isNotBlank(where.getValue())){ + return where.getValue(); + } + } + return null; + }, SubExchangisJob.class); + + + /** + * Import: Hive-import + */ + private static final JobParamDefine HIVE_IMPORT = JobParams.define("sqoop.args.hive.import", job -> { + if (MODE_ENUM.getValue(job) == IMPORT && job.getSinkType().equalsIgnoreCase("hive") && !IS_USE_HCATALOG.getValue(job)){ + return ""; + } + return null; + }, SubExchangisJob.class); + + /** + * Export: Hive-export + */ + private static final JobParamDefine HIVE_EXPORT = JobParams.define("sqoop.hive.export", job -> { + if (MODE_ENUM.getValue(job) == EXPORT && job.getSourceType().equalsIgnoreCase("hive") && !IS_USE_HCATALOG.getValue(job)){ + return ""; + } + return null; + }, SubExchangisJob.class); + + /** + * Import: Hive-overwrite + */ + private static final JobParamDefine HIVE_OVERWRITE = JobParams.define("sqoop.args.hive.overwrite", job -> { + if (Objects.nonNull(HIVE_IMPORT.getValue(job))){ + JobParam writeMode = MODE_HADOOP_PARAMS.getValue(job).get(JobParamConstraints.WRITE_MODE); + if (Objects.nonNull(writeMode) && "overwrite".equalsIgnoreCase(writeMode.getValue())){ + return ""; + } + } + return null; + }, SubExchangisJob.class); + + + + /** + * Import: Hive-database + */ + private static final JobParamDefine HIVE_DATABASE = JobParams.define("sqoop.args.hive.database", job -> { + if (Objects.nonNull(HIVE_IMPORT.getValue(job))){ + return MODE_HADOOP_PARAMS.getValue(job).get(JobParamConstraints.DATABASE, String.class).getValue(); + } + return null; + }, SubExchangisJob.class); + + /** + * Import: Hive-table + */ + private static final JobParamDefine HIVE_TABLE = JobParams.define("sqoop.args.hive.table", job -> { + if (Objects.nonNull(HIVE_DATABASE.getValue(job))) { + return MODE_HADOOP_PARAMS.getValue(job).get(JobParamConstraints.TABLE, String.class).getValue(); + } + return null; + }, SubExchangisJob.class); + + /** + * Import: Hive-partition-key + */ + private static final JobParamDefine HIVE_PARTITION_KEY = JobParams.define("sqoop.args.hive.partition.key", job -> { + AtomicReference keys = new AtomicReference<>(null); + if (Objects.nonNull(HIVE_TABLE.getValue(job))){ + Optional.ofNullable(PARTITION_MAP.getValue(job)).ifPresent(partitionMap -> keys.set(StringUtils.join(partitionMap.keySet(), ","))); + } + return keys.get(); + }, SubExchangisJob.class); + + /** + * Import: Hive-partition-values + */ + private static final JobParamDefine HIVE_PARTITION_VALUE = JobParams.define("sqoop.args.hive.partition.value", job -> { + if (Objects.nonNull(HIVE_PARTITION_KEY.getValue(job))){ + return StringUtils.join(PARTITION_MAP.getValue(job).values(), ","); + } + return null; + }, SubExchangisJob.class); + + /** + * Import: Hive-append + */ + private static final JobParamDefine HIVE_APPEND = JobParams.define("sqoop.args.append", job -> { +// if (Objects.nonNull(HIVE_IMPORT.getValue(job))){ +// JobParam writeMode = MODE_HADOOP_PARAMS.getValue(job).get(JobParamConstraints.WRITE_MODE); +// if (Objects.nonNull(writeMode) && "append".equalsIgnoreCase(writeMode.getValue())){ +// return ""; +// } +// } + return null; + }, SubExchangisJob.class); + /** + * Import: Hive-target-dir\] + */ + private static final JobParamDefine HIVE_TARGET_DIR = JobParams.define("sqoop.args.target.dir", job -> { + if (Objects.nonNull(HIVE_IMPORT.getValue(job)) && Objects.nonNull(QUERY_STRING.getValue(job))){ + return "/user/linkis/exchangis/sqoop/" + HIVE_TABLE.getValue(job) + "/"; + } + return null; + }, SubExchangisJob.class); + + /** + * Import: Hive-delete-target-dir + */ + private static final JobParamDefine HIVE_DELETE_TARGET = JobParams.define("sqoop.args.delete.target.dir", job -> { + if (Objects.nonNull(HIVE_IMPORT.getValue(job))){ + return ""; + } + return null; + }, SubExchangisJob.class); + + /** + * Import: Hive-fields-terminated-by + */ + private static final JobParamDefine HIVE_FIELDS_TERMINATED_BY = JobParams.define("sqoop.args.fields.terminated.by", job -> { + if (MODE_ENUM.getValue(job) == IMPORT && "hive".equalsIgnoreCase(job.getSinkType())){ + return META_HADOOP_TABLE_PROPS.getValue(job).getOrDefault(META_FIELD_DELIMITER, "\u0001"); + } + return null; + }, SubExchangisJob.class); + + /** + * TODO get the properties from hive + * Import: Hive-null-string + */ + private static final JobParamDefine HIVE_NULL_STRING = JobParams.define("sqoop.args.null.string", job -> { + if (MODE_ENUM.getValue(job) == IMPORT && "hive".equalsIgnoreCase(job.getSinkType())){ + return "\\\\N"; + } + return null; + }, SubExchangisJob.class); + + /** + * TODO get the properties from hive + * Import: Hive-null-non-string + */ + private static final JobParamDefine HIVE_NULL_NON_STRING = JobParams.define("sqoop.args.null.non.string", job -> { + if (MODE_ENUM.getValue(job) == IMPORT && "hive".equalsIgnoreCase(job.getSinkType())){ + return "\\\\N"; + } + return null; + }, SubExchangisJob.class); + + /** + * TODO get the properties from hive to build the export directory + * Export: Export-dir + */ + private static final JobParamDefine EXPORT_DIR = JobParams.define("sqoop.args.export.dir", job -> { + if (Objects.nonNull(HIVE_EXPORT.getValue(job))){ + + } + return null; + }, SubExchangisJob.class); + + /** + * Export: Update-key + */ + private static final JobParamDefine UPDATE_KEY = JobParams.define("sqoop.args.update.key", job -> { + if (MODE_ENUM.getValue(job) == EXPORT ){ + JobParam writeMode = MODE_RDBMS_PARAMS.getValue(job).get(JobParamConstraints.WRITE_MODE, String.class); + if (Objects.nonNull(writeMode) && StringUtils.isNotBlank(writeMode.getValue()) && !"insert".equalsIgnoreCase(writeMode.getValue())){ + return META_RDBMS_COLUMNS.getValue(job).stream().filter(MetaColumn::isPrimaryKey) + .map(MetaColumn::getName).collect(Collectors.joining(",")); + } + } + return null; + }, SubExchangisJob.class); + + /** + * Export: Update mode + */ + private static final JobParamDefine UPDATE_MODE = JobParams.define("sqoop.args.update.mode", job -> { + if (StringUtils.isNotBlank(UPDATE_KEY.getValue(job))){ + JobParam writeMode = MODE_RDBMS_PARAMS.getValue(job).get(JobParamConstraints.WRITE_MODE, String.class); + return "update".equals(writeMode.getValue())? "allowinsert" : "updateonly"; + } + return null; + }, SubExchangisJob.class); + + /** + * Export: Hcatalog-database + */ + private static final JobParamDefine HCATALOG_DATABASE = JobParams.define("sqoop.args.hcatalog.database", job ->{ + if (IS_USE_HCATALOG.getValue(job)){ + return MODE_HADOOP_PARAMS.getValue(job).get(JobParamConstraints.DATABASE, String.class).getValue(); + } + return null; + }, SubExchangisJob.class); + + /** + * Export: Hcatalog-table + */ + private static final JobParamDefine HCATALOG_TABLE = JobParams.define("sqoop.args.hcatalog.table", job ->{ + if (Objects.nonNull(HCATALOG_DATABASE.getValue(job))){ + return MODE_HADOOP_PARAMS.getValue(job).get(JobParamConstraints.TABLE, String.class).getValue(); + } + return null; + }, SubExchangisJob.class); + + /** + * Export: Hcatalog-partition-key + */ + private static final JobParamDefine HCATALOG_PARTITION_KEY = JobParams.define("sqoop.args.hcatalog.partition.keys", job -> { + AtomicReference keys = new AtomicReference<>(null); + if (Objects.nonNull(HCATALOG_TABLE.getValue(job))){ + Optional.ofNullable(PARTITION_MAP.getValue(job)).ifPresent(partitionMap -> keys.set(StringUtils.join(partitionMap.keySet(), ","))); + } + return keys.get(); + }, SubExchangisJob.class); + + /** + * Export: Hcatalog-partition-values + */ + private static final JobParamDefine HCATALOG_PARTITION_VALUE = JobParams.define("sqoop.args.hcatalog.partition.values", job -> { + if (Objects.nonNull(HCATALOG_PARTITION_KEY.getValue(job))){ + return StringUtils.join(PARTITION_MAP.getValue(job).values(), ","); + } + return null; + }, SubExchangisJob.class); + + /** + * TODO get the properties from hive + * Export: Hive-input-fields-terminated-by + */ + private static final JobParamDefine HIVE_INPUT_FIELDS_TERMINATED_KEY = JobParams.define("sqoop.args.input.fields.terminated.by", job -> { + if (MODE_ENUM.getValue(job) == EXPORT && "hive".equalsIgnoreCase(job.getSourceType())){ + return META_HADOOP_TABLE_PROPS.getValue(job).getOrDefault(META_FIELD_DELIMITER, "\u0001"); + } + return null; + }, SubExchangisJob.class); + + /** + * TODO get the properties from hive + * Export: Hive-input-null-string + */ + private static final JobParamDefine HIVE_INPUT_NULL_STRING = JobParams.define("sqoop.args.input.null.string", job -> { + if (MODE_ENUM.getValue(job) == EXPORT && "hive".equalsIgnoreCase(job.getSourceType())){ + return "\\\\N"; + } + return null; + }, SubExchangisJob.class); + + /** + * TODO get the properties from hive + * Export: Hive-input-null-non-string + */ + private static final JobParamDefine HIVE_INPUT_NULL_NON_STRING = JobParams.define("sqoop.args.input.null.non.string", job -> { + if (MODE_ENUM.getValue(job) == EXPORT && "hive".equalsIgnoreCase(job.getSourceType())){ + return "\\\\N"; + } + return null; + }, SubExchangisJob.class); + + + /** + * Column serializer + */ + private static final JobParamDefine COLUMN_SERIAL = JobParams.define("sqoop.args.columns", job -> { + List columnSerial = new ArrayList<>(); + if (SUPPORT_RDBMS_TYPES.contains(job.getSourceType().toUpperCase())) { + job.getSourceColumns().forEach(columnDefine -> columnSerial.add(columnDefine.getName())); + } else if (SUPPORT_RDBMS_TYPES.contains(job.getSinkType().toUpperCase())) { + job.getSinkColumns().forEach(columnDefine -> columnSerial.add(columnDefine.getName())); + } + return StringUtils.join(columnSerial, ","); + }, SubExchangisJob.class); + + /** + * Inspection of the definitions above + */ + private static final JobParamDefine DEFINE_INSPECTION = JobParams.define("", job -> { + List rdbmsColumns = new ArrayList<>(Arrays.asList(COLUMN_SERIAL.getValue(job).split(","))); + List hadoopColumns = META_HADOOP_COLUMNS.getValue(job).stream().map(MetaColumn::getName) + .collect(Collectors.toList()); + if (IS_USE_HCATALOG.getValue(job)){ + rdbmsColumns.removeAll(hadoopColumns); + if (!rdbmsColumns.isEmpty()){ + warn("NOTE: task:[name:{}, id:{}] 在使用Hcatalog方式下,关系型数据库字段 [" + StringUtils.join(rdbmsColumns, ",") + "] 在hive/hbase表中未查询到对应字段", + job.getName(), job.getId()); + } + }else { + warn("NOTE: task:[name: {}, id:{}] 将使用非Hcatalog方式(原生)导数, 将顺序匹配关系型数据库字段和hive/hbase字段,否则请改变写入方式为APPEND追加", + job.getName(), job.getId()); + } + return null; + }, SubExchangisJob.class); + @Override + public int priority() { + return 1; + } + + @Override + public ExchangisEngineJob buildJob(SubExchangisJob inputJob, ExchangisEngineJob expectOut, ExchangisJobBuilderContext ctx) throws ExchangisJobException { + try { + SqoopExchangisEngineJob engineJob = new SqoopExchangisEngineJob(expectOut); + engineJob.setId(inputJob.getId()); + JobParamDefine[] definitions = getParamDefinitions(); + Map jobContent = engineJob.getJobContent(); + for (JobParamDefine definition : definitions){ + Object paramValue = definition.getValue(inputJob); + if (Objects.nonNull(paramValue)){ + jobContent.put(definition.getKey(), String.valueOf(paramValue)); + } + } + engineJob.setName(inputJob.getName()); + engineJob.setCreateUser(inputJob.getCreateUser()); + return engineJob; + } catch (Exception e) { + throw new ExchangisJobException(ExchangisJobExceptionCode.BUILDER_ENGINE_ERROR.getCode(), + "Fail to build sqoop engine job, message:[" + e.getMessage() + "]", e); + } + } + + @Override + public boolean canBuild(SubExchangisJob inputJob) { + return "sqoop".equalsIgnoreCase(inputJob.getEngineType()); + } + + /** + * Definition list + * @return list + */ + private JobParamDefine[] getParamDefinitions(){ + return new JobParamDefine[]{ + VERBOSE, MODE, CONNECT_DRIVER, CONNECT_STRING, NUM_MAPPERS, + USERNAME, PASSWORD, TABLE, WHERE_CLAUSE, HIVE_IMPORT, HIVE_OVERWRITE, + HIVE_DATABASE, HIVE_TABLE, HIVE_PARTITION_KEY, HIVE_PARTITION_VALUE, HIVE_APPEND, + HIVE_TARGET_DIR, HIVE_DELETE_TARGET, HIVE_FIELDS_TERMINATED_BY, HIVE_NULL_STRING, HIVE_NULL_NON_STRING, + EXPORT_DIR, UPDATE_KEY, UPDATE_MODE, + HCATALOG_DATABASE, HCATALOG_TABLE, HCATALOG_PARTITION_KEY, HCATALOG_PARTITION_VALUE, + HIVE_INPUT_FIELDS_TERMINATED_KEY, HIVE_INPUT_NULL_STRING, HIVE_INPUT_NULL_NON_STRING, + COLUMN_SERIAL,DEFINE_INSPECTION + }; + } + + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/GenericExchangisTransformJobBuilder.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/GenericExchangisTransformJobBuilder.java new file mode 100644 index 000000000..fe06a540c --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/GenericExchangisTransformJobBuilder.java @@ -0,0 +1,219 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform; + +import com.webank.wedatasphere.exchangis.common.linkis.bml.BmlResource; +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.datasource.core.vo.ExchangisJobInfoContent; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode; +import com.webank.wedatasphere.exchangis.job.server.builder.AbstractLoggingExchangisJobBuilder; +import com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.SubExchangisJobHandler; +import com.webank.wedatasphere.exchangis.job.server.mapper.JobTransformProcessorDao; +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformTypes; +import com.webank.wedatasphere.exchangis.job.server.render.transform.processor.TransformProcessor; +import com.webank.wedatasphere.exchangis.job.server.utils.SpringContextHolder; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.common.utils.ClassUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.lang.reflect.Modifier; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; + +/** + * TransformJob builder + */ +public class GenericExchangisTransformJobBuilder extends AbstractLoggingExchangisJobBuilder { + + private static final Logger LOG = LoggerFactory.getLogger(GenericExchangisTransformJobBuilder.class); + + /** + * Handlers + */ + private static final Map handlerHolders = new ConcurrentHashMap<>(); + + /** + * Transform dao + */ + private JobTransformProcessorDao transformProcessorDao; + + public synchronized void initHandlers() { + //Should define wds.linkis.reflect.scan.package in properties + Set> jobHandlerSet = ClassUtils.reflections().getSubTypesOf(SubExchangisJobHandler.class); + List reflectedHandlers = jobHandlerSet.stream().map(handlerClass -> { + if (!Modifier.isAbstract(handlerClass.getModifiers()) && + !Modifier.isInterface(handlerClass.getModifiers()) && !handlerClass.equals(SubExchangisJobHandler.class)) { + try { + return handlerClass.newInstance(); + } catch (InstantiationException | IllegalAccessException e) { + LOG.warn("Cannot create the instance of handler: [{}], message: [{}]", handlerClass.getCanonicalName(), e.getMessage(), e); + } + } + return null; + }).filter(handler -> Objects.nonNull(handler) && Objects.nonNull(handler.dataSourceType())).collect(Collectors.toList()); + reflectedHandlers.forEach(reflectedHandler -> handlerHolders.compute(reflectedHandler.dataSourceType(), (type, handlerChain) -> { + if (Objects.isNull(handlerChain)){ + handlerChain = new SubExchangisJobHandlerChain(type); + } + handlerChain.addHandler(reflectedHandler); + return handlerChain; + })); + LOG.trace("Sort the handler chain"); + handlerHolders.values().forEach(SubExchangisJobHandlerChain::sort); + LOG.trace("Add the default handlerChain to the head"); + //Add the default handlerChain to the head + Optional.ofNullable(handlerHolders.get(SubExchangisJobHandler.DEFAULT_DATA_SOURCE_TYPE)).ifPresent(defaultHandlerChain -> + handlerHolders.forEach( (s, handlerChain) -> {if(!Objects.equals(handlerChain, defaultHandlerChain)){ + handlerChain.addFirstHandler(defaultHandlerChain); + }})); + } + @Override + public TransformExchangisJob buildJob(ExchangisJobInfo inputJob, TransformExchangisJob expectOut, ExchangisJobBuilderContext ctx) throws ExchangisJobException { + LOG.trace("Start to build exchangis transform job, name: [{}], id: [{}], engine: [{}], content: [{}]", + inputJob.getName(), inputJob.getId(), inputJob.getEngineType(), inputJob.getJobContent()); + //First to convert content to "ExchangisJobInfoContent" + TransformExchangisJob outputJob = new TransformExchangisJob(); + outputJob.setCreateUser(Optional.ofNullable(inputJob.getExecuteUser()).orElse(String.valueOf(ctx.getEnv("USER_NAME")))); + try { + if (StringUtils.isNotBlank(inputJob.getJobContent())) { + //First to convert content to "ExchangisJobInfoContent" + List contents = Json.fromJson(inputJob.getJobContent(), List.class, ExchangisJobInfoContent.class); + if (Objects.nonNull(contents) ) { + LOG.info("To parse content ExchangisJob: id: [{}], name: [{}], expect subJobs: [{}]", + inputJob.getId(), inputJob.getName(), contents.size()); + //Second to new SubExchangisJob instances + List subExchangisJobs = contents.stream().map(job -> { + TransformExchangisJob.TransformSubExchangisJob transformSubJob = new TransformExchangisJob.TransformSubExchangisJob(job); + transformSubJob.setId(inputJob.getId()); + transformSubJob.setCreateUser(outputJob.getCreateUser()); + setTransformCodeResource(transformSubJob); + return transformSubJob; + }) + .collect(Collectors.toList()); + outputJob.setSubJobSet(subExchangisJobs); + outputJob.setId(inputJob.getId()); + outputJob.setName(inputJob.getName()); + LOG.info("Invoke job handlers to handle the subJobs, ExchangisJob: id: [{}], name: [{}]", inputJob.getId(), inputJob.getName()); + //Do handle of the sub jobs + for (SubExchangisJob subExchangisJob : subExchangisJobs){ + if(StringUtils.isBlank(subExchangisJob.getEngineType())){ + subExchangisJob.setEngineType(inputJob.getEngineType()); + } + SubExchangisJobHandler sourceHandler = handlerHolders.get( StringUtils + .isNotBlank(subExchangisJob.getSourceType())? subExchangisJob.getSourceType().toLowerCase():""); + if(Objects.isNull(sourceHandler)){ + LOG.warn("Not find source handler for subJob named: [{}], sourceType: [{}], " + + "ExchangisJob: id: [{}], name: [{}], use default instead", + subExchangisJob.getName(), subExchangisJob.getSourceType(), inputJob.getId(), inputJob.getName()); + sourceHandler = handlerHolders.get(SubExchangisJobHandler.DEFAULT_DATA_SOURCE_TYPE); + } + SubExchangisJobHandler sinkHandler = handlerHolders.get( StringUtils + .isNotBlank(subExchangisJob.getSinkType())? subExchangisJob.getSinkType().toLowerCase():""); + if(Objects.isNull(sinkHandler)){ + LOG.warn("Not find sink handler for subJob named: [{}], sinkType: [{}], ExchangisJob: id: [{}], name: [{}], use default instead", + subExchangisJob.getName(), subExchangisJob.getSourceType(), inputJob.getId(), inputJob.getName()); + sinkHandler = handlerHolders.get(SubExchangisJobHandler.DEFAULT_DATA_SOURCE_TYPE); + } + LOG.trace("Invoke handles for subJob: [{}], sourceHandler: [{}], sinkHandler: [{}]", subExchangisJob.getName(), sourceHandler, sinkHandler); + //TODO Handle the subExchangisJob parallel + if (Objects.nonNull(sourceHandler)) { + sourceHandler.handleSource(subExchangisJob, ctx); + } + if (Objects.nonNull(sinkHandler)){ + sinkHandler.handleSink(subExchangisJob, ctx); + } + } + }else{ + throw new ExchangisJobException(ExchangisJobExceptionCode.BUILDER_TRANSFORM_ERROR.getCode(), + "Illegal content string: [" + inputJob.getJobContent() + "] in job, please check", null); + } + }else{ + LOG.warn("It looks like an empty job ? id: [{}], name: [{}]", inputJob.getId(), inputJob.getName()); + } + }catch(Exception e){ + throw new ExchangisJobException(ExchangisJobExceptionCode.BUILDER_TRANSFORM_ERROR.getCode(), + "Fail to build transformJob from input job, message: [" + e.getMessage() + "]", e); + } + return outputJob; + } + + /** + * Set the code resource to transform job + * @param subExchangisJob sub transform job + */ + private void setTransformCodeResource(TransformExchangisJob.TransformSubExchangisJob subExchangisJob){ + if (subExchangisJob.getTransformType() == TransformTypes.PROCESSOR){ + TransformProcessor processor = getTransformProcessorDao().getProcInfo( + Long.valueOf(subExchangisJob.getJobInfoContent().getTransforms().getCodeId())); + if (Objects.nonNull(processor)){ + // TODO maybe the content of processor doesn't store in bml + subExchangisJob.addCodeResource(new + BmlResource(processor.getCodeBmlResourceId(), processor.getCodeBmlVersion())); + } + } + } + + /** + * Processor dao + * @return dao + */ + private JobTransformProcessorDao getTransformProcessorDao(){ + if (null == transformProcessorDao) { + this.transformProcessorDao = SpringContextHolder.getBean(JobTransformProcessorDao.class); + } + return this.transformProcessorDao; + } + /** + * Chain + */ + private static class SubExchangisJobHandlerChain implements SubExchangisJobHandler{ + + private String dataSourceType; + + private LinkedList handlers = new LinkedList<>(); + + public SubExchangisJobHandlerChain(){} + public SubExchangisJobHandlerChain(String dataSourceType){ + this.dataSourceType = dataSourceType; + } + public void addFirstHandler(SubExchangisJobHandler handler){ + handlers.addFirst(handler); + } + + public void addHandler(SubExchangisJobHandler handler){ + handlers.add(handler); + } + public void sort(){ + handlers.sort(Comparator.comparingInt(SubExchangisJobHandler::order)); + } + @Override + public String dataSourceType() { + return dataSourceType; + } + + @Override + public void handleSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + for(SubExchangisJobHandler handler : handlers){ + if(handler.acceptEngine(subExchangisJob.getEngineType())) { + handler.handleSource(subExchangisJob, ctx); + } + } + } + + @Override + public void handleSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + for(SubExchangisJobHandler handler : handlers){ + if(handler.acceptEngine(subExchangisJob.getEngineType())) { + handler.handleSink(subExchangisJob, ctx); + } + } + } + } + + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/TransformExchangisJob.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/TransformExchangisJob.java new file mode 100644 index 000000000..a9798b845 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/TransformExchangisJob.java @@ -0,0 +1,288 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform; + +import com.webank.wedatasphere.exchangis.common.linkis.bml.BmlResource; +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.datasource.core.vo.*; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; +import com.webank.wedatasphere.exchangis.job.domain.GenericExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; +import com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.GenericSubExchangisJobHandler; +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformTypes; +import com.webank.wedatasphere.exchangis.job.server.utils.JobUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +/** + * Transform job + */ +public class TransformExchangisJob extends GenericExchangisJob { + + private static final Logger LOG = LoggerFactory.getLogger(TransformExchangisJob.class); + + private ExchangisJobInfo exchangisJobInfo; + /** + * Set of sub jobs + */ + private List subJobSet = new ArrayList<>(); + + public ExchangisJobInfo getExchangisJobInfo() { + return exchangisJobInfo; + } + + public void setExchangisJobInfo(ExchangisJobInfo exchangisJobInfo) { + this.exchangisJobInfo = exchangisJobInfo; + } + + public List getSubJobSet() { + return subJobSet; + } + + public void setSubJobSet(List subJobSet) { + this.subJobSet = subJobSet; + } + + /** + * Wrap entity of 'ExchangisJobInfoContent' + */ + public static class TransformSubExchangisJob extends SubExchangisJob{ + + private static final String CODE_RESOURCE_NAME = ".code"; + /** + * Transform type + */ + private TransformTypes transformType; + + /** + * Content VO + */ + private ExchangisJobInfoContent jobInfoContent; + + /** + * Resource map + */ + private final Map resources = new HashMap<>(); + + public TransformSubExchangisJob(ExchangisJobInfoContent jobInfoContent){ + if(Objects.nonNull(jobInfoContent)) { + this.jobInfoContent = jobInfoContent; + this.engineType = jobInfoContent.getEngine(); + this.name = jobInfoContent.getSubJobName(); + convertContentToParams(jobInfoContent); + Optional.ofNullable(jobInfoContent.getTransforms()).ifPresent(transforms -> { + if (StringUtils.isNotBlank(transforms.getType())) { + this.transformType = TransformTypes.valueOf(transforms.getType().toUpperCase(Locale.ROOT)); + // TODO define different transform sub jobs + convertTransformToColumnDefine(transforms); + } + }); + } + } + + public ExchangisJobInfoContent getJobInfoContent() { + return jobInfoContent; + } + + public void setJobInfoContent(ExchangisJobInfoContent jobInfoContent) { + this.jobInfoContent = jobInfoContent; + } + + /** + * Convert content to column definitions + * @param transforms transform + */ + private void convertTransformToColumnDefine(ExchangisJobTransformsContent transforms){ + List items = transforms.getMapping(); + if (Objects.nonNull(items)){ + for(int i = 0; i < items.size(); i++){ + final int index = i; + ExchangisJobTransformsItem item = items.get(i); + ColumnDefine srcColumn = new ColumnDefine(item.getSourceFieldName(), + item.getSourceFieldType(), item.getSourceFieldIndex()); + ColumnDefine sinkColumn = new ColumnDefine(item.getSinkFieldName(), + item.getSinkFieldType(), item.getSinkFieldIndex()); + Optional.ofNullable(item.getValidator()).ifPresent(validator -> + convertValidatorFunction(index, validator)); + Optional.ofNullable(item.getTransformer()).ifPresent(transformer -> + convertTransformFunction(index, transformer)); + getSourceColumns().add(srcColumn); + getSinkColumns().add(sinkColumn); + }; + } + } + /** + * Convert to validator function + * @param index index + * @param validator validator + */ + private void convertValidatorFunction(int index, List validator){ + if (validator.size() > 0) { + ColumnFunction function = new ColumnFunction(); + function.setIndex(index); + // TODO abstract the name + function.setName("dx_filter"); + function.setParams(new ArrayList<>(validator)); + getColumnFunctions().add(function); + } + } + + /** + * Convert to transform function + * @param index index + * @param transformer transformer + */ + private void convertTransformFunction(int index, ExchangisJobTransformer transformer){ + if (StringUtils.isNotBlank(transformer.getName())) { + ColumnFunction function = new ColumnFunction(); + function.setIndex(index); + function.setName(transformer.getName()); + function.setParams(transformer.getParams()); + getColumnFunctions().add(function); + } + } + /** + * Convert content to params + * @param content content + */ + private void convertContentToParams(ExchangisJobInfoContent content){ + setIntoParams(REALM_JOB_DATA_SOURCE, () -> Json.convert(content.getDataSources(), Map.class, String.class, String.class)); +// setIntoParams(REALM_JOB_COLUMN_MAPPING, () -> Json.convert(content.getTransforms(), Map.class, String.class, Object.class)); + if(Objects.nonNull(content.getParams())){ + if(Objects.nonNull(content.getParams().getSources())) { + List items = content.getParams().getSources(); + + timePlaceHolderConvert(items); + + JobParamSet paramSet = setIntoParams(REALM_JOB_CONTENT_SOURCE, () -> { + //List items = content.getParams().getSources(); + return items.stream().filter(item -> StringUtils.isNotBlank(item.getConfigKey()) && Objects.nonNull(item.getConfigValue())).collect + (Collectors.toMap(ExchangisJobParamsContent.ExchangisJobParamsItem::getConfigKey, + ExchangisJobParamsContent.ExchangisJobParamsItem::getConfigValue)); + }); + if(Objects.nonNull(paramSet)) { + String sourceId = content.getDataSources().getSourceId(); + if (StringUtils.isNotBlank(sourceId)){ + this.sourceType = resolveDataSourceId(content.getDataSources().getSourceId(), paramSet); + } else { + + } + + } + } + + if(Objects.nonNull(content.getParams().getSinks())) { + List items = content.getParams().getSinks(); + timePlaceHolderConvert(items); + + JobParamSet paramSet = setIntoParams(REALM_JOB_CONTENT_SINK, () -> { + //List items = content.getParams().getSinks(); + return items.stream().filter(item -> StringUtils.isNotBlank(item.getConfigKey()) && Objects.nonNull(item.getConfigValue())).collect + (Collectors.toMap(ExchangisJobParamsContent.ExchangisJobParamsItem::getConfigKey, + ExchangisJobParamsContent.ExchangisJobParamsItem::getConfigValue)); + }); + if(Objects.nonNull(paramSet)) { + String sinkId = content.getDataSources().getSinkId(); + if (StringUtils.isNotBlank(sinkId)){ + this.sinkType = resolveDataSourceId(content.getDataSources().getSinkId(), paramSet); + } else { + + } + } + } + } + if (Objects.nonNull(content.getSettings())){ + setIntoParams(REALM_JOB_SETTINGS, () -> { + List items = content.getSettings(); + return items.stream().filter(item -> StringUtils.isNotBlank(item.getConfigKey())).collect + (Collectors.toMap(ExchangisJobParamsContent.ExchangisJobParamsItem::getConfigKey, + ExchangisJobParamsContent.ExchangisJobParamsItem::getConfigValue)); + }); + } + } + + private JobParamSet setIntoParams(String realm, Supplier> paramsSupplier){ + Optional> dataSourceMap = Optional.ofNullable(paramsSupplier.get()); + dataSourceMap.ifPresent( map -> { + JobParamSet paramSet = map.entrySet().stream().map(entry -> JobParams.newOne(entry.getKey(), entry.getValue())) + .reduce(new JobParamSet(), JobParamSet::add, JobParamSet::combine); + LOG.trace("Set params into sub exchangis job, realm: [{}], paramSet: [{}]", realm, paramSet.toString()); + super.addRealmParams(realm, paramSet); + }); + return getRealmParams(realm); + } + + /** + * + * @param dataSourceId + * @param paramSet + * @return return data source type + */ + private String resolveDataSourceId(String dataSourceId, JobParamSet paramSet){ + AtomicReference result = new AtomicReference<>(new String[]{}); + Optional.ofNullable(dataSourceId).ifPresent( id -> + result.set(id.split(GenericSubExchangisJobHandler.ID_SPLIT_SYMBOL))); + String[] idSerial = result.get(); + if(idSerial.length > 0){ + if(idSerial.length >= 4){ + paramSet.add(JobParams.newOne(JobParamConstraints.DATA_SOURCE_ID, idSerial[1], true)); + paramSet.add(JobParams.newOne(JobParamConstraints.DATABASE, idSerial[2], true)); + paramSet.add(JobParams.newOne(JobParamConstraints.TABLE, idSerial[3], true)); + } + return idSerial[0]; + } + return null; + } + + /** + * + * @param items + * @return 用于转换时间分区 + */ + private void timePlaceHolderConvert(List items) { + items.forEach(item -> { + Object value = item.getConfigValue(); + if (value instanceof String){ + item.setConfigValue(JobUtils.replaceVariable((String)value, new HashMap<>())); + } else if (value instanceof Map){ + for (Object key:((Map) value).keySet()) { + ((Map) value).put(key, JobUtils.replaceVariable(((String)((Map) value).get(key)), new HashMap<>())); + } + } + }); + } + + /** + * Transform type + * @return type string + */ + public TransformTypes getTransformType() { + return transformType; + } + + /** + * Add code resource + * @param bmlResource bml resource + */ + void addCodeResource(BmlResource bmlResource){ + this.resources.put(CODE_RESOURCE_NAME, bmlResource); + } + + /** + * Get code resource + * @return bml resource + */ + public BmlResource getCodeResource(){ + return this.resources.get(CODE_RESOURCE_NAME); + } + } + + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/AbstractLoggingSubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/AbstractLoggingSubExchangisJobHandler.java new file mode 100644 index 000000000..568270548 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/AbstractLoggingSubExchangisJobHandler.java @@ -0,0 +1,145 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers; + + +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.server.builder.SpringExchangisJobBuilderContext; +import org.apache.linkis.common.exception.ErrorException; + +import java.util.List; +import java.util.Objects; +import java.util.Optional; + +/** + * Job handler refer job builder + */ +public abstract class AbstractLoggingSubExchangisJobHandler implements SubExchangisJobHandler{ + + private static final ThreadLocal springContext = new ThreadLocal<>(); + + @Override + public final void handleSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + wrapFuncWithContext(ctx, () -> { + try { + handleSrcColumns(subExchangisJob, ctx, subExchangisJob.getSourceColumns()); + handleJobSource(subExchangisJob, ctx); + }catch (ErrorException e){ + throw new ExchangisJobException.Runtime(-1, "Exception in handling job source parameters", e); + } + }); + } + + @Override + public final void handleSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + wrapFuncWithContext(ctx, () -> { + try { + handleSinkColumns(subExchangisJob, ctx, subExchangisJob.getSinkColumns()); + handleJobSink(subExchangisJob, ctx); + } catch (ErrorException e) { + throw new ExchangisJobException.Runtime(-1, "Exception in handling job sink parameters", e); + } + }); + } + + /** + * Wrap the function(runnable) with context + * @param context context + * @param runnable function + */ + private void wrapFuncWithContext(ExchangisJobBuilderContext context, Runnable runnable){ + if (context instanceof SpringExchangisJobBuilderContext){ + springContext.set((SpringExchangisJobBuilderContext)context); + // Rest the default param set + JobParamSet storedParamSet = JobParamDefine.defaultParam.get(); + JobParamDefine.defaultParam.set(new JobParamSet()); + try{ + runnable.run(); + } finally { + springContext.remove(); + // Restore the default param set + if (Objects.nonNull(storedParamSet)){ + JobParamDefine.defaultParam.set(storedParamSet); + } else { + JobParamDefine.defaultParam.remove(); + } + } + } else { + runnable.run(); + } + } + + + /** + * Handle source columns + * @param columns columns + */ + protected void handleSrcColumns(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx, + List columns) { + // Empty + } + + /** + * Handle sink columns + * @param columns columns + */ + protected void handleSinkColumns(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx, + List columns){ + // Empty + } + + + + /** + * handle job source params + * @param subExchangisJob sub exchangis job + * @param ctx ctx + */ + public abstract void handleJobSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException; + + /** + * handle job sink params + * @param subExchangisJob sub exchangis job + * @param ctx ctx + */ + public abstract void handleJobSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException; + + public void preHandleJobParamSet(JobParamSet paramSet) { + // Empty + } + + /** + * Warn message + * @param message message + */ + public static void warn(String message, Object... args){ + Optional.ofNullable(springContext.get()).ifPresent(ctx -> ctx.getLogging().warn(null, message, args)); + } + + public static void warn(String message, Throwable t){ + Optional.ofNullable(springContext.get()).ifPresent(ctx -> ctx.getLogging().warn(null, message, t)); + } + + /** + * Info message + * @param message message + */ + public static void info(String message, Object... args){ + Optional.ofNullable(springContext.get()).ifPresent(ctx -> ctx.getLogging().info(null, message, args)); + } + + public static void info(String message, Throwable t){ + Optional.ofNullable(springContext.get()).ifPresent(ctx -> ctx.getLogging().info(null, message, t)); + } + + public static T getBean(Class clazz){ + return Objects.nonNull(springContext.get())? springContext.get().getBean(clazz) : null; + } + + protected static SpringExchangisJobBuilderContext getJobBuilderContext(){ + return springContext.get(); + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/AuthEnabledSubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/AuthEnabledSubExchangisJobHandler.java new file mode 100644 index 000000000..b290e8643 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/AuthEnabledSubExchangisJobHandler.java @@ -0,0 +1,52 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers; + + +import com.webank.wedatasphere.exchangis.job.domain.params.JobParam; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.lang3.StringUtils; + +import java.io.ByteArrayOutputStream; +import java.io.ObjectOutputStream; +import java.util.Objects; + +/** + * With authentication + */ +public abstract class AuthEnabledSubExchangisJobHandler extends AbstractLoggingSubExchangisJobHandler{ + /** + * Disable encrypt + */ + protected static final JobParamDefine ENCRYPT_DISABLE = JobParams.define("encrypt.disable"); + + /** + * User name + */ + protected static final JobParamDefine USERNAME = JobParams.define(JobParamConstraints.USERNAME); + + /** + * Password + */ + protected static final JobParamDefine PASSWORD = JobParams.define(JobParamConstraints.PASSWORD, paramSet -> { + JobParam password = paramSet.get(JobParamConstraints.PASSWORD); + if (Objects.nonNull(password) && StringUtils.isNotBlank(password.getValue())) { + Boolean encrypt = ENCRYPT_DISABLE.getValue(paramSet); + if (Objects.isNull(encrypt) || !encrypt) { + try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) { + try (ObjectOutputStream oos = new ObjectOutputStream(bos)) { + oos.writeObject(password.getValue()); + oos.flush(); + } + return new String(new Base64().encode(bos.toByteArray())); + } catch (Exception e) { + throw new ExchangisJobException.Runtime(-1, "Fail to encrypt password", e); + } + } + return password.getValue(); + } + return null; + }); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/GenericSubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/GenericSubExchangisJobHandler.java new file mode 100644 index 000000000..a3791cb80 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/GenericSubExchangisJobHandler.java @@ -0,0 +1,99 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers; + +import com.webank.wedatasphere.exchangis.datasource.service.ExchangisDataSourceService; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParam; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.server.utils.SpringContextHolder; +import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.datasource.client.response.GetConnectParamsByDataSourceIdResult; + +import java.util.Objects; +import java.util.Optional; + +/** + * Abstract implement, to fetch the data source params of job + */ +public class GenericSubExchangisJobHandler extends AbstractLoggingSubExchangisJobHandler{ + + public static final String ID_SPLIT_SYMBOL = "\\."; + + private static final JobParamDefine SOURCE_ID = JobParams.define("source_id"); + + private static final JobParamDefine SINK_ID = JobParams.define("sink_id"); + + @Override + public String dataSourceType() { + return DEFAULT_DATA_SOURCE_TYPE; + } + + @Override + public void handleJobSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + ExchangisJobInfo originJob = ctx.getOriginalJob(); + JobParamSet idParamSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_DATA_SOURCE); + JobParamSet sourceParamSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE); + if (Objects.nonNull(idParamSet) && Objects.nonNull(sourceParamSet)){ + info("Fetch data source parameters in [{}]", subExchangisJob.getSourceType()); + appendDataSourceParams(idParamSet.load(SOURCE_ID), sourceParamSet, originJob.getCreateUser()); + } + + } + + @Override + public void handleJobSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException{ + ExchangisJobInfo originJob = ctx.getOriginalJob(); + JobParamSet idParamSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_DATA_SOURCE); + JobParamSet sinkParamSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SINK); + if (Objects.nonNull(idParamSet) && Objects.nonNull(sinkParamSet)){ + info("Fetch data source parameters in [{}]", subExchangisJob.getSinkType()); + appendDataSourceParams(idParamSet.load(SINK_ID), sinkParamSet, originJob.getCreateUser()); + } + } + + /** + * Append data source params + * @param idParam param + * @param paramSet param set + * @param userName username + * @throws ErrorException + */ + private void appendDataSourceParams(JobParam idParam, JobParamSet paramSet, String userName) throws ErrorException { + ExchangisDataSourceService dataSourceService = DataSourceService.instance; + String sourceId = idParam.getValue(); + if(Objects.nonNull(sourceId)){ + // {TYPE}.{ID}.{DB}.{TABLE} + String[] idSerial = sourceId.split(ID_SPLIT_SYMBOL); + if (idSerial.length >= 2){ + GetConnectParamsByDataSourceIdResult infoResult = dataSourceService.getDataSourceConnectParamsById(userName, Long.valueOf(idSerial[1])); + Optional.ofNullable(infoResult.connectParams()).ifPresent(connectParams -> + connectParams.forEach((key, value) -> paramSet.add(JobParams.newOne(key, value, true)))); + } + } + } + + @Override + public int order() { + return Integer.MIN_VALUE; + } + + public static class DataSourceService{ + + /** + * Lazy load data source service + */ + public static ExchangisDataSourceService instance; + + static{ + instance = SpringContextHolder.getBean(ExchangisDataSourceService.class); + } + } + + @Override + public String toString() { + return this.getClass().getSimpleName() + "(" + dataSourceType() + ")"; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/MongoDataxSubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/MongoDataxSubExchangisJobHandler.java new file mode 100644 index 000000000..ee1f2289a --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/MongoDataxSubExchangisJobHandler.java @@ -0,0 +1,82 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; +import org.apache.linkis.common.exception.ErrorException; + +import java.util.Arrays; +import java.util.Map; +import java.util.Objects; + +/** + * Params mapping for mongo in datax + */ +public class MongoDataxSubExchangisJobHandler extends AuthEnabledSubExchangisJobHandler { + + /** + * Database + */ + private static final JobParamDefine DATABASE = JobParams.define("dbName", JobParamConstraints.DATABASE); + + /** + * Host + */ + private static final JobParamDefine SOURCE_HOST = JobParams.define("conn_ins[0].host", JobParamConstraints.HOST); + private static final JobParamDefine SINK_HOST = JobParams.define("conn_ins[0].host", JobParamConstraints.HOST); + + /** + * Port + */ + private static final JobParamDefine SOURCE_PORT = JobParams.define("conn_ins[0].port", JobParamConstraints.PORT); + private static final JobParamDefine SINK_PORT = JobParams.define("conn_ins[0].port", JobParamConstraints.PORT); + + /** + * Connect params + */ + private static final JobParamDefine> OPTION_PARAMS = JobParams.define("optionParams", JobParamConstraints.CONNECT_PARAMS, + connectParams -> Json.fromJson(connectParams, Map.class), String.class); + /** + * Collection name(table) + */ + private static final JobParamDefine COLLECTION_NAME = JobParams.define("collectionName", JobParamConstraints.TABLE); + + @Override + public String dataSourceType() { + return "mongodb"; + } + + @Override + public boolean acceptEngine(String engineType) { + return "datax".equalsIgnoreCase(engineType); + } + + + @Override + public void handleJobSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE); + if (Objects.nonNull(paramSet)){ + Arrays.asList(sourceMappings()).forEach(define -> paramSet.addNonNull(define.get(paramSet))); + } + } + + @Override + public void handleJobSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SINK); + if (Objects.nonNull(paramSet)){ + Arrays.asList(sinkMappings()).forEach(define -> paramSet.addNonNull(define.get(paramSet))); + } + } + + public JobParamDefine[] sourceMappings() { + return new JobParamDefine[]{SOURCE_HOST, SOURCE_PORT, USERNAME, PASSWORD, DATABASE, COLLECTION_NAME, OPTION_PARAMS}; + } + + public JobParamDefine[] sinkMappings() { + return new JobParamDefine[]{SINK_HOST, SINK_PORT, USERNAME, PASSWORD, DATABASE, COLLECTION_NAME, OPTION_PARAMS}; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/MySQLDataxSubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/MySQLDataxSubExchangisJobHandler.java new file mode 100644 index 000000000..873cdf9b9 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/MySQLDataxSubExchangisJobHandler.java @@ -0,0 +1,119 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; +import com.webank.wedatasphere.exchangis.job.server.utils.SQLCommandUtils; +import org.apache.linkis.common.exception.ErrorException; + +import java.util.*; +import java.util.stream.Collectors; + +/** + * Mysql in datax + */ +public class MySQLDataxSubExchangisJobHandler extends AuthEnabledSubExchangisJobHandler { + + /** + * Database + */ + private static final JobParamDefine SOURCE_DATABASE = JobParams.define("connection[0].jdbcUrl[0].database", JobParamConstraints.DATABASE); + private static final JobParamDefine SINK_DATABASE = JobParams.define("connection[0].jdbcUrl.database", JobParamConstraints.DATABASE); + + /** + * Table + */ + private static final JobParamDefine SINK_TABLE = JobParams.define("connection[0].table[0]", JobParamConstraints.TABLE); + + /** + * Host + */ + private static final JobParamDefine SOURCE_HOST = JobParams.define("connection[0].jdbcUrl[0].host", JobParamConstraints.HOST); + private static final JobParamDefine SINK_HOST = JobParams.define("connection[0].jdbcUrl.host", JobParamConstraints.HOST); + + /** + * Port + */ + private static final JobParamDefine SOURCE_PORT = JobParams.define("connection[0].jdbcUrl[0].port", JobParamConstraints.PORT); + private static final JobParamDefine SINK_PORT = JobParams.define("connection[0].jdbcUrl.port", JobParamConstraints.PORT); + + /** + * Connect params + */ + private static final JobParamDefine> SOURCE_PARAMS_MAP = JobParams.define("connection[0].jdbcUrl[0].connParams", JobParamConstraints.CONNECT_PARAMS, + connectParams -> Json.fromJson(connectParams, Map.class), String.class); + private static final JobParamDefine> SINK_PARAMS_MAP = JobParams.define("connection[0].jdbcUrl.connParams", JobParamConstraints.CONNECT_PARAMS, + connectParams -> Json.fromJson(connectParams, Map.class), String.class); + /** + * Where condition + */ + private static final JobParamDefine WHERE_CONDITION = JobParams.define(JobParamConstraints.WHERE); + + /** + * Query sql + */ + private static final JobParamDefine QUERY_SQL = JobParams.define("connection[0].querySql[0]", job ->{ + JobParamSet sourceParams = job.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE); + String where = WHERE_CONDITION.getValue(sourceParams); + List columns = job.getSourceColumns().stream().map(SubExchangisJob.ColumnDefine::getName).collect(Collectors.toList()); + if (columns.isEmpty()){ + columns.add("*"); + } + return SQLCommandUtils.contactSql(Collections.singletonList(sourceParams + .get(JobParamConstraints.TABLE).getValue()), null, columns, null, where); + }, SubExchangisJob.class); + + /** + * SQL column + */ + private static final JobParamDefine> SQL_COLUMN = JobParams.define("column", job -> { + List columns = job.getSinkColumns().stream().map(SubExchangisJob.ColumnDefine::getName).collect(Collectors.toList()); + if (columns.isEmpty()){ + columns.add("*"); + } + return columns; + }, SubExchangisJob.class); + @Override + public void handleJobSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE); + preHandleJobParamSet(paramSet); + if (Objects.nonNull(paramSet)){ + Arrays.asList(sourceMappings()).forEach(define -> paramSet.addNonNull(define.get(paramSet))); + paramSet.add(QUERY_SQL.newParam(subExchangisJob)); + } + } + + @Override + public void handleJobSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SINK); + preHandleJobParamSet(paramSet); + if (Objects.nonNull(paramSet)){ + Arrays.asList(sinkMappings()).forEach(define -> paramSet.addNonNull(define.get(paramSet))); + paramSet.add(SQL_COLUMN.newParam(subExchangisJob)); + } + } + + @Override + public String dataSourceType() { + return "mysql"; + } + + @Override + public boolean acceptEngine(String engineType) { + return "datax".equalsIgnoreCase(engineType); + } + + protected JobParamDefine[] sourceMappings(){ + return new JobParamDefine[]{USERNAME, PASSWORD, SOURCE_DATABASE, + SOURCE_HOST, SOURCE_PORT, SOURCE_PARAMS_MAP}; + } + + protected JobParamDefine[] sinkMappings(){ + return new JobParamDefine[]{SINK_HOST, SINK_PORT, USERNAME, PASSWORD, + SINK_DATABASE, SINK_TABLE, SINK_PARAMS_MAP}; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/OracleDataxSubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/OracleDataxSubExchangisJobHandler.java new file mode 100644 index 000000000..71cc41e69 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/OracleDataxSubExchangisJobHandler.java @@ -0,0 +1,122 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; + +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; +import com.webank.wedatasphere.exchangis.job.server.utils.SQLCommandUtils; +import org.apache.linkis.common.exception.ErrorException; + +import java.util.*; +import java.util.stream.Collectors; + +/** + * Oracle in datax + */ +public class OracleDataxSubExchangisJobHandler extends AuthEnabledSubExchangisJobHandler { + + /** + * Host + */ + private static final JobParamDefine SOURCE_HOST = JobParams.define("connection[0].jdbcUrl[0].host", JobParamConstraints.HOST); + private static final JobParamDefine SINK_HOST = JobParams.define("connection[0].jdbcUrl.host", JobParamConstraints.HOST); + + /** + * Port + */ + private static final JobParamDefine SOURCE_PORT = JobParams.define("connection[0].jdbcUrl[0].port", JobParamConstraints.PORT); + private static final JobParamDefine SINK_PORT = JobParams.define("connection[0].jdbcUrl.port", JobParamConstraints.PORT); + + /** + * ServiceName + */ + private static final JobParamDefine SOURCE_SERVICE_NAME = JobParams.define("connection[0].jdbcUrl[0].serviceName", JobParamConstraints.SERVICE_NAME); + private static final JobParamDefine SINK_SERVICE_NAME = JobParams.define("connection[0].jdbcUrl.serviceName", JobParamConstraints.SERVICE_NAME); + + /** + * Table + */ + private static final JobParamDefine SOURCE_TABLE = JobParams.define("table", JobParamConstraints.TABLE); + private static final JobParamDefine SINK_TABLE = JobParams.define("connection[0].table[0]", JobParamConstraints.TABLE); + + /** + * Connect params + */ + private static final JobParamDefine> SOURCE_PARAMS_MAP = JobParams.define("connection[0].jdbcUrl[0].connParams", JobParamConstraints.CONNECT_PARAMS, + connectParams -> Json.fromJson(connectParams, Map.class), String.class); + private static final JobParamDefine> SINK_PARAMS_MAP = JobParams.define("connection[0].jdbcUrl.connParams", JobParamConstraints.CONNECT_PARAMS, + connectParams -> Json.fromJson(connectParams, Map.class), String.class); + + /** + * Where condition + */ + private static final JobParamDefine SOURCE_WHERE_CONDITION = JobParams.define(JobParamConstraints.WHERE); + + + /** + * Query sql + */ + private static final JobParamDefine QUERY_SQL = JobParams.define("connection[0].querySql[0]", job -> { + JobParamSet sourceParams = job.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE); + String where = SOURCE_WHERE_CONDITION.getValue(sourceParams); + List columns = job.getSourceColumns().stream().map(SubExchangisJob.ColumnDefine::getName).collect(Collectors.toList()); + if (columns.isEmpty()) { + columns.add("*"); + } + return SQLCommandUtils.contactSql(Collections.singletonList(sourceParams + .get(JobParamConstraints.TABLE).getValue()), null, columns, null, where); + }, SubExchangisJob.class); + + /** + * SQL column + */ + private static final JobParamDefine> SQL_COLUMN = JobParams.define("column", job -> { + List columns = job.getSinkColumns().stream().map(SubExchangisJob.ColumnDefine::getName).collect(Collectors.toList()); + if (columns.isEmpty()) { + columns.add("*"); + } + return columns; + }, SubExchangisJob.class); + + @Override + public void handleJobSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE); + if (Objects.nonNull(paramSet)) { + Arrays.asList(sourceMappings()).forEach(define -> paramSet.addNonNull(define.get(paramSet))); + paramSet.add(QUERY_SQL.newParam(subExchangisJob)); + } + } + + @Override + public void handleJobSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SINK); + if (Objects.nonNull(paramSet)) { + Arrays.asList(sinkMappings()).forEach(define -> paramSet.addNonNull(define.get(paramSet))); + paramSet.add(SQL_COLUMN.newParam(subExchangisJob)); + } + } + + @Override + public String dataSourceType() { + return "oracle"; + } + + @Override + public boolean acceptEngine(String engineType) { + return "datax".equalsIgnoreCase(engineType); + } + + private JobParamDefine[] sourceMappings() { + return new JobParamDefine[]{USERNAME, PASSWORD, SOURCE_TABLE, SOURCE_WHERE_CONDITION, + SOURCE_HOST, SOURCE_PORT, SOURCE_SERVICE_NAME, SOURCE_PARAMS_MAP}; + } + + public JobParamDefine[] sinkMappings() { + return new JobParamDefine[]{USERNAME, PASSWORD, SINK_TABLE, + SINK_HOST, SINK_PORT, SINK_SERVICE_NAME, SINK_PARAMS_MAP}; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/StarRocksDataxSubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/StarRocksDataxSubExchangisJobHandler.java new file mode 100644 index 000000000..93e10e0f3 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/StarRocksDataxSubExchangisJobHandler.java @@ -0,0 +1,107 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParam; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.common.exception.ErrorException; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +/** + * StarRocks in datax + */ +public class StarRocksDataxSubExchangisJobHandler extends AuthEnabledSubExchangisJobHandler { + + /** + * Host + */ + private static final JobParamDefine SINK_HOST = JobParams.define("connection[0].host", JobParamConstraints.HOST); + + /** + * TCP_Port + */ + private static final JobParamDefine SINK_PORT = JobParams.define("connection[0].port", JobParamConstraints.PORT); + + /** + * HTTP_Port + */ + private static final JobParamDefine SINK_LOAD_URL = JobParams.define("loadUrl[0]", paramSet -> { + JobParam host = paramSet.get("connection[0].host"); + JobParam httpPort = paramSet.get(JobParamConstraints.HTTP_PORT); + if (Objects.nonNull(host) && StringUtils.isNotBlank(host.getValue()) && + Objects.nonNull(httpPort) && StringUtils.isNotBlank(httpPort.getValue())) { + return host.getValue() + ":" + httpPort.getValue(); + } + return null; + }); + + /** + * Database + */ + private static final JobParamDefine SINK_DATABASE = JobParams.define("database", JobParamConstraints.DATABASE); + + /** + * Table + */ + private static final JobParamDefine SINK_TABLE = JobParams.define("table", JobParamConstraints.TABLE); + + /** + * Connect params + */ + private static final JobParamDefine> SINK_PARAMS_MAP = JobParams.define("connection[0].connParams", JobParamConstraints.CONNECT_PARAMS, + connectParams -> Json.fromJson(connectParams, Map.class), String.class); + + /** + * SQL column + */ + private static final JobParamDefine> SQL_COLUMN = JobParams.define("column", job -> { + List columns = job.getSinkColumns().stream().map(SubExchangisJob.ColumnDefine::getName).collect(Collectors.toList()); + if (columns.isEmpty()){ + columns.add("*"); + } + return columns; + }, SubExchangisJob.class); + + @Override + public void handleJobSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + } + + @Override + public void handleJobSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SINK); + if (Objects.nonNull(paramSet)){ + JobParamDefine[] jobParamDefines = sinkMappings(); + Arrays.asList(jobParamDefines).forEach( + define -> paramSet.addNonNull(define.get(paramSet)) + ); + } + } + + @Override + public String dataSourceType() { + return "starrocks"; + } + + @Override + public boolean acceptEngine(String engineType) { + return "datax".equalsIgnoreCase(engineType); + } + + private JobParamDefine[] sourceMappings(){ + return null; + } + + public JobParamDefine[] sinkMappings(){ + return new JobParamDefine[]{USERNAME, PASSWORD, SINK_HOST, SINK_PORT, SINK_LOAD_URL, SINK_DATABASE, SINK_TABLE, SINK_PARAMS_MAP}; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/SubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/SubExchangisJobHandler.java new file mode 100644 index 000000000..195755e78 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/SubExchangisJobHandler.java @@ -0,0 +1,49 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers; + +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import org.apache.linkis.common.exception.ErrorException; + +/** + * JobHandler + */ +public interface SubExchangisJobHandler { + + String DEFAULT_DATA_SOURCE_TYPE = "default"; + /** + * Associated data source type + * @return string + */ + String dataSourceType(); + + /** + * If accept engine type + * @param engineType engine type + * @return boolean + */ + default boolean acceptEngine(String engineType){ + return true; + } + /** + * Handle source + * @param subExchangisJob sub job + * @param ctx context + */ + void handleSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException; + + + /** + * Handle sink + * @param subExchangisJob + * @param ctx + */ + void handleSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException; + + /** + * Order + * @return value + */ + default int order(){ + return Integer.MAX_VALUE; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/AutoColumnSubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/AutoColumnSubExchangisJobHandler.java new file mode 100644 index 000000000..416886004 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/AutoColumnSubExchangisJobHandler.java @@ -0,0 +1,148 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.column; + +import com.webank.wedatasphere.exchangis.datasource.core.domain.MetaColumn; +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; +import com.webank.wedatasphere.exchangis.datasource.core.service.MetadataInfoService; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParam; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; +import com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.AbstractLoggingSubExchangisJobHandler; +import com.webank.wedatasphere.exchangis.job.utils.ColumnDefineUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.exception.ErrorException; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +/** + * Provide method to autofill columns + */ +public abstract class AutoColumnSubExchangisJobHandler extends AbstractLoggingSubExchangisJobHandler { + /** + * Auto type name + */ + private static final String AUTO_TYPE = "[Auto]"; + + /** + * Database + */ + private static final JobParamDefine DATABASE = JobParams.define(JobParamConstraints.DATABASE); + + /** + * Table + */ + private static final JobParamDefine TABLE = JobParams.define(JobParamConstraints.TABLE); + + @Override + public void handleJobSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + // Ignore + } + + @Override + public void handleJobSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + // Ignore + } + + /** + * Handle source columns + * @param columns columns + */ + protected void handleSrcColumns(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx, + List columns) { + if (autoColumn()){ + boolean complete = Objects.nonNull(columns) && columns.size() > 0 && + columns.stream().noneMatch(column -> StringUtils.isBlank(column.getType()) || column.getType().equals(AUTO_TYPE) || null == column.getIndex()); + if (!complete){ + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE); + doFillColumns(paramSet, columns); + } + } + } + + /** + * Handle sink columns + * @param columns columns + */ + protected void handleSinkColumns(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx, + List columns){ + if (autoColumn()){ + boolean complete = Objects.nonNull(columns) && columns.size() > 0 && + columns.stream().noneMatch(column -> StringUtils.isBlank(column.getType()) || column.getType().equals(AUTO_TYPE)); + if (!complete){ + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SINK); + doFillColumns(paramSet, columns); + } + } + } + + + /** + * Do fill column + * @param columns columns + */ + protected void doFillColumns(JobParamSet paramSet, List columns){ + List metaColumns = getMetaColumns(paramSet); + if (Objects.nonNull(metaColumns) && !metaColumns.isEmpty()){ + if (columns.size() <= 0){ + for(MetaColumn metaColumn : metaColumns){ + SubExchangisJob.ColumnDefine columnDefine = ColumnDefineUtils + .getColumn(metaColumn.getName(), metaColumn.getType()); + columnDefine.setIndex(metaColumn.getIndex()); + columns.add(columnDefine); + } + } else { + completeColumns(columns, metaColumns); + } + } + } + + /** + * Get columns for metadata server + * @param paramSet param set + * @return columns + */ + protected List getMetaColumns(JobParamSet paramSet){ + String database = DATABASE.getValue(paramSet); + String table = TABLE.getValue(paramSet); + JobParam dataSourceId = paramSet.get(JobParamConstraints.DATA_SOURCE_ID); + try { + return Objects.requireNonNull(getBean(MetadataInfoService.class)).getColumns(getJobBuilderContext().getOriginalJob().getCreateUser(), + Long.valueOf(dataSourceId.getValue()), database, table); + } catch (ExchangisDataSourceException e) { + throw new ExchangisJobException.Runtime(e.getErrCode(), e.getMessage(), e.getCause()); + } + } + /** + * + * @param columns columns + * @param metaColumns meta columns + */ + protected final void completeColumns(List columns, List metaColumns){ + Map metaColumnMap = metaColumns.stream().collect(Collectors.toMap( + MetaColumn::getName, metaColumn -> metaColumn, (left, right) -> left + )); + for (int i = 0; i < columns.size(); i ++){ + SubExchangisJob.ColumnDefine column = columns.get(i); + String name = column.getName(); + MetaColumn metaColumn = metaColumnMap.get(name); + if (Objects.isNull(metaColumn)){ + throw new ExchangisJobException.Runtime(-1, "Unable to find match column: [" + name + "] (表中找不到对应的字段)", null); + } + columns.set(i, ColumnDefineUtils.getColumn(name, metaColumn.getType(), metaColumn.getIndex())); + } + } + + /** + * If auto fill column + * @return bool + */ + protected abstract boolean autoColumn(); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/EsAutoColumnJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/EsAutoColumnJobHandler.java new file mode 100644 index 000000000..dafab1ead --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/EsAutoColumnJobHandler.java @@ -0,0 +1,21 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.column; + +/** + * ES auto column handler + */ +public class EsAutoColumnJobHandler extends AutoColumnSubExchangisJobHandler{ + @Override + public String dataSourceType() { + return "elasticsearch"; + } + + @Override + public int order() { + return 0; + } + + @Override + protected boolean autoColumn() { + return true; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/HiveAutoColumnJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/HiveAutoColumnJobHandler.java new file mode 100644 index 000000000..2901fb07d --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/HiveAutoColumnJobHandler.java @@ -0,0 +1,26 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.column; + + + +/** + * Hive auto column handler + */ +public class HiveAutoColumnJobHandler extends AutoColumnSubExchangisJobHandler { + + + @Override + public String dataSourceType() { + return "hive"; + } + + @Override + public int order() { + return 0; + } + + @Override + protected boolean autoColumn() { + return true; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/MongoAutoColumnJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/MongoAutoColumnJobHandler.java new file mode 100644 index 000000000..c3abba033 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/MongoAutoColumnJobHandler.java @@ -0,0 +1,22 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.column; + +/** + * Mongo auto column handler + */ +public class MongoAutoColumnJobHandler extends AutoColumnSubExchangisJobHandler{ + + @Override + public String dataSourceType() { + return "mongodb"; + } + + @Override + public int order() { + return 0; + } + + @Override + protected boolean autoColumn() { + return true; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/MySQLAutoColumnJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/MySQLAutoColumnJobHandler.java new file mode 100644 index 000000000..20ce2de16 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/MySQLAutoColumnJobHandler.java @@ -0,0 +1,22 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.column; + +/** + * Mysql auto column handler + */ +public class MySQLAutoColumnJobHandler extends AutoColumnSubExchangisJobHandler { + @Override + protected boolean autoColumn() { + return false; + } + + @Override + public String dataSourceType() { + return "mysql"; + } + + + @Override + public int order() { + return 0; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/OracleAutoColumnJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/OracleAutoColumnJobHandler.java new file mode 100644 index 000000000..22866c48e --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/OracleAutoColumnJobHandler.java @@ -0,0 +1,21 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.column; + +/** + * Oracle auto column handler + */ +public class OracleAutoColumnJobHandler extends AutoColumnSubExchangisJobHandler{ + @Override + public String dataSourceType() { + return "oracle"; + } + + @Override + public int order() { + return 0; + } + + @Override + protected boolean autoColumn() { + return true; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/StarRocksAutoColumnJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/StarRocksAutoColumnJobHandler.java new file mode 100644 index 000000000..43ea42989 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/StarRocksAutoColumnJobHandler.java @@ -0,0 +1,23 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.column; + +/** + * StarRocks auto column handler + */ +public class StarRocksAutoColumnJobHandler extends AutoColumnSubExchangisJobHandler{ + + @Override + public String dataSourceType() { + return "starrocks"; + } + + @Override + public int order() { + return 0; + } + + @Override + protected boolean autoColumn() { + return true; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/AbstractExchangisJobParamsMapping.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/AbstractExchangisJobParamsMapping.java new file mode 100644 index 000000000..cb9a2db1e --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/AbstractExchangisJobParamsMapping.java @@ -0,0 +1,72 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.mappings; + +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.AuthEnabledSubExchangisJobHandler; +import org.apache.linkis.common.exception.ErrorException; + +import java.util.Arrays; +import java.util.Objects; +import java.util.Optional; +import java.util.function.Consumer; + +/** + * Implement "SubExchangisJobHandler", only handle the params of job + */ +public abstract class AbstractExchangisJobParamsMapping extends AuthEnabledSubExchangisJobHandler { + + @Override + public void handleJobSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + subExchangisJob.getSourceColumns().forEach(srcColumnMappingFunc()); + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE); + if (Objects.nonNull(paramSet)){ +// info("SourceParamSet: {}", Json.toJson(paramSet.toList().stream().collect( +// Collectors.toMap(JobParam::getStrKey, JobParam::getValue)), null)); + Optional.ofNullable(sourceMappings()).ifPresent(jobParamDefines -> + Arrays.asList(jobParamDefines).forEach(define -> paramSet.addNonNull(define.get(paramSet)))); + } + } + + @Override + public void handleJobSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + subExchangisJob.getSinkColumns().forEach(sinkColumnMappingFunc()); + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SINK); + if (Objects.nonNull(paramSet)){ +// info("SinkParamSet: {}", Json.toJson(paramSet.toList().stream().collect( +// Collectors.toMap(JobParam::getStrKey, JobParam::getValue)), null)); + Optional.ofNullable(sinkMappings()).ifPresent(jobParamDefines -> + Arrays.asList(jobParamDefines).forEach(define -> paramSet.addNonNull(define.get(paramSet)))); + } + } + + /** + * Get param definition of source mapping + * @return definitions + */ + public abstract JobParamDefine[] sourceMappings(); + + + /** + * Get param definition of sink mapping + * @return + */ + public abstract JobParamDefine[] sinkMappings(); + + /** + * Source columns mapping function + * @return consumer function + */ + protected Consumer srcColumnMappingFunc(){ + return columnDefine -> {}; + } + + /** + * Sink columns mapping function + * @return consumer function + */ + protected Consumer sinkColumnMappingFunc(){ + return columnDefine -> {}; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/EsDataxParamsMapping.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/EsDataxParamsMapping.java new file mode 100644 index 000000000..1afe54842 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/EsDataxParamsMapping.java @@ -0,0 +1,110 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.mappings; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Elastic search datax mapping + */ +public class EsDataxParamsMapping extends AbstractExchangisJobParamsMapping{ + + private static final Logger LOG = LoggerFactory.getLogger(EsDataxParamsMapping.class); + + /** + * Elastic search urls + */ + private static final JobParamDefine ELASTIC_URLS = JobParams.define("elasticUrls", "elasticUrls", urls -> { + List elasticUrls = Json.fromJson(urls, List.class, String.class); + if (Objects.nonNull(elasticUrls)){ + return StringUtils.join(elasticUrls, ","); + } + return null; + }, String.class); + + /** + * Index name + */ + private static final JobParamDefine INDEX = JobParams.define("index", JobParamConstraints.DATABASE); + + /** + * Index type + */ + private static final JobParamDefine TYPE = JobParams.define("type", JobParamConstraints.TABLE); + + /** + * If in security connection + */ + private static final JobParamDefine SECURE = JobParams.define("secure"); + + /** + * Max merge count + */ + private static final JobParamDefine> SETTINGS = JobParams.define("settings", + () -> { + Map settings = new HashMap<>(); + settings.put("index.merge.scheduler.max_merge_count", 100); + return settings; + }); + + /** + * Clean up + */ + private static final JobParamDefine CLEANUP = JobParams.define("cleanUp", () -> "false"); + + /** + * Max pool size + */ + private static final JobParamDefine CLIENT_MAX_POOL_SIZE = JobParams.define("clientConfig.maxPoolSize", () -> "1"); + + /** + * Socket time out + */ + private static final JobParamDefine CLIENT_SOCK_TIMEOUT = JobParams.define("clientConfig.sockTimeout", () -> "60000"); + + /** + * Connection timeout + */ + private static final JobParamDefine CLIENT_CONN_TIMEOUT = JobParams.define("clientConfig.connTimeout", () -> "60000"); + + /** + * Timeout + */ + private static final JobParamDefine CLIENT_TIMEOUT = JobParams.define("clientConfig.timeout", () -> "60000"); + + /** + * Compress + */ + private static final JobParamDefine CLIENT_COMPRESS = JobParams.define("clientConfig.compress", () -> "true"); + @Override + public String dataSourceType() { + return "elasticsearch"; + } + + @Override + public boolean acceptEngine(String engineType) { + return "datax".equalsIgnoreCase(engineType); + } + + @Override + public JobParamDefine[] sourceMappings() { + return new JobParamDefine[0]; + } + + @Override + public JobParamDefine[] sinkMappings() { + return new JobParamDefine[]{USERNAME, PASSWORD, ELASTIC_URLS, INDEX, TYPE, SECURE, + SETTINGS, CLEANUP, CLIENT_MAX_POOL_SIZE, CLIENT_SOCK_TIMEOUT, CLIENT_CONN_TIMEOUT, + CLIENT_TIMEOUT, CLIENT_COMPRESS + }; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveDataxParamsMapping.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveDataxParamsMapping.java new file mode 100644 index 000000000..ca3e36cff --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveDataxParamsMapping.java @@ -0,0 +1,349 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.mappings; + +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; +import com.webank.wedatasphere.exchangis.datasource.core.service.MetadataInfoService; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParam; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; +import org.apache.commons.lang3.StringUtils; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.*; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +/** + * Hive datax mapping + */ +public class HiveDataxParamsMapping extends AbstractExchangisJobParamsMapping{ + + private static final Map FIELD_MAP = new HashMap<>(); + + private static final BitSet CHAR_TO_ESCAPE = new BitSet(128); + + private static final String[] SOURCE_SUPPORT_FILETYPE = new String[]{"TEXT", "ORC","RC","SEQ","CSV"}; + + private static final String[] SINK_SUPPORT_FILETYPE = new String[]{"ORC", "TEXT"}; + + private enum Type { + /** + * types that supported by DataX + */ + STRING, LONG, BOOLEAN, DOUBLE, DATE, BINARY, OBJECT + } + //hive type => dataX type + static{ + FIELD_MAP.put("TINYINT", Type.LONG); + FIELD_MAP.put("SMALLINT", Type.LONG); + FIELD_MAP.put("INT", Type.LONG); + FIELD_MAP.put("BIGINT", Type.LONG); + FIELD_MAP.put("FLOAT", Type.DOUBLE); + FIELD_MAP.put("DOUBLE", Type.DOUBLE); + FIELD_MAP.put("DECIMAL", Type.DOUBLE); + FIELD_MAP.put("STRING", Type.STRING); + FIELD_MAP.put("CHAR", Type.STRING); + FIELD_MAP.put("VARCHAR", Type.STRING); + FIELD_MAP.put("STRUCT", Type.STRING); + FIELD_MAP.put("MAP", Type.OBJECT); + FIELD_MAP.put("ARRAY", Type.OBJECT); + FIELD_MAP.put("UNION", Type.STRING); + FIELD_MAP.put("BINARY", Type.BINARY); + FIELD_MAP.put("BOOLEAN", Type.BOOLEAN); + FIELD_MAP.put("DATE", Type.DATE); + FIELD_MAP.put("TIMESTAMP", Type.DATE); + } + + /** + * Hive database + */ + private static final JobParamDefine HIVE_DATABASE = JobParams.define("hiveDatabase", JobParamConstraints.DATABASE); + + /** + * Hive table + */ + private static final JobParamDefine HIVE_TABLE = JobParams.define("hiveTable", JobParamConstraints.TABLE); + + /** + * Hive uris + */ + private static final JobParamDefine HIVE_URIS = JobParams.define("hiveMetastoreUris", "uris"); + + /** + * Data file name (prefix) + */ + private static final JobParamDefine DATA_FILE_NAME = JobParams.define("fileName", () -> "exch_hive_"); + /** + * Encoding + */ + private static final JobParamDefine ENCODING = JobParams.define("encoding", paramSet -> { + JobParam encodingParam = paramSet.get(JobParamConstraints.ENCODING); + if (Objects.nonNull(encodingParam)){ + return encodingParam.getValue(); + } + return "utf-8"; + }); + + /** + * Null format + */ + private static final JobParamDefine NULL_FORMAT = JobParams.define("nullFormat", paramSet -> { + JobParam nullFormatParam = paramSet.get(JobParamConstraints.NULL_FORMAT); + if (Objects.nonNull(nullFormatParam)){ + return nullFormatParam.getValue(); + } + return "\\N"; + }); + /** + * Table partition + */ + private static final JobParamDefine> TABLE_PARTITION = JobParams.define(JobParamConstraints.PARTITION); + + /** + * Table properties + */ + private static final JobParamDefine> HIVE_TABLE_PROPS = JobParams.define("tableProps", paramSet -> { + String database = HIVE_DATABASE.getValue(paramSet); + String table = HIVE_TABLE.getValue(paramSet); + JobParam dataSourceId = paramSet.get(JobParamConstraints.DATA_SOURCE_ID); + try { + return Objects.requireNonNull(getBean(MetadataInfoService.class)).getTableProps(getJobBuilderContext().getOriginalJob().getCreateUser(), + Long.valueOf(dataSourceId.getValue()), database, table); + } catch (ExchangisDataSourceException e) { + throw new ExchangisJobException.Runtime(e.getErrCode(), e.getMessage(), e.getCause()); + } + }); + + /** + * Partition keys + */ + private static final JobParamDefine> PARTITION_KEYS = JobParams.define("partitionKeys", paramSet -> { + JobParam dataSourceId = paramSet.get(JobParamConstraints.DATA_SOURCE_ID); + List partitionKeys = new ArrayList<>(); + String database = HIVE_DATABASE.getValue(paramSet); + String table = HIVE_TABLE.getValue(paramSet); + try { + partitionKeys = Objects.requireNonNull(getBean(MetadataInfoService.class)).getPartitionKeys(getJobBuilderContext().getOriginalJob().getCreateUser(), + Long.parseLong(dataSourceId.getValue()), database, table); + } catch (ExchangisDataSourceException e) { + throw new ExchangisJobException.Runtime(e.getErrCode(), e.getMessage(), e.getCause()); + } + return partitionKeys; + }); + /** + * Partition values + */ + private static final JobParamDefine PARTITION_VALUES = JobParams.define("partitionValues", paramSet -> { + Map partitions = Optional.ofNullable(TABLE_PARTITION.getValue(paramSet)).orElse(new HashMap<>()); + //Try to find actual partition from table properties + List partitionKeys = PARTITION_KEYS.getValue(paramSet); + String[] partitionColumns = Objects.isNull(partitionKeys)? new String[0]: partitionKeys.toArray(new String[0]); + if (partitionColumns.length > 0 && partitions.size() != partitionColumns.length){ + throw new ExchangisJobException.Runtime(-1, "Unmatched partition list: [" + + StringUtils.join(partitionColumns, ",") + "]", null); + } + if (partitionColumns.length > 0){ + return Arrays.stream(partitionColumns).map(partitions::get).collect(Collectors.joining(",")); + } + return null; + }); + + /** + * Field delimiter + */ + private static final JobParamDefine FIELD_DELIMITER = JobParams.define("fieldDelimiter", paramSet -> + HIVE_TABLE_PROPS.getValue(paramSet).getOrDefault("field.delim", "\u0001")); + + /** + * File type + */ + private static final JobParamDefine FILE_TYPE = JobParams.define("fileType", paramSet -> { + Map tableProps = HIVE_TABLE_PROPS.getValue(paramSet); + AtomicReference fileType = new AtomicReference<>(); + Optional.ofNullable(tableProps.get("serialization.lib")).ifPresent(serLib -> fileType + .set(HiveV2FileType.serde(serLib))); + if (Objects.nonNull(fileType.get())){ + Optional.ofNullable(tableProps.get("file.inputformat")).ifPresent(inputFormat -> fileType + .set(HiveV2FileType.input(inputFormat))); + } + if (Objects.nonNull(fileType.get())){ + Optional.ofNullable(tableProps.get("file.outputformat")).ifPresent(outputFormat -> fileType + .set(HiveV2FileType.output(outputFormat))); + } + return Objects.nonNull(fileType.get())? fileType.get() : HiveV2FileType.TEXT; + }); + + /** + * Data location + */ + private static final JobParamDefine DATA_LOCATION = JobParams.define("location", paramSet -> { + Map tableProps = HIVE_TABLE_PROPS.getValue(paramSet); + String path = tableProps.getOrDefault("location", ""); + String partitionValues = PARTITION_VALUES.getValue(paramSet); + if (StringUtils.isNotBlank(partitionValues)){ + String[] values = partitionValues.split(","); + String[] keys = PARTITION_KEYS.getValue(paramSet).toArray(new String[0]); + // Escape the path and value of partition + StringBuilder pathBuilder = new StringBuilder(path).append("/"); + for(int i = 0; i < keys.length; i++){ + if (i > 0){ + pathBuilder.append("/"); + } + pathBuilder.append(escapeHivePathName(keys[i])); + pathBuilder.append("="); + pathBuilder.append(escapeHivePathName(values[i])); + } + path = pathBuilder.toString(); + } + return path.replaceAll(" ", "%20"); + }); + + /** + * Compress name + */ + private static final JobParamDefine COMPRESS_NAME = JobParams.define("compress", paramSet -> { + HiveV2FileType fileType = FILE_TYPE.getValue(paramSet); + if (HiveV2FileType.TEXT.equals(fileType)){ + return "GZIP"; + } else if (HiveV2FileType.ORC.equals(fileType)){ + return "SNAPPY"; + } + return null; + }); + + /** + * Data path + */ + private static final JobParamDefine DATA_PATH = JobParams.define("path", paramSet -> { + String location = DATA_LOCATION.getValue(paramSet); + if (StringUtils.isNotBlank(location)){ + try { + return new URI(location).getPath(); + } catch (URISyntaxException e) { + warn("Unrecognized location: [{}]", location, e); + } + } + return null; + }); + + /** + * Hadoop config + */ + private static final JobParamDefine> HADOOP_CONF = JobParams.define("hadoopConfig", paramSet -> { + String uri = DATA_LOCATION.getValue(paramSet); + try { + // TODO get the other hdfs cluster with tab + return Objects.requireNonNull(getBean(MetadataInfoService.class)).getLocalHdfsInfo(uri); + } catch (ExchangisDataSourceException e) { + throw new ExchangisJobException.Runtime(e.getErrCode(), e.getDesc(), e.getCause()); + } + }); + + /** + * To "defaultFS" + */ + private static final JobParamDefine DEFAULT_FS = JobParams.define("defaultFS", paramSet -> + HADOOP_CONF.getValue(paramSet).get("fs.defaultFS")); + + private static final JobParamDefine IS_SINK_FILETYPE_SUPPORT = JobParams.define("sink.fileType.support", paramSet -> { + if (!isSupport(FILE_TYPE.getValue(paramSet).name(), SINK_SUPPORT_FILETYPE)){ + throw new ExchangisJobException.Runtime(-1, "Unsupported sink file type [" + FILE_TYPE.getValue(paramSet).name() + "] of hive", null); + } + return null; + }); + + private static final JobParamDefine IS_SOURCE_FILETYPE_SUPPORT = JobParams.define("sink.fileType.support", paramSet -> { + if (!isSupport(FILE_TYPE.getValue(paramSet).name(), SOURCE_SUPPORT_FILETYPE)){ + throw new ExchangisJobException.Runtime(-1, "Unsupported source file type [" + FILE_TYPE.getValue(paramSet).name() + "] of hive", null); + } + return null; + }); + // TODO kerberos params + + /** + * Escape hive path name + * @param path path name + * @return path + */ + protected static String escapeHivePathName(String path) { + if (path != null && path.length() != 0) { + StringBuilder sb = new StringBuilder(); + + for(int i = 0; i < path.length(); ++i) { + char c = path.charAt(i); + if (c < CHAR_TO_ESCAPE.size() && CHAR_TO_ESCAPE.get(c)) { + sb.append('%'); + sb.append(String.format("%1$02X", (int) c)); + } else { + sb.append(c); + } + } + + return sb.toString(); + } else { + return "__HIVE_DEFAULT_PARTITION__"; + } + } + + protected static boolean isSupport(String value, String[] array){ + boolean isSupport = false; + for(String item: array){ + if(item.equalsIgnoreCase(value)){ + isSupport = true; + break; + } + } + return isSupport; + } + + @Override + public JobParamDefine[] sourceMappings() { + return new JobParamDefine[]{HIVE_DATABASE, HIVE_TABLE, ENCODING, + NULL_FORMAT, PARTITION_VALUES, FIELD_DELIMITER, FILE_TYPE, DATA_PATH, HADOOP_CONF, DEFAULT_FS, + IS_SOURCE_FILETYPE_SUPPORT}; + } + + @Override + public JobParamDefine[] sinkMappings() { + return new JobParamDefine[]{HIVE_DATABASE, HIVE_TABLE, ENCODING, + NULL_FORMAT, PARTITION_VALUES, FIELD_DELIMITER, FILE_TYPE, DATA_PATH, HADOOP_CONF, DEFAULT_FS, + COMPRESS_NAME, IS_SINK_FILETYPE_SUPPORT, HIVE_URIS, DATA_FILE_NAME}; + } + + @Override + protected Consumer srcColumnMappingFunc() { + return columnDefine -> { + String type = columnDefine.getType(); + Type t = FIELD_MAP.get(type.toUpperCase().replaceAll("[(<(][\\s\\S]+", "")); + if (null != t){ + columnDefine.setType(t.toString()); + if (t == Type.OBJECT){ + // Set the raw column type + columnDefine.setRawType(type); + } + } else { + columnDefine.setType(Type.STRING.toString()); + } + }; + } + + @Override + protected Consumer sinkColumnMappingFunc() { + return columnDefine -> columnDefine.setType(columnDefine.getType().replaceAll("[(<(][\\s\\S]+", "")); + } + + @Override + public String dataSourceType() { + return "hive"; + } + + @Override + public boolean acceptEngine(String engineType) { + return "datax".equalsIgnoreCase(engineType); + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveSqoopParamsMapping.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveSqoopParamsMapping.java new file mode 100644 index 000000000..366d89b82 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveSqoopParamsMapping.java @@ -0,0 +1,41 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.mappings; + +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; + +import java.util.function.BiFunction; + +/** + * Mapping of Hive in Sqoop + */ +public class HiveSqoopParamsMapping extends AbstractExchangisJobParamsMapping { + + @Override + public String dataSourceType() { + return "hive"; + } + + @Override + public boolean acceptEngine(String engineType) { + return "sqoop".equalsIgnoreCase(engineType); + } + + @Override + public JobParamDefine[] sourceMappings() { + return new JobParamDefine[]{ + //Unit test + JobParams.define("version", "source.version" ), + JobParams.define("version", () -> "1.4.7"), + JobParams.define("tab", (BiFunction)(key, paramSet)->{ + JobParams.define("version").newParam(paramSet).getValue(); + return null; + }) + }; + } + + @Override + public JobParamDefine[] sinkMappings() { + return new JobParamDefine[0]; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveV2FileType.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveV2FileType.java new file mode 100644 index 000000000..f4479e602 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveV2FileType.java @@ -0,0 +1,69 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.mappings; + +import java.util.HashMap; +import java.util.Map; + +/** + * Hive File type for Hive version 2.x + */ +public enum HiveV2FileType { + /** + * TYPE:TEXT + */ + TEXT, + /** + * TYPE:ORC + */ + ORC, + /** + * TYPE:AVRO + */ + AVRO, + /** + * TYPE:PARQUET + */ + PARQUET, + /** + * TYPE:RC + */ + RC, + /** + * TYPE:SEQUENCE + */ + SEQ; + + static final Map SERDE = new HashMap<>(); + static final Map INPUT = new HashMap<>(); + static final Map OUTPUT = new HashMap<>(); + static{ + SERDE.put("org.apache.hadoop.hive.ql.io.orc.OrcSerde", ORC); + SERDE.put("org.apache.hadoop.hive.serde2.avro.AvroSerDe", AVRO); + SERDE.put("org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", PARQUET); + INPUT.put("org.apache.hadoop.mapred.TextInputFormat", TEXT); + INPUT.put("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat", ORC); + INPUT.put("org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat", AVRO); + INPUT.put("org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", PARQUET); + INPUT.put("org.apache.hadoop.hive.ql.io.RCFileInputFormat", RC); + INPUT.put("org.apache.hadoop.mapred.SequenceFileInputFormat", SEQ); + OUTPUT.put("org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", TEXT); + OUTPUT.put("org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat", ORC); + OUTPUT.put("org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat", AVRO); + OUTPUT.put("org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", PARQUET); + OUTPUT.put("org.apache.hadoop.hive.ql.io.RCFileOutputFormat", RC); + OUTPUT.put("org.apache.hadoop.mapred.SequenceFileOutputFormat", SEQ); + } + HiveV2FileType(){ + } + + static HiveV2FileType serde(String serializationClz){ + return SERDE.get(serializationClz); + } + + static HiveV2FileType input(String inputFormat){ + return INPUT.get(inputFormat); + } + + static HiveV2FileType output(String outputFormat){ + return OUTPUT.get(outputFormat); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/MySQLSqoopParamsMapping.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/MySQLSqoopParamsMapping.java new file mode 100644 index 000000000..d26c88abd --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/MySQLSqoopParamsMapping.java @@ -0,0 +1,25 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.mappings; + +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; + +public class MySQLSqoopParamsMapping extends AbstractExchangisJobParamsMapping{ + @Override + public JobParamDefine[] sourceMappings() { + return new JobParamDefine[0]; + } + + @Override + public JobParamDefine[] sinkMappings() { + return new JobParamDefine[0]; + } + + @Override + public String dataSourceType() { + return "mysql"; + } + + @Override + public boolean acceptEngine(String engineType) { + return "sqoop".equalsIgnoreCase(engineType); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/config/ModelMapperConfig.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/config/ModelMapperConfig.java new file mode 100644 index 000000000..ada40ed13 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/config/ModelMapperConfig.java @@ -0,0 +1,18 @@ +package com.webank.wedatasphere.exchangis.job.server.config; + +import org.modelmapper.ModelMapper; +import org.modelmapper.convention.MatchingStrategies; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +public class ModelMapperConfig { + + @Bean + public ModelMapper getModelMapper() { + ModelMapper modelMapper = new ModelMapper(); + modelMapper.getConfiguration().setMatchingStrategy(MatchingStrategies.STRICT); + return modelMapper; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisDataSourceFlowMetricsDTO.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisDataSourceFlowMetricsDTO.java new file mode 100644 index 000000000..f99a7184e --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisDataSourceFlowMetricsDTO.java @@ -0,0 +1,47 @@ +package com.webank.wedatasphere.exchangis.job.server.dto; + +// 数据源流量指标对象 +// datasource flow metrics pojo +public class ExchangisDataSourceFlowMetricsDTO { + + private String dataSourceTitle; + + private Long dataSourceId; + + // 监控的维度(秒、分钟、小时) + private String dimension; + + private String flow; + + public String getDataSourceTitle() { + return dataSourceTitle; + } + + public void setDataSourceTitle(String dataSourceTitle) { + this.dataSourceTitle = dataSourceTitle; + } + + public Long getDataSourceId() { + return dataSourceId; + } + + public void setDataSourceId(Long dataSourceId) { + this.dataSourceId = dataSourceId; + } + + public String getDimension() { + return dimension; + } + + public void setDimension(String dimension) { + this.dimension = dimension; + } + + public String getFlow() { + return flow; + } + + public void setFlow(String flow) { + this.flow = flow; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisEngineResourceMetricsDTO.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisEngineResourceMetricsDTO.java new file mode 100644 index 000000000..da95594f6 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisEngineResourceMetricsDTO.java @@ -0,0 +1,34 @@ +package com.webank.wedatasphere.exchangis.job.server.dto; + +public class ExchangisEngineResourceMetricsDTO { + + private String engine; + + private String cpu; + + private String mem; + + public String getEngine() { + return engine; + } + + public void setEngine(String engine) { + this.engine = engine; + } + + public String getCpu() { + return cpu; + } + + public void setCpu(String cpu) { + this.cpu = cpu; + } + + public String getMem() { + return mem; + } + + public void setMem(String mem) { + this.mem = mem; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisJobBasicInfoDTO.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisJobBasicInfoDTO.java new file mode 100644 index 000000000..ece45b5e8 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisJobBasicInfoDTO.java @@ -0,0 +1,121 @@ +package com.webank.wedatasphere.exchangis.job.server.dto; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + +import com.webank.wedatasphere.exchangis.job.enums.JobTypeEnum; + +@JsonIgnoreProperties(ignoreUnknown = true) +public class ExchangisJobBasicInfoDTO { + + private int id; + + private Long projectId; + + private Long dssProjectId; + + private String dssProjectName; + + private String nodeId; + + private String nodeName; + + private String name; + + private JobTypeEnum jobType; + + private String engineType; + + private String jobLabels; + + private String jobDesc; + + private String jobName; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getJobName() { + return jobName; + } + + public void setJobName(String jobName) { + this.jobName = jobName; + } + + public Long getProjectId() { + return projectId; + } + + public void setProjectId(Long projectId) { + this.projectId = projectId; + } + + public Long getDssProjectId() { return dssProjectId; } + + public void setDssProjectId(Long dssProjectId) { this.dssProjectId = dssProjectId; } + + public String getNodeId() { + return nodeId; + } + + public void setNodeId(String nodeId) { + this.nodeId = nodeId; + } + + public String getNodeName() { return nodeName; } + + public void setNodeName(String nodeName) { this.nodeName = nodeName; } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public JobTypeEnum getJobType() { + return jobType; + } + + public void setJobType(JobTypeEnum jobType) { + this.jobType = jobType; + } + + public String getEngineType() { + return engineType; + } + + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + public String getJobLabels() { + return jobLabels; + } + + public void setJobLabels(String jobLabels) { + this.jobLabels = jobLabels; + } + + public String getJobDesc() { + return jobDesc; + } + + public void setJobDesc(String jobDesc) { + this.jobDesc = jobDesc; + } + + public String getDssProjectName() { + return dssProjectName; + } + + public void setDssProjectName(String dssProjectName) { + this.dssProjectName = dssProjectName; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisJobContentDTO.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisJobContentDTO.java new file mode 100644 index 000000000..258f0de57 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisJobContentDTO.java @@ -0,0 +1,55 @@ +package com.webank.wedatasphere.exchangis.job.server.dto; + +public class ExchangisJobContentDTO { + + private String content; + + private String proxyUser; + + private String executeNode; + + private String syncType; + + private String jobParams; + + public String getContent() { + return content; + } + + public void setContent(String content) { + this.content = content; + } + + public String getProxyUser() { + return proxyUser; + } + + public void setProxyUser(String proxyUser) { + this.proxyUser = proxyUser; + } + + public String getExecuteNode() { + return executeNode; + } + + public void setExecuteNode(String executeNode) { + this.executeNode = executeNode; + } + + public String getSyncType() { + return syncType; + } + + public void setSyncType(String syncType) { + this.syncType = syncType; + } + + public String getJobParams() { + return jobParams; + } + + public void setJobParams(String jobParams) { + this.jobParams = jobParams; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisTaskIndicatorMetricsDTO.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisTaskIndicatorMetricsDTO.java new file mode 100644 index 000000000..f380f73de --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisTaskIndicatorMetricsDTO.java @@ -0,0 +1,38 @@ +package com.webank.wedatasphere.exchangis.job.server.dto; + +/** + * + * @Date 2022/1/8 19:48 + */ +public class ExchangisTaskIndicatorMetricsDTO { + + private Long exchangedRecords; + + private Long errorRecords; + + private Long ignoredRecords; + + public Long getExchangedRecords() { + return exchangedRecords; + } + + public void setExchangedRecords(Long exchangedRecords) { + this.exchangedRecords = exchangedRecords; + } + + public Long getErrorRecords() { + return errorRecords; + } + + public void setErrorRecords(Long errorRecords) { + this.errorRecords = errorRecords; + } + + public Long getIgnoredRecords() { + return ignoredRecords; + } + + public void setIgnoredRecords(Long ignoredRecords) { + this.ignoredRecords = ignoredRecords; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisTaskMetricsDTO.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisTaskMetricsDTO.java new file mode 100644 index 000000000..c4aa09471 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisTaskMetricsDTO.java @@ -0,0 +1,42 @@ +package com.webank.wedatasphere.exchangis.job.server.dto; + +/** + * + * @Date 2022/1/8 19:33 + */ +public class ExchangisTaskMetricsDTO { + + private ExchangisTaskResourceUsedMetricsDTO resourceUsed; + + private ExchangisTaskTrafficMetricsDTO traffic; + + private ExchangisTaskIndicatorMetricsDTO indicator; + + public ExchangisTaskResourceUsedMetricsDTO getResourceUsed() { + return resourceUsed; + } + + public void setResourceUsed(ExchangisTaskResourceUsedMetricsDTO resourceUsed) { + this.resourceUsed = resourceUsed; + } + + public ExchangisTaskTrafficMetricsDTO getTraffic() { + return traffic; + } + + public void setTraffic(ExchangisTaskTrafficMetricsDTO traffic) { + this.traffic = traffic; + } + + public ExchangisTaskIndicatorMetricsDTO getIndicator() { + return indicator; + } + + public void setIndicator(ExchangisTaskIndicatorMetricsDTO indicator) { + this.indicator = indicator; + } + + /*public static class ExchangisTaskResourceUsedMetricsDTO{ + + }*/ +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisTaskProcessMetricsDTO.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisTaskProcessMetricsDTO.java new file mode 100644 index 000000000..f991d4368 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisTaskProcessMetricsDTO.java @@ -0,0 +1,64 @@ +package com.webank.wedatasphere.exchangis.job.server.dto; + +public class ExchangisTaskProcessMetricsDTO { + + private String key; + + private String title; + + private Integer running; + + private Integer initialized; + + private Integer total; + + private String percentOfComplete; + + public Integer getRunning() { + return running; + } + + public void setRunning(Integer running) { + this.running = running; + } + + public Integer getInitialized() { + return initialized; + } + + public void setInitialized(Integer initialized) { + this.initialized = initialized; + } + + public Integer getTotal() { + return total; + } + + public void setTotal(Integer total) { + this.total = total; + } + + public String getPercentOfComplete() { + return percentOfComplete; + } + + public void setPercentOfComplete(String percentOfComplete) { + this.percentOfComplete = percentOfComplete; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisTaskResourceUsedMetricsDTO.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisTaskResourceUsedMetricsDTO.java new file mode 100644 index 000000000..7c1e5e6bd --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisTaskResourceUsedMetricsDTO.java @@ -0,0 +1,28 @@ +package com.webank.wedatasphere.exchangis.job.server.dto; + +/** + * + * @Date 2022/1/8 19:43 + */ +public class ExchangisTaskResourceUsedMetricsDTO { + + private double cpu; + + private Long memory; + + public double getCpu() { + return cpu; + } + + public void setCpu(double cpu) { + this.cpu = cpu; + } + + public Long getMemory() { + return memory; + } + + public void setMemory(Long memory) { + this.memory = memory; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisTaskStatusMetricsDTO.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisTaskStatusMetricsDTO.java new file mode 100644 index 000000000..b3ba60b87 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisTaskStatusMetricsDTO.java @@ -0,0 +1,64 @@ +package com.webank.wedatasphere.exchangis.job.server.dto; + +public class ExchangisTaskStatusMetricsDTO { + + private String jobName; + + private String taskName; + + private String status; + + private Integer num; + + private String createUser; + + private String proxyUser; + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public Integer getNum() { + return num; + } + + public void setNum(Integer num) { + this.num = num; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } + + public String getJobName() { + return jobName; + } + + public void setJobName(String jobName) { + this.jobName = jobName; + } + + public String getTaskName() { + return taskName; + } + + public void setTaskName(String taskName) { + this.taskName = taskName; + } + + public String getProxyUser() { + return proxyUser; + } + + public void setProxyUser(String proxyUser) { + this.proxyUser = proxyUser; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisTaskTrafficMetricsDTO.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisTaskTrafficMetricsDTO.java new file mode 100644 index 000000000..c04aad9f0 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExchangisTaskTrafficMetricsDTO.java @@ -0,0 +1,38 @@ +package com.webank.wedatasphere.exchangis.job.server.dto; + +/** + * + * @Date 2022/1/8 19:45 + */ +public class ExchangisTaskTrafficMetricsDTO { + + private String source; + + private String sink; + + private Long flow; + + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public String getSink() { + return sink; + } + + public void setSink(String sink) { + this.sink = sink; + } + + public Long getFlow() { + return flow; + } + + public void setFlow(Long flow) { + this.flow = flow; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExportedProject.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExportedProject.java new file mode 100644 index 000000000..71b2fda58 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/ExportedProject.java @@ -0,0 +1,39 @@ +package com.webank.wedatasphere.exchangis.job.server.dto; + +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVo; + +import java.util.List; + +/** + * @author tikazhang + * @Date 2022/3/14 12:22 + */ +public class ExportedProject { + String name; + List sqoops; + List dataxes; + + public List getSqoops() { + return sqoops; + } + + public void setSqoops(List sqoops) { + this.sqoops = sqoops; + } + + public List getDataxes() { + return dataxes; + } + + public void setDataxes(List dataxes) { + this.dataxes = dataxes; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/IdCatalog.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/IdCatalog.java new file mode 100644 index 000000000..8ba12b551 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/dto/IdCatalog.java @@ -0,0 +1,32 @@ +package com.webank.wedatasphere.exchangis.job.server.dto; + +import com.google.common.collect.Maps; + +import java.util.Map; + +/** + * @author tikazhang + * @Date 2022/3/15 10:33 + */ +public class IdCatalog { + + private Map sqoop = Maps.newHashMap(); + + private Map datax = Maps.newHashMap(); + + public Map getSqoop() { + return sqoop; + } + + public void setSqoop(Map sqoop) { + this.sqoop = sqoop; + } + + public Map getDatax() { + return datax; + } + + public void setDatax(Map datax) { + this.datax = datax; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/exception/ExchangisJobServerException.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/exception/ExchangisJobServerException.java new file mode 100644 index 000000000..bff9aa6f4 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/exception/ExchangisJobServerException.java @@ -0,0 +1,33 @@ + +package com.webank.wedatasphere.exchangis.job.server.exception; + + +import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.common.exception.ExceptionLevel; +import org.apache.linkis.common.exception.LinkisRuntimeException; + + +public class ExchangisJobServerException extends ErrorException { + + public ExchangisJobServerException(int errCode, String desc) { + super(errCode, desc); + } + + public ExchangisJobServerException(int errorCode, String desc, Throwable throwable) { + super(errorCode, desc); + this.initCause(throwable); + } + + public static class Runtime extends LinkisRuntimeException { + + public Runtime(int errCode, String desc, Throwable t) { + super(errCode, desc); + super.initCause(t); + } + + @Override + public ExceptionLevel getLevel() { + return ExceptionLevel.ERROR; + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/exception/ExchangisSchedulerException.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/exception/ExchangisSchedulerException.java new file mode 100644 index 000000000..57f6bdd2d --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/exception/ExchangisSchedulerException.java @@ -0,0 +1,30 @@ +package com.webank.wedatasphere.exchangis.job.server.exception; + +import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.common.exception.ExceptionLevel; +import org.apache.linkis.common.exception.LinkisRuntimeException; + +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.SCHEDULER_ERROR; + +/** + * Exception in scheduling + */ +public class ExchangisSchedulerException extends ErrorException { + public ExchangisSchedulerException(String desc, Throwable t) { + super(SCHEDULER_ERROR.getCode(), desc); + super.initCause(t); + } + + public static class Runtime extends LinkisRuntimeException{ + + public Runtime(String desc, Throwable t) { + super(SCHEDULER_ERROR.getCode(), desc); + super.initCause(t); + } + + @Override + public ExceptionLevel getLevel() { + return ExceptionLevel.ERROR; + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/exception/ExchangisSchedulerRetryException.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/exception/ExchangisSchedulerRetryException.java new file mode 100644 index 000000000..0339fbaaf --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/exception/ExchangisSchedulerRetryException.java @@ -0,0 +1,25 @@ +package com.webank.wedatasphere.exchangis.job.server.exception; + +import org.apache.linkis.common.exception.LinkisRetryException; + +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.SCHEDULER_ERROR; + +/** + * Exception in scheduling (could be retried in limit) + */ +public class ExchangisSchedulerRetryException extends LinkisRetryException { + private int retryNum = 0; + + public int getRetryNum() { + return retryNum; + } + + public void setRetryNum(int retryNum) { + this.retryNum = retryNum; + } + + public ExchangisSchedulerRetryException(String desc, Throwable t) { + super(SCHEDULER_ERROR.getCode(), desc); + super.initCause(t); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/exception/ExchangisTaskExecuteException.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/exception/ExchangisTaskExecuteException.java new file mode 100644 index 000000000..a7cdff332 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/exception/ExchangisTaskExecuteException.java @@ -0,0 +1,26 @@ +package com.webank.wedatasphere.exchangis.job.server.exception; + +import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.common.exception.ExceptionLevel; +import org.apache.linkis.common.exception.LinkisRuntimeException; + +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.TASK_EXECUTE_ERROR; + +public class ExchangisTaskExecuteException extends ErrorException { + public ExchangisTaskExecuteException(String desc, Throwable t) { + super(TASK_EXECUTE_ERROR.getCode(), desc); + super.initCause(t); + } + public static class Runtime extends LinkisRuntimeException { + + public Runtime(String desc, Throwable t) { + super(TASK_EXECUTE_ERROR.getCode(), desc); + super.initCause(t); + } + + @Override + public ExceptionLevel getLevel() { + return ExceptionLevel.ERROR; + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/exception/ExchangisTaskGenerateException.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/exception/ExchangisTaskGenerateException.java new file mode 100644 index 000000000..2cf8832a5 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/exception/ExchangisTaskGenerateException.java @@ -0,0 +1,15 @@ +package com.webank.wedatasphere.exchangis.job.server.exception; + +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; + +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.TASK_GENERATE_ERROR; + +/** + * Exception in generating tasks of job + */ +public class ExchangisTaskGenerateException extends ExchangisJobException { + public ExchangisTaskGenerateException(String desc, Throwable t) { + super(TASK_GENERATE_ERROR.getCode(), desc); + super.initCause(t); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/exception/ExchangisTaskObserverException.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/exception/ExchangisTaskObserverException.java new file mode 100644 index 000000000..6a9275318 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/exception/ExchangisTaskObserverException.java @@ -0,0 +1,44 @@ +package com.webank.wedatasphere.exchangis.job.server.exception; + +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import org.apache.linkis.common.exception.ExceptionLevel; +import org.apache.linkis.common.exception.LinkisRuntimeException; + +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.TASK_OBSERVER_ERROR; + +/** + * Exception in subscribing task + */ +public class ExchangisTaskObserverException extends ExchangisJobException { + + private String methodName; + public ExchangisTaskObserverException(String methodName, String desc, Throwable t) { + this(desc, t); + this.methodName = methodName; + } + public ExchangisTaskObserverException(String desc, Throwable t) { + super(TASK_OBSERVER_ERROR.getCode(), desc); + super.initCause(t); + } + + public String getMethodName() { + return methodName; + } + + public void setMethodName(String methodName) { + this.methodName = methodName; + } + + public static class Runtime extends LinkisRuntimeException { + + public Runtime(String desc, Throwable t) { + super(TASK_OBSERVER_ERROR.getCode(), desc); + super.initCause(t); + } + + @Override + public ExceptionLevel getLevel() { + return ExceptionLevel.ERROR; + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/AbstractTaskExecution.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/AbstractTaskExecution.java new file mode 100644 index 000000000..0a5e3e2b4 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/AbstractTaskExecution.java @@ -0,0 +1,209 @@ +package com.webank.wedatasphere.exchangis.job.server.execution; + + +import com.webank.wedatasphere.exchangis.job.exception.ExchangisOnEventException; +import com.webank.wedatasphere.exchangis.job.launcher.ExchangisTaskLaunchManager; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerException; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskExecuteException; +import com.webank.wedatasphere.exchangis.job.server.execution.events.*; +import com.webank.wedatasphere.exchangis.job.server.execution.loadbalance.TaskSchedulerLoadBalancer; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.ExchangisSchedulerTask; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.SchedulerThread; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks.AbstractLoadBalanceSchedulerTask; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks.SubmitSchedulerTask; +import com.webank.wedatasphere.exchangis.job.server.execution.subscriber.TaskChooseRuler; +import com.webank.wedatasphere.exchangis.job.server.execution.subscriber.TaskObserver; +import com.webank.wedatasphere.exchangis.job.utils.TypeGenericUtils; +import org.apache.linkis.scheduler.Scheduler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.Optional; + +/** + * Contains: + * 1) TaskManager to manager running task. + * 2) TaskObserver to observe initial task. + * 3) TaskScheduler to submit scheduler task. + */ +public abstract class AbstractTaskExecution implements TaskExecution { + + private static final Logger LOG = LoggerFactory.getLogger(AbstractTaskExecution.class); + private boolean initial = false; + + /** + * Execution listeners + */ + private List listeners = new ArrayList<>(); + + @Override + public void submit(LaunchableExchangisTask task) throws ExchangisTaskExecuteException{ + SubmitSchedulerTask submitSchedulerTask = new SubmitSchedulerTask(task); + try { + submit(submitSchedulerTask); + } catch (ExchangisSchedulerException e) { + throw new ExchangisTaskExecuteException("Submit task [" + task.getId() + "] to schedule occurred error", e); + } + } + + @Override + @SuppressWarnings("unchecked") + public void submit(ExchangisSchedulerTask schedulerTask) throws ExchangisSchedulerException { + try{ + preSubmit(schedulerTask); + if (schedulerTask instanceof AbstractLoadBalanceSchedulerTask){ + try { + ((AbstractLoadBalanceSchedulerTask) schedulerTask) + .setSchedulerLoadBalancer(getTaskSchedulerLoadBalancer()); + }catch (Exception e){ + //Ignore the exception + LOG.warn("Load balance scheduler task [" + schedulerTask.getClass().getSimpleName() + "] doesn't match the load balancer", e); + } + } + getScheduler().submit(schedulerTask); + }catch (Exception e){ + throw new ExchangisSchedulerException("Submit scheduler task [id: " + schedulerTask.getId() + ", type: " + schedulerTask.getClass().getName() + "] occurred error", e); + } + } + + @Override + public void start() throws ExchangisTaskExecuteException { + if (!initial){ + init(); + } + // Start the scheduler + getScheduler().start(); + // Start the observers + Optional.ofNullable(getTaskObservers()).ifPresent(taskObservers -> taskObservers.forEach(TaskObserver::start)); + // Start the loadBalancer + TaskSchedulerLoadBalancer loadBalancer = getTaskSchedulerLoadBalancer(); + if (Objects.nonNull(loadBalancer) && loadBalancer instanceof SchedulerThread){ + ((SchedulerThread) loadBalancer).start(); + } + } + + @Override + public void stop() { + // Stop the observers + Optional.ofNullable(getTaskObservers()).ifPresent(taskObservers -> taskObservers.forEach(TaskObserver::stop)); + // Stop the loadBalancer + TaskSchedulerLoadBalancer loadBalancer = getTaskSchedulerLoadBalancer(); + if (Objects.nonNull(loadBalancer) && loadBalancer instanceof SchedulerThread){ + ((SchedulerThread) loadBalancer).stop(); + } + // Stop the scheduler + getScheduler().shutdown(); + } + + + @SuppressWarnings("unchecked") + protected synchronized void init() throws ExchangisTaskExecuteException{ + if (!initial){ + Scheduler scheduler = getScheduler(); + if (Objects.isNull(scheduler)){ + throw new ExchangisTaskExecuteException("Scheduler cannot be empty in task execution", null); + } + TaskManager taskManager = getTaskManager(); + if (Objects.nonNull(taskManager) && taskManager instanceof AbstractTaskManager){ + ((AbstractTaskManager) taskManager).setExecutionListener(new CombinedTaskExecutionListener()); + } + List> observers = getTaskObservers(); + Optional.ofNullable(observers).ifPresent(taskObservers -> taskObservers.forEach(observer -> { + observer.setScheduler(scheduler); + Class subType = TypeGenericUtils.getActualTypeFormGenericClass(observer.getClass(), null, 0); + if (LaunchedExchangisTask.class.equals(subType)){ + ((TaskObserver)observer).setTaskManager(taskManager); + } else if (LaunchableExchangisTask.class.equals(subType)){ + ((TaskObserver)observer).setTaskExecution(this); + ((TaskObserver)observer).setTaskChooseRuler(getTaskChooseRuler()); + } + })); + initial = true; + } + } + + @Override + public void addListener(TaskExecutionListener listener) { + this.listeners.add(listener); + } + + private class CombinedTaskExecutionListener implements TaskExecutionListener{ + + @Override + public void onEvent(TaskExecutionEvent taskExecutionEvent) throws ExchangisOnEventException { + for(TaskExecutionListener listener : listeners){ + listener.onEvent(taskExecutionEvent); + } + } + + @Override + public void onMetricsUpdate(TaskMetricsUpdateEvent metricsUpdateEvent) { + // Ignore + } + + @Override + public void onStatusUpdate(TaskStatusUpdateEvent statusUpdateEvent) { + // Ignore + } + + @Override + public void onLaunch(TaskLaunchEvent infoUpdateEvent) { + // Ignore + } + + @Override + public void onDelete(TaskDeleteEvent deleteEvent) { + // Ignore + } + + @Override + public void onDequeue(TaskDequeueEvent dequeueEvent) throws ExchangisOnEventException { + //Ignore + } + + @Override + public void onProgressUpdate(TaskProgressUpdateEvent updateEvent) { + // Ignore + } + } + + /** + * Pre hook of submitting + * @param schedulerTask scheduler task + */ + public void preSubmit(ExchangisSchedulerTask schedulerTask){ + // Do nothing + } + /** + * TaskManager of launchedExchangisTask + * @return task Manager + */ + protected abstract TaskManager getTaskManager(); + + /** + * TaskObserver + * @return list + */ + protected abstract List> getTaskObservers(); + + /** + * Scheduler + * @return Scheduler + */ + protected abstract Scheduler getScheduler(); + + protected abstract TaskChooseRuler getTaskChooseRuler(); + /** + * Launch manager + * @return launch manager + */ + protected abstract ExchangisTaskLaunchManager getExchangisLaunchManager(); + + protected abstract TaskSchedulerLoadBalancer getTaskSchedulerLoadBalancer(); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/AbstractTaskManager.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/AbstractTaskManager.java new file mode 100644 index 000000000..e31d3f087 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/AbstractTaskManager.java @@ -0,0 +1,314 @@ +package com.webank.wedatasphere.exchangis.job.server.execution; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskProgressInfo; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; +import com.webank.wedatasphere.exchangis.job.listener.events.JobLogEvent; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskExecuteException; +import com.webank.wedatasphere.exchangis.job.server.execution.events.*; +import com.webank.wedatasphere.exchangis.job.server.log.JobServerLogging; +import com.webank.wedatasphere.exchangis.job.server.log.cache.JobLogCacheUtils; + +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.locks.ReentrantLock; +import java.util.stream.Collectors; + +/** + * Launched task manager + */ +public abstract class AbstractTaskManager implements TaskManager { + + /** + * Execution listener + */ + private TaskExecutionListener executionListener; + + /** + * Contains the job_execution_id + */ + private List jobExecutionIds = new CopyOnWriteArrayList<>(); + + /** + * Task id => Running task + */ + private ConcurrentHashMap runningTasks = new ConcurrentHashMap<>(); + + /** + * job_execution_id => List(Running tasks) + */ + private ConcurrentHashMap jobWrappers = new ConcurrentHashMap<>(); + + /** + * Collect the job_execution_id from running tasks + * @return list + */ + public List getJobExecutionIds(){ + return jobExecutionIds; + } + + @Override + public List getRunningTasks() { + return runningTasks.values().stream().map(ctx -> + ctx.task).collect(Collectors.toList()); + } + + @Override + public void cancelRunningTask(String taskId) { + TaskContext context = runningTasks.get(taskId); + if (Objects.nonNull(context)){ + LaunchedExchangisTask task = context.task; + context.access(() -> { + onEvent(new TaskStatusUpdateEvent(task, TaskStatus.Cancelled)); + info(task, "Status of task: [name: {}, id: {}] change {} => {}", + task.getName(), task.getTaskId(), task.getStatus(), TaskStatus.Cancelled); + JobLogCacheUtils.flush(task.getJobExecutionId(), false); + runningTasks.remove(taskId); + }); + JobWrapper wrapper = jobWrappers.get(task.getJobExecutionId()); + if (Objects.nonNull(wrapper)){ + wrapper.removeTask(task); + } + } + } + + @Override + public void addRunningTask(LaunchedExchangisTask task) { + task.setStatus(TaskStatus.Running); + task.setRunningTime(Calendar.getInstance().getTime()); + onEvent(new TaskLaunchEvent(task)); + info(task, "Status of task: [name: {}, id: {}] change to {}, info: [{}]", task.getName(), task.getTaskId(), task.getStatus(), ""); + if (Objects.isNull(runningTasks.putIfAbsent(task.getTaskId(), new TaskContext(task)))){ + jobWrappers.compute(task.getJobExecutionId(), (jobExecutionId, jobWrapper) -> { + if (Objects.nonNull(jobWrapper) && jobWrapper.addTask(task)){ + return jobWrapper; + } + jobWrapper = new JobWrapper(jobExecutionId); + jobWrapper.addTask(task); + return jobWrapper; + }); + } + } + + + @Override + public void removeRunningTask(String taskId) { + removeRunningTaskInner(taskId, true); + } + + @Override + public boolean refreshRunningTaskMetrics(LaunchedExchangisTask task, Map metricsMap) { + TaskContext context = runningTasks.get(task.getTaskId()); + if (Objects.nonNull(context)) { + refreshRunningTaskMetrics(context, metricsMap); + return true; + } + return false; + } + + @Override + public boolean refreshRunningTaskStatus(LaunchedExchangisTask task, TaskStatus status) { + return refreshRunningTaskStatusAndMetrics(task, status, null); + } + + @Override + public boolean refreshRunningTaskStatusAndMetrics(LaunchedExchangisTask task, TaskStatus status, Map metricsMap) { + TaskStatus beforeStatus = task.getStatus(); + TaskContext context = runningTasks.get(task.getTaskId()); + if (Objects.nonNull(context)){ + task = context.task; + LaunchedExchangisTask finalTask = task; + context.access( () -> { + if (Objects.nonNull(metricsMap)){ + refreshRunningTaskMetrics(context, metricsMap); + } + if (TaskStatus.isCompleted(status)){ + info(finalTask, "Status of task: [name: {}, id: {}] change {} => {}", + finalTask.getName(), finalTask.getTaskId(), beforeStatus, status); + onEvent(new TaskStatusUpdateEvent(finalTask, status)); + removeRunningTaskInner(finalTask.getTaskId(), false); + } else { + onEvent(new TaskStatusUpdateEvent(finalTask, status)); + if (isTransition(finalTask, status)) { + info(finalTask, "Status of task: [name: {}, id: {}] change {} => {}", + finalTask.getName(), finalTask.getTaskId(), beforeStatus, status); + } + } + finalTask.setStatus(status); + }); + return true; + } + return false; + } + + @Override + public boolean refreshRunningTaskProgress(LaunchedExchangisTask task, TaskProgressInfo progressInfo) { + TaskContext context = runningTasks.get(task.getTaskId()); + if (Objects.nonNull(context)){ + task = context.task; + LaunchedExchangisTask finalTask = task; + context.access(() -> { + onEvent(new TaskProgressUpdateEvent(finalTask, progressInfo)); + if (finalTask.getProgress() != progressInfo.getProgress()){ + info(finalTask, "Progress of task: [{}] change {} => {}", + finalTask.getTaskId(), finalTask.getProgress(), progressInfo.getProgress()); + } + finalTask.setProgress(progressInfo.getProgress()); + }); + return true; + } + return false; + } + + @Override + public LaunchedExchangisTask getRunningTask(String taskId) { + TaskContext context = runningTasks.get(taskId); + return context != null ? context.task : null; + } + + public TaskExecutionListener getExecutionListener() { + return executionListener; + } + + public void setExecutionListener(TaskExecutionListener executionListener) { + this.executionListener = executionListener; + } + + /** + * Refresh running task metrics + * @param context context + * @param metricsMap metric map + */ + private void refreshRunningTaskMetrics(TaskContext context, Map metricsMap){ + LaunchedExchangisTask finalTask = context.task; + context.access(() -> { + if (!TaskStatus.isCompleted(finalTask.getStatus())) { + onEvent(new TaskMetricsUpdateEvent(finalTask, metricsMap)); + finalTask.setMetrics(null); + finalTask.setMetricsMap(metricsMap); + trace(finalTask, "Metrics info of task: [{}]", Json.toJson(metricsMap, null)); + } + }); + } + + /** + * Remove inner + * @param taskId task id + * @param updateStatus if update status + */ + private void removeRunningTaskInner(String taskId, boolean updateStatus){ + TaskContext context = runningTasks.get(taskId); + if (Objects.nonNull(context)){ + LaunchedExchangisTask task = context.task; + context.access(() -> { + if (updateStatus) { + onEvent(new TaskStatusUpdateEvent(task, task.getStatus())); + } + runningTasks.remove(taskId); + }); + JobWrapper wrapper = jobWrappers.get(task.getJobExecutionId()); + if (Objects.nonNull(wrapper)){ + wrapper.removeTask(task); + } + } + } + /** + * OnEvent + * @param event event entity + */ + public void onEvent(TaskExecutionEvent event){ + try { + executionListener.onEvent(event); + } catch (Exception e) { + throw new ExchangisTaskExecuteException.Runtime("Fail to call 'onEvent' event: [id: " + event.eventId() +", type:" + event.getClass().getSimpleName() +"]", e); + } + } + + private boolean isTransition(LaunchedExchangisTask task, TaskStatus status){ + if (Objects.nonNull(task)){ + return !task.getStatus().equals(status); + } + return false; + } + + @Override + public JobLogEvent getJobLogEvent(JobLogEvent.Level level, LaunchedExchangisTask task, String message, Object... args) { + return new JobLogEvent(level, task.getExecuteUser(), task.getJobExecutionId(), message, args); + } + + private static class TaskContext{ + /** + * Access lock + */ + private final ReentrantLock accessLock = new ReentrantLock(); + + private final LaunchedExchangisTask task; + + public TaskContext(LaunchedExchangisTask task){ + this.task = task; + } + /** + * Access the process + * @param exec exec process + */ + private void access(Runnable exec){ + accessLock.lock(); + try{ + exec.run(); + }finally { + accessLock.unlock(); + } + } + } + private class JobWrapper{ + + /** + * job_execution_id + */ + String jobExecutionId; + + boolean destroy = false; + + JobWrapper(String jobExecutionId){ + this.jobExecutionId = jobExecutionId; + jobExecutionIds.add(jobExecutionId); + } + + Map tasksInJob = new HashMap<>(); + + final AtomicInteger taskNum = new AtomicInteger(0); + + /** + * Remove task (if the task list is empty, remove self from the map) + * @param task task + */ + public void removeTask(LaunchedExchangisTask task) { + synchronized (taskNum) { + if (Objects.nonNull(tasksInJob.remove(task.getTaskId()))) { + if (taskNum.decrementAndGet() == 0) { + // Flush the job log cache + JobLogCacheUtils.flush(jobExecutionId, true); + jobWrappers.remove(jobExecutionId); + jobExecutionIds.remove(jobExecutionId); + destroy = true; + } + } + } + } + + public boolean addTask(LaunchedExchangisTask task){ + synchronized (taskNum) { + if (!destroy) { + if (Objects.isNull(tasksInJob.put(task.getTaskId(), task))) { + taskNum.getAndIncrement(); + } + return true; + } + } + return false; + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/DefaultTaskExecution.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/DefaultTaskExecution.java new file mode 100644 index 000000000..07ed20e1d --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/DefaultTaskExecution.java @@ -0,0 +1,124 @@ +package com.webank.wedatasphere.exchangis.job.server.execution; + + +import com.webank.wedatasphere.exchangis.job.launcher.ExchangisTaskLaunchManager; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskExecuteException; +import com.webank.wedatasphere.exchangis.job.server.execution.loadbalance.TaskSchedulerLoadBalancer; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.ExchangisSchedulerTask; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks.MetricUpdateSchedulerTask; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks.StatusUpdateSchedulerTask; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks.SubmitSchedulerTask; +import com.webank.wedatasphere.exchangis.job.server.execution.subscriber.TaskChooseRuler; +import com.webank.wedatasphere.exchangis.job.server.execution.subscriber.TaskObserver; +import org.apache.linkis.scheduler.Scheduler; + +import java.util.List; +import java.util.Objects; +import java.util.Optional; + +/** + * Default task execution + */ +public class DefaultTaskExecution extends AbstractTaskExecution{ + + public static final String DEFAULT_LAUNCHER_NAME = "Linkis"; + /** + * Scheduler + */ + private Scheduler scheduler; + + /** + * TaskManager + */ + private TaskManager taskManager; + + /** + * Observer list + */ + private List> taskObservers; + + private TaskChooseRuler taskChooseRuler; + /** + * load balancer + */ + private TaskSchedulerLoadBalancer taskSchedulerLoadBalancer; + + /** + * Launch manager + */ + private ExchangisTaskLaunchManager launchManager; + /** + * + * @param scheduler scheduler + * @param launchManager launch manager + * @param taskManager task manager for launched task + * @param taskObservers task observers + * @param taskSchedulerLoadBalancer load balancer + * @param taskChooseRuler choose ruler + */ + public DefaultTaskExecution(Scheduler scheduler, ExchangisTaskLaunchManager launchManager, + TaskManager taskManager, List> taskObservers, + TaskSchedulerLoadBalancer taskSchedulerLoadBalancer, + TaskChooseRuler taskChooseRuler){ + this.scheduler = scheduler; + this.taskManager = taskManager; + this.taskObservers = taskObservers; + this.launchManager = launchManager; + this.taskSchedulerLoadBalancer = taskSchedulerLoadBalancer; + this.taskChooseRuler = taskChooseRuler; + } + + @Override + protected synchronized void init() throws ExchangisTaskExecuteException { + super.init(); + Optional.ofNullable(getTaskSchedulerLoadBalancer()).ifPresent(loadBalancer -> { + loadBalancer.registerSchedulerTask(StatusUpdateSchedulerTask.class); + loadBalancer.registerSchedulerTask(MetricUpdateSchedulerTask.class); + }); + } + + @Override + public void preSubmit(ExchangisSchedulerTask schedulerTask) { + if (schedulerTask instanceof SubmitSchedulerTask){ + SubmitSchedulerTask submitSchedulerTask = ((SubmitSchedulerTask) schedulerTask); + if (Objects.nonNull(getExchangisLaunchManager())){ + submitSchedulerTask.setLauncher(getExchangisLaunchManager().getTaskLauncher(DEFAULT_LAUNCHER_NAME)); + } + submitSchedulerTask.setTaskManager(getTaskManager()); + submitSchedulerTask.setLoadBalancer(getTaskSchedulerLoadBalancer()); + } + } + + @Override + protected TaskManager getTaskManager() { + return this.taskManager; + } + + @Override + protected List> getTaskObservers() { + return this.taskObservers; + } + + @Override + protected Scheduler getScheduler() { + return this.scheduler; + } + + @Override + protected TaskChooseRuler getTaskChooseRuler() { + return taskChooseRuler; + } + + @Override + protected ExchangisTaskLaunchManager getExchangisLaunchManager() { + return launchManager; + } + + @Override + protected TaskSchedulerLoadBalancer getTaskSchedulerLoadBalancer() { + return this.taskSchedulerLoadBalancer; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/DefaultTaskManager.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/DefaultTaskManager.java new file mode 100644 index 000000000..d36b70c89 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/DefaultTaskManager.java @@ -0,0 +1,23 @@ +package com.webank.wedatasphere.exchangis.job.server.execution; + +import com.webank.wedatasphere.exchangis.job.listener.JobLogListener; + +/** + * Default implement + */ +public class DefaultTaskManager extends AbstractTaskManager{ + + /** + * Log listener + */ + private JobLogListener jobLogListener; + + public DefaultTaskManager(JobLogListener jobLogListener) { + this.jobLogListener = jobLogListener; + } + + @Override + public JobLogListener getJobLogListener() { + return this.jobLogListener; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/TaskExecution.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/TaskExecution.java new file mode 100644 index 000000000..57679fdf5 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/TaskExecution.java @@ -0,0 +1,33 @@ +package com.webank.wedatasphere.exchangis.job.server.execution; + +import com.webank.wedatasphere.exchangis.job.domain.ExchangisTask; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerException; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskExecuteException; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.ExchangisSchedulerTask; +import org.apache.linkis.scheduler.Scheduler; + +/** + * Task execution + */ +public interface TaskExecution { + + void submit(T task) throws ExchangisTaskExecuteException; + /** + * Submit scheduler task + * @param schedulerTask scheduler task + */ + void submit(ExchangisSchedulerTask schedulerTask) throws ExchangisSchedulerException; + + /** + * Start execution + */ + void start() throws ExchangisTaskExecuteException; + + /** + * Stop execution + */ + void stop(); + + void addListener(TaskExecutionListener listener); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/TaskExecutionListener.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/TaskExecutionListener.java new file mode 100644 index 000000000..e050991d2 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/TaskExecutionListener.java @@ -0,0 +1,70 @@ +package com.webank.wedatasphere.exchangis.job.server.execution; + +import com.webank.wedatasphere.exchangis.job.exception.ExchangisOnEventException; +import com.webank.wedatasphere.exchangis.job.listener.ExchangisListener; +import com.webank.wedatasphere.exchangis.job.server.execution.events.*; + +/** + * Execution listener + */ +public interface TaskExecutionListener extends ExchangisListener { + /** + * Listen event during task execution + * @param event event + */ + default void onEvent(TaskExecutionEvent event) throws ExchangisOnEventException{ + getLogger().trace("Event: [id: {}, type: {}] in listener [{}]", event.eventId(), event.getClass().getSimpleName(), + this.getClass().getSimpleName()); + if (event instanceof TaskMetricsUpdateEvent){ + onMetricsUpdate((TaskMetricsUpdateEvent)event); + } else if (event instanceof TaskStatusUpdateEvent){ + onStatusUpdate((TaskStatusUpdateEvent)event); + } else if (event instanceof TaskLaunchEvent){ + onLaunch((TaskLaunchEvent)event); + } else if (event instanceof TaskDeleteEvent){ + onDelete((TaskDeleteEvent)event); + } else if (event instanceof TaskProgressUpdateEvent){ + onProgressUpdate((TaskProgressUpdateEvent)event); + } else if (event instanceof TaskDequeueEvent){ + onDequeue((TaskDequeueEvent) event); + } + } + + /** + * Listen metrics update + * @param metricsUpdateEvent update event + */ + void onMetricsUpdate(TaskMetricsUpdateEvent metricsUpdateEvent) throws ExchangisOnEventException; + + /** + * Status update + * @param statusUpdateEvent update event + */ + void onStatusUpdate(TaskStatusUpdateEvent statusUpdateEvent) throws ExchangisOnEventException; + + /** + * Info update + * @param infoUpdateEvent update event + */ + void onLaunch(TaskLaunchEvent infoUpdateEvent) throws ExchangisOnEventException; + + /** + * Delete + * @param deleteEvent delete event + */ + void onDelete(TaskDeleteEvent deleteEvent) throws ExchangisOnEventException; + + /** + * Dequeue event + * @param dequeueEvent dequeue event + * @throws ExchangisOnEventException exception + */ + void onDequeue(TaskDequeueEvent dequeueEvent) throws ExchangisOnEventException; + + /** + * Progress update + * @param updateEvent update event + */ + void onProgressUpdate(TaskProgressUpdateEvent updateEvent) throws ExchangisOnEventException; + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/TaskManager.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/TaskManager.java new file mode 100644 index 000000000..80d4bf9de --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/TaskManager.java @@ -0,0 +1,78 @@ +package com.webank.wedatasphere.exchangis.job.server.execution; + +import com.webank.wedatasphere.exchangis.job.domain.ExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskProgressInfo; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; +import com.webank.wedatasphere.exchangis.job.listener.JobLogListener; +import com.webank.wedatasphere.exchangis.job.server.log.JobServerLogging; + +import java.util.List; +import java.util.Map; + +/** + * Task manager + */ +public interface TaskManager extends JobServerLogging { + + + List getRunningTasks(); + + /** + * Cancel running task + * @param taskId task id + */ + void cancelRunningTask(String taskId); + + /** + * Add running task to manager + * @param task running task + */ + void addRunningTask(T task); + + /** + * Remove the running task + * @param taskId task id + */ + void removeRunningTask(String taskId); + + /** + * Refresh running task metrics + * @param task + */ + boolean refreshRunningTaskMetrics(T task, Map metricsMap); + + + /** + * Refresh running task status + * @param task + * @param status + * @return + */ + boolean refreshRunningTaskStatus(T task, TaskStatus status); + + /** + * Refresh running task status and metrics + * @param task task + * @param status status + * @param metricsMap metric map + * @return + */ + boolean refreshRunningTaskStatusAndMetrics(T task, TaskStatus status, Map metricsMap); + + /** + * Refresh progress + * @param task + * @param progressInfo + * @return + */ + boolean refreshRunningTaskProgress(T task, TaskProgressInfo progressInfo); + /** + * Get running task + * @param taskId task id + * @return T + */ + T getRunningTask(String taskId); + + JobLogListener getJobLogListener(); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskDeleteEvent.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskDeleteEvent.java new file mode 100644 index 000000000..ec466f42a --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskDeleteEvent.java @@ -0,0 +1,24 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.events; + +public class TaskDeleteEvent extends TaskExecutionEvent { + + private String taskId; + + public TaskDeleteEvent(String taskId) { + super(null); + this.taskId = taskId; + } + + @Override + public String eventId() { + return "_TaskExecution_" + this.taskId; + } + + public String getTaskId() { + return taskId; + } + + public void setTaskId(String taskId) { + this.taskId = taskId; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskDequeueEvent.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskDequeueEvent.java new file mode 100644 index 000000000..900bbfd1e --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskDequeueEvent.java @@ -0,0 +1,32 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.events; + + +/** + * Event that remove the launchable task from the queue(table) + */ +public class TaskDequeueEvent extends TaskExecutionEvent{ + /** + * Task id + */ + private String taskId; + /** + * @param taskId task id + */ + public TaskDequeueEvent(String taskId) { + super(null); + this.taskId = taskId; + } + + @Override + public String eventId() { + return "_TaskExecution_" + this.taskId; + } + + public String getTaskId() { + return taskId; + } + + public void setTaskId(String taskId) { + this.taskId = taskId; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskExecutionEvent.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskExecutionEvent.java new file mode 100644 index 000000000..b0523a552 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskExecutionEvent.java @@ -0,0 +1,37 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.events; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.listener.ExchangisEvent; + +public class TaskExecutionEvent implements ExchangisEvent { + private long eventTime; + + private LaunchedExchangisTask launchedExchangisTask; + public TaskExecutionEvent(LaunchedExchangisTask task){ + this.eventTime = System.currentTimeMillis(); + this.launchedExchangisTask = task; + } + @Override + public String eventId() { + return "_TaskExecution_" + launchedExchangisTask.getTaskId(); + } + + @Override + public void setEventId(String eventId) { + //null + } + + public LaunchedExchangisTask getLaunchedExchangisTask() { + return launchedExchangisTask; + } + + @Override + public long getEventTime() { + return this.eventTime; + } + + @Override + public void setEventTime(long timestamp) { + this.eventTime = timestamp; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskLaunchEvent.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskLaunchEvent.java new file mode 100644 index 000000000..cb4e034be --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskLaunchEvent.java @@ -0,0 +1,13 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.events; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; + +/** + * Insert event + */ +public class TaskLaunchEvent extends TaskExecutionEvent{ + + public TaskLaunchEvent(LaunchedExchangisTask task) { + super(task); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskMetricsUpdateEvent.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskMetricsUpdateEvent.java new file mode 100644 index 000000000..3d46d9fd1 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskMetricsUpdateEvent.java @@ -0,0 +1,27 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.events; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; + +import java.util.HashMap; +import java.util.Map; + +/** + * Updating of task info + */ +public class TaskMetricsUpdateEvent extends TaskExecutionEvent{ + + private Map metrics = new HashMap<>(); + + public TaskMetricsUpdateEvent(LaunchedExchangisTask task, Map metrics) { + super(task); + this.metrics = metrics; + } + + public Map getMetrics() { + return metrics; + } + + public void setMetrics(Map metrics) { + this.metrics = metrics; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskProgressUpdateEvent.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskProgressUpdateEvent.java new file mode 100644 index 000000000..8339c53a9 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskProgressUpdateEvent.java @@ -0,0 +1,25 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.events; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskProgressInfo; + +/** + * Update of task progress + */ +public class TaskProgressUpdateEvent extends TaskExecutionEvent{ + + private TaskProgressInfo progressInfo; + + public TaskProgressUpdateEvent(LaunchedExchangisTask task, TaskProgressInfo progressInfo){ + super(task); + this.progressInfo = progressInfo; + } + + public TaskProgressInfo getProgressInfo() { + return progressInfo; + } + + public void setProgressInfo(TaskProgressInfo progressInfo) { + this.progressInfo = progressInfo; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskStatusUpdateEvent.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskStatusUpdateEvent.java new file mode 100644 index 000000000..33754ead9 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/events/TaskStatusUpdateEvent.java @@ -0,0 +1,25 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.events; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; + +/** + * Updating of task status + */ +public class TaskStatusUpdateEvent extends TaskExecutionEvent{ + + private TaskStatus updateStatus; + + public TaskStatusUpdateEvent(LaunchedExchangisTask task, TaskStatus status) { + super(task); + this.updateStatus = status; + } + + public TaskStatus getUpdateStatus() { + return updateStatus; + } + + public void setUpdateStatus(TaskStatus updateStatus) { + this.updateStatus = updateStatus; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/AbstractTaskGenerator.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/AbstractTaskGenerator.java new file mode 100644 index 000000000..f6be48332 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/AbstractTaskGenerator.java @@ -0,0 +1,136 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.generator; + +import com.webank.wedatasphere.exchangis.job.builder.manager.DefaultExchangisJobBuilderManager; +import com.webank.wedatasphere.exchangis.job.builder.manager.ExchangisJobBuilderManager; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisJob; +import com.webank.wedatasphere.exchangis.job.listener.JobLogListener; +import com.webank.wedatasphere.exchangis.job.listener.events.JobLogEvent; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskGenerateException; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.events.TaskGenerateErrorEvent; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.events.TaskGenerateEvent; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.events.TaskGenerateInitEvent; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.events.TaskGenerateSuccessEvent; +import org.apache.linkis.common.exception.ErrorException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +/** + * Contains the main progress for generating + */ +public abstract class AbstractTaskGenerator implements TaskGenerator { + + private static final Logger LOG = LoggerFactory.getLogger(AbstractTaskGenerator.class); + + private List listeners = new ArrayList<>(); + + protected TaskGeneratorContext generatorContext; + + @Override + public void init() throws ExchangisJobException { + } + + @Override + public LaunchableExchangisJob init(ExchangisJobInfo jobInfo) throws ExchangisTaskGenerateException { + Calendar calendar = Calendar.getInstance(); + LaunchableExchangisJob launchableExchangisJob = new LaunchableExchangisJob(); + launchableExchangisJob.setExchangisJobInfo(jobInfo); + launchableExchangisJob.setName(jobInfo.getName()); + launchableExchangisJob.setEngineType(jobInfo.getEngineType()); + launchableExchangisJob.setJobLabel(jobInfo.getJobLabel()); + launchableExchangisJob.setCreateTime(calendar.getTime()); + launchableExchangisJob.setLastUpdateTime(calendar.getTime()); + launchableExchangisJob.setId(jobInfo.getId()); + launchableExchangisJob.setExecUser(jobInfo.getExecuteUser()); + launchableExchangisJob.setCreateUser(jobInfo.getCreateUser()); + // Generate launchable exchangis job id to UUID + launchableExchangisJob.setJobExecutionId(UUID.randomUUID().toString()); + LOG.info("Generate job execution id: [{}] for job: [{}]" , launchableExchangisJob.getJobExecutionId(), launchableExchangisJob.getExchangisJobInfo().getName()); + onEvent(new TaskGenerateInitEvent(launchableExchangisJob)); + return launchableExchangisJob; + } + + @Override + public LaunchableExchangisJob generate(LaunchableExchangisJob launchableExchangisJob, String tenancy) throws ExchangisTaskGenerateException { + if (Objects.isNull(launchableExchangisJob.getExchangisJobInfo())){ + throw new ExchangisTaskGenerateException("Job info of launchableExchangisJob cannot be empty", null); + } + launchableExchangisJob.setCreateUser(tenancy); + try { + execute(launchableExchangisJob, getTaskGeneratorContext(), tenancy); + } catch(ErrorException e){ + if (e instanceof ExchangisTaskGenerateException){ + throw (ExchangisTaskGenerateException)e; + } + throw new ExchangisTaskGenerateException("Error occurred in generating progress", e); + } + return launchableExchangisJob; + } + + @Override + public LaunchableExchangisJob generate(LaunchableExchangisJob launchableExchangisJob) throws ExchangisTaskGenerateException { + return generate(launchableExchangisJob, launchableExchangisJob.getCreateUser()); + } + + + @Override + public TaskGeneratorContext getTaskGeneratorContext() { + return generatorContext; + } + + + /** + * Use the default job builder manager + * @return default manager + */ + @Override + public ExchangisJobBuilderManager getExchangisJobBuilderManager() { + return new DefaultExchangisJobBuilderManager(); + } + + @Override + public void addListener(TaskGenerateListener taskGenerateListener) { + listeners.add(taskGenerateListener); + } + + /** + * Listeners listen generate event method + * @param taskGenerateEvent event + * @throws ExchangisTaskGenerateException + */ + protected void onEvent(TaskGenerateEvent taskGenerateEvent) throws ExchangisTaskGenerateException{ + for (TaskGenerateListener listener : listeners) { + try { + listener.onEvent(taskGenerateEvent); + } catch (ErrorException e) { + throw new ExchangisTaskGenerateException("Fail to call 'onEvent' method in generator listener: [" + listener.getClass().getSimpleName() + + "] for event: [id: " + taskGenerateEvent.eventId() +", type:" + taskGenerateEvent.getClass().getSimpleName() +"]", e); + } + } + if (taskGenerateEvent instanceof TaskGenerateInitEvent){ + info(taskGenerateEvent.getLaunchableExchangisJob(), "Init to create launched job and begin generating"); + } else if (taskGenerateEvent instanceof TaskGenerateSuccessEvent){ + info(taskGenerateEvent.getLaunchableExchangisJob(), "Success to generate launched job, output tasks [{}]", + taskGenerateEvent.getLaunchableExchangisJob().getLaunchableExchangisTasks().size()); + } else if (taskGenerateEvent instanceof TaskGenerateErrorEvent){ + error(taskGenerateEvent.getLaunchableExchangisJob(), "Error occurred in generating", + ((TaskGenerateErrorEvent)taskGenerateEvent).getException()); + } + } + + @Override + public JobLogListener getJobLogListener() { + return getTaskGeneratorContext().getJobLogListener(); + } + + @Override + public JobLogEvent getJobLogEvent(JobLogEvent.Level level, LaunchableExchangisJob job, String message, Object... args) { + return new JobLogEvent(level, job.getCreateUser(), job.getJobExecutionId(), message, args); + } + + + protected abstract void execute(LaunchableExchangisJob launchableExchangisJob, TaskGeneratorContext ctx, String tenancy) throws ErrorException; +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/DefaultTaskGenerator.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/DefaultTaskGenerator.java new file mode 100644 index 000000000..a14d22182 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/DefaultTaskGenerator.java @@ -0,0 +1,128 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.generator; + + +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.builder.manager.ExchangisJobBuilderManager; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisEngineJob; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisJob; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.server.builder.SpringExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.server.builder.transform.TransformExchangisJob; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskGenerateException; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.events.TaskGenerateErrorEvent; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.events.TaskGenerateSuccessEvent; +import com.webank.wedatasphere.exchangis.job.utils.SnowFlake; +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.common.exception.ErrorException; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.Optional; + +/** + * Async exec, + * construct a JobGenerationSchedulerTask and them submit to TaskExecution + */ +public class DefaultTaskGenerator extends AbstractTaskGenerator{ + + + private static class Constraints{ + private static final CommonVars TASK_ID_GENERATOR_DATA_CENTER = CommonVars.apply("wds.exchangis.job.task.generator.id.data-center", 1L); + + private static final CommonVars TASK_ID_GENERATOR_WORKER = CommonVars.apply("wds.exchangis.job.task.generator.id.worker", 1L); + + private static final CommonVars TASK_ID_GENERATOR_START_TIME = CommonVars.apply("wds.exchangis.job.task.generator.id.start-time", 1238434978657L); + } + protected TaskGeneratorContext ctx; + + private final ExchangisJobBuilderManager jobBuilderManager; + + /** + * Generate task id + */ + private SnowFlake idGenerator; + + public DefaultTaskGenerator(TaskGeneratorContext ctx, ExchangisJobBuilderManager jobBuilderManager){ + this.ctx = ctx; + this.jobBuilderManager = jobBuilderManager; + } + + @Override + public TaskGeneratorContext getTaskGeneratorContext() { + return ctx; + } + + @Override + public void init() throws ExchangisJobException { + super.init(); + idGenerator = new SnowFlake(Constraints.TASK_ID_GENERATOR_DATA_CENTER.getValue(), Constraints.TASK_ID_GENERATOR_WORKER.getValue(), + Constraints.TASK_ID_GENERATOR_START_TIME.getValue()); + } + + @Override + protected void execute(LaunchableExchangisJob launchableExchangisJob, + TaskGeneratorContext generatorContext, String tenancy) throws ErrorException { + ExchangisTaskGenerateException throwable; + ExchangisJobInfo jobInfo = launchableExchangisJob.getExchangisJobInfo(); + List launchableExchangisTasks = new ArrayList<>(); + if (Objects.isNull(jobInfo)){ + throwable = new ExchangisTaskGenerateException("Job information is empty in launchable exchangis job", null); + onEvent(new TaskGenerateErrorEvent(launchableExchangisJob, throwable)); + throw throwable; + } + ExchangisJobBuilderManager jobBuilderManager = getExchangisJobBuilderManager(); + ExchangisJobBuilderContext ctx; + if (generatorContext instanceof SpringTaskGeneratorContext){ + // Spring job builder context + ctx = new SpringExchangisJobBuilderContext(jobInfo, + ((SpringTaskGeneratorContext) generatorContext).getApplicationContext(), + generatorContext.getJobLogListener()); + ((SpringExchangisJobBuilderContext)ctx).setJobExecutionId(launchableExchangisJob.getJobExecutionId()); + } else { + ctx = new ExchangisJobBuilderContext(jobInfo); + } + ctx.putEnv("USER_NAME", tenancy); + // ExchangisJobInfo -> TransformExchangisJob(SubExchangisJob) + try { + TransformExchangisJob transformJob = jobBuilderManager.doBuild(jobInfo, TransformExchangisJob.class, ctx); + List engineJobs = new ArrayList<>(); + for (SubExchangisJob subExchangisJob : transformJob.getSubJobSet()){ + // Will deal with the parameters in source/sink of job + Optional.ofNullable(jobBuilderManager.doBuild(subExchangisJob, + SubExchangisJob.class, ExchangisEngineJob.class, ctx)).ifPresent(engineJobs::add); + } + // List -> List + for (ExchangisEngineJob engineJob : engineJobs){ + Optional.ofNullable(jobBuilderManager.doBuild(engineJob, + ExchangisEngineJob.class, LaunchableExchangisTask.class, ctx)).ifPresent(launchableExchangisTasks :: add); + } + if (launchableExchangisTasks.isEmpty()){ + throw new ExchangisTaskGenerateException("The result set of launchable tasks is empty, please examine your launchable job entity," + + " content: [" + jobInfo.getJobContent() + "]", null); + } + // Create task id + launchableExchangisTasks.forEach(task -> task.setId(idGenerator.nextId())); + launchableExchangisJob.setLaunchableExchangisTasks(launchableExchangisTasks); + onEvent(new TaskGenerateSuccessEvent(launchableExchangisJob)); + } catch (Exception e) { + if (e instanceof ExchangisTaskGenerateException){ + // Just throws the generate exception + throwable = (ExchangisTaskGenerateException)e; + } else { + throwable = new ExchangisTaskGenerateException("Error in generating launchable tasks", e); + } + onEvent(new TaskGenerateErrorEvent(launchableExchangisJob, throwable)); + throw throwable; + } + } + + @Override + public ExchangisJobBuilderManager getExchangisJobBuilderManager() { + return jobBuilderManager; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/SpringTaskGeneratorContext.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/SpringTaskGeneratorContext.java new file mode 100644 index 000000000..c591448ed --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/SpringTaskGeneratorContext.java @@ -0,0 +1,35 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.generator; + +import com.webank.wedatasphere.exchangis.job.listener.JobLogListener; +import org.springframework.context.ApplicationContext; + +/** + * Spring generator context (with application context) + */ +public class SpringTaskGeneratorContext implements TaskGeneratorContext { + + private JobLogListener jobLogListener; + + /** + * Spring application context + */ + private ApplicationContext applicationContext; + + public SpringTaskGeneratorContext(){ + + } + public SpringTaskGeneratorContext(JobLogListener jobLogListener, + ApplicationContext applicationContext){ + this.jobLogListener = jobLogListener; + this.applicationContext = applicationContext; + } + + @Override + public JobLogListener getJobLogListener() { + return this.jobLogListener; + } + + public ApplicationContext getApplicationContext() { + return this.applicationContext; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/TaskGenerateListener.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/TaskGenerateListener.java new file mode 100644 index 000000000..755499205 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/TaskGenerateListener.java @@ -0,0 +1,45 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.generator; + +import com.webank.wedatasphere.exchangis.job.exception.ExchangisOnEventException; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.events.TaskGenerateErrorEvent; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.events.TaskGenerateEvent; +import com.webank.wedatasphere.exchangis.job.listener.ExchangisListener; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.events.TaskGenerateInitEvent; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.events.TaskGenerateSuccessEvent; + +/** + * Listener of task generating + */ +public interface TaskGenerateListener extends ExchangisListener { + + @Override + default void onEvent(TaskGenerateEvent event) throws ExchangisOnEventException { + getLogger().trace("Event: [id: {}, type: {}] in listener [{}]", event.eventId(), event.getClass().getSimpleName(), + this.getClass().getSimpleName()); + if (event instanceof TaskGenerateErrorEvent){ + onError((TaskGenerateErrorEvent) event); + } else if (event instanceof TaskGenerateInitEvent){ + onInit((TaskGenerateInitEvent)event); + } else if (event instanceof TaskGenerateSuccessEvent){ + onSuccess((TaskGenerateSuccessEvent)event); + } + } + + /** + * Listen error + * @param errorEvent error event + */ + void onError(TaskGenerateErrorEvent errorEvent); + + /** + * Listen init + * @param initEvent init event + */ + void onInit(TaskGenerateInitEvent initEvent); + + /** + * Listen success + * @param successEvent success event + */ + void onSuccess(TaskGenerateSuccessEvent successEvent); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/TaskGenerator.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/TaskGenerator.java new file mode 100644 index 000000000..107e2c51e --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/TaskGenerator.java @@ -0,0 +1,54 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.generator; + +/** + * To generate task for execution + */ + +import com.webank.wedatasphere.exchangis.job.builder.manager.ExchangisJobBuilderManager; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskGenerateException; +import com.webank.wedatasphere.exchangis.job.server.log.JobServerLogging; + +public interface TaskGenerator extends JobServerLogging { + /** + * init method + * @throws ExchangisJobException error in initializing + */ + void init() throws ExchangisJobException; + + /** + * Init the job info to suitable input + * @param jobInfo + * @return + */ + T init(ExchangisJobInfo jobInfo) throws ExchangisTaskGenerateException; + /** + * Generate exchangis job (has tasks) + * @param exchangisJob job extends ExchangisJob + * @param tenancy act as exec user + * @return job has been handled + * @throws ExchangisTaskGenerateException exception in generating + */ + T generate(T exchangisJob, String tenancy) throws ExchangisTaskGenerateException; + + T generate(T exchangisJob) throws ExchangisTaskGenerateException; + /** + * Get generator context + * @return context + */ + TaskGeneratorContext getTaskGeneratorContext(); + + /** + * Get job builder manager + * @return + */ + ExchangisJobBuilderManager getExchangisJobBuilderManager(); + + /** + * Add listeners + * @param taskGenerateListener listener + */ + void addListener(TaskGenerateListener taskGenerateListener); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/TaskGeneratorContext.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/TaskGeneratorContext.java new file mode 100644 index 000000000..0c45979a5 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/TaskGeneratorContext.java @@ -0,0 +1,16 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.generator; + +import com.webank.wedatasphere.exchangis.job.listener.JobLogListener; + +/** + * Generator context + */ +public interface TaskGeneratorContext { + + /** + * Job Log listener + * @return + */ + JobLogListener getJobLogListener(); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/events/TaskGenerateErrorEvent.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/events/TaskGenerateErrorEvent.java new file mode 100644 index 000000000..70076a9e8 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/events/TaskGenerateErrorEvent.java @@ -0,0 +1,21 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.generator.events; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisJob; + +/** + * Error event + */ +public class TaskGenerateErrorEvent extends TaskGenerateEvent{ + + private Throwable exception; + + public TaskGenerateErrorEvent(LaunchableExchangisJob launchableExchangisJob, Throwable e) { + super(System.currentTimeMillis(), launchableExchangisJob); + this.exception = e; + } + + public Throwable getException() { + return exception; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/events/TaskGenerateEvent.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/events/TaskGenerateEvent.java new file mode 100644 index 000000000..c46744af3 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/events/TaskGenerateEvent.java @@ -0,0 +1,44 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.generator.events; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisJob; +import com.webank.wedatasphere.exchangis.job.listener.ExchangisEvent; + +public class TaskGenerateEvent implements ExchangisEvent { + + private long eventTime; + + private LaunchableExchangisJob launchableExchangisJob; + + public TaskGenerateEvent(long eventTime, LaunchableExchangisJob launchableExchangisJob){ + this.eventTime = eventTime; + this.launchableExchangisJob = launchableExchangisJob; + } + + @Override + public String eventId() { + return "_TaskGenerate_" + launchableExchangisJob.getJobExecutionId(); + } + + @Override + public void setEventId(String eventId) { + //null + } + + @Override + public long getEventTime() { + return this.eventTime; + } + + @Override + public void setEventTime(long timestamp) { + this.eventTime = timestamp; + } + + public LaunchableExchangisJob getLaunchableExchangisJob() { + return launchableExchangisJob; + } + + public void setLaunchableExchangisJob(LaunchableExchangisJob launchableExchangisJob) { + this.launchableExchangisJob = launchableExchangisJob; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/events/TaskGenerateInitEvent.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/events/TaskGenerateInitEvent.java new file mode 100644 index 000000000..a57c28e4c --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/events/TaskGenerateInitEvent.java @@ -0,0 +1,12 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.generator.events; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisJob; + +/** + * Init event + */ +public class TaskGenerateInitEvent extends TaskGenerateEvent{ + public TaskGenerateInitEvent(LaunchableExchangisJob launchableExchangisJob) { + super(System.currentTimeMillis(), launchableExchangisJob); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/events/TaskGenerateSuccessEvent.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/events/TaskGenerateSuccessEvent.java new file mode 100644 index 000000000..7b0ee3205 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/generator/events/TaskGenerateSuccessEvent.java @@ -0,0 +1,30 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.generator.events; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisJob; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; + +import java.util.List; + +/** + * Success event + */ +public class TaskGenerateSuccessEvent extends TaskGenerateEvent{ + + /** + * Generate result + */ + private List taskGenerated; + + public TaskGenerateSuccessEvent(LaunchableExchangisJob launchableExchangisJob) { + super(System.currentTimeMillis(), launchableExchangisJob); + taskGenerated = launchableExchangisJob.getLaunchableExchangisTasks(); + } + + public List getTaskGenerated() { + return taskGenerated; + } + + public void setTaskGenerated(List taskGenerated) { + this.taskGenerated = taskGenerated; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/loadbalance/AbstractTaskSchedulerLoadBalancer.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/loadbalance/AbstractTaskSchedulerLoadBalancer.java new file mode 100644 index 000000000..9d84b93c7 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/loadbalance/AbstractTaskSchedulerLoadBalancer.java @@ -0,0 +1,96 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.loadbalance; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskExecuteException; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskObserverException; +import com.webank.wedatasphere.exchangis.job.server.execution.TaskManager; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks.LoadBalanceSchedulerTask; +import com.webank.wedatasphere.exchangis.job.utils.TypeGenericUtils; +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.scheduler.Scheduler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.Future; + +/** + * Scheduler load balancer for launched task + */ +public abstract class AbstractTaskSchedulerLoadBalancer implements TaskSchedulerLoadBalancer { + + private static final Logger LOG = LoggerFactory.getLogger(AbstractTaskSchedulerLoadBalancer.class); + + protected TaskManager taskManager; + + protected Scheduler scheduler; + + protected List> registeredTaskClasses = new ArrayList<>(); + + + + public AbstractTaskSchedulerLoadBalancer(Scheduler scheduler, TaskManager taskManager){ + this.taskManager = taskManager; + this.scheduler = scheduler; + } + @Override + public TaskManager getTaskManager() { + return this.taskManager; + } + + @Override + public void registerSchedulerTask(Class schedulerTaskClass){ + if(isSuitableClass(schedulerTaskClass)){ + LOG.info("Register the load balance scheduler class: [{}]", schedulerTaskClass.getName()); + registeredTaskClasses.add(schedulerTaskClass); + } + } + + @Override + public List> choose(LaunchedExchangisTask launchedExchangisTask) { + List> schedulerTasks = new ArrayList<>(); + registeredTaskClasses.forEach(taskClass -> { + Optional.ofNullable(choose(launchedExchangisTask, taskClass, false)).ifPresent(schedulerTasks::add); + }); + return schedulerTasks; + } + + @Override + public LoadBalanceSchedulerTask choose(LaunchedExchangisTask launchedExchangisTask, Class schedulerTaskClass) { + return choose(launchedExchangisTask, schedulerTaskClass, false); + } + + + /** + * Choose entrance + * @param launchedExchangisTask task + * @param schedulerTaskClass task class + * @param unchecked if checked + * @return scheduler task + */ + protected abstract LoadBalanceSchedulerTask choose(LaunchedExchangisTask launchedExchangisTask, Class schedulerTaskClass, boolean unchecked); + + + @Override + public Scheduler getScheduler() { + return scheduler; + } + + protected boolean isSuitableClass(Class schedulerTaskClass){ + if (LoadBalanceSchedulerTask.class.isAssignableFrom(schedulerTaskClass)){ + Class subType = TypeGenericUtils.getActualTypeFormGenericClass(schedulerTaskClass, null, 0); + if (Objects.isNull(subType) || !subType.equals(LaunchedExchangisTask.class)){ + LOG.warn("Unrecognized generic sub type: [{}] in scheduler", subType); + } else { + return true; + } + } else { + LOG.warn("Not load balance scheduler task class [{}]", schedulerTaskClass.getCanonicalName()); + } + return false; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/loadbalance/FlexibleTenancyLoadBalancer.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/loadbalance/FlexibleTenancyLoadBalancer.java new file mode 100644 index 000000000..b88cc58b3 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/loadbalance/FlexibleTenancyLoadBalancer.java @@ -0,0 +1,525 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.loadbalance; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskExecuteException; +import com.webank.wedatasphere.exchangis.job.server.execution.TaskManager; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.*; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.loadbalance.LoadBalancePoller; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks.AbstractLoadBalanceSchedulerTask; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks.LoadBalanceSchedulerTask; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.scheduler.Scheduler; +import org.apache.linkis.scheduler.SchedulerContext; +import org.apache.linkis.scheduler.queue.ConsumerManager; +import org.apache.linkis.scheduler.queue.GroupFactory; +import org.apache.linkis.scheduler.queue.SchedulerEventState; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.locks.ReentrantReadWriteLock; + + +public class FlexibleTenancyLoadBalancer extends AbstractTaskSchedulerLoadBalancer implements SchedulerThread { + + private static final Logger LOG = LoggerFactory.getLogger(FlexibleTenancyLoadBalancer.class); + /** + * key: {tenancy}_{schedulerTask_name} + */ + private ConcurrentHashMap tenancySchedulerTasks = new ConcurrentHashMap<>(); + + static class Constraints{ + private static final CommonVars SCHEDULE_INTERVAL = CommonVars.apply("wds.exchangis.job.task.scheduler.load-balancer.flexible.schedule-in-millisecond", 3000); + private static final CommonVars SEGMENT_MAX_OCCUPY = CommonVars.apply("wds.exchangis.job.task.scheduler.load-balancer.flexible.segments.max-occupy", 0.35d); + private static final CommonVars SEGMENT_MIN_OCCUPY = CommonVars.apply("wds.exchangis.job.task.scheduler.load-balancer.flexible.segments.min-occupy", 0.15d); + private static final CommonVars SEGMENT_ADJUST_STEP = CommonVars.apply("wds.exchangis.job.task.scheduler.load-balancer.flexible.segments.adjust-step", 5); + } + + private boolean isShutdown = false; + + private Future balanceFuture; + + public FlexibleTenancyLoadBalancer(Scheduler scheduler, TaskManager taskManager) { + super(scheduler, taskManager); + } + + + @Override + protected LoadBalanceSchedulerTask choose(LaunchedExchangisTask launchedExchangisTask, Class schedulerTaskClass, boolean unchecked) { + if( !unchecked || isSuitableClass(schedulerTaskClass)){ + // Fetch the latest info + launchedExchangisTask = getTaskManager().getRunningTask(launchedExchangisTask.getTaskId()); + // If the value is None means that the task is ended + if (Objects.nonNull(launchedExchangisTask) && !TaskStatus.isCompleted(launchedExchangisTask.getStatus())) { + // Use the exec user as tenancy + String tenancy = launchedExchangisTask.getExecuteUser(); + GroupFactory groupFactory = getScheduler().getSchedulerContext().getOrCreateGroupFactory(); + if (groupFactory instanceof TenancyParallelGroupFactory && + !((TenancyParallelGroupFactory) groupFactory).getTenancies().contains(tenancy)) { + // Unrecognized tenancy name + tenancy = ""; + } + if (StringUtils.isBlank(tenancy)) { + tenancy = TenancyParallelGroupFactory.DEFAULT_TENANCY; + } + // Select one + return geOrCreateSchedulerTaskContainer(tenancy, schedulerTaskClass).select(); + } + + } + return null; + } + + + @SuppressWarnings("unchecked") + private LoadBalanceSchedulerTask createLoadBalanceSchedulerTask(Class schedulerTaskClass){ + Constructor[] constructors = schedulerTaskClass.getDeclaredConstructors(); + if (constructors.length <= 0){ + throw new ExchangisTaskExecuteException.Runtime("Cannot find any constructors from load balance scheduler task: [" + schedulerTaskClass.getSimpleName() + "]", null); + } + // Use the first one constructor + Constructor constructor = constructors[0]; + Object[] parameters = new Object[constructor.getParameterCount()]; + Class[] parameterTypes = constructor.getParameterTypes(); + for (int i = 0 ;i < parameterTypes.length; i++){ + Class parameterType = parameterTypes[i]; + if (parameterType.isAssignableFrom(TaskManager.class)){ + parameters[i] = getTaskManager(); + } else if (parameterType.isAssignableFrom(Scheduler.class)){ + parameters[i] = getScheduler(); + } else if (parameterType.isAssignableFrom(this.getClass())){ + parameters[i] = this; + } else { + parameters[i] = null; + } + } + try { + LoadBalanceSchedulerTask loadBalanceSchedulerTask = (LoadBalanceSchedulerTask) constructor.newInstance(parameters); + //Use the current timestamp as ID + loadBalanceSchedulerTask.setId(String.valueOf(System.currentTimeMillis())); + return loadBalanceSchedulerTask; + } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { + throw new ExchangisTaskExecuteException.Runtime("Cannot new instance of load balance scheduler task: [" + schedulerTaskClass.getSimpleName() + "]", e); + } + } + + @Override + public void run() { + Thread.currentThread().setName("Balancer-Thread" + getName()); + LOG.info("Thread:[ {} ] is started. ", Thread.currentThread().getName()); + initLoadBalancerSchedulerTasks(); + ConsumerManager consumerManager = getScheduler().getSchedulerContext().getOrCreateConsumerManager(); + Map tenancyExecutorServices = new HashMap<>(); + int residentThreads = 0; + if (consumerManager instanceof TenancyParallelConsumerManager){ + tenancyExecutorServices = ((TenancyParallelConsumerManager) consumerManager).getTenancyExecutorServices(); + residentThreads = ((TenancyParallelConsumerManager) consumerManager).getInitResidentThreads(); + } else { + LOG.warn("Cannot auto scale-in/out on the consumer manager: [" + consumerManager.getClass().getSimpleName() +"] which is not a tenancy consumer manager"); + isShutdown = true; + } + while (!isShutdown){ + try { + loop(tenancyExecutorServices, residentThreads); + Thread.sleep(Constraints.SCHEDULE_INTERVAL.getValue()); + } catch (Exception e) { + if (e instanceof InterruptedException && isShutdown){ + LOG.info("Receive the interrupt signal from shutdown operation"); + } else { + LOG.warn("Unknown exception in scale-in/out segments of load balance scheduler task", e); + } + try { + // Enforce to sleep + Thread.sleep(Constraints.SCHEDULE_INTERVAL.getValue()); + } catch (InterruptedException ex) { + //Ignore + } + } + } + LOG.info("Thread:[ {} ] is stopped. ", Thread.currentThread().getName()); + } + + + @Override + public void start() { + if (Objects.isNull(this.scheduler)){ + throw new ExchangisTaskExecuteException.Runtime("TaskScheduler cannot be empty, please set it before starting the ["+ getName() +"]!", null); + } + if (Objects.nonNull(this.balanceFuture)){ + throw new ExchangisTaskExecuteException.Runtime("The load balancer: [" + getName() +"] has been started before", null); + } + // Submit self to default executor service + this.balanceFuture = this.scheduler.getSchedulerContext() + .getOrCreateConsumerManager().getOrCreateExecutorService().submit(this); + } + + /** + * Loop method + * @param tenancyExecutorServices executorServices + */ + private void loop(Map tenancyExecutorServices, int residentThreads){ + LOG.trace("Start to auto scale-in/out segments of load balance scheduler task"); + int adjustStep = Constraints.SEGMENT_ADJUST_STEP.getValue(); + Map tenancyLoopCounter = new HashMap<>(); + this.tenancySchedulerTasks.forEach((key, taskContainer) -> tenancyLoopCounter.compute(taskContainer.tenancy, (tenancy, counter) -> { + if (null == counter){ + counter = new LoopCounter(); + } + counter.containers.incrementAndGet(); + counter.segments.addAndGet(taskContainer.segments.length); + for (SchedulerTaskSegment segment : taskContainer.segments){ + counter.pollerSize.addAndGet(segment.loadBalanceSchedulerTask.getOrCreateLoadBalancePoller().size()); + } + counter.taskContainers.add(taskContainer); + return counter; + })); + tenancyExecutorServices.forEach((tenancy, executorService) -> { + LoopCounter loopCounter = tenancyLoopCounter.get(tenancy); + if (Objects.nonNull(loopCounter)){ + if (loopCounter.pollerSize.get() > 0) { + LOG.info("Monitor: [tenancy: {}, task_segments: {}, wait_in_poll: {}]", tenancy, loopCounter.segments.get(), + loopCounter.pollerSize.get()); + } + ThreadPoolExecutor pool = (ThreadPoolExecutor)executorService; + int adjustSegmentNum = 0; + int coreSize = pool.getCorePoolSize(); + // Must more than residentThreads +// if (TenancyParallelGroupFactory.DEFAULT_TENANCY.equals(tenancy) || coreSize > residentThreads){ + int segments = loopCounter.segments.get(); + // TODO fix the problem that the value of residentThreads always equal 1 for not default consumer + int restSize = TenancyParallelGroupFactory.DEFAULT_TENANCY.equals(tenancy)? coreSize - residentThreads - 1: coreSize - 1; + if (restSize > 0) { + int activeThreads = pool.getActiveCount(); + if (activeThreads >= coreSize) { + // All threads is active, should reduce the number of segments + adjustSegmentNum = Math.min((int) Math.floor((double) restSize * Constraints.SEGMENT_MIN_OCCUPY.getValue()), segments); + } else { + adjustSegmentNum = Math.max((int) Math.floor((double) restSize * Constraints.SEGMENT_MAX_OCCUPY.getValue()), segments); + } + adjustSegmentNum = adjustSegmentNum > segments ? segments + Math.min(adjustStep, adjustSegmentNum - segments) + : segments - Math.min(adjustStep, segments - adjustSegmentNum); + if (segments != adjustSegmentNum) { + // Div the number of container + int average = adjustSegmentNum / loopCounter.containers.get(); + LOG.info("Adjust total number of load balance scheduler task segments for tenancy: [{}] from {} to {}, average {}", + tenancy, segments, adjustSegmentNum, average); + for (int i = 0; i < loopCounter.containers.get(); i++) { + if (i == loopCounter.containers.get() - 1) { + loopCounter.taskContainers.get(i).adjustSegment(adjustSegmentNum); + } else { + loopCounter.taskContainers.get(i).adjustSegment(average); + adjustSegmentNum = adjustSegmentNum - average; + } + } + } + } +// } + } + }); + LOG.trace("End to auto scale-in/out segments of load balance scheduler task"); + } + @Override + public void stop() { + if (Objects.nonNull(this.balanceFuture)){ + this.isShutdown = true; + this.balanceFuture.cancel(true); + this.tenancySchedulerTasks.forEach((tenancy, container) -> { + container.segmentLock.writeLock().lock(); + try{ + for(SchedulerTaskSegment segment : container.segments){ + if (segment.loadBalanceSchedulerTask instanceof AbstractExchangisSchedulerTask){ + ((AbstractExchangisSchedulerTask) segment.loadBalanceSchedulerTask).kill(); + } + } + }finally { + container.segmentLock.writeLock().unlock(); + } + }); + this.tenancySchedulerTasks.clear(); + } + } + + @Override + public String getName() { + return this.getClass().getSimpleName(); + } + + /** + * Get or create scheduler task container + * @return container + */ + private SchedulerTaskContainer geOrCreateSchedulerTaskContainer(String tenancy, Class schedulerTaskClass){ + String schedulerTaskName = schedulerTaskClass.getSimpleName(); + return tenancySchedulerTasks.compute(tenancy + "_" + schedulerTaskName,(key, taskContainer) -> { + if (Objects.isNull(taskContainer)){ + LoadBalanceSchedulerTask headSchedulerTask = createLoadBalanceSchedulerTask(schedulerTaskClass); + if (headSchedulerTask instanceof AbstractLoadBalanceSchedulerTask){ + ((AbstractLoadBalanceSchedulerTask) headSchedulerTask) + .setSchedulerLoadBalancer(FlexibleTenancyLoadBalancer.this); + } + headSchedulerTask.setTenancy(tenancy); + try { + getScheduler().submit(headSchedulerTask); + } catch (Exception e){ + // Only if not enough reserved threads in scheduler + throw new ExchangisTaskExecuteException.Runtime("If there is no enough reserved threads in scheduler for tenancy: [" + tenancy + + "], load balance scheduler task: [" + schedulerTaskName + "]? please invoke setInitResidentThreads(num) method in consumerManager", e); + } + taskContainer = new SchedulerTaskContainer(headSchedulerTask); + taskContainer.tenancy = tenancy; + LOG.info("Create scheduler task container[ tenancy: {}, load balance scheduler task: {} ]", tenancy, schedulerTaskName); + } + return taskContainer; + }); + } + + /** + * Init to pre create task container for load balancer scheduler tasks + */ + private void initLoadBalancerSchedulerTasks(){ + SchedulerContext schedulerContext = getScheduler().getSchedulerContext(); + if (schedulerContext instanceof ExchangisSchedulerContext){ + Optional.ofNullable(((ExchangisSchedulerContext)schedulerContext).getTenancies()).ifPresent(tenancies -> { + tenancies.forEach(tenancy -> { + // Skip the system tenancy + if (!tenancy.startsWith(".")) { + for (Class registeredTaskClass : registeredTaskClasses) { + geOrCreateSchedulerTaskContainer(tenancy, registeredTaskClass); + } + } + }); + }); + // init scheduler task container for default tenancy + for (Class registeredTaskClass : registeredTaskClasses) { + geOrCreateSchedulerTaskContainer(TenancyParallelGroupFactory.DEFAULT_TENANCY, registeredTaskClass); + } + } + } + static class LoopCounter { + + AtomicInteger containers = new AtomicInteger(0); + + AtomicInteger segments = new AtomicInteger(0); + + AtomicInteger pollerSize = new AtomicInteger(0); + + List taskContainers = new ArrayList<>(); + } + + /** + * Scheduler + */ + private class SchedulerTaskContainer{ + + String tenancy; + + String taskName; + + SchedulerTaskSegment[] segments; + + ReentrantReadWriteLock segmentLock = new ReentrantReadWriteLock(); + + SchedulerTaskContainer(LoadBalanceSchedulerTask schedulerTask){ + // TODO should create the strategy of defining 'weight' value + segments = new SchedulerTaskSegment[]{new SchedulerTaskSegment(1, schedulerTask)}; + taskName = schedulerTask.getClass().getSimpleName(); + } + LoadBalanceSchedulerTask select(){ + segmentLock.writeLock().lock(); + try { + int segmentIndex = selectSegment(segments); + SchedulerTaskSegment segment = segments[segmentIndex]; + segment.cwt = segment.cwt - 1; + return segment.loadBalanceSchedulerTask; + }finally { + segmentLock.writeLock().unlock(); + } + } + + private void adjustSegment(int adjustNum){ + if (adjustNum != segments.length) { + segmentLock.writeLock().lock(); + try { + if (adjustNum > segments.length) { + scaleInSegment(adjustNum - segments.length); + } else { + scaleOutSegment(segments.length - adjustNum); + } + }finally { + segmentLock.writeLock().unlock(); + } + } + } + + /** + * Scale-out segment + * @param scaleOut + */ + private void scaleOutSegment(int scaleOut){ + int newSize = segments.length - scaleOut; + LOG.info("Scale-out segments for tenancy: [{}],scaleOut: [{}], newSize: [{}], scheduler_task_type: [{}]", + tenancy, scaleOut, newSize, taskName); + if (newSize <= 0){ + LOG.warn("Scale-out fail, the newSize cannot <= 0"); + return; + } + SchedulerTaskSegment[] newSegments = new SchedulerTaskSegment[newSize]; + System.arraycopy(segments, 0, newSegments, 0, newSize); + int offset = -1; + Map>> waitForCombine = new HashMap<>(); + for(int i = newSize; i < segments.length; i ++){ + LoadBalanceSchedulerTask schedulerTask = segments[i].loadBalanceSchedulerTask; + try { + SchedulerTaskSegment newSegment = null; + int count = 0; + do { + offset = (offset + 1) % newSize; + newSegment = newSegments[offset]; + count ++; + }while (newSegment.loadBalanceSchedulerTask.getState() != SchedulerEventState.Running() && count <= newSize); + if (offset != 0 && newSegment.loadBalanceSchedulerTask.getState() != SchedulerEventState.Running()){ + // Ignore the first load balance scheduler task + LOG.error("Unable to scale-out segments for tenancy: [{}], reason:" + + " the scheduler task has still in state[{}], scheduler_task_type: [{}], offset: [{}]", + tenancy, newSegment.loadBalanceSchedulerTask.getState(), taskName, offset); + return; + } + waitForCombine.compute(offset + "", (key, value) -> { + if (Objects.isNull(value)){ + value = new ArrayList<>(); + } + value.add(schedulerTask); + return value; + }); + } catch (Exception e){ + LOG.warn("Scale-out segments for tenancy: [{}] wrong, index: [{}], scheduler_task_type: [{}]", tenancy, i, taskName, e); + } + } + // Kill all + waitForCombine.forEach((key, tasks) -> { + SchedulerTaskSegment newSegment = newSegments[Integer.parseInt(key)]; + tasks.forEach(task -> { + // Kill task + if (AbstractExchangisSchedulerTask.class.isAssignableFrom(task.getClass())) { + ((AbstractExchangisSchedulerTask) task).kill(); + } + // Merge/Combine the poller + LoadBalancePoller poller = task.getOrCreateLoadBalancePoller(); + LOG.info("Merge/combine [{}] poller form {} to {}", taskName, task.getId(), newSegment.loadBalanceSchedulerTask.getId()); + newSegment.loadBalanceSchedulerTask.getOrCreateLoadBalancePoller().combine(poller); + }); + }); + segments = newSegments; + } + /** + * Scale-in segment + * @param scaleIn + */ + private void scaleInSegment(int scaleIn){ + LOG.info("Scale-in segments for tenancy: [{}],scaleIn: [{}], newSize: [{}], scheduler task: [{}]", + tenancy, scaleIn, segments.length + scaleIn, taskName); + SchedulerTaskSegment[] newSegments = new SchedulerTaskSegment[segments.length + scaleIn]; + System.arraycopy(segments, 0, newSegments, 0, segments.length); + for(int i = segments.length; i < segments.length + scaleIn; i ++){ + try { + LoadBalanceSchedulerTask schedulerTask = + createLoadBalanceSchedulerTask(segments[0].loadBalanceSchedulerTask.getClass()); + // + final SchedulerTaskSegment segment = new SchedulerTaskSegment(0, schedulerTask); + if (schedulerTask instanceof AbstractLoadBalanceSchedulerTask){ + ((AbstractLoadBalanceSchedulerTask) schedulerTask) + .setSchedulerLoadBalancer(FlexibleTenancyLoadBalancer.this); + ((AbstractLoadBalanceSchedulerTask) schedulerTask).setScheduleListener( task -> { + segmentLock.writeLock().lock(); + try{ + segment.setWeight(1); + LOG.info("Init the weight of segment to 1, relate scheduler task: {}", task.getName()); + }finally { + segmentLock.writeLock().unlock(); + } + }); + } + schedulerTask.setTenancy(tenancy); + newSegments[i] = segment; + getScheduler().submit(schedulerTask); + } catch (Exception e){ + LOG.warn("Scale-in segments for tenancy: [{}] wrong, index: [{}]", tenancy, i, e); + } + } + segments = newSegments; + } + /** + * Select segment + * @param segments segments + * @return index + */ + private int selectSegment(SchedulerTaskSegment[] segments) { + int u = 0; + int reset = -1; + while (true) { + for (int i = 0; i < segments.length; i++) { + if (null == segments[i] || segments[i].cwt <= 0) { + continue; + } + u = i; + while (i < segments.length - 1) { + i++; + if (null == segments[i] || segments[i].cwt <= 0) { + continue; + } + if ((segments[u].wt * 1000 / segments[i].wt < + segments[u].cwt * 1000 / segments[i].cwt)) { + return u; + } + u = i; + } + return u; + } + if (reset++ > 0) { + return 0; + } + for (SchedulerTaskSegment segment : segments) { + segment.cwt = segment.wt; + } + } + } + } + + /** + * Each segment has + * weight => initial weight size + * cWeight => current weight + */ + private static class SchedulerTaskSegment{ + + int wt = -1; + + int cwt = -1; + + String schedulerId; + + LoadBalanceSchedulerTask loadBalanceSchedulerTask; + + SchedulerTaskSegment(int weight, LoadBalanceSchedulerTask task){ + this.wt = weight; + this.cwt = this.wt; + this.loadBalanceSchedulerTask = task; + this.schedulerId = task.getId(); + } + + public void setWeight(int weight){ + this.wt = weight; + this.cwt = this.wt; + } + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/loadbalance/TaskSchedulerLoadBalancer.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/loadbalance/TaskSchedulerLoadBalancer.java new file mode 100644 index 000000000..46bba16fd --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/loadbalance/TaskSchedulerLoadBalancer.java @@ -0,0 +1,15 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.loadbalance; + +import com.webank.wedatasphere.exchangis.job.domain.ExchangisTask; +import com.webank.wedatasphere.exchangis.job.server.execution.TaskManager; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.loadbalance.SchedulerLoadBalancer; + +public interface TaskSchedulerLoadBalancer extends SchedulerLoadBalancer{ + + /** + * Manager the running tasks + * @return task manager + */ + TaskManager getTaskManager(); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/AbstractExchangisSchedulerTask.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/AbstractExchangisSchedulerTask.java new file mode 100644 index 000000000..d5b78a9f4 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/AbstractExchangisSchedulerTask.java @@ -0,0 +1,119 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler; + +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerException; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerRetryException; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.scheduler.executer.CompletedExecuteResponse; +import org.apache.linkis.scheduler.executer.ErrorExecuteResponse; +import org.apache.linkis.scheduler.executer.ExecuteRequest; +import org.apache.linkis.scheduler.queue.Job; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; + +/** + * Inheritable scheduler task for exchangis, different from ExchangisTask + */ +public abstract class AbstractExchangisSchedulerTask extends Job implements ExchangisSchedulerTask{ + + private static final Logger LOG = LoggerFactory.getLogger(AbstractExchangisSchedulerTask.class); + + public static final int MAX_RETRY_NUM = 3; + + private int maxRetryNum = MAX_RETRY_NUM; + + /** + * Tenancy name + */ + private String tenancy; + + protected String scheduleId; + /** + * Each schedule task should has an id + * @param scheduleId schedule id + */ + public AbstractExchangisSchedulerTask(String scheduleId){ + this.scheduleId = scheduleId; + } + + public AbstractExchangisSchedulerTask() { + + } + + @Override + public void init() throws Exception { + + } + + @Override + public ExecuteRequest jobToExecuteRequest() throws Exception { + return new DirectExecuteRequest(); + } + + @Override + public int getMaxRetryNum() { + return maxRetryNum; + } + + private void setMaxRetryNum(int maxRetryNum){ + this.maxRetryNum = maxRetryNum; + } + + @Override + public void close() throws IOException { + } + + @Override + public String getId() { + if (StringUtils.isNotBlank(this.scheduleId)){ + return scheduleId; + } + return super.getId(); + } + + /** + * schedule main method + * @throws ExchangisSchedulerException error exception + * @throws ExchangisSchedulerRetryException retry exception + */ + protected abstract void schedule() throws ExchangisSchedulerException, ExchangisSchedulerRetryException; + + public class DirectExecuteRequest implements ExecuteRequest { + + @Override + public String code() { + return null; + } + + public void directExecute() throws ExchangisSchedulerException, ExchangisSchedulerRetryException { + // Direct execute + try { + schedule(); + } catch (ExchangisSchedulerRetryException e){ + if (e.getRetryNum() > 0){ + setMaxRetryNum(e.getRetryNum()); + } + // Need to throw again + throw e; + } + } + } + + @Override + public void transitionCompleted(CompletedExecuteResponse executeCompleted) { + super.transitionCompleted(executeCompleted); + if (executeCompleted instanceof ErrorExecuteResponse){ + ErrorExecuteResponse response = ((ErrorExecuteResponse)executeCompleted); + LOG.error("Schedule Error: " + response.message(), response.t()); + } + } + + public String getTenancy() { + return tenancy; + } + + public void setTenancy(String tenancy) { + this.tenancy = tenancy; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisGenericScheduler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisGenericScheduler.java new file mode 100644 index 000000000..33edd7704 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisGenericScheduler.java @@ -0,0 +1,87 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler; + +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.scheduler.AbstractScheduler; +import org.apache.linkis.scheduler.SchedulerContext; +import org.apache.linkis.scheduler.executer.ExecutorManager; +import org.apache.linkis.scheduler.queue.ConsumerManager; +import org.apache.linkis.scheduler.queue.GroupFactory; +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOSchedulerContextImpl; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +/** + * Inherited the AbstractScheduler from linkis-scheduler + */ +public class ExchangisGenericScheduler extends AbstractScheduler { + + private static class Constraints{ + + private static final CommonVars MAX_PARALLEL_PER_TENANCY = CommonVars.apply("wds.exchangis.job.scheduler.consumer.max.parallel.per-tenancy", 1); + + /** + * System tenancies + */ + private static final CommonVars SYSTEM_TENANCY_PATTERN = CommonVars.apply("wds.exchangis.job.scheduler.consumer.tenancies-system", ".log"); + + /** + * Custom tenancies + */ + private static final CommonVars CUSTOM_TENANCY_PATTERN = CommonVars.apply("wds.exchangis.job.scheduler.consumer.tenancies", "hadoop"); + + private static final CommonVars GROUP_INIT_CAPACITY = CommonVars.apply("wds.exchangis.job.scheduler.group.min.capacity", 1000); + + private static final CommonVars GROUP_MAX_CAPACITY = CommonVars.apply("wds.exchangis.job.scheduler.group.max.capacity", 5000); + + private static final CommonVars GROUP_MAX_RUNNING_JOBS = CommonVars.apply("wds.exchangis.job.scheduler.group.max.running-jobs", 30); + } + + + private SchedulerContext schedulerContext; + + private ExecutorManager executorManager; + + private ConsumerManager consumerManager; + + public ExchangisGenericScheduler(ExecutorManager executorManager, ConsumerManager consumerManager){ + this.executorManager = executorManager; + this.consumerManager = consumerManager; + } + + @Override + public void init() { + List tenancies = new ArrayList<>(); + String sysTenancies = Constraints.SYSTEM_TENANCY_PATTERN.getValue(); + if (StringUtils.isNotBlank(sysTenancies)){ + tenancies.addAll(Arrays.asList(sysTenancies.split(","))); + } + String customTenancies = Constraints.CUSTOM_TENANCY_PATTERN.getValue(); + if (StringUtils.isNotBlank(customTenancies)){ + tenancies.addAll(Arrays.asList(customTenancies.split(","))); + } + this.schedulerContext = new ExchangisSchedulerContext(Constraints.MAX_PARALLEL_PER_TENANCY.getValue(), tenancies); + GroupFactory groupFactory = this.schedulerContext.getOrCreateGroupFactory(); + if (groupFactory instanceof TenancyParallelGroupFactory){ + TenancyParallelGroupFactory tenancyParallelGroupFactory = (TenancyParallelGroupFactory)groupFactory; + tenancyParallelGroupFactory.setDefaultInitCapacity(Constraints.GROUP_INIT_CAPACITY.getValue()); + tenancyParallelGroupFactory.setDefaultMaxCapacity(Constraints.GROUP_MAX_CAPACITY.getValue()); + tenancyParallelGroupFactory.setDefaultMaxRunningJobs(Constraints.GROUP_MAX_RUNNING_JOBS.getValue()); + } + ((FIFOSchedulerContextImpl) this.schedulerContext).setExecutorManager(executorManager); + ((FIFOSchedulerContextImpl) this.schedulerContext).setConsumerManager(consumerManager); + } + + @Override + public String getName() { + return "Exchangis-Multi-Tenancy-Scheduler"; + } + + @Override + public SchedulerContext getSchedulerContext() { + return schedulerContext; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerContext.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerContext.java new file mode 100644 index 000000000..929961dbb --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerContext.java @@ -0,0 +1,46 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler; + +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerException; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.scheduler.queue.ConsumerManager; +import org.apache.linkis.scheduler.queue.GroupFactory; +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOSchedulerContextImpl; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +/** + * Contains the executorManager, consumerManager and groupFactory + */ +public class ExchangisSchedulerContext extends FIFOSchedulerContextImpl { + + /** + * Tenancy list + */ + private final List tenancies; + + private int maxParallelismPerUser = 1; + public ExchangisSchedulerContext(int maxParallelismPerUser, List tenancies) { + super(Integer.MAX_VALUE); + this.maxParallelismPerUser = maxParallelismPerUser; + this.tenancies = tenancies; + } + + @Override + public GroupFactory createGroupFactory() { + TenancyParallelGroupFactory parallelGroupFactory = new TenancyParallelGroupFactory(); + parallelGroupFactory.setParallelPerTenancy(maxParallelismPerUser); + parallelGroupFactory.setTenancies(this.tenancies); + return parallelGroupFactory; + } + + @Override + public ConsumerManager createConsumerManager() { + throw new ExchangisSchedulerException.Runtime("Must set the consumer manager before scheduling", null); + } + + public List getTenancies() { + return tenancies; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerExecutorFactory.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerExecutorFactory.java new file mode 100644 index 000000000..eea78bc82 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerExecutorFactory.java @@ -0,0 +1,24 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler; + +import org.apache.linkis.scheduler.executer.Executor; +import org.apache.linkis.scheduler.queue.SchedulerEvent; + +/** + * Create executor + */ +public interface ExchangisSchedulerExecutorFactory { + + /** + * Whether create singleton executor + * @param singleton boolean + */ + void setIsSingleTon(boolean singleton); + + /** + * Create Executor + * @param event scheduler event + * @return executor + */ + Executor getOrCreateExecutor(SchedulerEvent event); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerExecutorManager.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerExecutorManager.java new file mode 100644 index 000000000..02ea96da7 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerExecutorManager.java @@ -0,0 +1,194 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler; + +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerException; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerRetryException; +import org.apache.linkis.protocol.engine.EngineState; +import org.apache.linkis.scheduler.exception.LinkisJobRetryException; +import org.apache.linkis.scheduler.executer.*; +import org.apache.linkis.scheduler.listener.ExecutorListener; +import org.apache.linkis.scheduler.queue.SchedulerEvent; +import scala.Option; +import scala.Some; +import scala.concurrent.duration.Duration; + +import java.io.IOException; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + + +/** + * Executor manager for scheduler + */ +public class ExchangisSchedulerExecutorManager extends ExecutorManager { + + private ExchangisSchedulerExecutorFactory schedulerExecutorFactory; + + public ExchangisSchedulerExecutorManager(ExchangisSchedulerExecutorFactory schedulerExecutorFactory){ + this.schedulerExecutorFactory = schedulerExecutorFactory; + } + + public ExchangisSchedulerExecutorManager(){ + this.schedulerExecutorFactory = new DefaultExchangisSchedulerExecutorFactory(); + } + @Override + public void setExecutorListener(ExecutorListener engineListener) { + // Emm, It is never be used + } + + @Override + public Executor createExecutor(SchedulerEvent event) { + return schedulerExecutorFactory.getOrCreateExecutor(event); + } + + @Override + public Option askExecutor(SchedulerEvent event) { + return Some.apply(schedulerExecutorFactory.getOrCreateExecutor(event)); + } + + @Override + public Option askExecutor(SchedulerEvent event, Duration wait) { + return askExecutor(event); + } + + @Override + public Option getById(long id) { + return null; + } + + @Override + public Executor[] getByGroup(String groupName) { + return new Executor[0]; + } + + @Override + public void delete(Executor executor) { + + } + + @Override + public void shutdown() { + // Do nothing + } + + public ExchangisSchedulerExecutorFactory getSchedulerExecutorFactory() { + return schedulerExecutorFactory; + } + + public static class DefaultExchangisSchedulerExecutorFactory implements ExchangisSchedulerExecutorFactory{ + + private static final Class DEFAULT_DIRECT_EXECUTOR = DefaultDirectExecutor.class; + + /** + * Register executor class + */ + private Map> registeredExecutorClass = new ConcurrentHashMap<>(); + /** + * Singleton instance holder + */ + private Map singletonExecutorHolder = new ConcurrentHashMap<>(); + /** + * Default true + */ + boolean isSingleton = true; + @Override + public void setIsSingleTon(boolean singleton) { + this.isSingleton = singleton; + } + + @Override + public Executor getOrCreateExecutor(SchedulerEvent event) { + String name = event.getClass().getName(); + if (isSingleton){ + return singletonExecutorHolder.computeIfAbsent(name, this::createExecutorInternal); + } + return createExecutorInternal(name); + } + + private Executor createExecutorInternal(String eventName){ + Class executorClass = registeredExecutorClass + .getOrDefault(eventName, DEFAULT_DIRECT_EXECUTOR); + try { + Constructor constructor = executorClass.getDeclaredConstructor(); + Executor executor = (Executor)constructor.newInstance(); + if (executor instanceof FactoryCreateExecutor){ + ((FactoryCreateExecutor)executor).setSchedulerExecutorFactory(this); + } + return executor; + } catch (NoSuchMethodException e) { + throw new ExchangisSchedulerException.Runtime("Fail to construct the executor for" + + " scheduler task: [" + eventName + "], reason: has no suitable constructor", e); + } catch (IllegalAccessException | InstantiationException | InvocationTargetException e) { + throw new ExchangisSchedulerException.Runtime("Fail to construct the executor for" + + " scheduler task: [" + eventName + "], reason: authority or other error", e); + } + } + /** + * Register method + * @param schedulerTask scheduler task + * @param schedulerExecutor scheduler executor + */ + public void registerTaskExecutor(Class schedulerTask, + Class schedulerExecutor){ + String schedulerTaskClass = schedulerTask.getCanonicalName(); + registeredExecutorClass.putIfAbsent(schedulerTaskClass, schedulerExecutor); + } + } + + public static abstract class FactoryCreateExecutor implements Executor{ + + /** + * Executor factory + */ + private ExchangisSchedulerExecutorFactory schedulerExecutorFactory; + + public ExchangisSchedulerExecutorFactory getSchedulerExecutorFactory() { + return schedulerExecutorFactory; + } + + public void setSchedulerExecutorFactory(ExchangisSchedulerExecutorFactory schedulerExecutorFactory) { + this.schedulerExecutorFactory = schedulerExecutorFactory; + } + } + + public static class DefaultDirectExecutor extends FactoryCreateExecutor{ + + @Override + public long getId() { + return 0; + } + + @Override + public ExecuteResponse execute(ExecuteRequest executeRequest) { + if (executeRequest instanceof AbstractExchangisSchedulerTask.DirectExecuteRequest){ + try { + ((AbstractExchangisSchedulerTask.DirectExecuteRequest)executeRequest).directExecute(); + return new SuccessExecuteResponse(); + } catch (ExchangisSchedulerException | ExchangisSchedulerRetryException e) { + e.setErrCode(LinkisJobRetryException.JOB_RETRY_ERROR_CODE()); + return new ErrorExecuteResponse("Exception occurred in scheduling, task will fail or retry on the next time, message: [" + + e.getMessage() + "]", e); + } catch (Exception e){ + return new ErrorExecuteResponse("Unknown Exception occurred in scheduling, message: [" + e.getMessage() + "]", e); + } + } + return new ErrorExecuteResponse("Unsupported execute request: code: [" + executeRequest.code() + "]", null); + } + + @Override + public EngineState state() { + return null; + } + + @Override + public ExecutorInfo getExecutorInfo() { + return null; + } + + @Override + public void close() throws IOException { + + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerTask.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerTask.java new file mode 100644 index 000000000..a0ec6eeb4 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerTask.java @@ -0,0 +1,19 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler; + +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.priority.PriorityRunnable; +import org.apache.linkis.scheduler.queue.SchedulerEvent; + +/** + * Exchangis scheduler task + */ +public interface ExchangisSchedulerTask extends PriorityRunnable, SchedulerEvent { + + /** + * Tenancy + * @return + */ + String getTenancy(); + + void setTenancy(String tenancy); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ScheduleListener.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ScheduleListener.java new file mode 100644 index 000000000..790b90d71 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ScheduleListener.java @@ -0,0 +1,15 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler; + +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.ExchangisSchedulerTask; + +/** + * Schedule listener + * @param + */ +public interface ScheduleListener { + /** + * On schedule event + * @param schedulerTask scheduler task + */ + void onSchedule(T schedulerTask); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/SchedulerThread.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/SchedulerThread.java new file mode 100644 index 000000000..b9da86213 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/SchedulerThread.java @@ -0,0 +1,24 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler; + +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.priority.PriorityRunnable; + +/** + * Define the basic interface of thread in scheduler + */ +public interface SchedulerThread extends PriorityRunnable { + /** + * Start entrance + */ + void start(); + + /** + * Stop entrance + */ + void stop(); + + /** + * Name + * @return + */ + String getName(); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/TenancyParallelConsumerManager.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/TenancyParallelConsumerManager.java new file mode 100644 index 000000000..0e2bd5caf --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/TenancyParallelConsumerManager.java @@ -0,0 +1,182 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler; + +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerException; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.priority.PriorityOrderedQueue; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.priority.PriorityRunnable; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.scheduler.listener.ConsumerListener; +import org.apache.linkis.scheduler.queue.*; +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOGroup; +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; +import java.util.concurrent.*; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.locks.ReentrantLock; + +/** + * Tenancy parallel Consumer manager + */ +public class TenancyParallelConsumerManager extends ConsumerManager { + + private static final Logger LOG = LoggerFactory.getLogger(TenancyParallelConsumerManager.class); + + private ConsumerListener consumerListener; + + /** + * Default executor service + */ + private ExecutorService defaultExecutorService; + + private ReentrantLock defaultExecutorServiceLock = new ReentrantLock(); + + private Map tenancyExecutorServices = new ConcurrentHashMap<>(); + + private Map consumerGroupMap = new ConcurrentHashMap<>(); + + private int initResidentThreads = 1; + + @Override + public void setConsumerListener(ConsumerListener consumerListener) { + this.consumerListener = consumerListener; + } + + @Override + public ExecutorService getOrCreateExecutorService() { + if (Objects.isNull(defaultExecutorService)){ + defaultExecutorServiceLock.lock(); + try{ + Group group = getSchedulerContext().getOrCreateGroupFactory().getOrCreateGroup(null); + if (group instanceof FIFOGroup){ + defaultExecutorService = newPriorityThreadPool(((FIFOGroup) group).getMaxRunningJobs() + + this.initResidentThreads + 1, + TenancyParallelGroupFactory.GROUP_NAME_PREFIX + TenancyParallelGroupFactory.DEFAULT_TENANCY + "-Executor-", true); + tenancyExecutorServices.put(TenancyParallelGroupFactory.DEFAULT_TENANCY, defaultExecutorService); + } else { + throw new ExchangisSchedulerException.Runtime("Cannot construct the executor service " + + "using the default group: [" + group.getClass().getCanonicalName() + "]", null); + } + }finally{ + defaultExecutorServiceLock.unlock(); + } + } + return this.defaultExecutorService; + } + + @Override + public Consumer getOrCreateConsumer(String groupName) { + Consumer resultConsumer = consumerGroupMap.computeIfAbsent(groupName, groupName0 -> { + Consumer consumer = createConsumer(groupName); + Group group = getSchedulerContext().getOrCreateGroupFactory().getGroup(groupName); + consumer.setGroup(group); + consumer.setConsumeQueue(new LoopArrayQueue(group)); + LOG.info("Create a new consumer for group: [{}]", groupName); + Optional.ofNullable(consumerListener).ifPresent( listener -> listener.onConsumerCreated(consumer)); + consumer.start(); + return consumer; + }); + if (resultConsumer instanceof FIFOUserConsumer){ + ((FIFOUserConsumer) resultConsumer).setLastTime(System.currentTimeMillis()); + } + return resultConsumer; + } + + @Override + public Consumer createConsumer(String groupName) { + Group group = getSchedulerContext().getOrCreateGroupFactory().getGroup(groupName); + return new FIFOUserConsumer(getSchedulerContext(), getOrCreateExecutorService(groupName), group); + } + + @Override + public void destroyConsumer(String groupName) { + Optional.ofNullable(consumerGroupMap.get(groupName)).ifPresent( consumer -> { + LOG.warn("Start to shutdown the consumer for group: [{}]", groupName); + consumer.shutdown(); + consumerGroupMap.remove(groupName); + Optional.ofNullable(consumerListener).ifPresent( listener -> listener.onConsumerDestroyed(consumer)); + LOG.warn("End to shutdown the consumer for group: [{}]", groupName); + }); + } + + /*** + * Will invoke if the spring container is down + */ + @Override + public void shutdown() { + LOG.warn("Shutdown all the consumers which is working"); + consumerGroupMap.forEach((group, consumer) -> consumer.shutdown()); + LOG.warn("Shutdown all the executor service for tenancies: [{}]", StringUtils.join(tenancyExecutorServices.keySet(), ",")); + tenancyExecutorServices.forEach((tenancy, executorService) -> executorService.shutdownNow()); + } + + @Override + public Consumer[] listConsumers() { + return consumerGroupMap.values().toArray(new Consumer[]{}); + } + + protected ExecutorService getOrCreateExecutorService(String groupName){ + GroupFactory groupFactory = getSchedulerContext().getOrCreateGroupFactory(); + if (groupFactory instanceof TenancyParallelGroupFactory){ + TenancyParallelGroupFactory parallelGroupFactory = (TenancyParallelGroupFactory)groupFactory; + String tenancy = parallelGroupFactory.getTenancyByGroupName(groupName); + groupFactory.getGroup(groupName); + if (StringUtils.isNotBlank(tenancy)){ + return tenancyExecutorServices.computeIfAbsent(tenancy, tenancyName -> { + // Use the default value of max running jobs + return newPriorityThreadPool(parallelGroupFactory.getDefaultMaxRunningJobs() + parallelGroupFactory.getParallelPerTenancy(), + TenancyParallelGroupFactory.GROUP_NAME_PREFIX + tenancy + "-Executor-", true); + }); + } + } + return getOrCreateExecutorService(); + } + + public int getInitResidentThreads() { + return initResidentThreads; + } + + public void setInitResidentThreads(int initResidentThreads) { + this.initResidentThreads = initResidentThreads; + } + + /** + * Tenancy executor service + * @return + */ + public Map getTenancyExecutorServices() { + return tenancyExecutorServices; + } + + /** + * Create thread pool with priority for tenancy consumer + * @return + */ + private ExecutorService newPriorityThreadPool(int threadNum, String threadName, boolean isDaemon){ + ThreadPoolExecutor threadPool = new ThreadPoolExecutor( + threadNum, + threadNum, + 120L, + TimeUnit.SECONDS, + new PriorityBlockingQueue<>(10 * threadNum, (o1, o2) -> { + int left = o1 instanceof PriorityRunnable ? ((PriorityRunnable) o1).getPriority() : 0; + int right = o2 instanceof PriorityRunnable ? ((PriorityRunnable) o2).getPriority() : 0; + return right - left; + }), + new ThreadFactory() { + final AtomicInteger num = new AtomicInteger(0); + @Override + public Thread newThread(Runnable r) { + Thread t = new Thread(r); + t.setDaemon(isDaemon); + t.setName(threadName + num.incrementAndGet()); + return t; + } + }); + threadPool.allowCoreThreadTimeOut(true); + return threadPool; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/TenancyParallelGroupFactory.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/TenancyParallelGroupFactory.java new file mode 100644 index 000000000..cf0ab4447 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/TenancyParallelGroupFactory.java @@ -0,0 +1,79 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler; + +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.scheduler.queue.AbstractGroup; +import org.apache.linkis.scheduler.queue.SchedulerEvent; +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOGroup; +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOGroupFactory; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Multi-tenancy group factory + */ +public class TenancyParallelGroupFactory extends FIFOGroupFactory { + + private static final int DEFAULT_PARALLEL_PER_TENANCY = 1; + + public static final String GROUP_NAME_PREFIX = "Multi-Tenancy-Group-"; + + public static final String DEFAULT_TENANCY = "default"; + + private static final Pattern TENANCY_IN_GROUP_PATTERN = Pattern.compile("^" + GROUP_NAME_PREFIX + "([-_\\w\\W]+)?_\\d$"); + + private int parallelPerTenancy = DEFAULT_PARALLEL_PER_TENANCY; + + private List tenancies = new ArrayList<>(); + + public int getParallelPerTenancy() { + return parallelPerTenancy; + } + + public void setParallelPerTenancy(int parallelPerTenancy) { + if (parallelPerTenancy < 0) + this.parallelPerTenancy = parallelPerTenancy; + } + + public List getTenancies() { + return tenancies; + } + + public void setTenancies(List tenancies) { + this.tenancies = tenancies; + } + + @Override + public AbstractGroup createGroup(String groupName) { + // Also use the fifo group + return new FIFOGroup(groupName, getInitCapacity(groupName), getMaxCapacity(groupName)); + } + + + @Override + public String getGroupNameByEvent(SchedulerEvent event) { + String tenancy = ""; + if (Objects.nonNull(event) && (event instanceof AbstractExchangisSchedulerTask)){ + String tenancyInSchedule = ((AbstractExchangisSchedulerTask)event).getTenancy(); + if (tenancies.contains(tenancyInSchedule)){ + tenancy = tenancyInSchedule; + } + } + return StringUtils.isNotBlank(tenancy)? GROUP_NAME_PREFIX + tenancy + "_" + (event.getId().hashCode() % parallelPerTenancy) : GROUP_NAME_PREFIX + DEFAULT_TENANCY; + } + + public String getTenancyByGroupName(String groupName){ + String tenancy = null; + if (StringUtils.isNotBlank(groupName)){ + Matcher matcher = TENANCY_IN_GROUP_PATTERN.matcher(groupName); + if (matcher.find()){ + tenancy = matcher.group(1); + } + } + return tenancy; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/loadbalance/DelayLoadBalancePoller.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/loadbalance/DelayLoadBalancePoller.java new file mode 100644 index 000000000..0f82f3066 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/loadbalance/DelayLoadBalancePoller.java @@ -0,0 +1,92 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler.loadbalance; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.DelayQueue; +import java.util.concurrent.Delayed; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +/** + * To poll element with delay timing property + */ +public abstract class DelayLoadBalancePoller implements LoadBalancePoller{ + + private static final int MAX_POLL_SIZE = 50; + + /** + * Delay queue + */ + private DelayQueue delayQueue = new DelayQueue<>(); + @Override + public List poll() throws InterruptedException { + List delayElements = new ArrayList<>(); + delayQueue.drainTo(delayElements, MAX_POLL_SIZE); + while (delayElements.isEmpty()){ + try { + DelayElement element = delayQueue.poll(3, TimeUnit.SECONDS); + if (Objects.nonNull(element)){ + delayElements.add(element); + } + } catch (InterruptedException e) { + throw new InterruptedException(); + } + } + return delayElements.stream().map(delayElement -> delayElement.element).collect(Collectors.toList()); + } + + @Override + public void push(T element) { + DelayElement delayElement = new DelayElement(element); + delayQueue.offer(delayElement); + } + + @Override + @SuppressWarnings("unchecked") + public void combine(LoadBalancePoller other) { + // Only combine with DelayLoadBalancePoller + if(other instanceof DelayLoadBalancePoller){ + DelayLoadBalancePoller poller = (DelayLoadBalancePoller)other; + for(Object delayElement : poller.delayQueue.toArray()){ + delayQueue.put((DelayElement) delayElement); + } + } + } + + @Override + public int size() { + return delayQueue.size(); + } + + /** + * Get the delay time from element + * @param element element + * @return timestamp + */ + protected abstract long getDelayTimeInMillis(T element); + + private class DelayElement implements Delayed{ + T element; + + private long triggerTime; + + DelayElement(T element){ + this.element = element; + this.triggerTime = getDelayTimeInMillis(element); + } + @Override + public long getDelay(TimeUnit unit) { + return unit.convert(this.triggerTime - System.currentTimeMillis(), TimeUnit.MILLISECONDS); + } + + @Override + @SuppressWarnings("unchecked") + public int compareTo(Delayed o) { + DelayElement delayElement = (DelayElement)o; + long compare = this.triggerTime - delayElement.triggerTime; + return compare <= 0? -1 : 1; + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/loadbalance/LoadBalancePoller.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/loadbalance/LoadBalancePoller.java new file mode 100644 index 000000000..145b39e9a --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/loadbalance/LoadBalancePoller.java @@ -0,0 +1,26 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler.loadbalance; + +import java.util.List; + +/** + * Poller for load balance + * @param + */ +public interface LoadBalancePoller { + + List poll() throws InterruptedException; + + /** + * Push the element + * @param element element + */ + void push(T element); + + /** + * Combine with other poller + * @param other + */ + void combine(LoadBalancePoller other); + + int size(); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/loadbalance/SchedulerLoadBalancer.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/loadbalance/SchedulerLoadBalancer.java new file mode 100644 index 000000000..d420dc229 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/loadbalance/SchedulerLoadBalancer.java @@ -0,0 +1,32 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler.loadbalance; + +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks.LoadBalanceSchedulerTask; +import org.apache.linkis.scheduler.Scheduler; + +import java.util.List; + + +/** + * load balancer api of scheduler + */ +public interface SchedulerLoadBalancer { + /** + * Register the scheduler task into the balancer + * @param schedulerTaskClass class of scheduler task + */ + void registerSchedulerTask(Class schedulerTaskClass); + + /** + * Choose the load balance scheduler tasks + * @param element task + * @return + */ + List> choose(T element); + + LoadBalanceSchedulerTask choose(T element, Class schedulerTaskClass); + /** + * Hold the scheduler to analyze the condition of loading + * @return + */ + Scheduler getScheduler(); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/priority/PriorityOrderedQueue.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/priority/PriorityOrderedQueue.java new file mode 100644 index 000000000..4277d768a --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/priority/PriorityOrderedQueue.java @@ -0,0 +1,169 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler.priority; + +import com.webank.wedatasphere.exchangis.job.utils.SnowFlake; + +import java.util.*; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.PriorityBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +/** + * Refer to 'PriorityBlockingQueue', + * Use snowflake to generate order number of elements + */ +public class PriorityOrderedQueue extends AbstractQueue + implements BlockingQueue, java.io.Serializable { + + /** + * Priority queue + */ + private final PriorityBlockingQueue priorityQueue; + + /** + /** + * Snowflake context + */ + private final SnowFlake snowFlake; + public PriorityOrderedQueue(int initialCapacity, + Comparator comparator){ + if (Objects.isNull(comparator)){ + this.priorityQueue = new PriorityBlockingQueue<>(initialCapacity, + (left, right) -> (int) (right.seq - left.seq)); + } else { + this.priorityQueue = new PriorityBlockingQueue<>(initialCapacity, + (left, right) -> { + int result = comparator.compare(left.element, right.element); + if (result == 0){ + return (int)(left.seq - right.seq); + } + return result; + }); + } + this.snowFlake = new SnowFlake(0, 0, System.currentTimeMillis()); + } + @Override + public Iterator iterator() { + return new Itr(priorityQueue.iterator()); + } + + @Override + public int size() { + return priorityQueue.size(); + } + + @Override + public void put(E e) throws InterruptedException { + offer(e); + } + + @Override + public boolean offer(E e, long timeout, TimeUnit unit) throws InterruptedException { + return offer(e); + } + + @Override + public E take() throws InterruptedException { + Ordered ordered = this.priorityQueue.take(); + return ordered.element; + } + + @Override + public E poll(long timeout, TimeUnit unit) throws InterruptedException { + Ordered ordered = this.priorityQueue.poll(timeout, unit); + if (null != ordered){ + return ordered.element; + } + return null; + } + + @Override + public int remainingCapacity() { + return this.priorityQueue.remainingCapacity(); + } + + @Override + public int drainTo(Collection c) { + return drainTo(c, Integer.MAX_VALUE); + } + + @Override + @SuppressWarnings("unchecked") + public int drainTo(Collection c, int maxElements) { + Collection collection = null; + if (null != c && c != this){ + collection = c.stream().map(e -> new Ordered((E) e)).collect(Collectors.toList()); + } + return this.priorityQueue.drainTo(collection); + } + + @Override + public boolean offer(E e) { + return this.priorityQueue.offer(new Ordered(e)); + } + + @Override + public E poll() { + Ordered ordered = this.priorityQueue.poll(); + if (null != ordered){ + return ordered.element; + } + return null; + } + + @Override + public E peek() { + Ordered ordered = this.priorityQueue.peek(); + if (null != ordered){ + return ordered.element; + } + return null; + } + + private class Ordered{ + /** + * Seq number + */ + private long seq; + + /** + * Queue element + */ + private E element; + + public Ordered(E element){ + this.seq = snowFlake.nextId(); + this.element = element; + } + } + + private class Itr implements Iterator { + private Iterator innerItr; + public Itr(Iterator iterator){ + innerItr = iterator; + } + + + @Override + public boolean hasNext() { + return innerItr.hasNext(); + } + + @Override + public E next() { + return innerItr.next().element; + } + + @Override + public void remove() { + innerItr.remove(); + } + + @Override + public void forEachRemaining(Consumer action) { + innerItr.forEachRemaining(eOrdered -> + action.accept(eOrdered.element)); + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/priority/PriorityRunnable.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/priority/PriorityRunnable.java new file mode 100644 index 000000000..4d8ed3608 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/priority/PriorityRunnable.java @@ -0,0 +1,16 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler.priority; + +/** + * Runnable with priority + */ +public interface PriorityRunnable extends Runnable{ + + /** + * Default: 1 + * @return value + */ + default int getPriority(){ + return 1; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/AbstractLoadBalanceSchedulerTask.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/AbstractLoadBalanceSchedulerTask.java new file mode 100644 index 000000000..72a883a25 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/AbstractLoadBalanceSchedulerTask.java @@ -0,0 +1,136 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks; + +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerException; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerRetryException; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.AbstractExchangisSchedulerTask; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.ScheduleListener; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.loadbalance.LoadBalancePoller; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.loadbalance.SchedulerLoadBalancer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.Optional; + +/** + * Implement the load balance logic + * @param + */ +public abstract class AbstractLoadBalanceSchedulerTask extends AbstractExchangisSchedulerTask implements LoadBalanceSchedulerTask { + + private static final Logger LOG = LoggerFactory.getLogger(AbstractLoadBalanceSchedulerTask.class); + + private LoadBalancePoller loadBalancePoller; + + private SchedulerLoadBalancer schedulerLoadBalancer; + + private boolean pollFinish = false; + + /** + * Schedule listener + */ + private ScheduleListener> listener; + + public AbstractLoadBalanceSchedulerTask() { + super(""); + } + + @Override + protected void schedule() throws ExchangisSchedulerException, ExchangisSchedulerRetryException { + LoadBalancePoller loadBalancePoller = getOrCreateLoadBalancePoller(); + if (Objects.isNull(loadBalancePoller)) { + LOG.warn("LoadBalancePoller is empty in load balance scheduler task [{}]", getName()); + return; + } + if (!pollFinish && Objects.nonNull(listener)){ + // Invoke listener + listener.onSchedule(this); + } + List pollElements = new ArrayList<>(); + LOG.info("Start to iterate the poller in load balance scheduler task [{}]", getName()); + while (!pollFinish && null != pollElements) { + try { + pollElements = loadBalancePoller.poll(); + Optional.of(pollElements).ifPresent(elements -> { + elements.forEach(pollElement -> { + try { + onPoll(pollElement); + } catch (Exception e) { + LOG.warn("Error occurred in onPoll in load balance scheduler task [{}]", getName(), e); + } + }); + for( T pollElement : elements){ + try { + rePushWithBalancer(pollElement, this.schedulerLoadBalancer); + } catch (Exception e) { + throw new ExchangisSchedulerException.Runtime( + "Error occurred in rePush in load balance scheduler task [" + getName() + "]", e); + } + } + }); + + } catch (Exception e) { + if (e instanceof InterruptedException && pollFinish){ + LOG.trace("Poller is interrupted by shutdown, will exit gradually"); + }else { + if (e instanceof ExchangisSchedulerException.Runtime) { + LOG.warn("Schedule method error", e); + } + LOG.warn("Error occurred in poll/onPoll/rePush in load balance scheduler task [{}]", getName(), e); + } + } + } + LOG.info("End to iterate the poller in load balance scheduler task [{}]", getName()); + } + + @Override + public LoadBalancePoller getOrCreateLoadBalancePoller() { + if (null == this.loadBalancePoller){ + synchronized (this){ + if (null == this.loadBalancePoller) { + this.loadBalancePoller = createLoadBalancePoller(); + } + } + } + return this.loadBalancePoller; + } + + public SchedulerLoadBalancer getSchedulerLoadBalancer() { + return schedulerLoadBalancer; + } + + public void setSchedulerLoadBalancer(SchedulerLoadBalancer schedulerLoadBalancer) { + this.schedulerLoadBalancer = schedulerLoadBalancer; + } + + public void setScheduleListener(ScheduleListener> listener){ + this.listener = listener; + } + + /** + * Re push the element into poller with balancer + * @param element element + * @param loadBalancer load balancer + */ + private void rePushWithBalancer(T element, SchedulerLoadBalancer loadBalancer) throws ExchangisSchedulerException{ + LoadBalanceSchedulerTask loadBalanceSchedulerTask = loadBalancer.choose(element, this.getClass()); + Optional.ofNullable(loadBalanceSchedulerTask).ifPresent(schedulerTask -> + schedulerTask.getOrCreateLoadBalancePoller().push(element)); + } + @Override + public void kill() { + pollFinish = true; + super.kill(); + } + + protected abstract void onPoll(T element) throws ExchangisSchedulerException, ExchangisSchedulerRetryException; + + /** + * Create the load balance poller + * @return + */ + protected abstract LoadBalancePoller createLoadBalancePoller(); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/GenerationSchedulerTask.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/GenerationSchedulerTask.java new file mode 100644 index 000000000..eeaefba14 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/GenerationSchedulerTask.java @@ -0,0 +1,60 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks; + +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisJob; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerException; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerRetryException; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskGenerateException; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.TaskGenerator; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.TaskGeneratorContext; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.AbstractTaskGenerator; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.AbstractExchangisSchedulerTask; +import org.apache.linkis.scheduler.queue.JobInfo; + +public class GenerationSchedulerTask extends AbstractExchangisSchedulerTask { + + private LaunchableExchangisJob launchableExchangisJob; + + private TaskGeneratorContext ctx; + + private TaskGenerator taskGenerator; + /** + * Each schedule task should has an id + * + */ + public GenerationSchedulerTask(TaskGenerator taskGenerator, + ExchangisJobInfo exchangisJobInfo) throws ExchangisTaskGenerateException { + super(""); + this.taskGenerator = taskGenerator; + this.launchableExchangisJob = taskGenerator.init(exchangisJobInfo); + this.ctx = taskGenerator.getTaskGeneratorContext(); + this.scheduleId = this.launchableExchangisJob.getJobExecutionId(); + } + + @Override + protected void schedule() throws ExchangisSchedulerException, ExchangisSchedulerRetryException { + try { + this.taskGenerator.generate(this.launchableExchangisJob, this.getTenancy()); + } catch (Exception e) { + String errorMessage = "Exception in generating launchable tasks: [ name: " + launchableExchangisJob.getExchangisJobInfo().getName() + + ", job_execution_id: " + launchableExchangisJob.getJobExecutionId() + "]"; + // TODO retry the generating progress + if (!(e instanceof ExchangisTaskGenerateException)){ + // Retry the generate progress +// throw new ExchangisSchedulerRetryException(errorMessage, e); + } + throw new ExchangisSchedulerException(errorMessage, e); + } + } + + @Override + public String getName() { + return "Scheduler-GenerationTask-" + getId(); + } + + + @Override + public JobInfo getJobInfo() { + return null; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/LoadBalanceSchedulerTask.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/LoadBalanceSchedulerTask.java new file mode 100644 index 000000000..f158fef1d --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/LoadBalanceSchedulerTask.java @@ -0,0 +1,18 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks; + +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.ExchangisSchedulerTask; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.loadbalance.LoadBalancePoller; + +/** + * Scheduler task could be balanced, + * Each one is a resident task with a poller + * @param + */ +public interface LoadBalanceSchedulerTask extends ExchangisSchedulerTask { + + /** + * Get/Create a poller + * @return + */ + LoadBalancePoller getOrCreateLoadBalancePoller(); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/MetricUpdateSchedulerTask.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/MetricUpdateSchedulerTask.java new file mode 100644 index 000000000..941d36461 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/MetricUpdateSchedulerTask.java @@ -0,0 +1,76 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks; + +import com.webank.wedatasphere.exchangis.job.launcher.exception.ExchangisTaskLaunchException; +import com.webank.wedatasphere.exchangis.job.launcher.AccessibleLauncherTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerException; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerRetryException; +import com.webank.wedatasphere.exchangis.job.server.execution.TaskManager; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.loadbalance.DelayLoadBalancePoller; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.loadbalance.LoadBalancePoller; +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.scheduler.queue.JobInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Map; +import java.util.Objects; + +/** + * Metric update scheduler task + */ +public class MetricUpdateSchedulerTask extends AbstractLoadBalanceSchedulerTask { + + private static final Logger LOG = LoggerFactory.getLogger(MetricUpdateSchedulerTask.class); + + private static final CommonVars METRIC_UPDATE_INTERVAL = CommonVars.apply("wds.exchangis.job.scheduler.task.metric.update.interval-in-millis", 3000L); + + private TaskManager taskManager; + + public MetricUpdateSchedulerTask(TaskManager taskManager){ + this.taskManager = taskManager; + } + + /** + * High priority to get schedule resource + * @return priority + */ + @Override + public int getPriority() { + return 2; + } + + @Override + protected void onPoll(LaunchedExchangisTask launchedExchangisTask) throws ExchangisSchedulerException, ExchangisSchedulerRetryException { + LOG.trace("Metrics update task: [{}] in scheduler: [{}]", launchedExchangisTask.getTaskId(), getName()); + AccessibleLauncherTask launcherTask = launchedExchangisTask.getLauncherTask(); + try { + Map metricsInfo = launcherTask.getMetricsInfo(); + if (Objects.nonNull(metricsInfo)){ + taskManager.refreshRunningTaskMetrics(launchedExchangisTask, metricsInfo); + } + } catch (ExchangisTaskLaunchException e) { + throw new ExchangisSchedulerException("Fail to get metrics information for task: [" + launchedExchangisTask.getTaskId() + "]", e); + } + } + + @Override + protected LoadBalancePoller createLoadBalancePoller() { + return new DelayLoadBalancePoller() { + @Override + protected long getDelayTimeInMillis(LaunchedExchangisTask element) { + return System.currentTimeMillis() + METRIC_UPDATE_INTERVAL.getValue(); + } + }; + } + + @Override + public String getName() { + return getId() + "-Metric"; + } + + @Override + public JobInfo getJobInfo() { + return null; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/StatusUpdateSchedulerTask.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/StatusUpdateSchedulerTask.java new file mode 100644 index 000000000..4772fdc22 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/StatusUpdateSchedulerTask.java @@ -0,0 +1,82 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; +import com.webank.wedatasphere.exchangis.job.launcher.exception.ExchangisTaskLaunchException; +import com.webank.wedatasphere.exchangis.job.launcher.AccessibleLauncherTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskProgressInfo; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerException; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerRetryException; +import com.webank.wedatasphere.exchangis.job.server.execution.TaskManager; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.loadbalance.DelayLoadBalancePoller; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.loadbalance.LoadBalancePoller; +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.scheduler.queue.JobInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Objects; + +/** + * Status update scheduler task + */ +public class StatusUpdateSchedulerTask extends AbstractLoadBalanceSchedulerTask { + + private static final Logger LOG = LoggerFactory.getLogger(StatusUpdateSchedulerTask.class); + private static final CommonVars STATUS_UPDATE_INTERVAL = CommonVars.apply("wds.exchangis.job.scheduler.task.status.update.interval-in-millis", 5000L); + + private TaskManager taskManager; + + /** + * High priority to get schedule resource + * @return priority + */ + @Override + public int getPriority() { + return 2; + } + + public StatusUpdateSchedulerTask(TaskManager taskManager){ + this.taskManager = taskManager; + } + @Override + protected void onPoll(LaunchedExchangisTask launchedExchangisTask) throws ExchangisSchedulerException, ExchangisSchedulerRetryException { + LOG.info("Status update task: [{}] in scheduler: [{}]", launchedExchangisTask.getId(), getName()); + AccessibleLauncherTask launcherTask = launchedExchangisTask.getLauncherTask(); + try{ + TaskProgressInfo progressInfo = launcherTask.getProgressInfo(); + if (Objects.nonNull(progressInfo)){ + this.taskManager.refreshRunningTaskProgress(launchedExchangisTask, progressInfo); + } + TaskStatus status = launcherTask.getLocalStatus(); + if (TaskStatus.isCompleted(status)){ + this.taskManager.refreshRunningTaskStatusAndMetrics(launchedExchangisTask, + status, launcherTask.getMetricsInfo()); + } else { + this.taskManager.refreshRunningTaskStatus(launchedExchangisTask, status); + } + } catch (ExchangisTaskLaunchException e){ + throw new ExchangisSchedulerException("Fail to update status(progress) for task: [" + launchedExchangisTask.getTaskId() + "]", e); + } + } + + @Override + protected LoadBalancePoller createLoadBalancePoller() { + return new DelayLoadBalancePoller() { + @Override + protected long getDelayTimeInMillis(LaunchedExchangisTask element) { + return System.currentTimeMillis() + STATUS_UPDATE_INTERVAL.getValue(); + } + }; + } + + @Override + public String getName() { + return getId() + "-Status"; + } + + @Override + public JobInfo getJobInfo() { + return null; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/SubmitSchedulerTask.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/SubmitSchedulerTask.java new file mode 100644 index 000000000..1bfd20dda --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/SubmitSchedulerTask.java @@ -0,0 +1,234 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks; + +import com.webank.wedatasphere.exchangis.job.launcher.exception.ExchangisTaskLaunchException; +import com.webank.wedatasphere.exchangis.job.launcher.ExchangisTaskLauncher; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; +import com.webank.wedatasphere.exchangis.job.listener.JobLogListener; +import com.webank.wedatasphere.exchangis.job.listener.events.JobLogEvent; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerException; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerRetryException; +import com.webank.wedatasphere.exchangis.job.server.execution.AbstractTaskManager; +import com.webank.wedatasphere.exchangis.job.server.execution.TaskManager; +import com.webank.wedatasphere.exchangis.job.server.execution.events.TaskDequeueEvent; +import com.webank.wedatasphere.exchangis.job.server.execution.events.TaskExecutionEvent; +import com.webank.wedatasphere.exchangis.job.server.execution.events.TaskDeleteEvent; +import com.webank.wedatasphere.exchangis.job.server.execution.events.TaskStatusUpdateEvent; +import com.webank.wedatasphere.exchangis.job.server.execution.loadbalance.TaskSchedulerLoadBalancer; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.AbstractExchangisSchedulerTask; +import com.webank.wedatasphere.exchangis.job.server.log.JobServerLogging; +import org.apache.linkis.scheduler.queue.JobInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; +import java.util.concurrent.Callable; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Submit scheduler task + */ +public class SubmitSchedulerTask extends AbstractExchangisSchedulerTask implements JobServerLogging { + + private static final Logger LOG = LoggerFactory.getLogger(SubmitSchedulerTask.class); + + /** + * Submit parallel limit + */ + private static final AtomicInteger SUBMIT_PARALLEL = new AtomicInteger(0); + + private LaunchableExchangisTask launchableExchangisTask; + + private TaskManager taskManager; + + private ExchangisTaskLauncher launcher; + + private TaskSchedulerLoadBalancer loadBalancer; + + private Callable submitCondition; + + private AtomicInteger retryCnt = new AtomicInteger(0); + + /** + * Submittable + */ + private AtomicBoolean submitAble = new AtomicBoolean(false); + /** + * Each schedule task should has an id + * + */ + public SubmitSchedulerTask(LaunchableExchangisTask task, Callable submitCondition) { + this(task, submitCondition, false); + } + + public SubmitSchedulerTask(LaunchableExchangisTask task){ + this(task, null, false); + } + public SubmitSchedulerTask(LaunchableExchangisTask task, Callable submitCondition, boolean checkCondition) { + super(String.valueOf(task.getId())); + this.launchableExchangisTask = task; + this.submitCondition = submitCondition; + if (checkCondition) { + try { + submitAble.set(submitCondition.call()); + } catch (Exception e) { + // Ignore + } + } + // Set max retry + } + @Override + protected void schedule() throws ExchangisSchedulerException, ExchangisSchedulerRetryException { + String jobExecutionId = this.launchableExchangisTask.getJobExecutionId(); + if (!submitAble.get()) { + try { + submitAble.set(submitCondition.call()); + } catch (Exception e) { + throw new ExchangisSchedulerRetryException("Error occurred in examining submit condition for task: [" + launchableExchangisTask.getId() + "]", e); + } + } + if (submitAble.get()) { + info(jobExecutionId, "Submit the launchable task: [name:{} ,id:{} ] to launcher: [{}], retry_count: {}", + launchableExchangisTask.getName(), launchableExchangisTask.getId(), launcher.name(), retryCnt.get()); + LaunchedExchangisTask launchedExchangisTask; + try { + // Invoke launcher + Date launchTime = Calendar.getInstance().getTime(); + launchedExchangisTask = launcher.launch(this.launchableExchangisTask); +// launchedExchangisTask = new LaunchedExchangisTask(launchableExchangisTask); + launchedExchangisTask.setLaunchTime(launchTime); + info(jobExecutionId, "Success to submit task:[name:{}, id:{}] to Linkis [linkis_id: {}, info: {}]", + launchedExchangisTask.getName(), launchedExchangisTask.getId(), launchedExchangisTask.getLinkisJobId(), launchedExchangisTask.getLinkisJobInfo()); + } catch (Exception e) { + info(jobExecutionId, "Launch task:[name:{} ,id:{}] fail, possible reason is: [{}]", + launchableExchangisTask.getName(), launchableExchangisTask.getId(), getActualCause(e).getMessage()); + if (retryCnt.incrementAndGet() < getMaxRetryNum()) { + // Remove the launched task stored +// onEvent(new TaskDeleteEvent(String.valueOf(launchableExchangisTask.getId()))); + throw new ExchangisSchedulerRetryException("Error occurred in invoking launching method for task: [" + launchableExchangisTask.getId() +"]", e); + }else { + // Update the launched task status to fail + // New be failed + // Remove the launched task stored + onEvent(new TaskDeleteEvent(String.valueOf(launchableExchangisTask.getId()))); +// launchedExchangisTask = new LaunchedExchangisTask(); +// launchedExchangisTask.setTaskId(String.valueOf(launchableExchangisTask.getId())); +// launchedExchangisTask.setJobExecutionId(launchableExchangisTask.getJobExecutionId()); +// onEvent(new TaskStatusUpdateEvent(launchedExchangisTask, TaskStatus.Failed)); + } + throw new ExchangisSchedulerException("Error occurred in invoking launching method for task: [" + launchableExchangisTask.getId() +"]", e); + } + // Add the success/launched job into taskManager + if (Objects.nonNull(this.taskManager)){ + boolean successAdd = true; + try { + this.taskManager.addRunningTask(launchedExchangisTask); + } catch (Exception e){ + successAdd = false; + error(jobExecutionId, "Error occurred in adding running task: [{}] to taskManager, linkis_id: [{}], should kill the job in linkis!", + launchedExchangisTask.getId(), launchedExchangisTask.getLinkisJobId(), e); + Optional.ofNullable(launchedExchangisTask.getLauncherTask()).ifPresent(launcherTask -> { + try { + launcherTask.kill(); + } catch (ExchangisTaskLaunchException ex){ + LOG.error("Kill linkis_id: [{}] fail", launchedExchangisTask.getLinkisJobId(), e); + } + }); + } + if (successAdd){ + try { + onEvent(new TaskDequeueEvent(launchableExchangisTask.getId() + "")); + }catch (Exception e){ + // Ignore the exception + LOG.warn("Fail to dequeue the launchable task [{}]", launchableExchangisTask.getId(), e); + } + if (Objects.nonNull(this.loadBalancer)){ + // Add the launchedExchangisTask to the load balance poller + List> loadBalanceSchedulerTasks = this.loadBalancer.choose(launchedExchangisTask); + Optional.ofNullable(loadBalanceSchedulerTasks).ifPresent(tasks -> tasks.forEach(loadBalanceSchedulerTask -> { + loadBalanceSchedulerTask.getOrCreateLoadBalancePoller().push(launchedExchangisTask); + })); + } + } + } + } + } + + /** + * Check if it can be submitted + * @return boolean + */ + public boolean isSubmitAble(){ + return submitAble.get(); + } + /** + * Listen the execution event + * @param event + */ + private void onEvent(TaskExecutionEvent event){ + if (this.taskManager instanceof AbstractTaskManager) { + ((AbstractTaskManager) this.taskManager).onEvent(event); + } + } + + @Override + public JobLogEvent getJobLogEvent(JobLogEvent.Level level, String executionId, String message, Object... args) { + return new JobLogEvent(level, this.getTenancy(), executionId, message, args); + } + + /** + * Get actual cause + * @param throwable throwable + * @return Throwable + */ + private Throwable getActualCause(Throwable throwable){ + Throwable t = throwable; + while (Objects.nonNull(t.getCause())){ + t = t.getCause(); + } + return t; + } + @Override + public JobLogListener getJobLogListener() { + if (Objects.nonNull(this.taskManager)){ + return this.taskManager.getJobLogListener(); + } + return null; + } + + @Override + public String getName() { + return "Scheduler-SubmitTask-" + getId(); + } + + @Override + public JobInfo getJobInfo() { + return null; + } + + public TaskManager getTaskManager() { + return taskManager; + } + + public void setTaskManager(TaskManager taskManager) { + this.taskManager = taskManager; + } + + public ExchangisTaskLauncher getLauncher() { + return launcher; + } + + public void setLauncher(ExchangisTaskLauncher launcher) { + this.launcher = launcher; + } + + public TaskSchedulerLoadBalancer getLoadBalancer() { + return loadBalancer; + } + + public void setLoadBalancer(TaskSchedulerLoadBalancer loadBalancer) { + this.loadBalancer = loadBalancer; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/AbstractTaskObserver.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/AbstractTaskObserver.java new file mode 100644 index 000000000..2229dd1f0 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/AbstractTaskObserver.java @@ -0,0 +1,237 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.subscriber; + +import com.webank.wedatasphere.exchangis.job.domain.ExchangisTask; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskObserverException; +import com.webank.wedatasphere.exchangis.job.server.execution.TaskExecution; +import com.webank.wedatasphere.exchangis.job.server.execution.TaskManager; +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.scheduler.Scheduler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.Objects; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.locks.Condition; +import java.util.concurrent.locks.ReentrantLock; + +/** + * Contains the schedule and publish strategies + */ +public abstract class AbstractTaskObserver implements TaskObserver { + + private static final Logger LOG = LoggerFactory.getLogger(AbstractTaskObserver.class); + + private static final int DEFAULT_TASK_OBSERVER_PUBLISH_INTERVAL = 10000; + + private static final int DEFAULT_TASK_OBSERVER_PUBLISH_BATCH = 50; + + private static final CommonVars TASK_OBSERVER_PUBLISH_INTERVAL = CommonVars.apply("wds.exchangis.job.task.observer.publish.interval-in-millisecond", DEFAULT_TASK_OBSERVER_PUBLISH_INTERVAL); + + private static final CommonVars TASK_OBSERVER_PUBLISH_BATCH = CommonVars.apply("wds.exchangis.job.task.observer.publish.batch", DEFAULT_TASK_OBSERVER_PUBLISH_BATCH); + + private Scheduler scheduler; + + private TaskChooseRuler taskChooseRuler; + + /** + * Task manager + */ + private TaskManager taskManager; + + private TaskExecution taskExecution; + + private ReentrantLock observerLock = new ReentrantLock(); + + private Condition emptyCondition = observerLock.newCondition(); + + private AtomicBoolean waitStatus = new AtomicBoolean(false); + + private Future observerFuture; + + protected int publishBatch; + + protected int publishInterval; + + protected long lastPublishTime = -1; + + private boolean isShutdown = false; + + public AbstractTaskObserver(int publishBatch, int publishInterval){ + if (publishBatch <= 0){ + throw new IllegalArgumentException("Batch size of task subscribed cannot be less than(<) 0"); + } + this.publishBatch = publishBatch; + this.publishInterval = publishInterval; + } + + public AbstractTaskObserver(){ + this.publishBatch = TASK_OBSERVER_PUBLISH_BATCH.getValue(); + this.publishInterval = TASK_OBSERVER_PUBLISH_INTERVAL.getValue(); + } + + @Override + public void run() { + Thread.currentThread().setName("Observe-Thread-" + getName()); + LOG.info("Thread: [ {} ] is started. ", Thread.currentThread().getName()); + this.lastPublishTime = System.currentTimeMillis(); + while (!isShutdown) { + try { + List publishedTasks; + try { + publishedTasks = onPublish(publishBatch); + // If list of publish tasks is not empty + if (publishedTasks.size() > 0) { + this.lastPublishTime = System.currentTimeMillis(); + } + } catch (ExchangisTaskObserverException e){ + e.setMethodName("call_on_publish"); + throw e; + } + if (!publishedTasks.isEmpty()) { + List chooseTasks; + try { + chooseTasks = choose(publishedTasks, getTaskChooseRuler(), getScheduler()); + } catch (Exception e){ + throw new ExchangisTaskObserverException("call_choose_rule", "Fail to choose candidate tasks", e); + } + if (!chooseTasks.isEmpty()) { + try { + subscribe(chooseTasks); + } catch (ExchangisTaskObserverException e){ + e.setMethodName("call_subscribe"); + throw e; + } + } + } + sleepOrWaitIfNeed(publishedTasks); + } catch (Exception e){ + if(e instanceof ExchangisTaskObserverException){ + LOG.warn("Observer exception in progress paragraph: [{}]",((ExchangisTaskObserverException)e).getMethodName(), e); + } + LOG.warn("Unknown exception happened in observing thread", e); + // Enforce to sleep + try { + Thread.sleep(publishInterval); + } catch (InterruptedException ex) { + //Ignore + } + } + } + LOG.info("Thread: [ {} ] is stopped. ", Thread.currentThread().getName()); + } + + @Override + public synchronized void start() { + if (Objects.isNull(this.scheduler)){ + throw new ExchangisTaskObserverException.Runtime("TaskScheduler cannot be empty, please set it before starting the ["+ getName() +"]!", null); + } + if (Objects.nonNull(this.observerFuture)){ + throw new ExchangisTaskObserverException.Runtime("The observer: [" + getName() +"] has been started before", null); + } + // Submit self to default executor service + this.observerFuture = this.scheduler.getSchedulerContext() + .getOrCreateConsumerManager().getOrCreateExecutorService().submit(this); + } + + @Override + public synchronized void stop() { + if (Objects.nonNull(this.observerFuture)) { + this.isShutdown = true; + this.observerFuture.cancel(true); + } + } + + /** + * Sleep or wait during the publish and subscribe + * @param publishedTasks published tasks + */ + private void sleepOrWaitIfNeed(List publishedTasks){ + long observerWait = this.lastPublishTime + publishInterval - System.currentTimeMillis(); + if (publishedTasks.isEmpty() || observerWait > 0) { + observerWait = observerWait > 0? observerWait : publishInterval; + boolean hasLock = observerLock.tryLock(); + if (hasLock) { + try { + LOG.trace("TaskObserver wait in {} ms to ", observerWait); + waitStatus.set(true); + emptyCondition.await(observerWait, TimeUnit.MILLISECONDS); + } catch (InterruptedException e) { + if (isShutdown){ + LOG.warn("TaskObserver wait is interrupted by shutdown"); + } else { + LOG.warn("TaskObserver wait is interrupted", e); + } + } finally { + waitStatus.set(false); + observerLock.unlock(); + } + } + } + } + protected abstract List onPublish(int batchSize) throws ExchangisTaskObserverException; + + /** + * Call publish + */ + protected void publish(){ + if (waitStatus.get()) { + observerLock.lock(); + try { + emptyCondition.signalAll(); + } finally { + observerLock.unlock(); + } + } + } + protected List choose(List candidateTasks, TaskChooseRuler chooseRuler, Scheduler scheduler){ + return chooseRuler.choose(candidateTasks, scheduler); + } + + @Override + public TaskChooseRuler getTaskChooseRuler() { + return this.taskChooseRuler; + } + + @Override + public void setTaskChooseRuler(TaskChooseRuler ruler) { + this.taskChooseRuler = ruler; + } + + + @Override + public Scheduler getScheduler() { + return scheduler; + } + + public void setScheduler(Scheduler scheduler) { + this.scheduler = scheduler; + } + + @Override + public TaskManager getTaskManager() { + return this.taskManager; + } + + @Override + public void setTaskManager(TaskManager taskManager) { + this.taskManager = taskManager; + } + + @Override + public void setTaskExecution(TaskExecution taskExecution) { + this.taskExecution = taskExecution; + } + + @Override + public TaskExecution getTaskExecution() { + return taskExecution; + } + + @Override + public String getName() { + return getClass().getSimpleName(); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/CacheInTaskObserver.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/CacheInTaskObserver.java new file mode 100644 index 000000000..92236d133 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/CacheInTaskObserver.java @@ -0,0 +1,114 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.subscriber; + + +import com.webank.wedatasphere.exchangis.job.domain.ExchangisTask; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskObserverException; +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.scheduler.Scheduler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; +import java.util.concurrent.ArrayBlockingQueue; + +/** + * Subscribe the task cached in memory(queue) + */ +public abstract class CacheInTaskObserver extends AbstractTaskObserver { + + private static final Logger LOG = LoggerFactory.getLogger(CacheInTaskObserver.class); + + protected Queue queue; + + private static final CommonVars TASK_OBSERVER_CACHE_SIZE = CommonVars.apply("wds.exchangis.job.task.observer.cache.size", 3000); + + public CacheInTaskObserver(int cacheSize){ + this.queue = new ArrayBlockingQueue<>(cacheSize); + } + + public CacheInTaskObserver(){ + this.queue = new ArrayBlockingQueue<>(TASK_OBSERVER_CACHE_SIZE.getValue()); + } + @Override + public List onPublish(int batchSize) throws ExchangisTaskObserverException { + List cacheTasks = new ArrayList<>(batchSize); + T polledTask; + while (cacheTasks.size() < batchSize && (polledTask = queue.poll()) != null){ + cacheTasks.add(polledTask); + } + int fetchTaskSize = cacheTasks.size(); + int restBatchSize = batchSize - fetchTaskSize; + if (restBatchSize > 0 && (this.lastPublishTime + this.publishInterval <= System.currentTimeMillis())) { + Optional.ofNullable(onPublishNext(restBatchSize)).ifPresent(cacheTasks::addAll); + } + return cacheTasks; + } + + @Override + protected List choose(List candidateTasks, + TaskChooseRuler chooseRuler, Scheduler scheduler) { + List chooseTasks = chooseRuler.choose(candidateTasks, scheduler); + // Left the rejected tasks + candidateTasks.removeAll(chooseTasks); + // Update the lastUpdateTime + Date currentTime = Calendar.getInstance().getTime(); + candidateTasks.forEach(task -> task.setLastUpdateTime(currentTime)); + // Requeue into + this.queue.addAll(candidateTasks); + return chooseTasks; + } + + protected abstract List onPublishNext(int batchSize) throws ExchangisTaskObserverException; + /** + * Offer operation for service to add/offer queue + * @return queue + */ + public Queue getCacheQueue(){ + return new OperateLimitQueue(this.queue); + } + + /** + * Limit the operation + */ + private class OperateLimitQueue extends AbstractQueue{ + + private Queue innerQueue; + + public OperateLimitQueue(Queue queue){ + this.innerQueue = queue; + } + + @Override + public Iterator iterator() { + throw new ExchangisTaskObserverException.Runtime("Unsupported operation 'iterator'", null); + } + + @Override + public int size() { + return this.innerQueue.size(); + } + + @Override + public boolean offer(T launchableExchangisTask) { + boolean offer = this.innerQueue.offer(launchableExchangisTask); + if(offer){ + try { + publish(); + } catch (Exception e){ + LOG.warn("Publish the launchable task: {} has occurred an exception", launchableExchangisTask.getId(), e); + } + } + return offer; + } + + @Override + public T poll() { + throw new ExchangisTaskObserverException.Runtime("Unsupported operation 'poll'", null); + } + + @Override + public T peek() { + throw new ExchangisTaskObserverException.Runtime("Unsupported operation 'peek'", null); + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/MaxParallelChooseRuler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/MaxParallelChooseRuler.java new file mode 100644 index 000000000..4da588b73 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/MaxParallelChooseRuler.java @@ -0,0 +1,18 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.subscriber; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import org.apache.linkis.scheduler.Scheduler; + +import java.util.List; + +/** + * Max parallel number of tenancy in choose ruler + */ +public class MaxParallelChooseRuler extends MaxUsageTaskChooseRuler{ + + @Override + public List choose(List candidates, Scheduler scheduler) { + List usageChosen = super.choose(candidates, scheduler); + return null; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/MaxUsageTaskChooseRuler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/MaxUsageTaskChooseRuler.java new file mode 100644 index 000000000..4ccbeb3d5 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/MaxUsageTaskChooseRuler.java @@ -0,0 +1,44 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.subscriber; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.TenancyParallelConsumerManager; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.TenancyParallelGroupFactory; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.scheduler.Scheduler; +import org.apache.linkis.scheduler.queue.ConsumerManager; +import org.springframework.stereotype.Component; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; + +/** + * Max usage of queue in executor service + */ +public class MaxUsageTaskChooseRuler implements TaskChooseRuler{ + + @Override + public List choose(List candidates, Scheduler scheduler) { + ConsumerManager consumerManager = scheduler.getSchedulerContext().getOrCreateConsumerManager(); + if (consumerManager instanceof TenancyParallelConsumerManager){ + TenancyParallelConsumerManager tenancyConsumerManager = (TenancyParallelConsumerManager)consumerManager; + Map tenancyExecutorServices = tenancyConsumerManager.getTenancyExecutorServices(); + Map candidateCounter = new HashMap<>(); + return candidates.stream().filter( task -> { + String tenancy = StringUtils.isNotBlank(task.getExecuteUser())? + task.getExecuteUser(): TenancyParallelGroupFactory.DEFAULT_TENANCY; + ExecutorService executorService = tenancyExecutorServices.get(tenancy); + AtomicInteger counter = candidateCounter.computeIfAbsent(tenancy, (key) -> new AtomicInteger(0)); + // TODO complete the choose rule + return Objects.isNull(executorService) || ((ThreadPoolExecutor)executorService).getQueue().remainingCapacity() >= counter.incrementAndGet(); + }).collect(Collectors.toList()); + } + return candidates; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/NewInTaskObserver.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/NewInTaskObserver.java new file mode 100644 index 000000000..e81824fe0 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/NewInTaskObserver.java @@ -0,0 +1,79 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.subscriber; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskObserverException; +import com.webank.wedatasphere.exchangis.job.server.execution.TaskExecution; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks.SubmitSchedulerTask; +import com.webank.wedatasphere.exchangis.job.server.service.TaskObserverService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; + +import javax.annotation.Resource; +import java.util.Iterator; +import java.util.List; +import java.util.Objects; + +/** + * Subscribe the new task from database and then submit to scheduler + */ +@Component +public class NewInTaskObserver extends CacheInTaskObserver { + + private static final Logger LOG = LoggerFactory.getLogger(NewInTaskObserver.class); + + @Resource + private TaskObserverService taskObserverService; + + @Override + protected List onPublishNext(int batchSize){ + // Get the launchable task from launchable task inner join launched task + List tasks = taskObserverService.onPublishLaunchableTask(batchSize); + if (!tasks.isEmpty()) { + LOG.info("Get the launchable task from database, size: [{}]", tasks.size()); + } + return tasks; + } + + + @Override + public void subscribe(List publishedTasks) throws ExchangisTaskObserverException { + Iterator iterator = publishedTasks.iterator(); + TaskExecution taskExecution = getTaskExecution(); + if (Objects.isNull(taskExecution)){ + throw new ExchangisTaskObserverException("TaskExecution cannot be null, please set it before subscribing!", null); + } + while(iterator.hasNext()){ + LaunchableExchangisTask launchableExchangisTask = iterator.next(); + if (Objects.nonNull(launchableExchangisTask)){ + try { + // Check the submittable condition first in order to avoid the duplicate scheduler tasks + SubmitSchedulerTask submitSchedulerTask = new SubmitSchedulerTask(launchableExchangisTask, + () -> { + // check the status of launchedTask + // insert or update launched task, status as TaskStatus.Scheduler + return taskObserverService.subscribe(launchableExchangisTask); + }, true); + if (submitSchedulerTask.isSubmitAble()) { + submitSchedulerTask.setTenancy(launchableExchangisTask.getExecuteUser()); + try { + taskExecution.submit(submitSchedulerTask); + } catch (Exception e) { + LOG.warn("Fail to async submit launchable task: [ id: {}, name: {}, job_execution_id: {} ]" + , launchableExchangisTask.getId(), launchableExchangisTask.getName(), launchableExchangisTask.getJobExecutionId(), e); + } + } + } catch (Exception e){ + LOG.error("Exception in subscribing launchable tasks, please check your status of database and network", e); + } + } + iterator.remove(); + } + } + + @Override + public String getName() { + return "NewInTaskObserver"; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/ReceiveTaskSubscriber.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/ReceiveTaskSubscriber.java new file mode 100644 index 000000000..f0748660b --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/ReceiveTaskSubscriber.java @@ -0,0 +1,27 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.subscriber; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskObserverException; +import org.springframework.stereotype.Component; + +import java.util.ArrayList; +import java.util.List; + +/** + * Default implements + */ +@Component +public class ReceiveTaskSubscriber extends AbstractTaskObserver { + + + @Override + protected List onPublish(int batchSize) throws ExchangisTaskObserverException { + return new ArrayList<>(); + } + + + @Override + public void subscribe(List publishedTasks) throws ExchangisTaskObserverException { + + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/TaskChooseRuler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/TaskChooseRuler.java new file mode 100644 index 000000000..9b0f08762 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/TaskChooseRuler.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.subscriber; + +import com.webank.wedatasphere.exchangis.job.domain.ExchangisTask; +import org.apache.linkis.scheduler.Scheduler; + +import java.util.List; + +/** + * Choose rule + */ +public interface TaskChooseRuler { + + /** + * Choose the tasks from candidates with scheduler + * @param candidates candidate task + * @param scheduler scheduler + * @return + */ + List choose(List candidates, Scheduler scheduler); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/TaskObserver.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/TaskObserver.java new file mode 100644 index 000000000..d0a7b582f --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/TaskObserver.java @@ -0,0 +1,54 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.subscriber; + +import com.webank.wedatasphere.exchangis.job.domain.ExchangisTask; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskObserverException; +import com.webank.wedatasphere.exchangis.job.server.execution.TaskExecution; +import com.webank.wedatasphere.exchangis.job.server.execution.TaskManager; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.SchedulerThread; +import org.apache.linkis.scheduler.Scheduler; + +import java.util.List; + + +/** + * Subscribe the ExchangisTask + * @param extends ExchangisTask + */ +public interface TaskObserver extends SchedulerThread { + + + /** + * Subscribe method + * @param publishedTasks + */ + void subscribe(List publishedTasks) throws ExchangisTaskObserverException; + + /** + * Task choose ruler + * @return ruler + */ + TaskChooseRuler getTaskChooseRuler(); + + void setTaskChooseRuler(TaskChooseRuler ruler); + + /** + * Scheduler + * @return + */ + Scheduler getScheduler(); + + void setScheduler(Scheduler scheduler); + + /** + * Task manager + * @return + */ + TaskManager getTaskManager(); + + void setTaskManager(TaskManager taskManager); + + void setTaskExecution(TaskExecution execution); + + TaskExecution getTaskExecution(); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/UpdateInTaskObserver.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/UpdateInTaskObserver.java new file mode 100644 index 000000000..004a8d53a --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/subscriber/UpdateInTaskObserver.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.subscriber; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskObserverException; + +import java.util.List; + +public class UpdateInTaskObserver extends AbstractTaskObserver { + + @Override + protected List onPublish(int batchSize) throws ExchangisTaskObserverException { + return null; + } + + + @Override + public void subscribe(List publishedTasks) throws ExchangisTaskObserverException { + + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/JobServerLogging.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/JobServerLogging.java new file mode 100644 index 000000000..2b4e11dba --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/JobServerLogging.java @@ -0,0 +1,89 @@ +package com.webank.wedatasphere.exchangis.job.server.log; + +import com.webank.wedatasphere.exchangis.job.listener.JobLogListener; +import com.webank.wedatasphere.exchangis.job.listener.events.JobLogEvent; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Objects; +import java.util.Optional; + +public interface JobServerLogging { + default Logger getLogger(){ + return LoggerFactory.getLogger(this.getClass()); + } + + default void trace(T entity, String message, Object... args) { + Logger logger = getLogger(); + if (Objects.nonNull(logger) && logger.isTraceEnabled()){ + logger.trace(message, args); + } + Optional.ofNullable(getJobLogListener()).ifPresent(listener -> + listener.onAsyncEvent(getJobLogEvent(JobLogEvent.Level.INFO, entity, message, args))); + } + + default void debug(T entity, String message){ + Logger logger = getLogger(); + if (Objects.nonNull(logger) && logger.isDebugEnabled()){ + logger.debug(message); + } + } + + default void info(T entity, String message, Object... args){ + Logger logger = getLogger(); + if (Objects.nonNull(logger) && logger.isInfoEnabled()){ + logger.info(message, args); + } + Optional.ofNullable(getJobLogListener()).ifPresent(listener -> + listener.onAsyncEvent(getJobLogEvent(JobLogEvent.Level.INFO, entity, message, args))); + } + + default void info(T entity, String message, Throwable t){ + Logger logger = getLogger(); + if (Objects.nonNull(logger) && logger.isInfoEnabled()){ + logger.info(message, t); + } + Optional.ofNullable(getJobLogListener()).ifPresent(listener -> + listener.onAsyncEvent(getJobLogEvent(JobLogEvent.Level.INFO, entity, message, t))); + } + + default void warn(T entity, String message, Object... args){ + Logger logger = getLogger(); + if (Objects.nonNull(logger) && logger.isWarnEnabled()){ + logger.warn(message, args); + } + Optional.ofNullable(getJobLogListener()).ifPresent(listener -> + listener.onAsyncEvent(getJobLogEvent(JobLogEvent.Level.WARN, entity, message, args))); + } + + default void warn(T entity, String message, Throwable t){ + Logger logger = getLogger(); + if (Objects.nonNull(logger) && logger.isWarnEnabled()){ + logger.warn(message, t); + } + Optional.ofNullable(getJobLogListener()).ifPresent(listener -> + listener.onAsyncEvent(getJobLogEvent(JobLogEvent.Level.WARN, entity, message, t))); + } + + default void error(T entity, String message, Object... args){ + Optional.ofNullable(getLogger()).ifPresent(logger -> logger.error(message, args)); + Optional.ofNullable(getJobLogListener()).ifPresent(listener -> + listener.onAsyncEvent(getJobLogEvent(JobLogEvent.Level.ERROR, entity, message, args))); + } + + default JobLogListener getJobLogListener() { + return null; + } + + /** + * Job log event + * @param level level + * @param entity entity + * @param message message + * @param args args + * @return + */ + default JobLogEvent getJobLogEvent(JobLogEvent.Level level, T entity, String message, Object... args){ + return new JobLogEvent(level, null, null, message, args); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/appender/RpcJobLogAppender.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/appender/RpcJobLogAppender.java new file mode 100644 index 000000000..9073fda75 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/appender/RpcJobLogAppender.java @@ -0,0 +1,104 @@ +package com.webank.wedatasphere.exchangis.job.server.log.appender; + +import com.webank.wedatasphere.exchangis.job.server.log.JobLogService; +import com.webank.wedatasphere.exchangis.job.server.utils.SpringContextHolder; +import org.apache.logging.log4j.core.Filter; +import org.apache.logging.log4j.core.Layout; +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.appender.AbstractAppender; +import org.apache.logging.log4j.core.config.Property; +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.config.plugins.PluginAttribute; +import org.apache.logging.log4j.core.config.plugins.PluginElement; +import org.apache.logging.log4j.core.config.plugins.PluginFactory; +import org.apache.logging.log4j.core.layout.PatternLayout; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.Serializable; +import java.util.Collections; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicReference; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Custom appender + */ +@Plugin(name = "RpcLog", category = "Core", elementType = "appender", printObject = true) +public class RpcJobLogAppender extends AbstractAppender { + + private static final Logger LOG = LoggerFactory.getLogger(RpcJobLogAppender.class); + + private static final Pattern JOB_LOG_PATTERN = Pattern.compile("^[\\s\\S]+?\\[([\\w\\d-]+):([\\w\\d-]+)]"); + + protected RpcJobLogAppender(String name, Filter filter, Layout layout, boolean ignoreExceptions, Property[] properties) { + super(name, filter, layout, ignoreExceptions, properties); + } + + @Override + public void append(LogEvent logEvent) { + String logContent = new String(getLayout().toByteArray(logEvent)); + + if (Objects.nonNull(JobLogServiceInstance.jobLogService)){ + String[] resolved = resolveJobLogLine(logContent); + if (Objects.nonNull(resolved)){ + JobLogServiceInstance.jobLogService.appendLog(resolved[0], resolved[1], Collections.singletonList(logContent)); + } else { + LOG.error("JobLogService instance is ignored ! missing log => [" + logContent + "]"); + } + } else { + LOG.error("JobLogService instance cannot be found ! missing log => [" + logContent + "]"); + } + } + + /** + * Resolve job log line + * @param line log line + * @return String[0] => tenancy, String[1] => job_execution_id + */ + private String[] resolveJobLogLine(String line){ + Matcher matcher = JOB_LOG_PATTERN.matcher(line); + if (matcher.find()){ + return new String[]{matcher.group(1), matcher.group(2)}; + } + return null; + } + + private static class JobLogServiceInstance{ + static JobLogService jobLogService; + static { + try { + jobLogService = SpringContextHolder.getBean(JobLogService.class); + }catch(Exception e){ + LOG.warn("Cannot get the bean of JobLogService from spring context !", e); + } + } + } + + @PluginFactory + public static RpcJobLogAppender createAppender(@PluginAttribute("name")String name, + @PluginElement("Filter")final Filter filter, + @PluginElement("Layout") Layout layout, + @PluginAttribute("ignoreExceptions")boolean ignoreExceptions){ + AtomicReference jobLogAppender = new AtomicReference<>(); + Optional.ofNullable(name).ifPresent(appenderName -> { + Layout layoutDef = layout; + if (Objects.isNull(layout)){ + layoutDef = PatternLayout.createDefaultLayout(); + } + jobLogAppender.set(new RpcJobLogAppender(name, filter, layoutDef, ignoreExceptions, Property.EMPTY_ARRAY)); + }); + return jobLogAppender.get(); + } + + public static void main(String[] args){ + String testLog = "2022-01-27 19:36:26.028 INFO - [davidhua:68f42422-a4d8-4065-9810-86013d93f153] Init to create launched job and begin generating"; + Matcher matcher = JOB_LOG_PATTERN.matcher(testLog); + if (matcher.find()){ + System.out.println(matcher.group(1)); + System.out.println(matcher.group(2)); + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/rpc/FetchLogRequest.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/rpc/FetchLogRequest.java new file mode 100644 index 000000000..18d9a5f8b --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/rpc/FetchLogRequest.java @@ -0,0 +1,31 @@ +package com.webank.wedatasphere.exchangis.job.server.log.rpc; + +import com.webank.wedatasphere.exchangis.job.log.LogQuery; +import org.apache.linkis.protocol.message.RequestProtocol; + +/** + * Fetch log request + */ +public class FetchLogRequest extends LogQuery implements RequestProtocol { + + /** + * Log path + */ + private String logPath; + + public FetchLogRequest(LogQuery logQuery, String logPath){ + super(logQuery.getFromLine(), logQuery.getPageSize(), + logQuery.getIgnoreKeywords(), logQuery.getOnlyKeywords(), + logQuery.getLastRows()); + setEnableTail(logQuery.isEnableTail()); + this.logPath = logPath; + } + + public String getLogPath() { + return logPath; + } + + public void setLogPath(String logPath) { + this.logPath = logPath; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/rpc/FetchLogResponse.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/rpc/FetchLogResponse.java new file mode 100644 index 000000000..b9dd399b4 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/rpc/FetchLogResponse.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.exchangis.job.server.log.rpc; + +import com.webank.wedatasphere.exchangis.job.log.LogResult; +import org.apache.linkis.protocol.message.RequestProtocol; + +import java.util.List; + +/** + * Extend log result + */ +public class FetchLogResponse extends LogResult implements RequestProtocol { + + public FetchLogResponse(LogResult logResult){ + super(logResult.getEndLine(), logResult.isEnd(), logResult.getLogs()); + } + + public FetchLogResponse(int endLine, boolean isEnd, List logs) { + super(endLine, isEnd, logs); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/rpc/SendLogRequest.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/rpc/SendLogRequest.java new file mode 100644 index 000000000..8310dfef0 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/rpc/SendLogRequest.java @@ -0,0 +1,57 @@ +package com.webank.wedatasphere.exchangis.job.server.log.rpc; + +import org.apache.linkis.protocol.message.RequestProtocol; + +import java.util.ArrayList; +import java.util.List; + +/** + * Send log request + */ +public class SendLogRequest implements RequestProtocol { + /** + * Exec id + */ + private String jobExecId; + + /** + * Is reached the end of log + */ + private boolean isEnd; + /** + * Log lines + */ + private List logLines = new ArrayList<>(); + + public SendLogRequest(String jobExecId, + boolean isEnd, + List logLines){ + this.jobExecId = jobExecId; + this.isEnd = isEnd; + this.logLines = logLines; + } + + public String getJobExecId() { + return jobExecId; + } + + public void setJobExecId(String jobExecId) { + this.jobExecId = jobExecId; + } + + public List getLogLines() { + return logLines; + } + + public void setLogLines(List logLines) { + this.logLines = logLines; + } + + public boolean isEnd() { + return isEnd; + } + + public void setEnd(boolean end) { + isEnd = end; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/AbstractJobLogService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/AbstractJobLogService.java new file mode 100644 index 000000000..cb47f18c4 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/AbstractJobLogService.java @@ -0,0 +1,159 @@ +package com.webank.wedatasphere.exchangis.job.server.log.service; + +import com.google.common.cache.*; +import com.webank.wedatasphere.exchangis.job.launcher.entity.LaunchedExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.log.LogQuery; +import com.webank.wedatasphere.exchangis.job.log.LogResult; +import com.webank.wedatasphere.exchangis.job.server.mapper.LaunchedJobDao; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.AbstractExchangisSchedulerTask; +import com.webank.wedatasphere.exchangis.job.server.log.JobLogService; +import com.webank.wedatasphere.exchangis.job.server.log.cache.AbstractJobLogCache; +import com.webank.wedatasphere.exchangis.job.server.log.cache.JobLogCache; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.scheduler.Scheduler; +import org.apache.linkis.scheduler.queue.JobInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; +import javax.annotation.Resource; +import java.io.File; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.LOG_OP_ERROR; + +/** + * Abstract Job log service + */ +public abstract class AbstractJobLogService implements JobLogService { + + private static final Logger LOG = LoggerFactory.getLogger(AbstractJobLogService.class); + + protected Cache> cacheHolder; + + private AbstractExchangisSchedulerTask cleaner; + + private volatile boolean cleanerOn; + + protected static class Constraints{ + public static final CommonVars LOG_LOCAL_PATH = CommonVars.apply("wds.exchangis.job.log.local.path", "/data/bdp/dss/exchangis/main/logs"); + + public static final CommonVars lOG_CACHE_SIZE = CommonVars.apply("wds.exchangis.job.log.cache.size", 15); + + public static final CommonVars LOG_CACHE_EXPIRE_TIME_IN_SECONDS = CommonVars.apply("wds.exchangis.job.log.cache.expire.time-in-seconds", 5); + + public static final CommonVars LOG_MULTILINE_PATTERN = CommonVars.apply("wds.exchangis.log.multiline.pattern", "^\\d{4}-\\d{2}-\\d{2}\\s+\\d{2}:\\d{2}:\\d{2}\\.\\d{3}"); + } + + @Resource + protected Scheduler scheduler; + + @Resource + private LaunchedJobDao launchedJobDao; + @PostConstruct + public void init(){ + cleanerOn = true; + cacheHolder = CacheBuilder.newBuilder().maximumSize(Constraints.lOG_CACHE_SIZE.getValue()) + .expireAfterAccess(Constraints.LOG_CACHE_EXPIRE_TIME_IN_SECONDS.getValue(), TimeUnit.SECONDS) + .removalListener((RemovalListener>) removalNotification -> { + // Flush for expired + if (removalNotification.getCause() == RemovalCause.EXPIRED){ + removalNotification.getValue().flushCache(true); + } + }) + .build(); + cleaner = new AbstractExchangisSchedulerTask("Job-Log-Cache-Cleaner") { + @Override + public String getTenancy() { + return "log"; + } + + @Override + public String getName() { + return getId(); + } + + @Override + public JobInfo getJobInfo() { + return null; + } + + @Override + protected void schedule() { + while(cleanerOn){ + try { + Thread.sleep(Constraints.LOG_CACHE_EXPIRE_TIME_IN_SECONDS.getValue()); + //Just invoke the auto cleaner + cacheHolder.get("log", () -> null); + } catch (Exception e){ + //Ignore + } + } + } + }; + scheduler.submit(cleaner); + } + + @PreDestroy + public void destroy(){ + this.cleanerOn = false; + if (Objects.nonNull(this.cleaner.future())){ + this.cleaner.future().cancel(true); + } + } + + @Override + public LogResult logsFromPage( String jobExecId, LogQuery logQuery) { + LaunchedExchangisJobEntity launchedExchangisJob = launchedJobDao.searchLogPathInfo(jobExecId); + return logsFromPageAndPath(launchedExchangisJob.getLogPath(), logQuery); + } + + @Override + public void appendLog(String tenancy, String jobExecId, List logs) { + appendLog(jobExecId, logs); + } + + @Override + public void appendLog(String jobExecId, List logs) { + JobLogCache cache = getOrCreateLogCache(jobExecId); + logs.forEach(cache ::cacheLog); + } + + + @Override + public JobLogCache getOrCreateLogCache(String jobExecId){ + try { + return cacheHolder.get(jobExecId, () -> { + LaunchedExchangisJobEntity launchedExchangisJob = launchedJobDao.searchLogPathInfo(jobExecId); + if (Objects.nonNull(launchedExchangisJob)) { + return loadJobLogCache(jobExecId, launchedExchangisJob); + } + return null; + }); + } catch (ExecutionException e) { + throw new ExchangisJobServerException.Runtime(LOG_OP_ERROR.getCode(),"Fail to create the job log cache of [" + jobExecId +"]", e); + } + } + + /** + * Load job log cache + * @param launchedExchangisJob job + * @return log cache + */ + protected abstract AbstractJobLogCache loadJobLogCache(String jobExcId, LaunchedExchangisJobEntity launchedExchangisJob) + throws Exception; +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/RpcJobLogService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/RpcJobLogService.java new file mode 100644 index 000000000..a466d692e --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/RpcJobLogService.java @@ -0,0 +1,289 @@ +package com.webank.wedatasphere.exchangis.job.server.log.service; + +import com.webank.wedatasphere.exchangis.common.EnvironmentUtils; +import com.webank.wedatasphere.exchangis.job.launcher.entity.LaunchedExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.log.LogQuery; +import com.webank.wedatasphere.exchangis.job.log.LogResult; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.job.server.log.cache.AbstractJobLogCache; +import com.webank.wedatasphere.exchangis.job.server.log.cache.JobLogCache; +import com.webank.wedatasphere.exchangis.job.server.log.rpc.FetchLogRequest; +import com.webank.wedatasphere.exchangis.job.server.log.rpc.FetchLogResponse; +import com.webank.wedatasphere.exchangis.job.server.log.rpc.SendLogRequest; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.io.input.ReversedLinesFileReader; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.rpc.Sender; +import org.apache.linkis.rpc.message.annotation.Receiver; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.function.Supplier; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.LOG_OP_ERROR; + +/** + * Rpc job log service + */ +public class RpcJobLogService extends AbstractJobLogService{ + + private static final Logger LOG = LoggerFactory.getLogger(RpcJobLogService.class); + + + @Receiver + public void appendLog(SendLogRequest sendLogRequest){ + String jobExecId = sendLogRequest.getJobExecId(); + List logLines = sendLogRequest.getLogLines(); + if (logLines.size() > 0) { + // Two level cache + JobLogCache cache = getOrCreateLogCache(jobExecId); + logLines.forEach(cache :: cacheLog); + if (sendLogRequest.isEnd()){ + cache.flushCache(true); + } + } else if (sendLogRequest.isEnd()){ + Optional.ofNullable(cacheHolder.getIfPresent(jobExecId)).ifPresent( cache -> { + cache.flushCache(true); + }); + } + } + + @Receiver + public FetchLogResponse logsFromPage(FetchLogRequest fetchLogRequest){ + return new FetchLogResponse( + logsFromPageAndPath(fetchLogRequest.getLogPath(), fetchLogRequest)); + } + @Override + protected AbstractJobLogCache loadJobLogCache(String jobExecId, + LaunchedExchangisJobEntity launchedExchangisJob) throws Exception{ + String logPath = launchedExchangisJob.getLogPath(); + int splitPos = logPath.indexOf("@"); + if (splitPos > 0){ + String logAddress = logPath.substring(0, splitPos); + if (!logAddress.equals(EnvironmentUtils.getServerAddress())){ + ServiceInstance instance = ServiceInstance.apply(EnvironmentUtils.getServerName(), logAddress); + return new AbstractJobLogCache(scheduler, 100, 2000) { + @Override + public void flushCache(boolean isEnd) { + // Send rpc + if (!cacheQueue().isEmpty()) { + try { + List logLines = new ArrayList<>(); + cacheQueue().drainTo(logLines); + Sender.getSender(instance).send(new SendLogRequest(jobExecId, isEnd, logLines)); + } catch (Exception ex) { + LOG.error("Fail to send the log cache of [" + launchedExchangisJob.getJobExecutionId() + + "] to remote rpc [" + logAddress + "]", ex); + } + } + if (isEnd) { + cacheHolder.invalidate(jobExecId); + } + } + }; + } + logPath = logPath.substring(splitPos + 1); + } + File logFile = new File(Constraints.LOG_LOCAL_PATH.getValue() + IOUtils.DIR_SEPARATOR_UNIX + + logPath); + + if (!logFile.exists()){ + // Write empty string to create new file + FileUtils.writeStringToFile(logFile, ""); + LOG.info("Create the new job log file: {}", logFile.getAbsolutePath()); + } + RandomAccessFile file = new RandomAccessFile(logFile, "rw"); + // Seek to the end of file + file.seek(file.length()); + return new AbstractJobLogCache(scheduler, 100, 2000) { + @Override + public synchronized void flushCache(boolean isEnd) { + // Store into local path + if (!cacheQueue().isEmpty()) { + try { + List logLines = new ArrayList<>(); + cacheQueue().drainTo(logLines); + for (Object line : logLines) { + file.write(String.valueOf(line).getBytes(Charset.defaultCharset())); + } + } catch (IOException ex) { + LOG.error("Fail to flush the log cache of [" + launchedExchangisJob.getJobExecutionId() + "]", ex); + } + } + if (isEnd) { + cacheHolder.invalidate(jobExecId); + try { + file.close(); + } catch (IOException e) { + //Ignore + } + } + } + }; + } + + @Override + public LogResult logsFromPageAndPath(String logPath, LogQuery logQuery) { + int splitPos = logPath.indexOf("@"); + if (splitPos > 0) { + String logAddress = logPath.substring(0, splitPos); + if (!logAddress.equals(EnvironmentUtils.getServerAddress())) { + Object response; + try { + response = Sender.getSender(ServiceInstance.apply(EnvironmentUtils.getServerName(), logAddress)) + .ask(new FetchLogRequest(logQuery, logPath)); + } catch (Exception e){ + throw new ExchangisJobServerException.Runtime(LOG_OP_ERROR.getCode(), + "Remote exception in fetching log from: [" + logPath + "]", e); + } + if (response instanceof FetchLogResponse){ + return (LogResult) response; + } + throw new ExchangisJobServerException.Runtime(LOG_OP_ERROR.getCode(),"Unable to fetch log from: [" + logPath + + "], unknown request protocol: [" + response + "]", null); + } + logPath = logPath.substring(splitPos + 1); + } + String fullPath = Constraints.LOG_LOCAL_PATH.getValue() + IOUtils.DIR_SEPARATOR_UNIX + logPath; + LogResult result = new LogResult(0, false, Collections.emptyList()); + if (!new File(fullPath).exists()){ + return result; + } + if (logQuery.getLastRows() != null && logQuery.getLastRows() > 0){ + return getLastRows(fullPath, logQuery.getLastRows()); + } + RandomAccessFile logReader = null; + ReversedLinesFileReader reverseReader = null; + try { + String patternValue = Constraints.LOG_MULTILINE_PATTERN.getValue(); + Pattern linePattern = StringUtils.isNotBlank(patternValue)? Pattern.compile(patternValue) : null; + int readLine = 0; + int lineNum = 0; + int skippedLine = 0; + int ignoreLine = 0; + int pageSize = logQuery.getPageSize(); + int fromLine = logQuery.getFromLine(); + List ignoreKeywords = logQuery.getIgnoreKeywordsList(); + List onlyKeywords = logQuery.getOnlyKeywordsList(); + boolean rowIgnore = false; + Supplier lineSupplier = null; + if (logQuery.isEnableTail()){ + reverseReader = new ReversedLinesFileReader(new File(fullPath), Charset.defaultCharset()); + LOG.trace("Enable reverse read the log: {}, fromLine: {}, pageSize: {}", fullPath, fromLine, pageSize); + ReversedLinesFileReader finalReverseReader = reverseReader; + lineSupplier = () -> { + try { + return finalReverseReader.readLine(); + } catch (IOException e) { + throw new ExchangisJobServerException.Runtime(LOG_OP_ERROR.getCode(), e.getMessage(), e); + } + }; + } else { + logReader = new RandomAccessFile(fullPath, "rw"); + RandomAccessFile finalLogReader = logReader; + lineSupplier = () -> { + try { + String line = finalLogReader.readLine(); + if (null != line){ + return new String(line.getBytes(StandardCharsets.ISO_8859_1), Charset.defaultCharset()); + } + return null; + } catch (IOException e) { + throw new ExchangisJobServerException.Runtime(LOG_OP_ERROR.getCode(), e.getMessage(), e); + } + }; + } + String line = lineSupplier.get(); + List logs = new ArrayList<>(); + while (readLine < pageSize && line != null){ + lineNum += 1; + if (skippedLine < fromLine - 1){ + skippedLine += 1; + } else { + if (rowIgnore) { + if (Objects.nonNull(linePattern)){ + Matcher matcher = linePattern.matcher(line); + if (matcher.find()){ + ignoreLine = 0; + rowIgnore = !isIncludeLine(line, onlyKeywords, ignoreKeywords); + } else { + ignoreLine += 1; + // TODO limit the value of ignoreLine + } + }else{ + rowIgnore = !isIncludeLine(line, onlyKeywords, ignoreKeywords); + } + }else { + rowIgnore = !isIncludeLine(line, onlyKeywords, ignoreKeywords); + } + if (!rowIgnore) { + if (line.contains("password")) { + LOG.info("have error information"); + } + if (!line.contains("password")) { + logs.add(line); + } + readLine += 1; + } + } + line = lineSupplier.get(); + } + if (logQuery.isEnableTail()){ + Collections.reverse(logs); + } + result = new LogResult(lineNum, false, logs); + } catch (IOException e) { + throw new ExchangisJobServerException.Runtime(LOG_OP_ERROR.getCode(),"Unable to query the logs from path: [" + logPath + "]", e); + } finally { + if (Objects.nonNull(logReader)) { + try { + logReader.close(); + } catch (IOException e) { + //Ignore + } + } + if (Objects.nonNull(reverseReader)) { + try { + reverseReader.close(); + } catch (IOException e) { + //Ignore + } + } + } + return result; + } + + /** + * Get last rows + * @param fullPath full path + * @param lastRows last rows + * @return + */ + private LogResult getLastRows(String fullPath, int lastRows){ + try { + List logs = Arrays.asList(Utils.exec(new String[]{"tail", "-n", lastRows + "", fullPath}, 5000L).split("\n")); + return new LogResult(0, true, logs); + }catch (Exception e){ + throw new ExchangisJobServerException.Runtime(LOG_OP_ERROR.getCode(), "Fail to get last rows from path: [" + fullPath + "]", e); + } + } + + private boolean isIncludeLine(String line, List onlyKeywordList, List ignoreKeywordList){ + boolean accept = ignoreKeywordList.isEmpty() || ignoreKeywordList.stream().noneMatch(line::contains); + if (accept){ + accept = onlyKeywordList.isEmpty() || onlyKeywordList.stream().anyMatch(line::contains); + } + return accept; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/ExchangisJobEntityDao.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/ExchangisJobEntityDao.java new file mode 100644 index 000000000..d2ec8ba6a --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/ExchangisJobEntityDao.java @@ -0,0 +1,75 @@ +package com.webank.wedatasphere.exchangis.job.server.mapper; + +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobQueryVo; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** + * + */ + +public interface ExchangisJobEntityDao { + + /** + * Add new JobEntity + * @param exchangisJobEntity job entity + */ + void addJobEntity(ExchangisJobEntity exchangisJobEntity); + + /** + * Delete jobEntity + * @param jobId job id + */ + void deleteJobEntity(@Param("jobId") Long jobId); + + /** + * upgrade jobEntity basic info + * @param jobEntity job entity + */ + void upgradeBasicInfo(ExchangisJobEntity jobEntity); + + /** + * Upgrade config + * @param jobEntity job entity + */ + void upgradeConfig(ExchangisJobEntity jobEntity); + + /** + * Upgrade content + * @param jobEntity job entity + */ + void upgradeContent(ExchangisJobEntity jobEntity); + /** + * Get jobEntity + * @param jobId job id + */ + ExchangisJobEntity getDetail(@Param("jobId") Long jobId); + + /** + * Get jobEntity list + * @param projectId + * @return + */ + List getDetailList(@Param("projectId") Long projectId); + + ExchangisJobEntity getBasicInfo(@Param("jobId") Long jobId); + + /** + * Query page list + * @param queryVo query vo + * @return list + */ + List queryPageList(ExchangisJobQueryVo queryVo); + + /** + * Delete batch + * @param ids id list + */ + void deleteBatch(@Param("ids") List ids); + + List getByNameAndProjectId(@Param("jobName") String jobName, @Param("projectId") Long projectId); + + List getByNameWithProjectId(@Param("jobName") String jobName, @Param("projectId") Long projectId); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/ExchangisLaunchTaskMapper.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/ExchangisLaunchTaskMapper.java new file mode 100644 index 000000000..ed0e39666 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/ExchangisLaunchTaskMapper.java @@ -0,0 +1,31 @@ +package com.webank.wedatasphere.exchangis.job.server.mapper; + +import java.util.Date; +import java.util.List; + +import com.webank.wedatasphere.exchangis.job.server.dto.ExchangisTaskStatusMetricsDTO; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.webank.wedatasphere.exchangis.job.launcher.entity.ExchangisLaunchTask; + +/** + * The interface Exchangis launch task mapper. + * + * @author yuxin.yuan + * @since 2021-09-08 + */ +@Mapper +public interface ExchangisLaunchTaskMapper extends BaseMapper { + + List listTasks(@Param("taskId") Long taskId, @Param("taskName") String taskName, + @Param("status") String status, @Param("launchStartTime") Date launchStartTime, + @Param("launchEndTime") Date launchEndTime, @Param("start") int start, @Param("size") int size); + + int count(@Param("taskId") Long taskId, @Param("taskName") String taskName, @Param("status") String status, + @Param("launchStartTime") Date launchStartTime, @Param("launchEndTime") Date launchEndTime); + + ExchangisTaskStatusMetricsDTO getTaskMetricsByStatus(String status); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/JobFunctionDao.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/JobFunctionDao.java new file mode 100644 index 000000000..63ea32c53 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/JobFunctionDao.java @@ -0,0 +1,27 @@ +package com.webank.wedatasphere.exchangis.job.server.mapper; + +import com.webank.wedatasphere.exchangis.job.server.vo.JobFunction; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** + * @author davidhua + * 2020/4/23 + */ +public interface JobFunctionDao { + /** + * List name referenced + * @param type + * @parm tabName + * @return + */ + List listRefNames(@Param("tabName") String tabName, @Param("type") String type); + + /** + * List function entities + * @param type + * @return + */ + List listFunctions(@Param("tabName") String tabName, @Param("type") String type); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/JobTransformProcessorDao.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/JobTransformProcessorDao.java new file mode 100644 index 000000000..72e26fd85 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/JobTransformProcessorDao.java @@ -0,0 +1,31 @@ +package com.webank.wedatasphere.exchangis.job.server.mapper; + +import com.webank.wedatasphere.exchangis.job.server.render.transform.processor.TransformProcessor; + +/** + * Job transform processor dao + */ +public interface JobTransformProcessorDao { + /** + * Save one entity + * @param processor processor entity + * @return id + */ + Long saveOne(TransformProcessor processor); + + /** + * Get the processor detail (with code content) + * @param id id + * @return processor + */ + TransformProcessor getProcDetail(Long id); + + /** + * Get the processor basic information + * @param id id + * @return processor + */ + TransformProcessor getProcInfo(Long id); + + void updateOne(TransformProcessor processor); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/JobTransformRuleDao.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/JobTransformRuleDao.java new file mode 100644 index 000000000..438b5eebb --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/JobTransformRuleDao.java @@ -0,0 +1,22 @@ +package com.webank.wedatasphere.exchangis.job.server.mapper; + +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformRule; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** + * Transform dao in rendering job + */ +public interface JobTransformRuleDao { + + /** + * Get transform rule list + * @param ruleType rule type + * @param dataSourceType data source type + * @return rule list + */ + List getTransformRules(@Param("ruleType") String ruleType, + @Param("dataSourceType") String dataSourceType); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/LaunchableTaskDao.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/LaunchableTaskDao.java new file mode 100644 index 000000000..4002a6e41 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/LaunchableTaskDao.java @@ -0,0 +1,44 @@ +package com.webank.wedatasphere.exchangis.job.server.mapper; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** + * + * @Date 2022/1/17 21:58 + */ +public interface LaunchableTaskDao { + + /** + * Add new launchableTask + * @param tasks + */ + void addLaunchableTask(@Param("tasks")List tasks); + + /** + * Delete launchableTask + * @param taskId + */ + void deleteLaunchableTask(@Param("taskId") String taskId); + + /** + * upgradeLaunchableTask + * @param launchableExchangisTask + */ + void upgradeLaunchableTask(LaunchableExchangisTask launchableExchangisTask); + + /** + * Get launchableTask + * @param taskId + */ + LaunchableExchangisTask getLaunchableTask(@Param("taskId") String taskId); + + /** + * Get Tasks need to execute + * @param + */ + + List getTaskToLaunch(@Param("limitSize") Integer limitSize); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/LaunchedJobDao.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/LaunchedJobDao.java new file mode 100644 index 000000000..53baaef33 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/LaunchedJobDao.java @@ -0,0 +1,97 @@ +package com.webank.wedatasphere.exchangis.job.server.mapper; + +import com.webank.wedatasphere.exchangis.job.launcher.entity.LaunchedExchangisJobEntity; +import org.apache.ibatis.annotations.Param; + +import java.util.Date; + +import java.util.List; + +/** + * + */ +public interface LaunchedJobDao { + /** + * insert launchedJob + * @param launchedExchangisJobEntity entity + */ + + void insertLaunchedJob(LaunchedExchangisJobEntity launchedExchangisJobEntity); + + /** + * delete launchedJob + * @param jobExecutionId execution id + */ + + void deleteLaunchedJob(@Param("jobExecutionId")String jobExecutionId); + + /** + * upgrade launchedJob + * @param launchedExchangisJobEntity entity + */ + void upgradeLaunchedJob(LaunchedExchangisJobEntity launchedExchangisJobEntity); + + /** + * Update launch info + * @param launchedExchangisJobEntity entity + */ + void updateLaunchInfo(LaunchedExchangisJobEntity launchedExchangisJobEntity); + /** + * search launchJob + * @param jobExecutionId execution id + * @return entity + */ + LaunchedExchangisJobEntity searchLaunchedJob(@Param("jobExecutionId")String jobExecutionId); + + /** + * Search log path and status info + * @param jobExecutionId execution id + * @return + */ + LaunchedExchangisJobEntity searchLogPathInfo(@Param("jobExecutionId")String jobExecutionId); + /** + * upgrade launchedJob status + * @param jobExecutionId execution id + * @param status status + */ + + void upgradeLaunchedJobStatus(@Param("jobExecutionId")String jobExecutionId, @Param("status") String status, @Param("updateTime")Date updateTime); + + /** + * Try to upgrade launchedJob status in version control + * @param jobExecutionId execution id + * @param status update status + * @param launchableTaskNum expected launchable task number + * @param updateTime updateTime + */ + int upgradeLaunchedJobStatusInVersion(@Param("jobExecutionId")String jobExecutionId, @Param("status") String status, + @Param("launchableTaskNum") Integer launchableTaskNum, + @Param("updateTime")Date updateTime); + + /** + * To upgrade launchedJob progress + * @param jobExecutionId execution id + * @param totalTaskProgress progress of total related task + * @param updateTime updateTime + */ + int upgradeLaunchedJobProgress(@Param("jobExecutionId")String jobExecutionId, @Param("totalTaskProgress") Float totalTaskProgress, + @Param("updateTime")Date updateTime); + + /** + * get All launchJob + * @return job entity list + */ + List getAllLaunchedJob(@Param("jobExecutionId") String jobExecutionId, @Param("jobName") String jobName, @Param("status") String status, @Param("launchStartTime") Date launchStartTime, @Param("launchEndTime") Date launchEndTime, @Param("start")int start, @Param("size") int size, @Param("loginUser") String loginUser); + + /** + * get launchJob count + * return job entity number + */ + int count(@Param("jobExecutionId") String jobExecutionId, @Param("jobName") String jobName, @Param("status") String status, @Param("launchStartTime") Date launchStartTime, @Param("launchEndTime") Date launchEndTime, @Param("loginUser") String loginUser); + + /** + * delete launchJob + */ + void deleteJob(@Param("jobExecutionId")String jobExecutionId); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/LaunchedTaskDao.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/LaunchedTaskDao.java new file mode 100644 index 000000000..b78acee21 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/LaunchedTaskDao.java @@ -0,0 +1,136 @@ +package com.webank.wedatasphere.exchangis.job.server.mapper; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.entity.LaunchedExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.launcher.entity.LaunchedExchangisTaskEntity; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; +import org.springframework.jmx.export.annotation.ManagedOperationParameter; +import org.springframework.stereotype.Service; + +import java.util.Date; +import java.util.List; + +/** + * + */ +public interface LaunchedTaskDao { + + /** + * judge task whether launch(insert) or update + * @param launchedExchangisTaskEntity taskEntity + * @return insert/update result (1:insert 2:update) + */ + int insertLaunchedTaskOrUpdate(LaunchedExchangisTaskEntity launchedExchangisTaskEntity); + + /** + * insert launchedTask + * @param launchedExchangisTaskEntity + */ + void insertLaunchedTask(LaunchedExchangisTaskEntity launchedExchangisTaskEntity); + + /** + * delete launchedTask + * @param taskId + */ + void deleteLaunchedTask(@Param("taskId")String taskId); + + /** + * upgrade launchedTask + * @param launchedExchangisTaskEntity + */ + void upgradeLaunchedTask(LaunchedExchangisTaskEntity launchedExchangisTaskEntity); + + /** + * search launchedTask + * @param taskId + */ + + LaunchedExchangisTaskEntity getLaunchedTaskEntity(@Param("taskId") String taskId); + + /** + * upgrade launchedTask metrics + * @param metrics + * @param taskId + */ + + void upgradeLaunchedTaskMetrics(@Param("taskId") String taskId, @Param("metrics") String metrics, @Param("updateTime")Date updateTime); + + /** + * upgrade launchedTask status + * @param status + * @param taskId + */ + + void upgradeLaunchedTaskStatus(@Param("taskId") String taskId, @Param("status") String status, @Param("updateTime")Date updateTime); + + /** + * upgrade launchedTask progress + * @param taskId + * @param progress + * @param updateTime + */ + void upgradeLaunchedTaskProgress(@Param("taskId") String taskId, @Param("progress") Float progress, @Param("updateTime")Date updateTime); + + + /** + * Sum the progress value + * @param jobExecutionId job execution id + * @return sum result + */ + float sumProgressByJobExecutionId(@Param("jobExecutionId") String jobExecutionId); + /** + * Update the launch information + * @param launchedExchangisTaskEntity entity + */ + void updateLaunchInfo(LaunchedExchangisTaskEntity launchedExchangisTaskEntity); + + /** + * search launchedTaskList + * @param jobExecutionId + */ + + List selectTaskListByJobExecutionId(@Param("jobExecutionId") String jobExecutionId); + + /** + * Select status list + * @param jobExecutionId job execution id + * @return + */ + List selectTaskStatusByJobExecutionId(@Param("jobExecutionId")String jobExecutionId); + /** + * search getTaskMetrics + * @param jobExecutionId + */ + + List getTaskMetricsByJobExecutionId(@Param("jobExecutionId") String jobExecutionId); + + /** + * search launchedTaskList by taskId and jobExecutionId + * @param jobExecutionId + */ + + LaunchedExchangisTaskEntity getLaunchedTaskMetrics(@Param("jobExecutionId") String jobExecutionId, @Param("taskId") String taskId); + + /** + * Get launched task status + * @param taskId + * @return + */ + String getLaunchedTaskStatus(@Param("taskId") String taskId); + + /** + * search TaskStatusList + * @param + */ + + List getTaskStatusList(@Param("jobExecutionId") String jobExecutionId); + + /** + * delete task + * @param + */ + + void deleteTask(@Param("jobExecutionId") String jobExecutionId); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/ExchangisJobEntityMapper.xml b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/ExchangisJobEntityMapper.xml new file mode 100644 index 000000000..dbac7c66b --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/ExchangisJobEntityMapper.xml @@ -0,0 +1,199 @@ + + + + + + + + + + + + + + + + + + + + + + + + + exchangis_job_entity + + + + id, + name, create_time, last_update_time, engine_type, job_labels, create_user, job_content, execute_user, + job_params, job_desc, job_type, project_id, source, modify_user + + + + id, + name, create_time, last_update_time, engine_type, job_labels, create_user, execute_user, + job_params, job_desc, job_type, project_id, source, modify_user + + + + insert into + + (name, create_time, last_update_time, engine_type, job_labels, create_user, job_content, execute_user, job_params, + job_desc, job_type, project_id, source, modify_user) + values( + #{name}, + #{createTime}, + #{lastUpdateTime}, + #{engineType}, + #{jobLabel}, + #{createUser}, + #{jobContent}, + #{executeUser}, + #{jobParams}, + #{jobDesc}, + #{jobType}, + #{projectId}, + #{source}, + #{modifyUser} + ); + + + + DELETE FROM WHERE + id = #{jobId} + + + + UPDATE + SET + name = #{name}, + last_update_time = #{lastUpdateTime}, + engine_type = #{engineType}, + job_labels = #{jobLabel}, + job_type = #{jobType}, + job_desc = #{jobDesc}, + modify_user = #{modifyUser} + WHERE id = #{id} + + + + UPDATE + SET + execute_user = #{executeUser}, + job_params = #{jobParams}, + source = #{source}, + last_update_time = #{lastUpdateTime}, + modify_user = #{modifyUser} + WHERE id = #{id} + + + + UPDATE + SET + job_content = #{jobContent}, + last_update_time = #{lastUpdateTime}, + modify_user = #{modifyUser} + WHERE id = #{id} + + + + + + + + + + + + + + + + + + + + + DELETE FROM + + WHERE id in + + #{item} + + + diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/ExchangisLaunchTaskMapper.xml b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/ExchangisLaunchTaskMapper.xml new file mode 100644 index 000000000..c65797ef7 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/ExchangisLaunchTaskMapper.xml @@ -0,0 +1,90 @@ + + + + + + + + + + + + + + + + + + + + + + + + exchangis_launch_task + + + + + id,task_name, job_id, job_name, content, execute_node, create_time, create_user, launch_time, proxy_user, params_json, status, complete_time, engine_type + + + + + + + + + + + diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/JobFunctionDao.xml b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/JobFunctionDao.xml new file mode 100644 index 000000000..f35747653 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/JobFunctionDao.xml @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/JobTransformProcessorMapper.xml b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/JobTransformProcessorMapper.xml new file mode 100644 index 000000000..0ec314310 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/JobTransformProcessorMapper.xml @@ -0,0 +1,61 @@ + + + + + + + + + + + + + + exchangis_job_transform_processor + + + + INSERT INTO + (`job_id`, `code_content`, `code_language`, `code_bml_resourceId`, `code_bml_version`, `creator`) + VALUES(#{jobId}, #{codeContent}, #{codeLanguage}, #{codeBmlResourceId,jdbcType=VARCHAR}, #{codeBmlVersion,jdbcType=VARCHAR}, #{creator}); + + + + + + + + UPDATE + SET + `code_content` = #{codeContent}, + `code_language` = #{codeLanguage}, + `code_bml_resourceId` = #{codeBmlResourceId}, + `code_bml_version` = #{codeBmlVersion} + WHERE `id` = #{id}; + + \ No newline at end of file diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/JobTransformRuleMapper.xml b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/JobTransformRuleMapper.xml new file mode 100644 index 000000000..197188db5 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/JobTransformRuleMapper.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + + + exchangis_job_transform_rule + + + + \ No newline at end of file diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/LaunchableTaskMapper.xml b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/LaunchableTaskMapper.xml new file mode 100644 index 000000000..ac29d52cc --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/LaunchableTaskMapper.xml @@ -0,0 +1,89 @@ + + + + + + + + + + + + + + + + + + + + + + exchangis_launchable_task + + + + + id, name, create_time, last_update_time, engine_type, execute_user, linkis_job_name, linkis_job_content, linkis_params, linkis_source, labels + + + + insert into + + (id, name, job_execution_id, create_time, last_update_time, engine_type, execute_user, linkis_job_name, linkis_job_content, linkis_params, linkis_source, labels) + values( + #{task.id}, + #{task.name}, + #{task.jobExecutionId}, + #{task.createTime}, + #{task.lastUpdateTime}, + #{task.engineType}, + #{task.executeUser}, + #{task.linkisJobName}, + #{task.linkisJobContent}, + #{task.linkisParams}, + #{task.linkisSource}, + #{task.labels} + ); + + + + + delete from + + where id = #{taskId} + + + + UPDATE + SET + id = #{id}, + name = #{name}, + create_time = #{createTime}, + last_update_time = #{lastUpdateTime}, + engine_type = #{engineType}, + execute_user = #{execute_user}, + linkis_job_name = #{linkisJobName}, + linkis_job_content = #{linkisJobContent}, + linkis_params = #{linkisParams}, + linkis_source = #{linkisSource}, + labels = #{labels} + + + + + + + + \ No newline at end of file diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/LaunchedJobMapper.xml b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/LaunchedJobMapper.xml new file mode 100644 index 000000000..46afcce62 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/LaunchedJobMapper.xml @@ -0,0 +1,215 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + exchangis_launched_job_entity + + + + + id, name, create_time, last_update_time, job_id, job_execution_id, engine_type, execute_user, job_name, status, progress, error_code, error_msg, retry_num, log_path, create_user + + + insert into + + (name, create_time, last_update_time, job_id, launchable_task_num, job_execution_id, engine_type, execute_user, create_user, job_name, status, progress, error_code, error_msg, retry_num, log_path) + values( + #{name}, + #{createTime}, + #{lastUpdateTime}, + #{jobId}, + #{launchableTaskNum}, + #{jobExecutionId}, + #{engineType}, + #{executeUser}, + #{createUser}, + #{jobName}, + #{status}, + #{progress}, + #{errorCode}, + #{errorMessage}, + #{retryNum}, + #{logPath} + ); + + + + DELETE FROM WHERE + job_execution_id = #{jobExecutionId} + + + + UPDATE + SET + name = #{name}, + create_time = #{createTime}, + last_update_time = #{lastUpdateTime}, + job_id = #{jobId}, + launchable_task_num = #{launchableTaskNum}, + job_execution_id = #{jobExecutionId}, + engine_type = #{engineType}, + execute_user = #{execute_user}, + job_name = #{jobName}, + status = #{status}, + progress = #{progress}, + error_code = #{errorCode}, + error_msg = #{errorMessage}, + retry_num = #{retryNum}, + log_path = #{logPath} + where id = #{id} + + + UPDATE + SET + last_update_time = #{lastUpdateTime}, + launchable_task_num = #{launchableTaskNum}, + status = #{status} + WHERE job_execution_id = #{jobExecutionId} + + + + + + + UPDATE + SET + status = #{status}, + last_update_time = #{updateTime} + WHERE + job_execution_id = #{jobExecutionId} + + + + + + + + + AND job_execution_id = #{jobExecutionId} + + + AND #{updateTime} >= last_update_time + + + AND launchable_task_num = #{launchableTaskNum}; + + + + + + + + + + + + + + + DELETE FROM WHERE + job_execution_id = #{jobExecutionId} + + + diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/LaunchedTaskMapper.xml b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/LaunchedTaskMapper.xml new file mode 100644 index 000000000..adbf2cd17 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/mapper/impl/LaunchedTaskMapper.xml @@ -0,0 +1,226 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + exchangis_launched_task_entity + + + + + id, name, create_time, last_update_time, job_id, engine_type, execute_user, job_name, progress, error_code, error_msg, retry_num, task_id, linkis_job_id, linkis_job_info, job_execution_id, launch_time, running_time, metrics, status + + + insert into + + (id, name, create_time, last_update_time, job_id, engine_type, execute_user, job_name, progress, error_code, error_msg, retry_num, task_id, linkis_job_id, linkis_job_info, job_execution_id, launch_time, running_time, metrics, status) + values( + #{id}, + #{name}, + #{createTime}, + #{lastUpdateTime}, + #{jobId}, + #{engineType}, + #{executeUser}, + #{jobName}, + #{progress}, + #{errorCode}, + #{errorMessage}, + #{retryNum}, + #{taskId}, + #{linkisJobId}, + #{linkisJobInfo}, + #{jobExecutionId}, + #{launchTime}, + #{runningTime}, + #{metrics}, + #{status} + ) ON DUPLICATE KEY UPDATE + name = #{name}, `status` = #{status}; + + + + insert into + + (id, name, create_time, last_update_time, job_id, engine_type, execute_user, job_name, progress, error_code, error_msg, retry_num, task_id, linkis_job_id, linkis_job_info, job_execution_id, launch_time, running_time, metrics, status) + values( + #{id}, + #{name}, + #{createTime}, + #{lastUpdateTime}, + #{jobId}, + #{engineType}, + #{executeUser}, + #{jobName}, + #{progress}, + #{errorCode}, + #{errorMessage}, + #{retryNum}, + #{taskId}, + #{linkisJobId}, + #{linkisJobInfo}, + #{jobExecutionId}, + #{launchTime}, + #{runningTime}, + #{metrics}, + #{status} + ); + + + + DELETE FROM WHERE + task_id = #{taskId} + + + + UPDATE + SET + id = #{id}, + name = #{name}, + create_time = #{createTime}, + last_update_time = #{lastUpdateTime}, + job_id = #{jobId}, + engine_type = #{engineType}, + execute_user = #{executeUser}, + job_name = #{jobName}, + progress = #{progress}, + error_code = #{errorCode}, + error_msg = #{errorMessage}, + retry_num = #{retryNum}, + task_id = #{taskId}, + linkis_job_id = #{linkisJobId}, + linkis_job_info = #{linkisJobInfo}, + job_execution_id = #{jobExecutionId}, + launch_time = #{launchTime}, + running_time = #{runningTime}, + metrics = #{metrics}, + status = #{status} + + + + UPDATE + SET + linkis_job_id = #{linkisJobId}, + linkis_job_info = #{linkisJobInfo}, + last_update_time = #{lastUpdateTime}, + error_code = #{errorCode}, + error_msg = #{errorMessage}, + retry_num = #{retryNum}, + launch_time = #{launchTime}, + running_time = #{runningTime}, + status = #{status} + WHERE + task_id = #{taskId} + + + + + UPDATE + SET + metrics = #{metrics}, + last_update_time = #{updateTime} + WHERE + task_id = #{taskId} + + + + UPDATE + SET + status = #{status}, + last_update_time = #{updateTime} + WHERE + task_id= #{taskId} + + + + UPDATE + SET + progress = #{progress}, + last_update_time = #{updateTime} + WHERE + task_id = #{taskId} + + + + + + + + + + + + + + + + + DELETE FROM WHERE + job_execution_id = #{jobExecutionId} + + diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/ExchangisMetricConverterFactory.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/ExchangisMetricConverterFactory.java new file mode 100644 index 000000000..d3dcf6cd6 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/ExchangisMetricConverterFactory.java @@ -0,0 +1,42 @@ +package com.webank.wedatasphere.exchangis.job.server.metrics; + +import com.webank.wedatasphere.exchangis.job.enums.EngineTypeEnum; +import com.webank.wedatasphere.exchangis.job.server.metrics.converter.DataxMetricConverter; +import com.webank.wedatasphere.exchangis.job.server.metrics.converter.MetricsConverter; +import com.webank.wedatasphere.exchangis.job.server.metrics.converter.RegisterMetricConverterFactory; +import com.webank.wedatasphere.exchangis.job.server.metrics.converter.SqoopMetricConverter; +import org.apache.commons.lang.StringUtils; +import org.springframework.stereotype.Component; + +import javax.annotation.PostConstruct; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +/** + * Default metric converters factory + */ +@Component +public class ExchangisMetricConverterFactory implements RegisterMetricConverterFactory { + + private Map> registers = new ConcurrentHashMap<>(); + + @PostConstruct + public void init(){ + register(EngineTypeEnum.SQOOP.name(), new SqoopMetricConverter()); + register(EngineTypeEnum.DATAX.name(), new DataxMetricConverter()); + } + @Override + public void register(String engineType, MetricsConverter converter) { + if (StringUtils.isNotBlank(engineType)) { + registers.put(engineType.toLowerCase(), converter); + } + } + + @Override + public MetricsConverter getOrCreateMetricsConverter(String engineType) { + return registers.getOrDefault(StringUtils.isNotBlank(engineType)? engineType.toLowerCase() : "null", metricMap -> { + // Return the empty vo + return new ExchangisMetricsVo(); + }); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/ExchangisMetricsVo.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/ExchangisMetricsVo.java new file mode 100644 index 000000000..1169a1d44 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/ExchangisMetricsVo.java @@ -0,0 +1,170 @@ +package com.webank.wedatasphere.exchangis.job.server.metrics; + + +/** + */ +public class ExchangisMetricsVo implements MetricsVo{ + + /** + * Resource used + */ + private ResourceUsed resourceUsed; + + /** + * Traffic + */ + private Traffic traffic; + + /** + * Indicator + */ + private Indicator indicator; + + public ExchangisMetricsVo(){ + + } + + public ExchangisMetricsVo(ResourceUsed resourceUsed, Traffic traffic, Indicator indicator) { + this.resourceUsed = resourceUsed; + this.traffic = traffic; + this.indicator = indicator; + } + + public ResourceUsed getResourceUsed() { + return resourceUsed; + } + + public void setResourceUsed(ResourceUsed resourceUsed) { + this.resourceUsed = resourceUsed; + } + + public Traffic getTraffic() { + return traffic; + } + + public void setTraffic(Traffic traffic) { + this.traffic = traffic; + } + + public Indicator getIndicator() { + return indicator; + } + + public void setIndicator(Indicator indicator) { + this.indicator = indicator; + } + + public static class ResourceUsed { + private double cpu = 0.0; + + private long memory = 0; + + public ResourceUsed(double cpu, long memory){ + this.cpu = cpu; + this.memory = memory; + } + + public ResourceUsed(){ + + } + + public double getCpu() { + return cpu; + } + + public void setCpu(double cpu) { + this.cpu = cpu; + } + + public long getMemory() { + return memory; + } + + public void setMemory(long memory) { + this.memory = memory; + } + } + + public static class Traffic{ + private String source = "source"; + + private String sink = "sink"; + + private double flow; + + public Traffic(String source, String sink, double flow){ + this.source = source; + this.sink = sink; + this.flow = flow; + } + + public Traffic(){ + + } + + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public String getSink() { + return sink; + } + + public void setSink(String sink) { + this.sink = sink; + } + + public double getFlow() { + return flow; + } + + public void setFlow(double flow) { + this.flow = flow; + } + } + + public static class Indicator{ + private long exchangedRecords = 0; + + private long errorRecords = 0; + + private long ignoredRecords = 0; + + public Indicator(long exchangedRecords, long errorRecords, long ignoredRecords){ + this.exchangedRecords = exchangedRecords; + this.errorRecords = errorRecords; + this.ignoredRecords = ignoredRecords; + } + + public Indicator(){ + + } + public long getExchangedRecords() { + return exchangedRecords; + } + + public void setExchangedRecords(Long exchangedRecords) { + this.exchangedRecords = exchangedRecords; + } + + public long getErrorRecords() { + return errorRecords; + } + + public void setErrorRecords(Long errorRecords) { + this.errorRecords = errorRecords; + } + + public long getIgnoredRecords() { + return ignoredRecords; + } + + public void setIgnoredRecords(Long ignoredRecords) { + this.ignoredRecords = ignoredRecords; + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/MetricConverterDemo.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/MetricConverterDemo.java new file mode 100644 index 000000000..01d2eb864 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/MetricConverterDemo.java @@ -0,0 +1,67 @@ +/* +package com.webank.wedatasphere.exchangis.job.server.metrics; + +import com.webank.wedatasphere.exchangis.job.launcher.entity.LaunchedExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.launcher.entity.LaunchedExchangisTaskEntity; +import com.webank.wedatasphere.exchangis.job.server.metrics.converter.MetricConverterFactory; +import com.webank.wedatasphere.exchangis.job.server.metrics.converter.MetricsConverter; + +import java.util.HashMap; +import java.util.Map; + +*/ +/** + * + * @Date 2022/1/10 20:56 + *//* + +public class MetricConverterDemo { + private static MetricConverterFactory factory = new MetricConverterFactory() { + @Override + public MetricsConverter getOrCreateMetricsConverter(Class metricsVoClass, String engineType) { + return null; + } + }; + + public static void main(String[] args){ + + Map metricsMap = new HashMap<>(); + LaunchedTaskMetricVo launchedTaskMetricVo = factory.getOrCreateMetricsConverter(LaunchedTaskMetricVo.class, "sqoop").convert(metricsMap); + + } + + public static class LaunchedExchangisTaskVo { + public LaunchedExchangisTaskVo(LaunchedExchangisTaskEntity taskEntity){ + + } + } + + public static class LaunchedExchangisTaskProgressVo extends LaunchedExchangisTaskVo { + + public LaunchedExchangisTaskProgressVo(LaunchedExchangisTaskEntity taskEntity) { + super(taskEntity); + } + } + + public static class LaunchedExchangisTaskMetricsVo extends LaunchedExchangisTaskVo { + private LaunchedTaskMetricVo metrics; + private LaunchedTaskRescouceMetricVo resourceMetrics; + private LaunchedTaskTrafficeMetricVo trafficMetrics; + private LaunchedTaskIndicatorMetricVo IndicatorMetrics; + + public LaunchedExchangisTaskMetricsVo(LaunchedExchangisTaskEntity taskEntity) { + super(taskEntity); + this.metrics = factory.getOrCreateMetricsConverter(LaunchedTaskMetricVo.class, taskEntity.getEngineType()).convert(taskEntity.getMetricsMap()); + this.resourceMetrics = factory.getOrCreateMetricsConverter(LaunchedresourcTaskMetricVo.class, taskEntity.getEngineType(), taskEntity.getMetricsMap()); + this.trafficMetrics = factory.getOrCreateMetricsConverter(LaunchedtrafficTaskMetricVo.class, taskEntity.getEngineType(), taskEntity.getMetricsMap()); + this.IndicatorMetrics = factory.getOrCreateMetricsConverter(LaunchedIndicatorTaskMetricVo.class, taskEntity.getEngineType(), taskEntity.getMetricsMap()); + + } + } + + public static class LaunchedTaskMetricVo implements MetricsVo{ + + + } +} +*/ diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/MetricsVo.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/MetricsVo.java new file mode 100644 index 000000000..cc3d01f6d --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/MetricsVo.java @@ -0,0 +1,7 @@ +package com.webank.wedatasphere.exchangis.job.server.metrics; + +/** + * Metric Vo + */ +public interface MetricsVo { +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/AbstractMetricConverter.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/AbstractMetricConverter.java new file mode 100644 index 000000000..b39b0f6ff --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/AbstractMetricConverter.java @@ -0,0 +1,91 @@ +package com.webank.wedatasphere.exchangis.job.server.metrics.converter; + +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.job.server.metrics.ExchangisMetricsVo; +import com.webank.wedatasphere.exchangis.job.server.utils.JsonEntity; + +import java.util.Map; +import java.util.Objects; + +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.METRICS_OP_ERROR; + +/** + * Abstract converter + */ +public abstract class AbstractMetricConverter implements MetricsConverter { + /** + * Convert method + * @param metricMap map value + * @return + */ + @Override + public ExchangisMetricsVo convert(Map metricMap) throws ExchangisJobServerException { + ExchangisMetricsVo metricsVo = new ExchangisMetricsVo(); + MetricsParser metricsParser = getParser(); + if (Objects.nonNull(metricsParser) && + Objects.nonNull(metricMap) && !metricMap.isEmpty()){ + JsonEntity metric = JsonEntity.from(metricMap); + try { + // ResourceUsed + JsonEntity resourceUsedEntity = metric.getConfiguration(metricsParser.resourceUsedKey()); + ExchangisMetricsVo.ResourceUsed resourceUsed = Objects.nonNull(resourceUsedEntity) ? + metricsParser.parseResourceUsed(metricsParser.resourceUsedKey(), resourceUsedEntity) : null; + metricsVo.setResourceUsed(Objects.nonNull(resourceUsed) ? resourceUsed : new ExchangisMetricsVo.ResourceUsed()); + } catch (Exception e){ + throw new ExchangisJobServerException(METRICS_OP_ERROR.getCode(), "Exception in parsing \"resourceUsed\" info", e); + } + try { + // Traffic + JsonEntity trafficEntity = metric.getConfiguration(metricsParser.trafficKey()); + ExchangisMetricsVo.Traffic traffic = Objects.nonNull(trafficEntity) ? + metricsParser.parseTraffic(metricsParser.trafficKey(), trafficEntity) : null; + metricsVo.setTraffic(Objects.nonNull(traffic) ? traffic : new ExchangisMetricsVo.Traffic()); + } catch (Exception e){ + throw new ExchangisJobServerException(METRICS_OP_ERROR.getCode(), "Exception in parsing \"traffic\" info", e); + } + try { + // Indicator + JsonEntity indicatorEntity = metric.getConfiguration(metricsParser.indicatorKey()); + ExchangisMetricsVo.Indicator indicator = Objects.nonNull(indicatorEntity) ? + metricsParser.parseIndicator(metricsParser.indicatorKey(), indicatorEntity) : null; + metricsVo.setIndicator(Objects.nonNull(indicator) ? indicator : new ExchangisMetricsVo.Indicator()); + }catch (Exception e){ + throw new ExchangisJobServerException(METRICS_OP_ERROR.getCode(), "Exception in parsing \"indicator\" info", e); + } + } + return metricsVo; + } + + /** + * Get parser + * @return parser + */ + protected abstract MetricsParser getParser(); + + protected interface MetricsParser{ + /** + * ResourceUsed key + * @return key + */ + default String resourceUsedKey(){ return "-"; }; + + /** + * Traffic key + * @return key + */ + default String trafficKey(){ return "-"; }; + + /** + * Indicator key + * @return key + */ + default String indicatorKey(){ return "-"; }; + + ExchangisMetricsVo.ResourceUsed parseResourceUsed(String key, JsonEntity rawValue); + + ExchangisMetricsVo.Traffic parseTraffic(String key, JsonEntity rawValue); + + ExchangisMetricsVo.Indicator parseIndicator(String key, JsonEntity rawValue); + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/DataxMetricConverter.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/DataxMetricConverter.java new file mode 100644 index 000000000..59c38f5ac --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/DataxMetricConverter.java @@ -0,0 +1,83 @@ +package com.webank.wedatasphere.exchangis.job.server.metrics.converter; + +import com.webank.wedatasphere.exchangis.job.server.metrics.ExchangisMetricsVo; +import com.webank.wedatasphere.exchangis.job.server.utils.JsonEntity; +import com.webank.wedatasphere.exchangis.job.utils.MemUtils; +import org.apache.commons.lang.StringUtils; + +import java.math.BigDecimal; +import java.math.RoundingMode; +import java.util.Arrays; +import java.util.Objects; +import java.util.Optional; + + +/** + * Convert the metrics in datax engine + */ +public class DataxMetricConverter extends AbstractMetricConverter implements AbstractMetricConverter.MetricsParser{ + + @Override + public String resourceUsedKey() { + return ""; + } + + @Override + public String trafficKey() { + return ""; + } + + @Override + public String indicatorKey() { + return ""; + } + + @Override + protected AbstractMetricConverter.MetricsParser getParser() { + return this; + } + + @Override + public ExchangisMetricsVo.ResourceUsed parseResourceUsed(String key, JsonEntity rawValue) { + ExchangisMetricsVo.ResourceUsed resourceUsed = new ExchangisMetricsVo.ResourceUsed(); + String nodeResource = rawValue.getString("NodeResourceJson"); + if (StringUtils.isNotBlank(nodeResource)){ + JsonEntity nodeResourceJson = JsonEntity.from(nodeResource); + String memoryUnit = nodeResourceJson.getString("memory"); + if (StringUtils.isNotBlank(memoryUnit)){ + String[] memory = memoryUnit.split(" "); + resourceUsed.setMemory(memory.length >= 2 ? + MemUtils.convertToMB((long) Double.parseDouble(memory[0]), memory[1]) : (long) Double.parseDouble(memory[0])); + } + String cpuVCores = nodeResourceJson.getString("cpu"); + if (StringUtils.isNotBlank(cpuVCores)){ + resourceUsed.setCpu(Integer.parseInt(cpuVCores)); + } + } + return resourceUsed; + } + + @Override + public ExchangisMetricsVo.Traffic parseTraffic(String key, JsonEntity rawValue) { + ExchangisMetricsVo.Traffic traffic = new ExchangisMetricsVo.Traffic(); + Double speed = Optional.ofNullable(rawValue.getDouble("recordSpeedPerSecond")).orElse( + rawValue.getDouble("recordSpeed") + ); + if (Objects.nonNull(speed)){ + traffic.setFlow(new BigDecimal(speed).setScale(2, RoundingMode.HALF_UP).doubleValue()); + } + return traffic; + } + + @Override + public ExchangisMetricsVo.Indicator parseIndicator(String key, JsonEntity rawValue) { + ExchangisMetricsVo.Indicator indicator = new ExchangisMetricsVo.Indicator(); + long readSuccess = Optional.ofNullable(rawValue.getLong("readSucceedRecords")).orElse(0L); + long readFail = Optional.ofNullable(rawValue.getLong("readFailedRecords")).orElse(0L); + indicator.setExchangedRecords(readSuccess + readFail); + long writeFail = Optional.ofNullable(rawValue.getLong("writeFailedRecords")).orElse(0L); + indicator.setErrorRecords(readFail + writeFail); + return indicator; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/MetricConverterFactory.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/MetricConverterFactory.java new file mode 100644 index 000000000..f1ef689ef --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/MetricConverterFactory.java @@ -0,0 +1,18 @@ +package com.webank.wedatasphere.exchangis.job.server.metrics.converter; + + +import com.webank.wedatasphere.exchangis.job.server.metrics.MetricsVo; + +/** + * Metric ConverterFactory + * Metrics vo + */ +public interface MetricConverterFactory { + + /** + * getConverter + * @param engineType engine type + * @return converter + */ + MetricsConverter getOrCreateMetricsConverter(String engineType); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/MetricsConverter.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/MetricsConverter.java new file mode 100644 index 000000000..85283bae9 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/MetricsConverter.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.exchangis.job.server.metrics.converter; + + +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.job.server.metrics.MetricsVo; + +import java.util.Map; + +/** + * Converter the metricMap to MetricVo + */ +public interface MetricsConverter { + + /** + * Convert method + * @param metricMap map value + * @return T entity extends MetricsVo + */ + T convert(Map metricMap) throws ExchangisJobServerException; +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/RegisterMetricConverterFactory.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/RegisterMetricConverterFactory.java new file mode 100644 index 000000000..e4648958c --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/RegisterMetricConverterFactory.java @@ -0,0 +1,17 @@ +package com.webank.wedatasphere.exchangis.job.server.metrics.converter; + +import com.webank.wedatasphere.exchangis.job.server.metrics.MetricsVo; + +/** + * Registrable Converter factory + * @param + */ +public interface RegisterMetricConverterFactory extends MetricConverterFactory { + + /** + * Register method + * @param engineType engine type + * @param converter converter + */ + void register(String engineType, MetricsConverter converter); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/SqoopMetricConverter.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/SqoopMetricConverter.java new file mode 100644 index 000000000..041e0feed --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/SqoopMetricConverter.java @@ -0,0 +1,78 @@ +package com.webank.wedatasphere.exchangis.job.server.metrics.converter; + +import com.webank.wedatasphere.exchangis.job.server.metrics.ExchangisMetricsVo; +import com.webank.wedatasphere.exchangis.job.server.utils.JsonEntity; +import com.webank.wedatasphere.exchangis.job.utils.MemUtils; +import org.apache.commons.lang.StringUtils; + +import java.math.BigDecimal; +import java.math.RoundingMode; +import java.util.Objects; + +/** + * Convert the metrics in sqoop engine + */ +public class SqoopMetricConverter extends AbstractMetricConverter implements AbstractMetricConverter.MetricsParser { + + @Override + public String resourceUsedKey() { + return ""; + } + + @Override + public String trafficKey() { + return ""; + } + + @Override + public String indicatorKey() { + return JsonEntity.encodePath("org.apache.hadoop.mapreduce.TaskCounter"); + } + + @Override + protected AbstractMetricConverter.MetricsParser getParser() { + return this; + } + + @Override + public ExchangisMetricsVo.ResourceUsed parseResourceUsed(String key, JsonEntity rawValue) { + ExchangisMetricsVo.ResourceUsed resourceUsed = new ExchangisMetricsVo.ResourceUsed(); + String nodeResource = rawValue.getString("NodeResourceJson"); + if (StringUtils.isNotBlank(nodeResource)){ + JsonEntity nodeResourceJson = JsonEntity.from(nodeResource); + String memoryUnit = nodeResourceJson.getString("driver.memory"); + if (StringUtils.isNotBlank(memoryUnit)){ + String[] memory = memoryUnit.split(" "); + resourceUsed.setMemory(memory.length >= 2 ? + MemUtils.convertToMB((long) Double.parseDouble(memory[0]), memory[1]) : (long) Double.parseDouble(memory[0])); + } + String cpuVCores = nodeResourceJson.getString("driver.cpu"); + if (StringUtils.isNotBlank(cpuVCores)){ + resourceUsed.setCpu(Integer.parseInt(cpuVCores)); + } + } + return resourceUsed; + } + + @Override + public ExchangisMetricsVo.Traffic parseTraffic(String key, JsonEntity rawValue) { + ExchangisMetricsVo.Traffic traffic = new ExchangisMetricsVo.Traffic(); + Double records = rawValue.getDouble(JsonEntity.encodePath("org.apache.hadoop.mapreduce.TaskCounter") + ".MAP_OUTPUT_RECORDS"); + Double runTime = rawValue.getDouble("MetricsRunTime"); + if (Objects.nonNull(records) && Objects.nonNull(runTime)){ + traffic.setFlow(new BigDecimal(records / runTime * 1000).setScale(2, RoundingMode.HALF_UP).doubleValue()); + } + return traffic; + } + + @Override + public ExchangisMetricsVo.Indicator parseIndicator(String key, JsonEntity rawValue) { + ExchangisMetricsVo.Indicator indicator = new ExchangisMetricsVo.Indicator(); + Long records = rawValue.getLong("MAP_OUTPUT_RECORDS"); + if (Objects.nonNull(records)){ + indicator.setExchangedRecords(records); + } + return indicator; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/DefaultTransformContainer.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/DefaultTransformContainer.java new file mode 100644 index 000000000..bc47453df --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/DefaultTransformContainer.java @@ -0,0 +1,24 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform; + +import java.util.concurrent.ConcurrentHashMap; + +/** + * Default implement + */ +public class DefaultTransformContainer implements TransformerContainer { + + /** + * Transforms + */ + private final ConcurrentHashMap transforms = new ConcurrentHashMap<>(); + + @Override + public void registerTransformer(String type, Transformer transform) { + this.transforms.put(type, transform); + } + + @Override + public Transformer getTransformer(String type) { + return this.transforms.get(type); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformDefine.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformDefine.java new file mode 100644 index 000000000..4db39d05d --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformDefine.java @@ -0,0 +1,43 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.FieldMappingRuleEntity; +import org.apache.commons.lang3.StringUtils; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +/** + * Transform definition + */ +public class TransformDefine extends TransformRule{ + + static{ + TransformRule.typeClasses.put(TransformRule.Types.DEF.name(), TransformDefine.class); + } + + private List types = new ArrayList<>(); + + public TransformDefine(){ + + } + public TransformDefine(TransformRule.Types type, String ruleSource) { + super(type, ruleSource); + if (StringUtils.isNotBlank(ruleSource)){ + TransformDefine definition = Json.fromJson(ruleSource, TransformDefine.class); + Optional.ofNullable(definition).ifPresent(def -> { + this.types = def.types; + }); + } + } + + public List getTypes() { + return types; + } + + public void setTypes(List types) { + this.types = types; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformRequestVo.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformRequestVo.java new file mode 100644 index 000000000..2bbc689f9 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformRequestVo.java @@ -0,0 +1,178 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform; + + +import javax.validation.constraints.NotNull; +import java.util.HashMap; +import java.util.Map; + +public class TransformRequestVo { + /** + * Engine type + */ + private String engine; + + /** + * Source type + */ + @NotNull(message = "source type cannot be null (来源类型不能为空)") + private String sourceTypeId; + + /** + * Data source id (source direction) + */ + @NotNull(message = "source id cannot be null (来源数据源ID不能为空)") + private Long sourceDataSourceId; + /** + * Database (source direction) + */ + private String sourceDataBase; + + /** + * Table (source direction) + */ + private String sourceTable; + + /** + * Table (source) not exist + */ + private boolean srcTblNotExist = false; + + /** + * Sink type id + */ + @NotNull(message = "sink type cannot be null (目的类型不能为空)") + private String sinkTypeId; + + /** + * Sink data source id + */ + @NotNull(message = "sink id cannot be null (目的数据源ID不能为空)") + private Long sinkDataSourceId; + + /** + * Database (sink direction) + */ + private String sinkDataBase; + + /** + * Table (sink direction) + */ + private String sinkTable; + + /** + * Table (sink) not exist + */ + private boolean sinkTblNotExist = false; + /** + * Labels + */ + private Map labels = new HashMap<>(); + + /** + * Operate user + */ + private String operator; + public String getEngine() { + return engine; + } + + public void setEngine(String engine) { + this.engine = engine; + } + + public String getSourceTypeId() { + return sourceTypeId; + } + + public void setSourceTypeId(String sourceTypeId) { + this.sourceTypeId = sourceTypeId; + } + + public Long getSourceDataSourceId() { + return sourceDataSourceId; + } + + public void setSourceDataSourceId(Long sourceDataSourceId) { + this.sourceDataSourceId = sourceDataSourceId; + } + + public String getSourceDataBase() { + return sourceDataBase; + } + + public void setSourceDataBase(String sourceDataBase) { + this.sourceDataBase = sourceDataBase; + } + + public String getSourceTable() { + return sourceTable; + } + + public void setSourceTable(String sourceTable) { + this.sourceTable = sourceTable; + } + + public String getSinkTypeId() { + return sinkTypeId; + } + + public void setSinkTypeId(String sinkTypeId) { + this.sinkTypeId = sinkTypeId; + } + + public Long getSinkDataSourceId() { + return sinkDataSourceId; + } + + public void setSinkDataSourceId(Long sinkDataSourceId) { + this.sinkDataSourceId = sinkDataSourceId; + } + + public String getSinkDataBase() { + return sinkDataBase; + } + + public void setSinkDataBase(String sinkDataBase) { + this.sinkDataBase = sinkDataBase; + } + + public String getSinkTable() { + return sinkTable; + } + + public void setSinkTable(String sinkTable) { + this.sinkTable = sinkTable; + } + + public Map getLabels() { + return labels; + } + + public void setLabels(Map labels) { + this.labels = labels; + } + + public String getOperator() { + return operator; + } + + public void setOperator(String operator) { + this.operator = operator; + } + + public void setSrcTblNotExist(boolean srcTblNotExist) { + this.srcTblNotExist = srcTblNotExist; + } + + public void setSinkTblNotExist(boolean sinkTblNotExist) { + this.sinkTblNotExist = sinkTblNotExist; + } + + public boolean isSrcTblNotExist() { + return srcTblNotExist; + } + + public boolean isSinkTblNotExist() { + return sinkTblNotExist; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformRule.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformRule.java new file mode 100644 index 000000000..5da6d0c8e --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformRule.java @@ -0,0 +1,197 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform; + + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.util.*; + +public class TransformRule { + + private static final Logger LOG = LoggerFactory.getLogger(TransformRule.class); + /** + * Hold the type class + */ + protected static final Map> typeClasses = new HashMap<>(); + + public enum Types{ + DEF(TransformTypes.NONE), MAPPING(TransformTypes.MAPPING), PROCESSOR(TransformTypes.PROCESSOR); + final TransformTypes type; + Types(TransformTypes type){ + this.type = type; + } + + public TransformTypes getType() { + return type; + } + } + + public enum Direction{ + NONE, SOURCE, SINK + } + + /** + * Rule id + */ + private Long id; + + /** + * Rule name + */ + private String ruleName; + + /** + * Rule type + */ + protected String ruleType; + + /** + * Data source type + */ + protected String dataSourceType; + + /** + * Engine type + */ + protected String engineType; + + protected String direction = Direction.NONE.name(); + /** + * Rule source + */ + protected String ruleSource; + + protected Date createTime; + + public TransformRule(){ + + } + + public TransformRule(Types type, String ruleSource){ + this.ruleType = type.name(); + this.ruleSource = ruleSource; + } + + @SuppressWarnings("unchecked") + public T toRule(Class ruleType){ + Class ruleClass = typeClasses.get(this.ruleType); + if (Objects.isNull(ruleClass) || !ruleType.isAssignableFrom(ruleClass)){ + LOG.warn("Cannot convert to rule: [{}] with rule type: [{}]", + ruleType.getCanonicalName(), this.ruleType); + } else { + try { + Constructor constructor = ruleClass.getConstructor(Types.class, String.class); + return (T) constructor.newInstance(Types.valueOf(this.ruleType), this.ruleSource); + } catch (NoSuchMethodException e) { + LOG.warn("Not find the suitable constructor for rule class {}", ruleClass.getSimpleName(), e); + } catch (InvocationTargetException | InstantiationException | IllegalAccessException e) { + LOG.warn("Fail to construct rule class {} with: [{}, {}]", ruleClass.getSimpleName(), + this.ruleType, this.ruleSource, e); + } + } + return null; + } + + /** + * Convert rule source to map + * @return map + */ + public Map getRuleSourceAsMap(){ + Map mapResult = null; + if (StringUtils.isNotBlank(this.ruleSource)){ + mapResult = Json.fromJson(this.ruleSource, Map.class); + } + return Objects.nonNull(mapResult)? mapResult : Collections.emptyMap(); + } + + + public int matchInFraction(String dataSourceType, String engineType, String direction){ + int fraction = Objects.nonNull(dataSourceType) && dataSourceType.equals(this.dataSourceType) ? 1 : 0; + if (fraction > 0){ + if (Objects.nonNull(this.engineType)){ + if (this.engineType.equals(engineType)) { + fraction += 2; + } else { + fraction = 0; + } + } + } + if (fraction > 0){ + if (Objects.nonNull(this.direction) && !this.direction.equals(Direction.NONE.name())){ + if (this.direction.equals(direction)) { + fraction ++; + } else { + fraction = 0; + } + } + } + return fraction; + } + public String getDirection() { + return direction; + } + + public void setDirection(String direction) { + this.direction = direction; + } + + public String getRuleType() { + return ruleType; + } + + public void setRuleType(String ruleType) { + this.ruleType = ruleType; + } + + public String getEngineType() { + return engineType; + } + + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + public String getRuleSource() { + return ruleSource; + } + + public void setRuleSource(String ruleSource) { + this.ruleSource = ruleSource; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getRuleName() { + return ruleName; + } + + public void setRuleName(String ruleName) { + this.ruleName = ruleName; + } + + public String getDataSourceType() { + return dataSourceType; + } + + public void setDataSourceType(String dataSourceType) { + this.dataSourceType = dataSourceType; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformRulesFusion.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformRulesFusion.java new file mode 100644 index 000000000..8de4bdfad --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformRulesFusion.java @@ -0,0 +1,17 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform; + + +/** + * Transform rule fusion + * @param + */ +public interface TransformRulesFusion { + /** + * Fuse entrance + * @param sourceRule source rule + * @param sinkRule sink rule + * @return fused rule + */ + T fuse(T sourceRule, T sinkRule); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformSettings.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformSettings.java new file mode 100644 index 000000000..4c05afc95 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformSettings.java @@ -0,0 +1,8 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform; + +/** + * Transform settings + */ +public class TransformSettings { + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformTypes.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformTypes.java new file mode 100644 index 000000000..d6bd4bd2a --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformTypes.java @@ -0,0 +1,5 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform; + +public enum TransformTypes { + NONE, MAPPING, PROCESSOR +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/Transformer.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/Transformer.java new file mode 100644 index 000000000..f0734ce56 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/Transformer.java @@ -0,0 +1,15 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform; + +/** + * Transform basic interface in rendering + */ +public interface Transformer { + + String name(); + /** + * Get the settings by request params + * @param requestVo request params + * @return settings + */ + TransformSettings getSettings(TransformRequestVo requestVo); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformerContainer.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformerContainer.java new file mode 100644 index 000000000..377887e03 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformerContainer.java @@ -0,0 +1,21 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform; + +/** + * Transform container + */ +public interface TransformerContainer { + + /** + * Register transform + * @param type type + * @param transform transform + */ + void registerTransformer(String type, Transformer transform); + + /** + * Get transform + * @param type type + * @return transform + */ + Transformer getTransformer(String type); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/def/DefaultTransformDefineRulesFusion.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/def/DefaultTransformDefineRulesFusion.java new file mode 100644 index 000000000..2c3eff2d9 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/def/DefaultTransformDefineRulesFusion.java @@ -0,0 +1,39 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.def; + +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformDefine; +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformRule; +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformRulesFusion; +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformTypes; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Default transform define rules fusion + */ +public class DefaultTransformDefineRulesFusion implements TransformRulesFusion { + @Override + public TransformDefine fuse(TransformDefine sourceRule, TransformDefine sinkRule) { + Set typeSet = new HashSet<>(); + typeSet.addAll(sourceRule.getTypes()); + typeSet.addAll(sinkRule.getTypes()); + // Filter the unrecognized value + typeSet = typeSet.stream().filter( type -> { + try { + TransformTypes.valueOf(type); + return true; + }catch (Exception e){ + //Ignore + return false; + } + }).collect(Collectors.toSet()); + if (typeSet.contains(TransformTypes.NONE.name())){ + typeSet.clear(); + } + TransformDefine fusedDefine = new TransformDefine(TransformRule.Types.DEF, null); + fusedDefine.setTypes(new ArrayList<>(typeSet)); + return fusedDefine; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/FieldColumn.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/FieldColumn.java new file mode 100644 index 000000000..61022482d --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/FieldColumn.java @@ -0,0 +1,57 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.field; + +/** + * Field column + */ +public class FieldColumn { + + /** + * Field name + */ + protected String name; + + /** + * Field type + */ + protected String type; + + /** + * Field index + */ + protected int fieldIndex; + + public FieldColumn(){ + + } + + public FieldColumn(String name, String type, int fieldIndex){ + this.name = name; + this.type = type; + this.fieldIndex = fieldIndex; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public int getFieldIndex() { + return fieldIndex; + } + + public void setFieldIndex(int fieldIndex) { + this.fieldIndex = fieldIndex; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/DefaultFieldMappingRulesFusion.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/DefaultFieldMappingRulesFusion.java new file mode 100644 index 000000000..dbe51e465 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/DefaultFieldMappingRulesFusion.java @@ -0,0 +1,54 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping; + +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformRule; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.match.FieldMatchStrategyFactory; +import org.apache.commons.lang.StringUtils; + +/** + * Rules fusion + */ +public class DefaultFieldMappingRulesFusion implements FieldMappingRulesFusion{ + + /** + * Strategy factory + */ + private final FieldMatchStrategyFactory matchStrategyFactory; + + public DefaultFieldMappingRulesFusion(FieldMatchStrategyFactory matchStrategyFactory){ + this.matchStrategyFactory = matchStrategyFactory; + } + + @Override + public FieldMappingRule fuse(FieldMappingRule sourceRule, FieldMappingRule sinkRule) { + FieldMappingRule fusedRule = new FieldMappingRuleEntity(); + // And calculate + fusedRule.setFieldAddEnable(sourceRule.fieldAddEnable & sinkRule.fieldAddEnable); + fusedRule.setFieldDeleteEnable(sourceRule.fieldDeleteEnable & sinkRule.fieldDeleteEnable); + fusedRule.setFieldTransformEnable(sourceRule.fieldTransformEnable & sinkRule.fieldTransformEnable); + // Combine calculate + fusedRule.getFieldEditEnableRuleItem().putAll(sourceRule.getFieldEditEnableRuleItem()); + fusedRule.getFieldEditEnableRuleItem().putAll(sinkRule.getFieldEditEnableRuleItem()); + // Prior to choose sink rule + String matchStrategy = sinkRule.getFieldMatchStrategyName(); + TransformRule.Direction matchDirection = TransformRule.Direction.SINK; + if (StringUtils.isBlank(matchStrategy)){ + matchStrategy = sourceRule.getFieldMatchStrategyName(); + if (StringUtils.isNotBlank(matchStrategy)){ + matchDirection = TransformRule.Direction.SOURCE; + } + } + fusedRule.setFieldMatchStrategyName(matchStrategy); + if (StringUtils.isNotBlank(matchStrategy)){ + fusedRule.setFieldMatchStrategy(getFieldMatchStrategyFactory().getOrCreateStrategy(matchStrategy)); + } + fusedRule.setDirection(matchDirection.name()); + fusedRule.setFieldUnMatchIgnore( matchDirection == TransformRule.Direction.SOURCE ? + sourceRule.fieldUnMatchIgnore : sinkRule.fieldUnMatchIgnore); + return fusedRule; + } + + @Override + public FieldMatchStrategyFactory getFieldMatchStrategyFactory() { + return this.matchStrategyFactory; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingColumn.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingColumn.java new file mode 100644 index 000000000..df0832910 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingColumn.java @@ -0,0 +1,58 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping; + +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.FieldColumn; + +/** + * Mapping column + */ +public class FieldMappingColumn { + + /** + * Source column + */ + private FieldColumn source; + + /** + * Sink column + */ + private FieldColumn sink; + + /** + * Delete enable switch + */ + private boolean deleteEnable; + + public FieldMappingColumn(){ + + } + + public FieldMappingColumn(FieldColumn source, FieldColumn sink, boolean deleteEnable){ + this.source = source; + this.sink = sink; + this.deleteEnable = deleteEnable; + } + + public FieldColumn getSource() { + return source; + } + + public void setSource(FieldColumn source) { + this.source = source; + } + + public FieldColumn getSink() { + return sink; + } + + public void setSink(FieldColumn sink) { + this.sink = sink; + } + + public boolean isDeleteEnable() { + return deleteEnable; + } + + public void setDeleteEnable(boolean deleteEnable) { + this.deleteEnable = deleteEnable; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingRule.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingRule.java new file mode 100644 index 000000000..17edaf5ae --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingRule.java @@ -0,0 +1,112 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping; + +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformRule; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.match.FieldMatchStrategy; + +import java.util.HashMap; +import java.util.Map; + +/** + * Field mapping rule + */ +public abstract class FieldMappingRule extends TransformRule { + /** + * Field add enable + */ + protected boolean fieldAddEnable = true; + + /** + * Field delete enable + */ + protected boolean fieldDeleteEnable = true; + + /** + * Field transform enable + */ + protected boolean fieldTransformEnable = true; + + /** + * Rule item of field edit enable + */ + protected Map fieldEditEnableRuleItem = new HashMap<>(); + + /** + * Strategy name of field matching + */ + protected String fieldMatchStrategyName; + + /** + * Strategy of field matching + */ + protected FieldMatchStrategy fieldMatchStrategy; + + /** + * If ignore the unMatch field + */ + protected boolean fieldUnMatchIgnore; + + public FieldMappingRule(){ + this.ruleType = TransformRule.Types.MAPPING.name(); + } + + public FieldMappingRule(TransformRule.Types type, String ruleSource) { + super(type, ruleSource); + } + + + public boolean isFieldAddEnable() { + return fieldAddEnable; + } + + public void setFieldAddEnable(boolean fieldAddEnable) { + this.fieldAddEnable = fieldAddEnable; + } + + public boolean isFieldDeleteEnable() { + return fieldDeleteEnable; + } + + public void setFieldDeleteEnable(boolean fieldDeleteEnable) { + this.fieldDeleteEnable = fieldDeleteEnable; + } + + public boolean isFieldTransformEnable() { + return fieldTransformEnable; + } + + public void setFieldTransformEnable(boolean fieldTransformEnable) { + this.fieldTransformEnable = fieldTransformEnable; + } + + public Map getFieldEditEnableRuleItem() { + return fieldEditEnableRuleItem; + } + + public void setFieldEditEnableRuleItem(Map fieldEditEnableRuleItem) { + this.fieldEditEnableRuleItem = fieldEditEnableRuleItem; + } + + public String getFieldMatchStrategyName() { + return fieldMatchStrategyName; + } + + public void setFieldMatchStrategyName(String fieldMatchStrategyName) { + this.fieldMatchStrategyName = fieldMatchStrategyName; + } + + public FieldMatchStrategy getFieldMatchStrategy() { + return fieldMatchStrategy; + } + + public void setFieldMatchStrategy(FieldMatchStrategy fieldMatchStrategy) { + this.fieldMatchStrategy = fieldMatchStrategy; + } + + public boolean isFieldUnMatchIgnore() { + return fieldUnMatchIgnore; + } + + public void setFieldUnMatchIgnore(boolean fieldUnMatchIgnore) { + this.fieldUnMatchIgnore = fieldUnMatchIgnore; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingRuleEntity.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingRuleEntity.java new file mode 100644 index 000000000..90bbab04b --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingRuleEntity.java @@ -0,0 +1,66 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformRule; +import org.apache.commons.lang3.StringUtils; + +import java.util.Map; +import java.util.Optional; + +/** + * Rule entity + */ +@JsonInclude(JsonInclude.Include.NON_EMPTY) +public class FieldMappingRuleEntity extends FieldMappingRule{ + + static { + // Register type + TransformRule.typeClasses.put(TransformRule.Types.MAPPING.name(), FieldMappingRuleEntity.class); + } + + private String id; + + /** + * Field edit enable + */ + private boolean fieldEditEnable = true; + + @Override + public Map getFieldEditEnableRuleItem() { + Map ruleItem = super.getFieldEditEnableRuleItem(); + Optional.ofNullable(ruleItem).ifPresent(item -> { + if (StringUtils.isNotBlank(direction)){ + item.computeIfAbsent(direction, + name -> fieldEditEnable); + } + }); + return ruleItem; + } + + public FieldMappingRuleEntity(){ + super(); + } + public FieldMappingRuleEntity(TransformRule.Types type, String ruleSource) { + super(type, ruleSource); + if (StringUtils.isNotBlank(ruleSource)){ + FieldMappingRuleEntity ruleEntity = Json.fromJson(ruleSource, FieldMappingRuleEntity.class); + Optional.ofNullable(ruleEntity).ifPresent(rule -> { + this.fieldAddEnable = rule.fieldAddEnable; + this.fieldDeleteEnable = rule.fieldDeleteEnable; + this.fieldTransformEnable = rule.fieldTransformEnable; + this.fieldMatchStrategyName = rule.fieldMatchStrategyName; + this.fieldEditEnable = rule.fieldEditEnable; + }); + } + } + + public boolean isFieldEditEnable() { + return fieldEditEnable; + } + + public void setFieldEditEnable(boolean fieldEditEnable) { + this.fieldEditEnable = fieldEditEnable; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingRulesFusion.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingRulesFusion.java new file mode 100644 index 000000000..bf95cbb17 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingRulesFusion.java @@ -0,0 +1,16 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping; + +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformRulesFusion; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.match.FieldMatchStrategyFactory; + +/** + * Rules fusion + */ +public interface FieldMappingRulesFusion extends TransformRulesFusion { + + /** + * Strategy factory + * @return factory + */ + FieldMatchStrategyFactory getFieldMatchStrategyFactory(); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingSettings.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingSettings.java new file mode 100644 index 000000000..6297687a3 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingSettings.java @@ -0,0 +1,73 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping; + +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformSettings; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.FieldColumn; + +import java.util.ArrayList; +import java.util.List; + +public class FieldMappingSettings extends TransformSettings { + /** + * Add enable switch + */ + private boolean addEnable; + + /** + * Transform function enable switch + */ + private boolean transformEnable; + /** + * Source fields + */ + private List sourceFields = new ArrayList<>(); + + /** + * Sink fields + */ + private List sinkFields = new ArrayList<>(); + + /** + * Mapping column + */ + private List deductions = new ArrayList<>(); + + public boolean isAddEnable() { + return addEnable; + } + + public void setAddEnable(boolean addEnable) { + this.addEnable = addEnable; + } + + public boolean isTransformEnable() { + return transformEnable; + } + + public void setTransformEnable(boolean transformEnable) { + this.transformEnable = transformEnable; + } + + public List getSourceFields() { + return sourceFields; + } + + public void setSourceFields(List sourceFields) { + this.sourceFields = sourceFields; + } + + public List getSinkFields() { + return sinkFields; + } + + public void setSinkFields(List sinkFields) { + this.sinkFields = sinkFields; + } + + public List getDeductions() { + return deductions; + } + + public void setDeductions(List deductions) { + this.deductions = deductions; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingTransformer.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingTransformer.java new file mode 100644 index 000000000..19454a4f2 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingTransformer.java @@ -0,0 +1,187 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping; + +import com.webank.wedatasphere.exchangis.datasource.core.domain.MetaColumn; +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; +import com.webank.wedatasphere.exchangis.datasource.core.service.MetadataInfoService; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode; +import com.webank.wedatasphere.exchangis.job.server.mapper.JobTransformRuleDao; +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformRule; +import com.webank.wedatasphere.exchangis.job.server.render.transform.Transformer; +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformRequestVo; +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformSettings; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.FieldColumn; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.match.FieldAllMatchIgnoreCaseStrategy; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.match.FieldAllMatchStrategy; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.match.FieldColumnMatch; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.match.FieldMatchStrategy; +import com.webank.wedatasphere.exchangis.job.server.utils.SpringContextHolder; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; + +/** + * Transform of field mapping + */ + +public class FieldMappingTransformer implements Transformer { + + /** + * Metadata info service + */ + private MetadataInfoService metadataInfoService; + + /** + * Rules fusion + */ + private final FieldMappingRulesFusion rulesFusion; + + /** + * Rule dao + */ + private final JobTransformRuleDao transformRuleDao; + + public FieldMappingTransformer(FieldMappingRulesFusion rulesFusion, JobTransformRuleDao transformRuleDao){ + this.rulesFusion = rulesFusion; + this.transformRuleDao = transformRuleDao; + } + + @Override + public String name() { + return TransformRule.Types.MAPPING.name(); + } + + @Override + public TransformSettings getSettings(TransformRequestVo requestVo) { + // Get the mapping rules + FieldMappingRule sourceRule = getFieldMappingRule(requestVo.getSourceTypeId(), requestVo.getEngine(), TransformRule.Direction.SOURCE.name()); + if (Objects.isNull(sourceRule)){ + sourceRule = new FieldMappingRuleEntity(TransformRule.Types.MAPPING, null); + } + sourceRule.setDirection(TransformRule.Direction.SOURCE.name()); + FieldMappingRule sinkRule = getFieldMappingRule(requestVo.getSinkTypeId(), requestVo.getEngine(), TransformRule.Direction.SINK.name()); + if (Objects.isNull(sinkRule)){ + sinkRule = new FieldMappingRuleEntity(TransformRule.Types.MAPPING, null); + sinkRule.setFieldMatchStrategyName(FieldAllMatchIgnoreCaseStrategy.ALL_MATCH_IGNORE_CASE); + } + sinkRule.setDirection(TransformRule.Direction.SINK.name()); + FieldMappingRule fusedRule = this.rulesFusion.fuse(sourceRule, sinkRule); + return getFieldMappingSettings(fusedRule, requestVo); + } + + /** + * Get field mapping settings + * @param rule rule + * @param requestVo request Vo + * @return field mapping settings + */ + private FieldMappingSettings getFieldMappingSettings(FieldMappingRule rule, TransformRequestVo requestVo) { + FieldMappingSettings settings = new FieldMappingSettings(); + settings.setAddEnable(rule.isFieldAddEnable()); + settings.setTransformEnable(rule.isFieldTransformEnable()); + // Get raw meta columns + List sourceColumns = new ArrayList<>(); + if (!requestVo.isSrcTblNotExist()) { + try { + List metaColumns = getOrLoadMetadataInfoService(). + getColumns(requestVo.getOperator(), requestVo.getSourceDataSourceId(), + requestVo.getSourceDataBase(), requestVo.getSourceTable()); + boolean editable = rule.getFieldEditEnableRuleItem().getOrDefault(TransformRule.Direction.SOURCE.name(), true); + for (int i = 0; i < metaColumns.size(); i++) { + MetaColumn metaColumn = metaColumns.get(i); + sourceColumns.add(new FieldColumnWrapper(metaColumn.getName(), metaColumn.getType(), i, editable)); + } + } catch (ExchangisDataSourceException e) { + throw new ExchangisJobException.Runtime(ExchangisJobExceptionCode.RENDER_TRANSFORM_ERROR.getCode(), "Fail to get source meta columns in generating field mapping settings", e); + } + } + settings.setSourceFields(sourceColumns); + List sinkColumns = new ArrayList<>(); + if (!requestVo.isSinkTblNotExist()) { + try { + List metaColumns = getOrLoadMetadataInfoService(). + getColumns(requestVo.getOperator(), requestVo.getSinkDataSourceId(), + requestVo.getSinkDataBase(), requestVo.getSinkTable()); + boolean editable = rule.getFieldEditEnableRuleItem().getOrDefault(TransformRule.Direction.SINK.name(), true); + for (int i = 0; i < metaColumns.size(); i++) { + MetaColumn metaColumn = metaColumns.get(i); + sinkColumns.add(new FieldColumnWrapper(metaColumn.getName(), metaColumn.getType(), i, editable)); + } + } catch (ExchangisDataSourceException e) { + throw new ExchangisJobException.Runtime(ExchangisJobExceptionCode.RENDER_TRANSFORM_ERROR.getCode(), "Fail to get sink meta columns in generating field mapping settings", e); + } + } + settings.setSinkFields(sinkColumns); + FieldMatchStrategy matchStrategy = rule.getFieldMatchStrategy(); + if (Objects.isNull(matchStrategy)) { + // Just use the all match strategy + matchStrategy = this.rulesFusion.getFieldMatchStrategyFactory().getOrCreateStrategy(FieldAllMatchStrategy.ALL_MATCH); + } + boolean positive = TransformRule.Direction.SOURCE.name().equals(rule.getDirection()); + List dependColumns = positive ? sourceColumns : sinkColumns; + List searchColumns = positive ? sinkColumns : sourceColumns; + List fieldColumnMatches = matchStrategy.match(dependColumns, searchColumns, rule.isFieldUnMatchIgnore()); + // Covert to field mappings + if (positive) { + fieldColumnMatches.forEach(fieldColumnMatch -> settings.getDeductions() + .add(new FieldMappingColumn(fieldColumnMatch.getLeftMatch(), fieldColumnMatch.getRightMatch(), rule.isFieldDeleteEnable()))); + } else{ + fieldColumnMatches.forEach(fieldColumnMatch -> settings.getDeductions() + .add(new FieldMappingColumn(fieldColumnMatch.getRightMatch(), fieldColumnMatch.getLeftMatch(), rule.isFieldDeleteEnable()))); + } + return settings; + } + /** + * Get field mapping rule + * @param dataSourceType data source type + * @param engine engine type + * @param direction direction + * @return rule + */ + private FieldMappingRule getFieldMappingRule(String dataSourceType, String engine, String direction){ + AtomicReference fieldMappingRule = new AtomicReference<>(); + AtomicInteger maxFraction = new AtomicInteger(0); + this.transformRuleDao.getTransformRules(TransformRule.Types.MAPPING.name(), dataSourceType).forEach(rule -> { + int fraction = rule.matchInFraction(dataSourceType, engine, direction); + if (fraction > maxFraction.get()){ + fieldMappingRule.set(rule); + maxFraction.set(fraction); + } + }); + return Objects.nonNull(fieldMappingRule.get())? fieldMappingRule.get().toRule(FieldMappingRuleEntity.class) : null; + } + + private MetadataInfoService getOrLoadMetadataInfoService(){ + if (Objects.isNull(this.metadataInfoService)){ + this.metadataInfoService = SpringContextHolder.getBean(MetadataInfoService.class); + } + return this.metadataInfoService; + } + public static class FieldColumnWrapper extends FieldColumn{ + /** + * Edit enable switch + */ + private boolean fieldEditable; + + public FieldColumnWrapper(){ + + } + + public FieldColumnWrapper(String name, String type, int fieldIndex, boolean fieldEditable){ + super(name, type, fieldIndex); + this.fieldEditable = fieldEditable; + } + + public boolean isFieldEditable() { + return fieldEditable; + } + + public void setFieldEditable(boolean fieldEditable) { + this.fieldEditable = fieldEditable; + } + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/AbstractFieldMatchStrategy.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/AbstractFieldMatchStrategy.java new file mode 100644 index 000000000..883936bb3 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/AbstractFieldMatchStrategy.java @@ -0,0 +1,53 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.match; + +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.FieldColumn; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +/** + * Abstract field match strategy + */ +public abstract class AbstractFieldMatchStrategy implements FieldMatchStrategy{ + @Override + public List match(List dependColumns, List searchColumns, boolean ignoreUnMatch) { + List fieldColumnMatches = new ArrayList<>(); + if (Objects.nonNull(dependColumns) && Objects.nonNull(searchColumns)){ + // Convert the search columns list to map + Map searchColumnMap = searchColumns.stream() + .collect(Collectors.toMap(FieldColumn::getName, fieldColumn -> fieldColumn, (left, right) -> left)); + for(int i = 0; i < dependColumns.size(); i ++){ + FieldColumn dependColumn = dependColumns.get(i); + FieldColumn matchColumn = match(dependColumn, searchColumnMap); + if (Objects.nonNull(matchColumn)){ + fieldColumnMatches.add(new FieldColumnMatch(dependColumn, matchColumn)); + } else if (!ignoreUnMatch){ + if (searchColumns.size() > 0) { + fieldColumnMatches.add(new FieldColumnMatch(dependColumn, searchColumns.get(i % searchColumns.size()))); + } + } + } + } + return fieldColumnMatches; + } + + /** + * Convert search column list to map + * @param searchColumns search columns + * @return map + */ + protected Map searchColumnListToMap(List searchColumns){ + return searchColumns.stream() + .collect(Collectors.toMap(FieldColumn::getName, fieldColumn -> fieldColumn, (left, right) -> left)); + } + /** + * Depend column + * @param dependColumn depend column + * @param searchColumns search column map + * @return match field column + */ + protected abstract FieldColumn match(FieldColumn dependColumn, Map searchColumns); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldAllMatchIgnoreCaseStrategy.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldAllMatchIgnoreCaseStrategy.java new file mode 100644 index 000000000..3cceb0911 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldAllMatchIgnoreCaseStrategy.java @@ -0,0 +1,17 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.match; + +/** + * Field all match ignore case strategy + */ +public class FieldAllMatchIgnoreCaseStrategy extends FieldAllMatchStrategy{ + public static final String ALL_MATCH_IGNORE_CASE = "ALL_MATCH_IGNORE_CASE"; + @Override + public String name() { + return ALL_MATCH_IGNORE_CASE; + } + + @Override + protected boolean ignoreCase() { + return true; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldAllMatchStrategy.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldAllMatchStrategy.java new file mode 100644 index 000000000..68d781975 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldAllMatchStrategy.java @@ -0,0 +1,43 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.match; + +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.FieldColumn; + +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Field all match strategy + */ +public class FieldAllMatchStrategy extends AbstractFieldMatchStrategy{ + + public static final String ALL_MATCH = "ALL_MATCH"; + + @Override + protected Map searchColumnListToMap(List searchColumns) { + if (ignoreCase()){ + // Convert the column name to lower case + return searchColumns.stream() + .collect(Collectors.toMap(column -> column.getName().toLowerCase(Locale.ROOT), fieldColumn -> fieldColumn, (left, right) -> left)); + } else { + return super.searchColumnListToMap(searchColumns); + } + } + + @Override + protected FieldColumn match(FieldColumn dependColumn, Map searchColumns) { + String columnName = dependColumn.getName(); + return ignoreCase()? searchColumns.get(columnName.toLowerCase(Locale.ROOT)) + : searchColumns.get(columnName); + } + + @Override + public String name() { + return ALL_MATCH; + } + + protected boolean ignoreCase(){ + return false; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldCamelCaseMatchStrategy.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldCamelCaseMatchStrategy.java new file mode 100644 index 000000000..d627469dd --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldCamelCaseMatchStrategy.java @@ -0,0 +1,80 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.match; + +import com.baomidou.mybatisplus.core.toolkit.StringUtils; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.FieldColumn; + +import java.util.Locale; +import java.util.Map; +import java.util.Objects; + +/** + * Field camelCase match strategy + */ +public class FieldCamelCaseMatchStrategy extends AbstractFieldMatchStrategy{ + + public static final String CAMEL_CASE_MATCH = "CAMEL_CASE_MATCH"; + @Override + protected FieldColumn match(FieldColumn dependColumn, Map searchColumns) { + String columName = dependColumn.getName(); + // First to search by full name + FieldColumn matchColumn = searchColumns.get(columName); + if (Objects.isNull(matchColumn)){ + matchColumn = searchColumns.get(camelToUnderLine(columName)); + if (Objects.isNull(matchColumn)){ + matchColumn = searchColumns.get(underLineToCamel(columName)); + } + } + return matchColumn; + } + + @Override + public String name() { + return CAMEL_CASE_MATCH; + } + + /** + * Camel string to underline string + * @param param param + * @return underline string + */ + private String camelToUnderLine(String param){ + if (StringUtils.isNotBlank(param)){ + int len = param.length(); + StringBuilder sb = new StringBuilder(len); + for (int i = 0; i < len; i ++){ + if (Character.isUpperCase(param.charAt(i)) && i > 0){ + sb.append("_"); + } + sb.append(Character.toLowerCase(param.charAt(i))); + } + return sb.toString(); + } + return ""; + } + + /** + * Underline string to camel string + * @param param param + * @return camel string + */ + private String underLineToCamel(String param){ + if (StringUtils.isNotBlank(param)){ + String temp = param.toLowerCase(Locale.ROOT); + int len = temp.length(); + StringBuilder sb = new StringBuilder(len); + for(int i = 0; i < len; ++i) { + char c = temp.charAt(i); + if (c == '_') { + ++i; + if (i < len) { + sb.append(Character.toUpperCase(temp.charAt(i))); + } + } else { + sb.append(c); + } + } + return sb.toString(); + } + return ""; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldColumnMatch.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldColumnMatch.java new file mode 100644 index 000000000..c6b1a0475 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldColumnMatch.java @@ -0,0 +1,39 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.match; + +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.FieldColumn; + +/** + * Match the column + */ +public class FieldColumnMatch { + + /** + * Left match + */ + private FieldColumn leftMatch; + + /** + * Right match + */ + private FieldColumn rightMatch; + + public FieldColumnMatch(FieldColumn leftMatch, FieldColumn rightMatch){ + this.leftMatch = leftMatch; + this.rightMatch = rightMatch; + } + public FieldColumn getLeftMatch() { + return leftMatch; + } + + public void setLeftMatch(FieldColumn leftMatch) { + this.leftMatch = leftMatch; + } + + public FieldColumn getRightMatch() { + return rightMatch; + } + + public void setRightMatch(FieldColumn rightMatch) { + this.rightMatch = rightMatch; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldMatchNamedStrategyFactory.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldMatchNamedStrategyFactory.java new file mode 100644 index 000000000..2f6c3b2ff --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldMatchNamedStrategyFactory.java @@ -0,0 +1,26 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.match; + + +import java.util.concurrent.ConcurrentHashMap; + +/** + * Default strategy factory + */ +public class FieldMatchNamedStrategyFactory implements FieldMatchStrategyFactory{ + + private final ConcurrentHashMap strategies = new ConcurrentHashMap<>(); + + @Override + public FieldMatchStrategy getOrCreateStrategy(String strategyName) { + return strategies.get(strategyName); + } + + /** + * Register strategy entities + * @param strategyName strategyName + * @param strategy strategy + */ + public void registerStrategy(String strategyName, FieldMatchStrategy strategy){ + this.strategies.putIfAbsent(strategyName, strategy); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldMatchStrategy.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldMatchStrategy.java new file mode 100644 index 000000000..baf2b799f --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldMatchStrategy.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.match; + +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.FieldColumn; +import com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.FieldMappingColumn; + +import java.util.List; + +/** + * Field match strategy + */ +public interface FieldMatchStrategy { + /** + * Strategy name + * @return string + */ + String name(); + /** + * Match entrance + * @param dependColumns depend columns + * @param searchColumns search columns + * @param ignoreUnMatch ignore the unMatch element + * @return match column list + */ + List match(List dependColumns, List searchColumns, boolean ignoreUnMatch); + + default int getPriority(){ + return 1; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldMatchStrategyFactory.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldMatchStrategyFactory.java new file mode 100644 index 000000000..8750485cd --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/FieldMatchStrategyFactory.java @@ -0,0 +1,13 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.field.mapping.match; + +/** + * Field match strategy factory + */ +public interface FieldMatchStrategyFactory { + /** + * + * @param strategyName strategy + * @return + */ + FieldMatchStrategy getOrCreateStrategy(String strategyName); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/processor/ProcessorRequestVo.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/processor/ProcessorRequestVo.java new file mode 100644 index 000000000..531e3a596 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/processor/ProcessorRequestVo.java @@ -0,0 +1,42 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.processor; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.webank.wedatasphere.exchangis.common.validator.groups.InsertGroup; +import com.webank.wedatasphere.exchangis.common.validator.groups.UpdateGroup; +import org.apache.ibatis.annotations.Update; + +import javax.validation.constraints.NotNull; + +/** + * Request object of processor + */ +@JsonInclude(JsonInclude.Include.NON_EMPTY) +public class ProcessorRequestVo { + + /** + * Code content + */ + private String code; + + /** + * Job id + */ + @NotNull(groups = {InsertGroup.class}, message = "Job id cannot be null (任务ID不能为空)") + private String jobId; + + public String getCode() { + return code; + } + + public void setCode(String code) { + this.code = code; + } + + public String getJobId() { + return jobId; + } + + public void setJobId(String jobId) { + this.jobId = jobId; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/processor/ProcessorTransformer.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/processor/ProcessorTransformer.java new file mode 100644 index 000000000..45e62d112 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/processor/ProcessorTransformer.java @@ -0,0 +1,68 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.processor; + +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformRule; +import com.webank.wedatasphere.exchangis.job.server.render.transform.Transformer; +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformRequestVo; +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformSettings; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.datasourcemanager.common.exception.JsonErrorException; +import org.apache.linkis.datasourcemanager.common.util.PatternInjectUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.FileInputStream; +import java.io.IOException; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.util.Objects; + +/** + * Transform of processor + */ +public class ProcessorTransformer implements Transformer { + + private static final Logger LOG = LoggerFactory.getLogger(ProcessorTransformer.class); + + public static final CommonVars CODE_TEMPLATE_PATH = CommonVars.apply("wds.exchangis.job.render.transform.processor.code-template.path", "transform-processor-templates"); + + public static final CommonVars CODE_TEMPLATE_NAME = CommonVars.apply("wds.exchangis.job.render.transform.processor.code-template.name", "${engine}-processor.${type}"); + + @Override + public String name() { + return TransformRule.Types.PROCESSOR.name(); + } + + @Override + public TransformSettings getSettings(TransformRequestVo requestVo) { + return null; + } + + /** + * Fetch code template + * @param engine + * @param codeType + * @return + */ + public String getCodeTemplate(String engine, String codeType){ + ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader(); + String templateName = null; + try { + templateName = PatternInjectUtils.inject(CODE_TEMPLATE_NAME.getValue(), new String[]{engine, codeType}); + } catch (JsonErrorException e) { + LOG.warn("Unable to generate the template name", e); + } + if (StringUtils.isNotBlank(templateName)){ + URL resource = currentClassLoader.getResource(CODE_TEMPLATE_PATH.getValue() + IOUtils.DIR_SEPARATOR_UNIX + templateName); + if (Objects.nonNull(resource)){ + try { + return IOUtils.toString(new FileInputStream(resource.getPath()), StandardCharsets.UTF_8); + } catch (IOException e) { + LOG.warn("Unable to load code template form: {}", resource.getPath(), e); + } + } + } + return null; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/processor/TransformProcessor.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/processor/TransformProcessor.java new file mode 100644 index 000000000..7a009be49 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/processor/TransformProcessor.java @@ -0,0 +1,131 @@ +package com.webank.wedatasphere.exchangis.job.server.render.transform.processor; + +import com.webank.wedatasphere.exchangis.common.linkis.bml.BmlResource; + +import java.util.Objects; + +/** + * Processor entity + */ +public class TransformProcessor { + /** + * Id + */ + private Long id; + + /** + * Job id + */ + private Long jobId; + + /** + * Code content + */ + private String codeContent; + + /** + * Code language + */ + private String codeLanguage = "java"; + + /** + * Bml resourceId + */ + private String codeBmlResourceId; + + /** + * Bml version + */ + private String codeBmlVersion; + + /** + * Bml resource + */ + private BmlResource bmlResource; + + /** + * Creator + */ + private String creator; + public TransformProcessor(){ + + } + + public TransformProcessor(ProcessorRequestVo requestVo){ + this.jobId = Long.valueOf(requestVo.getJobId()); + this.codeContent = requestVo.getCode(); + } + + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public String getCodeContent() { + return codeContent; + } + + public void setCodeContent(String codeContent) { + this.codeContent = codeContent; + } + + public String getCodeLanguage() { + return codeLanguage; + } + + public void setCodeLanguage(String codeLanguage) { + this.codeLanguage = codeLanguage; + } + + public BmlResource getBmlResource() { + if (Objects.isNull(bmlResource) && Objects.nonNull(this.codeBmlResourceId) + && Objects.nonNull(this.codeBmlVersion)){ + this.bmlResource = new BmlResource(this.codeBmlResourceId, this.codeBmlVersion); + } + return bmlResource; + } + + public void setBmlResource(BmlResource bmlResource) { + this.bmlResource = bmlResource; + if (Objects.nonNull(bmlResource)){ + this.codeBmlResourceId = bmlResource.getResourceId(); + this.codeBmlVersion = bmlResource.getVersion(); + } + } + + public String getCreator() { + return creator; + } + + public void setCreator(String creator) { + this.creator = creator; + } + + public String getCodeBmlResourceId() { + return codeBmlResourceId; + } + + public void setCodeBmlResourceId(String codeBmlResourceId) { + this.codeBmlResourceId = codeBmlResourceId; + } + + public String getCodeBmlVersion() { + return codeBmlVersion; + } + + public void setCodeBmlVersion(String codeBmlVersion) { + this.codeBmlVersion = codeBmlVersion; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/ExchangisJobRestfulApi.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/ExchangisJobRestfulApi.java new file mode 100644 index 000000000..73929af81 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/ExchangisJobRestfulApi.java @@ -0,0 +1,456 @@ +package com.webank.wedatasphere.exchangis.job.server.restful; + +import com.webank.wedatasphere.exchangis.common.AuditLogUtils; +import com.webank.wedatasphere.exchangis.common.UserUtils; +import com.webank.wedatasphere.exchangis.common.enums.OperateTypeEnum; +import com.webank.wedatasphere.exchangis.common.enums.TargetTypeEnum; +import com.webank.wedatasphere.exchangis.common.pager.PageResult; +import com.webank.wedatasphere.exchangis.common.validator.groups.InsertGroup; +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; +import com.webank.wedatasphere.exchangis.job.domain.OperationType; +import com.webank.wedatasphere.exchangis.job.enums.EngineTypeEnum; +import com.webank.wedatasphere.exchangis.job.launcher.ExchangisLauncherConfiguration; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.job.server.service.JobFuncService; +import com.webank.wedatasphere.exchangis.job.server.service.JobInfoService; +import com.webank.wedatasphere.exchangis.job.server.utils.JobAuthorityUtils; +import com.webank.wedatasphere.exchangis.job.server.vo.JobFunction; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobQueryVo; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVo; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.ProxyUserSSOUtils; +import org.apache.linkis.server.security.SecurityFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.validation.BindingResult; +import org.springframework.validation.annotation.Validated; +import org.springframework.web.bind.annotation.*; +import scala.Option; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import javax.validation.groups.Default; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +/** + * The basic controller of Exchangis job + */ +@RestController +@RequestMapping(value = "dss/exchangis/main/job", produces = {"application/json;charset=utf-8"}) +public class ExchangisJobRestfulApi { + + private static final Logger LOG = LoggerFactory.getLogger(ExchangisJobRestfulApi.class); + + /** + * Job service + */ + @Resource + private JobInfoService jobInfoService; + + @Resource + private JobFuncService jobFuncService; + + /** + * Query job in page + * + * @param projectId project id + * @param jobType job type + * @param name name + * @param current current + * @param size size + * @param request request + * @return message + */ + @RequestMapping(value = "", method = RequestMethod.GET) + public Message getJobList(@RequestParam(value = "projectId") Long projectId, + @RequestParam(value = "jobType", required = false) String jobType, + @RequestParam(value = "name", required = false) String name, + @RequestParam(value = "current", required = false) int current, + @RequestParam(value = "size", required = false) int size, + HttpServletRequest request) { + String newName = name.replaceAll("_", "/_"); + ExchangisJobQueryVo queryVo = new ExchangisJobQueryVo( + projectId, jobType, newName, current, size + ); + String loginUser = UserUtils.getLoginUser(request); + try { + if (!JobAuthorityUtils.hasProjectAuthority(loginUser, projectId, OperationType.JOB_QUERY)) { + return Message.error("You have no permission to create Job (没有查询任务权限)"); + } + + queryVo.setCreateUser(loginUser); + PageResult pageResult = jobInfoService.queryJobList(queryVo); + return Message.ok().data("total", pageResult.getTotal()).data("result", pageResult.getList()); + } catch (Exception e) { + LOG.error("Fail to query job list for user {}", loginUser, e); + return Message.error("Failed to query job list (获取任务列表失败)"); + } + } + + /** + * Engine list + * + * @return message + */ + @RequestMapping(value = "/engineType", method = RequestMethod.GET) + public Message getEngineList() { + // TODO limit the engine type in exchangis +// return Message.ok().data("result", EngineTypeEnum.values()); + return Message.ok().data("result", new EngineTypeEnum[]{EngineTypeEnum.SQOOP, EngineTypeEnum.DATAX}); + } + + /** + * Executor + * + * @return message + */ + @RequestMapping(value = "/Executor", method = RequestMethod.GET) + public Message getExecutor(HttpServletRequest request) { + Option proxyUserUsername = + ProxyUserSSOUtils.getProxyUserUsername(request); + String loginUser = UserUtils.getLoginUser(request); + List executor = new ArrayList<>(); + if (proxyUserUsername.isDefined()) { + executor.add(proxyUserUsername.get()); + } else { + executor.add(loginUser); + } + executor.add("hadoop"); + return Message.ok().data("result", executor); + } + + + /** + * function + * + * @return message + */ + @RequestMapping(value = "/func/{funcType:\\w+}", method = RequestMethod.GET) + public Message vjobFuncList(@PathVariable("funcType") String funcType, + HttpServletRequest request) { + return jobFuncList("DATAX", funcType, request); + } + + @RequestMapping(value = "/func/{tabName:\\w+}/{funcType:\\w+}", method = RequestMethod.GET) + public Message jobFuncList(@PathVariable("tabName") String tabName, + @PathVariable("funcType") String funcType, + HttpServletRequest request) { + + Message response = Message.ok(); + try { + //Limit that the tab should be an engine tab + EngineTypeEnum.valueOf(tabName.toUpperCase()); + JobFunction.FunctionType funcTypeEnum = JobFunction.FunctionType.valueOf(funcType.toUpperCase()); + List functionList = jobFuncService.getFunctions(tabName, funcTypeEnum); + return Message.ok().data("data", functionList); + } catch (Exception e) { + String message = "Fail to get function (获取函数失败)"; + LOG.error(message, e); + response = Message.error(message); + } + return response; + } + + /** + * Create job + * + * @param request http request + * @param exchangisJobVo exchangis job vo + * @return message + */ + @RequestMapping(value = "", method = RequestMethod.POST) + public Message createJob( + @Validated({InsertGroup.class, Default.class}) @RequestBody ExchangisJobVo exchangisJobVo, + BindingResult result, + HttpServletRequest request) { + if (ExchangisLauncherConfiguration.LIMIT_INTERFACE.getValue()) { + return Message.error("You have no permission to create Job (没有创建任务权限)"); + } + if (result.hasErrors()) { + return Message.error(result.getFieldErrors().get(0).getDefaultMessage()); + } + String loginUser = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); + exchangisJobVo.setCreateUser(loginUser); + Message response = Message.ok(); + + try { + if (!JobAuthorityUtils.hasProjectAuthority(loginUser, exchangisJobVo.getProjectId(), OperationType.JOB_ALTER)) { + return Message.error("You have no permission to create Job (没有创建任务权限)"); + } + + //Check whether the job with the same name exists in current project + List jobs = jobInfoService.getByNameAndProjectId(exchangisJobVo.getJobName(), exchangisJobVo.getProjectId()); + if (!Objects.isNull(jobs) && jobs.size() > 0) { + return Message.error("A task with the same name exists under the current project (当前项目下存在同名任务)"); + } + + response.data("result", jobInfoService.createJob(exchangisJobVo)); + } catch (Exception e) { + String message = "Fail to create job: " + exchangisJobVo.getJobName() + " (创建任务失败)"; + LOG.error(message, e); + response = Message.error(message); + } + LOG.info("start to print audit log"); + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB,"0", "Job name is: " + exchangisJobVo.getJobName(), OperateTypeEnum.CREATE,request); + return response; + } + + /** + * Copy job + * + * @param sourceJobId source job id + * @param exchangisJobVo job vo + * @return message + */ + @RequestMapping(value = "/{sourceJobId:\\d+}/copy", method = RequestMethod.POST) + public Message copyJob(@PathVariable("sourceJobId") Long sourceJobId, + @Validated @RequestBody ExchangisJobVo exchangisJobVo, + BindingResult result, HttpServletRequest request) { + if (ExchangisLauncherConfiguration.LIMIT_INTERFACE.getValue()) { + return Message.error("You have no permission to update (没有复制权限)"); + } + if (result.hasErrors()) { + return Message.error(result.getFieldErrors().get(0).getDefaultMessage()); + } + String loginUser = UserUtils.getLoginUser(request); + exchangisJobVo.setId(sourceJobId); + exchangisJobVo.setModifyUser(loginUser); + Message response = Message.ok(); + try { + if (!JobAuthorityUtils.hasProjectAuthority(loginUser, exchangisJobVo.getProjectId(), OperationType.JOB_ALTER)) { + return Message.error("You have no permission to update (没有作业复制权限)"); + } + response.data("result", jobInfoService.copyJob(exchangisJobVo)); + } catch (Exception e) { + String message = "Fail to update job: " + exchangisJobVo.getJobName() + " (复制任务失败)"; + LOG.error(message, e); + response = Message.error(message); + } + return response; + //todo return Message.error("Function will be supported in next version (该功能将在下版本支持)"); + } + + /** + * Update job + * + * @param id job id + * @param exchangisJobVo job vo + * @return message + */ + @RequestMapping(value = "/{id:\\d+}", method = RequestMethod.PUT) + public Message updateJob(@PathVariable("id") Long id, + @Validated @RequestBody ExchangisJobVo exchangisJobVo, + BindingResult result, HttpServletRequest request) { + if (ExchangisLauncherConfiguration.LIMIT_INTERFACE.getValue()) { + return Message.error("You have no permission to update (没有更新权限)"); + } + if (result.hasErrors()) { + return Message.error(result.getFieldErrors().get(0).getDefaultMessage()); + } + String loginUser = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); + exchangisJobVo.setId(id); + exchangisJobVo.setModifyUser(loginUser); + Message response = Message.ok(); + try { + if (!JobAuthorityUtils.hasJobAuthority(loginUser, id, OperationType.JOB_ALTER)) { + return Message.error("You have no permission to update (没有更新任务权限)"); + } + response.data("result", jobInfoService.updateJob(exchangisJobVo)); + } catch (Exception e) { + String message = "Fail to update job: " + exchangisJobVo.getJobName() + " (更新任务失败)"; + LOG.error(message, e); + response = Message.error(message); + } + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB,exchangisJobVo.getId().toString(), "Job name is: " + exchangisJobVo.getJobName(), OperateTypeEnum.UPDATE,request); + return response; + } + + /** + * Delete job + * + * @param id id + * @param request http request + * @return message + */ + @RequestMapping(value = "/{id}", method = RequestMethod.DELETE) + public Message deleteJob(@PathVariable("id") Long id, HttpServletRequest request) { + if (ExchangisLauncherConfiguration.LIMIT_INTERFACE.getValue()) { + return Message.error("You have no permission to delete (没有删除权限)"); + } + String loginUser = UserUtils.getLoginUser(request); + Message response = Message.ok("job deleted"); + ExchangisJobVo jobVo = jobInfoService.getJob(id, true); + try { + if (!JobAuthorityUtils.hasJobAuthority(loginUser, id, OperationType.JOB_ALTER)) { + return Message.error("You have no permission to delete (没有删除任务权限)"); + } + jobInfoService.deleteJob(id); + } catch (Exception e) { + String message = "Fail to delete job [ id: " + id + "] (删除任务失败)"; + LOG.error(message, e); + response = Message.error(message); + } + String oringinUser = SecurityFilter.getLoginUsername(request); + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB,id.toString().toString(), "Job name is: " + jobVo.getJobName(), OperateTypeEnum.DELETE,request); + return response; + } + + + /** + * Get job + * + * @param request http request + * @param id id + * @return message + */ + @RequestMapping(value = "/{id}", method = RequestMethod.GET) + public Message getJob(HttpServletRequest request, @PathVariable("id") Long id) { + Message response = Message.ok(); + try { + LOG.info("Request: {}", request); + + String loginUser = UserUtils.getLoginUser(request); + + ExchangisJobVo job = jobInfoService.getJob(id, true); + if (!JobAuthorityUtils.hasJobAuthority(loginUser, id, OperationType.JOB_QUERY)) { + return Message.error("You have no permission to get job (没有获取任务权限)"); + } + job = jobInfoService.getDecoratedJob(request, id); + response.data("result", job); + } catch (Exception e) { + String message = "Fail to get job detail (查询任务失败)"; + if (e.getCause() instanceof ExchangisJobServerException) { + message += ", reason: " + e.getCause().getMessage(); + } + LOG.error(message, e); + response = Message.error(message); + } + return response; + } + + /** + * Get all sub job list + * @param request + * @param projectId + * @return + */ +// @Deprecated +// @RequestMapping(value = "/subJob/list", method = RequestMethod.GET) +// public Message getSubJobList(HttpServletRequest request, @RequestParam(value = "projectId") Long projectId) { +// Message response = Message.ok(); +// String loginUser = UserUtils.getLoginUser(request); +// try { +// List jobList = jobInfoService.getSubJobList(request, projectId); +// if (!JobAuthorityUtils.hasAuthority(loginUser, projectId, OperationType.JOB_QUERY)) { +// return Message.error("You have no permission to create Job (没有查询任务权限)"); +// } +// response.data("result", jobList); +// } catch (Exception e) { +// String message = "Fail to get job detail (查询所有子任务列表失败)"; +// if (e.getCause() instanceof ExchangisJobServerException) { +// message += ", reason: " + e.getCause().getMessage(); +// } +// LOG.error(message, e); +// response = Message.error(message); +// } +// return response; +// } + + /** + * Get job list + * + * @param projectId + * @param jobName + * @return + */ + @RequestMapping(value = "/getJob/list", method = RequestMethod.GET) + public Message getByNameWithProjectId(HttpServletRequest request, + @RequestParam(value = "projectId") Long projectId, + @RequestParam(value = "jobName", required = false) String jobName) { + Message response = Message.ok(); + String loginUser = UserUtils.getLoginUser(request); + try { + if (!JobAuthorityUtils.hasProjectAuthority(loginUser, projectId, OperationType.JOB_QUERY)) { + return Message.error("You have no permission to create Job (没有查询任务权限)"); + } + + List jobs = jobInfoService.getByNameWithProjectId(jobName, projectId); + response.data("result", jobs); + } catch (Exception e) { + String message = "Fail to get job detail (查询任务失败)"; + if (e.getCause() instanceof ExchangisJobServerException) { + message += ", reason: " + e.getCause().getMessage(); + } + LOG.error(message, e); + response = Message.error(message); + } + return response; + } + + /** + * Save the job configuration + * + * @param id id + * @param jobVo job vo + * @return message + */ + @RequestMapping(value = "/{id}/config", method = RequestMethod.PUT) + public Message saveJobConfig(@PathVariable("id") Long id, + @RequestBody ExchangisJobVo jobVo, HttpServletRequest request) { + if (ExchangisLauncherConfiguration.LIMIT_INTERFACE.getValue()) { + return Message.error("You have no permission to save content (没有保存任务权限)"); + } + jobVo.setId(id); + jobVo.setModifyUser(UserUtils.getLoginUser(request)); + Message response = Message.ok(); + String loginUser = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); + try { + if (!JobAuthorityUtils.hasJobAuthority(loginUser, id, OperationType.JOB_ALTER)) { + return Message.error("You have no permission to update (没有更新任务权限)"); + } + ExchangisJobVo exchangisJob = jobInfoService.updateJobConfig(jobVo); + response.data("id", exchangisJob.getId()); + } catch (Exception e) { + String message = "Fail to save the job configuration (保存任务配置失败)"; + LOG.error(message, e); + response = Message.error(message); + } + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB,id.toString(), "Job id is: " + id.toString(), OperateTypeEnum.UPDATE,request); + return response; + } + + @RequestMapping(value = "/{id}/content", method = RequestMethod.PUT) + public Message saveSubJobs(@PathVariable("id") Long id, + @RequestBody ExchangisJobVo jobVo, HttpServletRequest request) { + if (ExchangisLauncherConfiguration.LIMIT_INTERFACE.getValue()) { + return Message.error("You have no permission to save content (没有保存任务权限)"); + } + jobVo.setId(id); + jobVo.setModifyUser(UserUtils.getLoginUser(request)); + String loginUser = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); + Message response = Message.ok(); + try { + if (!JobAuthorityUtils.hasJobAuthority(loginUser, id, OperationType.JOB_ALTER)) { + return Message.error("You have no permission to save content (没有保存任务权限)"); + } + ExchangisJobVo exchangisJob = jobInfoService.updateJobContent(jobVo); + response.data("id", exchangisJob.getId()); + } catch (Exception e) { + String message = "Fail to save the job content (保存任务内容失败)"; + if (e.getCause() instanceof ExchangisJobServerException + || e.getCause() instanceof ExchangisDataSourceException) { + message += ", reason: " + e.getCause().getMessage(); + } + LOG.error(message, e); + response = Message.error(message); + } + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB,id.toString(), "Job id is: " + id.toString(), OperateTypeEnum.UPDATE,request); + return response; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/ExchangisMetricsRestfulApi.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/ExchangisMetricsRestfulApi.java new file mode 100644 index 000000000..dd1698aab --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/ExchangisMetricsRestfulApi.java @@ -0,0 +1,57 @@ +package com.webank.wedatasphere.exchangis.job.server.restful; + +import com.webank.wedatasphere.exchangis.job.server.service.ExchangisMetricsService; +import org.apache.linkis.server.Message; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +import javax.servlet.http.HttpServletRequest; + +@RestController +@RequestMapping(value = "dss/exchangis/main", produces = {"application/json;charset=utf-8"}) +public class ExchangisMetricsRestfulApi { + + private final ExchangisMetricsService exchangisMetricsService; + + @Autowired + public ExchangisMetricsRestfulApi(ExchangisMetricsService exchangisMetricsService) { + this.exchangisMetricsService = exchangisMetricsService; + } + + // get task state metrics + @RequestMapping( value = "metrics/taskstate", method = RequestMethod.GET) + public Message getTaskStateMetrics(HttpServletRequest request) throws Exception { + return this.exchangisMetricsService.getTaskStateMetrics(request); + } + + // get task process metrics + @RequestMapping( value = "metrics/taskprocess", method = RequestMethod.GET) + public Message getTaskProcessMetrics(HttpServletRequest request) throws Exception { + return this.exchangisMetricsService.getTaskProcessMetrics(request); + } + + // get datasource flow metrics + @RequestMapping( value = "metrics/datasourceflow", method = RequestMethod.GET) + public Message getDataSourceFlowMetrics(HttpServletRequest request) throws Exception { + return this.exchangisMetricsService.getDataSourceFlowMetrics(request); + } + + // get engine (sqoop datax linkis etc.) resource metrics + @RequestMapping( value = "metrics/engineresource", method = RequestMethod.GET) + public Message getEngineResourceMetrics(HttpServletRequest request) throws Exception { + return this.exchangisMetricsService.getEngineResourceMetrics(request); + } + + @RequestMapping( value = "metrics/engineresourcecpu", method = RequestMethod.GET) + public Message getEngineResourceCpuMetrics(HttpServletRequest request) throws Exception { + return this.exchangisMetricsService.getEngineResourceCpuMetrics(request); + } + + @RequestMapping( value = "metrics/engineresourcemem", method = RequestMethod.GET) + public Message getEngineResourceMemMetrics(HttpServletRequest request) throws Exception { + return this.exchangisMetricsService.getEngineResourceMemMetrics(request); + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/ExchangisTaskRestfulApi.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/ExchangisTaskRestfulApi.java new file mode 100644 index 000000000..7c4aab099 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/ExchangisTaskRestfulApi.java @@ -0,0 +1,16 @@ +package com.webank.wedatasphere.exchangis.job.server.restful; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.bind.annotation.*; + +/** + * The type Exchangis task controller. + * + * @date 2021/10/13 + */ +@RestController +@RequestMapping(value = "dss/exchangis/main/tasks", produces = {"application/json;charset=utf-8"}) +public class ExchangisTaskRestfulApi { + private static final Logger LOG = LoggerFactory.getLogger(ExchangisTaskRestfulApi.class); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/configuration/ExchangisJobTransformRestfulApi.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/configuration/ExchangisJobTransformRestfulApi.java new file mode 100644 index 000000000..0778adc4a --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/configuration/ExchangisJobTransformRestfulApi.java @@ -0,0 +1,166 @@ +package com.webank.wedatasphere.exchangis.job.server.restful.configuration; + +import com.webank.wedatasphere.exchangis.common.UserUtils; +import com.webank.wedatasphere.exchangis.common.validator.groups.InsertGroup; +import com.webank.wedatasphere.exchangis.common.validator.groups.UpdateGroup; +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.enums.EngineTypeEnum; +import com.webank.wedatasphere.exchangis.job.server.render.transform.*; +import com.webank.wedatasphere.exchangis.job.server.render.transform.processor.ProcessorRequestVo; +import com.webank.wedatasphere.exchangis.job.server.render.transform.processor.ProcessorTransformer; +import com.webank.wedatasphere.exchangis.job.server.render.transform.processor.TransformProcessor; +import com.webank.wedatasphere.exchangis.job.server.service.JobTransformService; +import org.apache.linkis.server.Message; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.validation.BindingResult; +import org.springframework.validation.annotation.Validated; +import org.springframework.web.bind.annotation.*; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import javax.validation.groups.Default; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; + +@RestController +@RequestMapping(value = "dss/exchangis/main/job/transform", produces = {"application/json;charset=utf-8"}) +public class ExchangisJobTransformRestfulApi { + private static final Logger LOG = LoggerFactory.getLogger(ExchangisJobTransformRestfulApi.class); + + @Resource + private JobTransformService transformService; + + @Resource + private TransformerContainer transformerContainer; + + @RequestMapping(value = "/settings", method = RequestMethod.POST) + public Message settings(@Validated @RequestBody TransformRequestVo params, + BindingResult result, HttpServletRequest request){ + if (result.hasErrors()){ + return Message.error(result.getFieldErrors().get(0).getDefaultMessage()); + } + String userName = UserUtils.getLoginUser(request); + params.setOperator(userName); + Message response = Message.ok(); + try{ + Map settingsMap = this.transformService.getSettings(params); + if (Objects.nonNull(settingsMap) && !settingsMap.isEmpty()){ + response.data("types", settingsMap.keySet()); + Message finalResponse = response; + settingsMap.values().forEach(settings -> { + // Inject the settings' params + Map settingMap = Json.convert(settings, Map.class, String.class, Object.class); + if (Objects.nonNull(settingMap)) { + settingMap.forEach(finalResponse::data); + } + }); + } + + } catch (Exception e){ + String message = "Fail to get transformer settings (加载转换器(映射/处理器)配置失败)"; + LOG.error(message, e); + response = Message.error(message); + } + return response; + } + + /** + * Create processor + * @param requestVo request vo + * @param result result + * @param request request + * @return + */ + @RequestMapping(value = "/processor/code_content", method = RequestMethod.POST) + public Message createProcessor(@Validated({InsertGroup.class, Default.class}) @RequestBody ProcessorRequestVo requestVo, + BindingResult result, HttpServletRequest request){ + if (result.hasErrors()){ + return Message.error(result.getFieldErrors().get(0).getDefaultMessage()); + } + String userName = UserUtils.getLoginUser(request); + // TODO validate the authority + TransformProcessor processor = new TransformProcessor(); + processor.setJobId(Long.valueOf(requestVo.getJobId())); + processor.setCodeContent(requestVo.getCode()); + processor.setCreator(userName); + Message response = Message.ok(); + try{ + response.data("proc_code_id", this.transformService.saveProcessor(processor)); + } catch (Exception e){ + String message = "Fail to create transform processor (创建处理器内容失败)"; + LOG.error(message, e); + response = Message.error(message); + } + return response; + } + + /** + * Update processor + * @param requestVo request vo + * @param result result + * @param procCodeId proc code id + * @param request + * @return + */ + @RequestMapping(value = "/processor/code_content/{proc_code_id:\\w+}", method = RequestMethod.PUT) + public Message updateProcessor(@Validated({UpdateGroup.class, Default.class}) @RequestBody ProcessorRequestVo requestVo + , BindingResult result, @PathVariable("proc_code_id")String procCodeId, HttpServletRequest request){ + if (result.hasErrors()){ + return Message.error(result.getFieldErrors().get(0).getDefaultMessage()); + } + String userName = UserUtils.getLoginUser(request); + // TODO validate the authority + TransformProcessor processor = this.transformService.getProcessorInfo(procCodeId); + if (Objects.isNull(processor)){ + return Message.error("Unable find the transform processor with code_id: [" + procCodeId + "] (处理器不存在)"); + } + processor.setCodeContent(requestVo.getCode()); + Message response = Message.ok(); + try{ + response.data("proc_code_id", this.transformService.updateProcessor(processor)); + } catch (Exception e){ + String message = "Fail to update transform processor (更新处理器内容失败)"; + LOG.error(message, e); + response = Message.error(message); + } + return response; + } + + @RequestMapping(value = "/processor/code_content/{proc_code_id:\\w+}", method = RequestMethod.GET) + public Message getProcessor(@PathVariable("proc_code_id")String procCodeId, HttpServletRequest request){ + String userName = UserUtils.getLoginUser(request); + // TODO validate the authority + Message response = Message.ok(); + try { + TransformProcessor processor = this.transformService.getProcessorWithCode(procCodeId); + response.data("code", Objects.nonNull(processor)? processor.getCodeContent() : null); + } catch (Exception e){ + String message = "Fail to fetch code content of transform processor (获取处理器内容失败)"; + LOG.error(message, e); + response = Message.error(message); + } + return response; + } + + @RequestMapping(value = "/processor/code_template", method = RequestMethod.GET) + public Message getProcTemplate(@RequestParam(value = "engine", required = false)String engine){ + // Accept all users to request + Message response = Message.ok(); + try{ + Transformer transformer = this.transformerContainer.getTransformer(TransformRule.Types.PROCESSOR.name()); + String template = null; + if (Objects.nonNull(transformer) && transformer instanceof ProcessorTransformer){ + template = ((ProcessorTransformer) transformer).getCodeTemplate(Objects.nonNull(engine)? engine : + EngineTypeEnum.DATAX.name().toLowerCase(Locale.ROOT), "java"); + } + response.data("code", template); + } catch (Exception e){ + String message = "Fail to obtain template code content (获得处理器内容模版失败)"; + LOG.error(message, e); + response = Message.error(message); + } + return response; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/execute/ExchangisJobExecuteRestfulApi.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/execute/ExchangisJobExecuteRestfulApi.java new file mode 100644 index 000000000..62d74e884 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/execute/ExchangisJobExecuteRestfulApi.java @@ -0,0 +1,283 @@ +package com.webank.wedatasphere.exchangis.job.server.restful.execute; + +import com.webank.wedatasphere.exchangis.common.AuditLogUtils; +import com.webank.wedatasphere.exchangis.common.UserUtils; +import com.webank.wedatasphere.exchangis.common.enums.OperateTypeEnum; +import com.webank.wedatasphere.exchangis.common.enums.TargetTypeEnum; +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; +import com.webank.wedatasphere.exchangis.job.domain.OperationType; +import com.webank.wedatasphere.exchangis.job.launcher.ExchangisLauncherConfiguration; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; +import com.webank.wedatasphere.exchangis.job.log.LogQuery; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.job.server.service.JobInfoService; +import com.webank.wedatasphere.exchangis.job.server.service.impl.DefaultJobExecuteService; +import com.webank.wedatasphere.exchangis.job.server.utils.JobAuthorityUtils; +import com.webank.wedatasphere.exchangis.job.server.vo.ExchangisCategoryLogVo; +import com.webank.wedatasphere.exchangis.job.server.vo.ExchangisJobProgressVo; +import com.webank.wedatasphere.exchangis.job.server.vo.ExchangisJobTaskVo; +import com.webank.wedatasphere.exchangis.job.server.vo.ExchangisLaunchedJobListVo; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVo; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.*; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import java.util.*; + +/** + * + * @Date 2022/1/8 15:25 + */ +@RestController +@RequestMapping(value = "dss/exchangis/main/job", produces = {"application/json;charset=utf-8"}) +public class ExchangisJobExecuteRestfulApi { + private static final Logger LOG = LoggerFactory.getLogger(ExchangisJobExecuteRestfulApi.class); + @Autowired + private JobInfoService jobInfoService; + + @Resource + private DefaultJobExecuteService executeService; + + /** + * Execute job + * @param permitPartialFailures permit + * @param id id + * @return message + */ + @RequestMapping( value = "/{id}/execute", method = RequestMethod.POST) + public Message executeJob(@RequestBody(required = false) Map permitPartialFailures, + @PathVariable("id") Long id, HttpServletRequest request) { + String loginUser = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); + Message result = Message.ok("Submitted succeed(提交成功)!"); + ExchangisJobInfo jobInfo = null; + try { + // First to find the job from the old table. + ExchangisJobVo jobVo = jobInfoService.getJob(id, false); + if (Objects.isNull(jobVo)){ + return Message.error("Job related the id: [" + id + "] is Empty(关联的任务不存在)"); + } + // Convert to the job info + jobInfo = new ExchangisJobInfo(jobVo); + + if (!JobAuthorityUtils.hasJobAuthority(loginUser, id, OperationType.JOB_EXECUTE)){ + return Message.error("You have no permission to execute job (没有执行任务权限)"); + } + + // Send to execute service + String jobExecutionId = executeService.executeJob(jobInfo, StringUtils.isNotBlank(jobInfo.getExecuteUser()) ? + jobInfo.getExecuteUser() : loginUser); + result.data("jobExecutionId", jobExecutionId); + } catch (Exception e) { + String message; + if (Objects.nonNull(jobInfo)) { + message = "Error occur while executing job: [id: " + jobInfo.getId() + " name: " + jobInfo.getName() + "]"; + result = Message.error(message + "(执行任务出错), reason: " + e.getMessage()); + } else { + message = "Error to get the job detail (获取任务信息出错)"; + result = Message.error(message); + } + LOG.error(message, e); + } + result.setMethod("/api/rest_j/v1/dss/exchangis/main/job/{id}/execute"); + assert jobInfo != null; + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB, id.toString(), "Execute task is: " + jobInfo.getName(), OperateTypeEnum.EXECUTE, request); + return result; + } + + @RequestMapping( value = "/execution/{jobExecutionId}/taskList", method = RequestMethod.GET) + public Message getExecutedJobTaskList(@PathVariable(value = "jobExecutionId") String jobExecutionId, HttpServletRequest request) { + Message message = Message.ok("Submitted succeed(提交成功)!"); + String loginUser = UserUtils.getLoginUser(request); + try { + if(!JobAuthorityUtils.hasJobExecuteSituationAuthority(loginUser, jobExecutionId, OperationType.JOB_QUERY)) { + return Message.error("You have no permission to get taskList (没有获取任务列表权限)"); + } + List jobTaskList = executeService.getExecutedJobTaskList(jobExecutionId); + message.data("tasks", jobTaskList); + } catch (ExchangisJobServerException e) { + String errorMessage = "Error occur while get taskList: [jobExecutionId: " + jobExecutionId + "]"; + LOG.error(errorMessage, e); + message = Message.error(message + "(执行任务出错), reason: " + e.getMessage()); + } + message.setMethod("/api/rest_j/v1/" + jobExecutionId + "/taskList"); + return message; + } + + @RequestMapping( value = "/execution/{jobExecutionId}/progress", method = RequestMethod.GET) + public Message getExecutedJobAndTaskStatus(@PathVariable(value = "jobExecutionId") String jobExecutionId, HttpServletRequest request) { + ExchangisJobProgressVo jobAndTaskStatus; + String loginUser = UserUtils.getLoginUser(request); + try { + if(!JobAuthorityUtils.hasJobExecuteSituationAuthority(loginUser, jobExecutionId, OperationType.JOB_QUERY)) { + return Message.error("You have no permission to get task progress (没有获取任务进度权限)"); + } + jobAndTaskStatus = executeService.getExecutedJobProgressInfo(jobExecutionId); + } catch (ExchangisJobServerException e) { + // TODO Log exception + return Message.error("Fail to get progress info (获取任务执行状态失败), reason: [" + e.getMessage() + "]"); + } + Message message = Message.ok("Submitted succeed(提交成功)!"); + message.setMethod("/api/rest_j/v1/dss/exchangis/main/job/execution/" +jobExecutionId +"/progress"); + message.data("job", jobAndTaskStatus); + return message; + } + + @RequestMapping( value = "/execution/{jobExecutionId}/status", method = RequestMethod.GET) + public Message getExecutedJobStatus(@PathVariable(value = "jobExecutionId") String jobExecutionId, HttpServletRequest request) { + Message message = Message.ok("Submitted succeed(提交成功)!"); + String loginUser = UserUtils.getLoginUser(request); + try { + if(!JobAuthorityUtils.hasJobExecuteSituationAuthority(loginUser, jobExecutionId, OperationType.JOB_QUERY)) { + return Message.error("You have no permission to get tastStatus (没有权限去获取任务状态)"); + } + ExchangisJobProgressVo jobStatus = executeService.getJobStatus(jobExecutionId); + message.setMethod("/api/rest_j/v1/dss/exchangis/main/job/execution/" + jobExecutionId + "/status"); + message.data("status", jobStatus.getStatus()); + message.data("progress", jobStatus.getProgress()); + message.data("allTaskStatus", jobStatus.getAllTaskStatus()); + } catch (ExchangisJobServerException e) { + String errorMessage = "Error occur while getting job status: [job_execution_id: " + jobExecutionId +"]"; + LOG.error(errorMessage, e); + message = Message.error(message + ", reason: " + e.getMessage()); + } + return message; + } + + @RequestMapping(value = "/execution/{jobExecutionId}/log", method = RequestMethod.GET) + public Message getJobExecutionLogs(@PathVariable(value = "jobExecutionId") String jobExecutionId, + @RequestParam(value = "fromLine", required = false) Integer fromLine, + @RequestParam(value = "pageSize", required = false) Integer pageSize, + @RequestParam(value = "ignoreKeywords", required = false) String ignoreKeywords, + @RequestParam(value = "onlyKeywords", required = false) String onlyKeywords, + @RequestParam(value = "enableTail", required = false) Boolean enableTail, + @RequestParam(value = "lastRows", required = false) Integer lastRows, HttpServletRequest request) { + + Message result = Message.ok("Submitted succeed(提交成功)!"); + LogQuery logQuery = new LogQuery(fromLine, pageSize, + ignoreKeywords, onlyKeywords, lastRows); + if (null != enableTail) { + logQuery.setEnableTail(enableTail); + } + String loginUser = UserUtils.getLoginUser(request); + try { + if(!JobAuthorityUtils.hasJobExecuteSituationAuthority(loginUser, jobExecutionId, OperationType.JOB_QUERY)) { + return Message.error("You have no permission to get logs (没有获取任务日志权限)"); + } + + ExchangisCategoryLogVo categoryLogVo = this.executeService + .getJobLogInfo(jobExecutionId, logQuery); + result.setData(Json.convert(categoryLogVo, Map.class, String.class, Object.class)); + } catch (ExchangisJobServerException e) { + String message = "Error occur while querying job log: [job_execution_id: " + jobExecutionId +"]"; + LOG.error(message, e); + result = Message.error(message + ", reason: " + e.getMessage()); + } + result.setMethod("/api/rest_j/v1/dss/exchangis/main/job/execution/{jobExecutionId}/log"); + return result; + } + + @RequestMapping( value = "/execution/{jobExecutionId}/kill", method = RequestMethod.POST) + public Message ExecutedJobKill(@PathVariable(value = "jobExecutionId") String jobExecutionId, HttpServletRequest request) throws ExchangisJobServerException { + ExchangisJobProgressVo jobStatus = executeService.getJobStatus(jobExecutionId); + Message message = null; + String loginUser = SecurityFilter.getLoginUsername(request); + String oringinUser = SecurityFilter.getLoginUsername(request); + if(!JobAuthorityUtils.hasJobExecuteSituationAuthority(loginUser, jobExecutionId, OperationType.JOB_EXECUTE)) { + return Message.error("You have no permission to get kill job (没有权限去杀死任务)"); + } + if (!TaskStatus.isCompleted(jobStatus.getStatus())) + { + message = Message.ok("Kill succeed(停止成功)!"); + try { + executeService.killJob(jobExecutionId); + } catch (ExchangisJobServerException e) { + String errorMessage = "Error occur while killing job: [job_execution_id: " + jobExecutionId + "]"; + LOG.error(errorMessage, e); + message = Message.error(message + ", reason: " + e.getMessage()); + } + } + else { + message = Message.error("Kill failed(停止失败)!,job 已经到终态"); + } + message.setMethod("/api/rest_j/v1/dss/exchangis/main/job/execution/" + jobExecutionId + "/kill"); + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB, jobExecutionId, "Kill job: ", OperateTypeEnum.KILL, request); + return message; + } + + @RequestMapping(value = "/listJobs", method = RequestMethod.GET) + public Message listJobs(@RequestParam(value = "jobExecutionId", required = false) String jobExecutionId, + @RequestParam(value = "jobName", required = false) String jobName, + @RequestParam(value = "status", required = false) String status, + @RequestParam(value = "launchStartTime", required = false) Long launchStartTime, + @RequestParam(value = "launchEndTime", required = false) Long launchEndTime, + @RequestParam(value = "current", required = false) int current, + @RequestParam(value = "size", required = false) int size, + HttpServletRequest request) { + Message message = Message.ok("Submitted succeed(提交成功)!"); + jobName = jobName.replace("_", "\\_"); + try { + List jobList = executeService.getExecutedJobList(jobExecutionId, jobName, status, + launchStartTime, launchEndTime, current, size, request); + int total = executeService.count(jobExecutionId, jobName, status, launchStartTime, launchEndTime, request); + message.data("jobList", jobList); + message.data("total", total); + } catch (ExchangisJobServerException e) { + String errorMessage = "Error occur while getting job list: [job_execution_id: " + jobExecutionId + "jobName: " + jobName + "status: " + status + "]"; + LOG.error(errorMessage, e); + message = Message.error(message + ", reason: " + e.getMessage()); + } + message.setMethod("/api/rest_j/v1/dss/exchangis/main/job/execution/listJobs"); + return message; + } + + @RequestMapping( value = "/{jobExecutionId}/deleteJob", method = RequestMethod.POST) + public Message ExecutedJobDelete(@PathVariable(value = "jobExecutionId") String jobExecutionId, HttpServletRequest request) throws ExchangisJobServerException { + if (ExchangisLauncherConfiguration.LIMIT_INTERFACE.getValue()) { + return Message.error("You have no permission to delete this record (没有删除历史记录权限)"); + } + Message message = Message.ok("Kill succeed(停止成功)!"); + String loginUser = UserUtils.getLoginUser(request); + try { + if(!JobAuthorityUtils.hasJobExecuteSituationAuthority(loginUser, jobExecutionId, OperationType.JOB_EXECUTE)) { + return Message.error("You have no permission to delete this record (没有删除历史记录权限)"); + } + executeService.deleteJob(jobExecutionId); + message.data("jobExecutionId", jobExecutionId); + } catch (ExchangisJobServerException e){ + String errorMessage = "Error occur while delete job: [job_execution_id: " + jobExecutionId + "]"; + LOG.error(errorMessage, e); + message = Message.error(message + ", reason: " + e.getMessage()); + } + message.setMethod("/api/rest_j/v1/dss/exchangis/main/job/" + jobExecutionId + "/deleteJob"); + return message; + } + + @RequestMapping( value = "/{jobExecutionId}/allTaskStatus", method = RequestMethod.GET) + public Message allTaskStatus(@PathVariable(value = "jobExecutionId") String jobExecutionId, HttpServletRequest request) throws ExchangisJobServerException { + //ExchangisLaunchedJobEntity jobAndTaskStatus = exchangisExecutionService.getExecutedJobAndTaskStatus(jobExecutionId); + Message message = Message.ok("所有任务状态"); + String loginUser = UserUtils.getLoginUser(request); + try { + if(!JobAuthorityUtils.hasJobExecuteSituationAuthority(loginUser, jobExecutionId, OperationType.JOB_QUERY)) { + return Message.error("You have no permission to get tastStatus (没有权限去获取任务状态)"); + } + List allStatus = executeService.allTaskStatus(jobExecutionId); + message.data("allStatus", allStatus); + message.data("jobExecutionId", jobExecutionId); + } catch (ExchangisJobServerException e) { + String errorMessage = "Error occur while judge whether all task complete: [job_execution_id: " + jobExecutionId + "]"; + LOG.error(errorMessage, e); + message = Message.error(message + ", reason: " + e.getMessage()); + } + message.setMethod("/api/rest_j/v1/dss/exchangis/main/job/" + jobExecutionId + "/allTaskStatus"); + return message; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/execute/ExchangisTaskExecuteRestfulApi.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/execute/ExchangisTaskExecuteRestfulApi.java new file mode 100644 index 000000000..aa2a45e1b --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/execute/ExchangisTaskExecuteRestfulApi.java @@ -0,0 +1,104 @@ +package com.webank.wedatasphere.exchangis.job.server.restful.execute; + +import com.webank.wedatasphere.exchangis.common.UserUtils; +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.domain.OperationType; +import com.webank.wedatasphere.exchangis.job.log.LogQuery; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.job.server.service.JobExecuteService; +import com.webank.wedatasphere.exchangis.job.server.service.JobInfoService; +import com.webank.wedatasphere.exchangis.job.server.service.impl.DefaultJobExecuteService; +import com.webank.wedatasphere.exchangis.job.server.utils.JobAuthorityUtils; +import com.webank.wedatasphere.exchangis.job.server.vo.ExchangisCategoryLogVo; +import com.webank.wedatasphere.exchangis.job.server.vo.ExchangisLaunchedTaskMetricsVo; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.server.Message; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.*; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import java.util.Map; + +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.METRICS_OP_ERROR; + +/** + * @Date 2022/1/8 17:23 + */ + +@RestController +@RequestMapping(value = "dss/exchangis/main/task", produces = {"application/json;charset=utf-8"}) +public class ExchangisTaskExecuteRestfulApi { + private static final Logger LOG = LoggerFactory.getLogger(ExchangisTaskExecuteRestfulApi.class); + @Autowired + private JobInfoService jobInfoService; + + @Resource + private JobExecuteService jobExecuteService; + + @Resource + private DefaultJobExecuteService executeService; + + @RequestMapping(value = "/execution/{taskId}/metrics", method = RequestMethod.POST) + public Message getTaskMetrics(@PathVariable("taskId") String taskId, + @RequestBody Map json, HttpServletRequest request) throws ExchangisJobServerException { + Message result = Message.ok("Submitted succeed(提交成功)!"); + String jobExecutionId = null; + + if (null != json.get("jobExecutionId")) { + jobExecutionId = (String) json.get("jobExecutionId"); + } + if (StringUtils.isBlank(jobExecutionId)) { + return Message.error("Required params 'jobExecutionId' is missing"); + } + try { + + if (!JobAuthorityUtils.hasJobExecuteSituationAuthority(UserUtils.getLoginUser(request), jobExecutionId, OperationType.JOB_EXECUTE)) { + + throw new ExchangisJobServerException(METRICS_OP_ERROR.getCode(), "Unable to find the launched job by [" + jobExecutionId + "]", null); + } + ExchangisLaunchedTaskMetricsVo taskMetrics = this.jobExecuteService.getLaunchedTaskMetrics(taskId, jobExecutionId); + result.data("task", taskMetrics); + } catch (Exception e) { + String message = "Error occur while fetching metrics: [task_id: " + taskId + ", job_execution_id: " + jobExecutionId + "]"; + LOG.error(message, e); + result = Message.error(message + ", reason: " + e.getMessage()); + } + result.setMethod("/api/rest_j/v1/dss/exchangis/main/task/execution/{taskId}/metrics"); + return result; + } + + @RequestMapping(value = "/execution/{taskId}/log", method = RequestMethod.GET) + public Message getTaskExecutionLogs(@PathVariable(value = "taskId") String taskId, + @RequestParam(value = "jobExecutionId", required = false) String jobExecutionId, + @RequestParam(value = "fromLine", required = false) Integer fromLine, + @RequestParam(value = "pageSize", required = false) Integer pageSize, + @RequestParam(value = "ignoreKeywords", required = false) String ignoreKeywords, + @RequestParam(value = "onlyKeywords", required = false) String onlyKeywords, + @RequestParam(value = "enableTail", required = false) Boolean enableTail, + @RequestParam(value = "lastRows", required = false) Integer lastRows, HttpServletRequest request) { + Message result = Message.ok("Submitted succeed(提交成功)!"); + LogQuery logQuery = new LogQuery(fromLine, pageSize, + ignoreKeywords, onlyKeywords, lastRows); + if (null != enableTail) { + logQuery.setEnableTail(enableTail); + } + String userName = UserUtils.getLoginUser(request); + try { + if (!JobAuthorityUtils.hasJobExecuteSituationAuthority(userName, jobExecutionId, OperationType.JOB_QUERY)) { + return Message.error("You have no permission to get logs(没有查看日志权限)"); + } + + ExchangisCategoryLogVo categoryLogVo = this.jobExecuteService.getTaskLogInfo(taskId, jobExecutionId, logQuery); + result.setData(Json.convert(categoryLogVo, Map.class, String.class, Object.class)); + } catch (Exception e) { + String message = "Error occur while query task log: [task_id: " + taskId + ", job_execution_id: " + jobExecutionId + "]"; + LOG.error(message, e); + result = Message.error(message + ", reason: " + e.getMessage()); + } + result.setMethod("/api/rest_j/v1/dss/exchangis/main/job/execution/{taskId}/log"); + return result; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/external/ExchangisJobDssAppConnRestfulApi.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/external/ExchangisJobDssAppConnRestfulApi.java new file mode 100644 index 000000000..72ae0dc09 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/external/ExchangisJobDssAppConnRestfulApi.java @@ -0,0 +1,238 @@ +package com.webank.wedatasphere.exchangis.job.server.restful.external; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.webank.wedatasphere.exchangis.common.AuditLogUtils; +import com.webank.wedatasphere.exchangis.common.UserUtils; +import com.webank.wedatasphere.exchangis.common.enums.OperateTypeEnum; +import com.webank.wedatasphere.exchangis.common.enums.TargetTypeEnum; +import com.webank.wedatasphere.exchangis.common.validator.groups.InsertGroup; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; +import com.webank.wedatasphere.exchangis.job.domain.OperationType; +import com.webank.wedatasphere.exchangis.job.launcher.ExchangisLauncherConfiguration; +import com.webank.wedatasphere.exchangis.job.server.service.JobInfoService; +import com.webank.wedatasphere.exchangis.job.server.service.impl.DefaultJobExecuteService; +import com.webank.wedatasphere.exchangis.job.server.utils.JobAuthorityUtils; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVo; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.server.BDPJettyServerHelper; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.validation.BindingResult; +import org.springframework.validation.annotation.Validated; +import org.springframework.web.bind.annotation.*; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import javax.validation.groups.Default; +import java.util.Map; +import java.util.Objects; + +/** + * Define to support the app conn, in order to distinguish from the inner api + */ +@RestController +@RequestMapping(value = "/dss/exchangis/main/appJob", produces = {"application/json;charset=utf-8"}) +public class ExchangisJobDssAppConnRestfulApi { + + private static final Logger LOG = LoggerFactory.getLogger(ExchangisJobDssAppConnRestfulApi.class); + /** + * Job service + */ + @Resource + private JobInfoService jobInfoService; + + /** + * Job execute service + */ + @Resource + private DefaultJobExecuteService executeService; + + /** + * Create job + * @param request http request + * @param exchangisJobVo exchangis job vo + * @return message + */ + @RequestMapping( value = "/create", method = RequestMethod.POST) + public Message createJob( + @Validated({InsertGroup.class, Default.class}) @RequestBody ExchangisJobVo exchangisJobVo, + BindingResult result, + HttpServletRequest request) { + if (ExchangisLauncherConfiguration.LIMIT_INTERFACE.getValue()) { + return Message.error("You have no permission to create (没有创建任务权限)"); + } + if (result.hasErrors()){ + return Message.error(result.getFieldErrors().get(0).getDefaultMessage()); + } + String oringinUser = SecurityFilter.getLoginUsername(request); + String loginUser = UserUtils.getLoginUser(request); + exchangisJobVo.setCreateUser(loginUser); + Message response = Message.ok(); + + Long id = null; + try{ + if (!JobAuthorityUtils.hasProjectAuthority(loginUser, exchangisJobVo.getProjectId(), OperationType.JOB_ALTER)) { + return Message.error("You have no permission to create Job (没有创建任务权限)"); + } + id = jobInfoService.createJob(exchangisJobVo).getId(); + response.data("id", id); + LOG.info("job id is: {}", id); + } catch (Exception e){ + String message = "Fail to create dss job: " + exchangisJobVo.getJobName() +" (创建DSS任务失败)"; + LOG.error(message, e); + return Message.error(message); + } + assert id != null; + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB, String.valueOf(id), "Job name is: " + exchangisJobVo.getJobName(), OperateTypeEnum.CREATE, request); + return response; + } + + /** + * Delete job + * @param id id + * @param request http request + * @return message + */ + @RequestMapping( value = "/{id:\\d+}", method = RequestMethod.POST) + public Message deleteJob(@PathVariable("id") Long id, HttpServletRequest request) { + if (ExchangisLauncherConfiguration.LIMIT_INTERFACE.getValue()) { + return Message.error("You have no permission to update (没有删除任务权限)"); + } + String loginUser = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); + Message response = Message.ok("dss job deleted"); + try { + ExchangisJobVo exchangisJob = jobInfoService.getJob(id, true); + if (Objects.isNull(exchangisJob)){ + return response; + } + if (!JobAuthorityUtils.hasProjectAuthority(loginUser, exchangisJob.getProjectId(), OperationType.JOB_ALTER)) { + return Message.error("You have no permission to delete (没有删除任务权限)"); + } + jobInfoService.deleteJob(id); + } catch (Exception e){ + String message = "Fail to delete dss job [ id: " + id + "] (删除DSS任务失败)"; + LOG.error(message, e); + return Message.error(message); + } + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB, String.valueOf(id), "Job", OperateTypeEnum.DELETE, request); + return response; + } + + /** + * Update job + * @param id job id + * @param exchangisJobVo job vo + * @return message + */ + @RequestMapping( value = "/{id:\\d+}", method = RequestMethod.PUT) + public Message updateJob(@PathVariable("id") Long id, + @Validated @RequestBody ExchangisJobVo exchangisJobVo, + BindingResult result, HttpServletRequest request) { + if (ExchangisLauncherConfiguration.LIMIT_INTERFACE.getValue()) { + return Message.error("You have no permission to update (没有更新任务权限)"); + } + if (result.hasErrors()){ + return Message.error(result.getFieldErrors().get(0).getDefaultMessage()); + } + String oringinUser = SecurityFilter.getLoginUsername(request); + String loginUser = UserUtils.getLoginUser(request); + exchangisJobVo.setId(id); + exchangisJobVo.setModifyUser(loginUser); + Message response = Message.ok(); + try{ + LOG.info("update job bean: {}, jobid: {}", jobInfoService.getJob(id, true), jobInfoService.getJob(id, true).getId()); + ExchangisJobVo exchangisJob = jobInfoService.getJob(id, true); + if (Objects.isNull(exchangisJob)){ + return Message.error("You have no job in exchangis,please delete this job (该节点在exchangis端不存在,请删除该节点)"); + } + if (!JobAuthorityUtils.hasJobAuthority(loginUser, id, OperationType.JOB_ALTER)) { + return Message.error("You have no permission to update (没有更新任务权限)"); + } + response.data("id", jobInfoService.updateJob(exchangisJobVo).getId()); + } catch (Exception e){ + String message = "Fail to update dss job: " + exchangisJobVo.getJobName() +" (更新DSS任务失败)"; + LOG.error(message, e); + return Message.error(message); + } + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB, String.valueOf(id), "Job name is: " + exchangisJobVo.getJobName(), OperateTypeEnum.UPDATE, request); + return response; + } + + /** + * Execute job + * @param id id + * @return message + */ + @RequestMapping( value = "/execute/{id}", method = RequestMethod.POST) + public Message executeJob(@PathVariable("id") Long id, HttpServletRequest request, @RequestBody Map params) { + try { + LOG.info("start to parse params"); + String paramString = BDPJettyServerHelper.jacksonJson().writeValueAsString(params); + LOG.error("paramString: {}", paramString); + } catch (JsonProcessingException e) { + LOG.error("parse execute content error: {}", e.getMessage()); + } + String submitUser = params.get("submitUser").toString(); + String oringinUser = SecurityFilter.getLoginUsername(request); + String loginUser = UserUtils.getLoginUser(request); + Message response = Message.ok(); + ExchangisJobInfo jobInfo = null; + LOG.info("wds execute user: {}", loginUser); + try { + // First to find the job from the old table. + ExchangisJobVo jobVo = jobInfoService.getJob(id, false); + /* if (!AuthorityUtils.hasOwnAuthority(jobVo.getProjectId(), loginUser) && !AuthorityUtils.hasExecAuthority(jobVo.getProjectId(), loginUser)) { + return Message.error("You have no permission to execute job (没有执行任务权限)"); + }*/ + if (Objects.isNull(jobVo)){ + return Message.error("Job related the id: [" + id + "] is Empty(关联的DSS任务不存在)"); + } + // Convert to the job info + jobInfo = new ExchangisJobInfo(jobVo); + jobInfo.setName(jobVo.getJobName()); + jobInfo.setId(jobVo.getId()); + LOG.info("jobInfo: name{},executerUser{},createUser{},id{}",jobInfo.getName(),jobInfo.getExecuteUser(),jobInfo.getCreateUser(),jobInfo.getId()); + LOG.info("loginUser: {}, jobVo:{}",loginUser,jobVo); + //find project user authority + /*if (!hasAuthority(submitUser, jobVo)){ + return Message.error("You have no permission to execute job (没有执行DSS任务权限)"); + }*/ + // Send to execute service + String jobExecutionId = executeService.executeJob(jobInfo, StringUtils.isNotBlank(jobInfo.getExecuteUser()) ? + jobInfo.getExecuteUser() : loginUser); + response.data("jobExecutionId", jobExecutionId); + + LOG.info("Prepare to get job status"); + /*while (true) { + TaskStatus jobStatus = executeService.getJobStatus(jobExecutionId).getStatus(); + LOG.info("Taskstatus is: {}", jobStatus.name()); + if (jobStatus == TaskStatus.Success ) { + result.data("jobStatus", jobStatus.name()); + LOG.info("Execute task success"); + break; + } else if (jobStatus == TaskStatus.Cancelled || jobStatus == TaskStatus.Failed || jobStatus == TaskStatus.Undefined || jobStatus == TaskStatus.Timeout) { + result.data("jobStatus", jobStatus.name()); + LOG.info("Execute task faild"); + throw new Exception(); + } + }*/ + } catch (Exception e) { + String message; + if (Objects.nonNull(jobInfo)) { + message = "Error occur while executing job: [id: " + jobInfo.getId() + " name: " + jobInfo.getName() + "]"; + response = Message.error(message + "(执行任务出错), reason: " + e.getMessage()); + } else { + message = "Error to get the job detail (获取任务信息出错)"; + response = Message.error(message); + } + LOG.error(message, e); + return response; + } + assert jobInfo != null; + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB, String.valueOf(id), "Execute task is: " + jobInfo.getName(), OperateTypeEnum.EXECUTE, request); + return response; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/external/ModuleEnum.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/external/ModuleEnum.java new file mode 100644 index 000000000..3ca1766b1 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/external/ModuleEnum.java @@ -0,0 +1,30 @@ +package com.webank.wedatasphere.exchangis.job.server.restful.external; + +import java.util.Arrays; + +/** + * @author tikazhang + * @Date 2022/3/14 20:37 + */ +public enum ModuleEnum { + + SQOOP_IDS("sqoopIds", "SQOOP ids"), + + DATAX_IDS("dataXIds", "DATAX ids"); + + private String name; + private String desc; + + ModuleEnum(String name, String desc) { + this.name = name; + this.desc = desc; + } + + public static ModuleEnum getEnum(String name) { + return Arrays.stream(ModuleEnum.values()).filter(e -> e.getName().equals(name)).findFirst().orElseThrow(NullPointerException::new); + } + + public String getName() { + return name; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/ExchangisJobDsBindService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/ExchangisJobDsBindService.java new file mode 100644 index 000000000..69a599369 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/ExchangisJobDsBindService.java @@ -0,0 +1,13 @@ +package com.webank.wedatasphere.exchangis.job.server.service; + +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobDsBind; + +import java.util.List; + +public interface ExchangisJobDsBindService { + + void updateJobDsBind(Long jobId, List dsBinds); + + boolean inUse(Long datasourceId); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/ExchangisMetricsService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/ExchangisMetricsService.java new file mode 100644 index 000000000..fa544c34f --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/ExchangisMetricsService.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.exchangis.job.server.service; + + +import org.apache.linkis.server.Message; + +import javax.servlet.http.HttpServletRequest; + +public interface ExchangisMetricsService { + Message getTaskStateMetrics(HttpServletRequest request); + + Message getTaskProcessMetrics(HttpServletRequest request); + + Message getDataSourceFlowMetrics(HttpServletRequest request); + + Message getEngineResourceCpuMetrics(HttpServletRequest request); + + Message getEngineResourceMemMetrics(HttpServletRequest request); + + Message getEngineResourceMetrics(HttpServletRequest request); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/JobExecuteService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/JobExecuteService.java new file mode 100644 index 000000000..ef5293e04 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/JobExecuteService.java @@ -0,0 +1,93 @@ +package com.webank.wedatasphere.exchangis.job.server.service; + + +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; +import com.webank.wedatasphere.exchangis.job.launcher.exception.ExchangisTaskLaunchException; +import com.webank.wedatasphere.exchangis.job.log.LogQuery; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.job.server.vo.*; + +import javax.servlet.http.HttpServletRequest; +import java.util.List; + +public interface JobExecuteService { + + /** + * Check if the user has the authority of execution job + * @param jobExecutionId job execution id + * @return bool + */ + ExchangisCategoryLogVo getJobLogInfo(String jobExecutionId, LogQuery logQuery) throws ExchangisJobServerException; + + ExchangisCategoryLogVo getTaskLogInfo(String taskId, String jobExecutionId, LogQuery logQuery) throws ExchangisJobServerException, ExchangisTaskLaunchException; + + /** + * Gets task Metrics + * + * @param taskid the task id + * @param jobExecutionId the job ExecutionId + * @return the task launched metrics + */ + ExchangisLaunchedTaskMetricsVo getLaunchedTaskMetrics(String taskid, String jobExecutionId) throws ExchangisJobServerException; + + /** + * Gets job progress info + * @param jobExecutionId the job ExecutionId + * @return the job tasks status + */ + ExchangisJobProgressVo getExecutedJobProgressInfo(String jobExecutionId) throws ExchangisJobServerException; + + /** + * Gets job status info + * @param jobExecutionId the job ExecutionId + * @return the job status + */ + ExchangisJobProgressVo getJobStatus(String jobExecutionId) throws ExchangisJobServerException; + + /** + * Gets Executed task list + * @param jobExecutionId the job ExecutionId + * @return the launched taskList + */ + List getExecutedJobTaskList(String jobExecutionId) throws ExchangisJobServerException; + + /** + * Gets Executed job list + * @return the launched jobList + */ + List getExecutedJobList(String jobExecutionId, String jobName, String status, + Long launchStartTime, Long launchEndTime, int current, int size, HttpServletRequest request) throws ExchangisJobServerException; + + /** + * Count int. + * + * @param jobExecutionId the job id + * @param jobName the job name + * @param status the status + * @param launchStartTime the launch start time + * @param launchEndTime the launch end time + * @return the int + */ + int count(String jobExecutionId, String jobName, String status, Long launchStartTime, Long launchEndTime, HttpServletRequest request); + + /** + * Execute job + * @param jobInfo job info + * @return job execution id + * @throws ExchangisJobServerException + */ + String executeJob(ExchangisJobInfo jobInfo, String execUser) throws ExchangisJobServerException; + /** + * Kill job. + * + * @param jobExecutionId the job ExecutionId + */ + void killJob(String jobExecutionId) throws ExchangisJobServerException; + + /** + * @param jobExecutionId the job ExecutionId + */ + void deleteJob(String jobExecutionId) throws ExchangisJobServerException; + + List allTaskStatus(String jobExecutionId) throws ExchangisJobServerException; +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/JobFuncService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/JobFuncService.java new file mode 100644 index 000000000..d2bc7da8d --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/JobFuncService.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.exchangis.job.server.service; + +import com.webank.wedatasphere.exchangis.job.server.vo.JobFunction; + +import java.util.List; +import java.util.Map; + +/** + * @author davidhua + * 2020/4/21 + */ +public interface JobFuncService { + + /** + * Fetch map: function -> refer name + * @param tabName tab name + * @param functionType type + * @return + */ + Map getFuncRefName(String tabName, JobFunction.FunctionType functionType); + + /** + * Fetch function list + * @param tabName tab name + * @param functionType type + * @return + */ + List getFunctions(String tabName, JobFunction.FunctionType functionType); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/JobInfoService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/JobInfoService.java new file mode 100644 index 000000000..aee170717 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/JobInfoService.java @@ -0,0 +1,110 @@ +package com.webank.wedatasphere.exchangis.job.server.service; + +import com.webank.wedatasphere.exchangis.common.pager.PageResult; +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobQueryVo; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVo; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; + +import javax.servlet.http.HttpServletRequest; + +import java.util.List; + +/** + * The interface Exchangis job service. + * + * @author yuxin.yuan + * @since 2021-08-10 + */ +public interface JobInfoService { + + /** + * Create job. + * + * @param jobVo the exchangis job basic info + * @return the exchangis job basic info vo + */ + ExchangisJobVo createJob(ExchangisJobVo jobVo); + + /** + * Update job exchangis job basic info. + * + * @param jobVo the exchangis job basic info dto + * @return the exchangis job basic info vo + */ + ExchangisJobVo updateJob(ExchangisJobVo jobVo); + + /** + * Gets job list(return job basic info). + * + * @param queryVo query vo + * @return the job page result + */ + PageResult queryJobList(ExchangisJobQueryVo queryVo); + + /** + * Delete job + * @param id job id + */ + void deleteJob(Long id); + + /** + * Get exchangis job by id. + * TODO remove the request + * @param id the id + * @throws ExchangisJobServerException the exchangis job error exception + */ + ExchangisJobVo getJob(Long id, boolean basic); + + /** + * Get job by name and projectId + * @param jobName + * @param projectId + * @return + */ + List getByNameAndProjectId(String jobName, Long projectId); + + /** + * Get job by name and projectId + * @param jobName + * @param projectId + * @return + */ + List getByNameWithProjectId(String jobName, Long projectId); + + ExchangisJobVo getDecoratedJob(HttpServletRequest request, Long id) throws ExchangisJobServerException; + + /** + * Get all subJob list + * @param request + * @param projectId + * @return + * @throws ExchangisJobServerException + */ + List getSubJobList(HttpServletRequest request, Long projectId) throws ExchangisJobServerException; + + /** + * Update exchangis job config. + * + * @param jobVo the exchangis job config + * @return the exchangis job + */ + ExchangisJobVo updateJobConfig(ExchangisJobVo jobVo) throws ExchangisJobServerException; + + /** + * Update exchangis job content. + * + * @param jobVo the exchangis job content + * @return the exchangis job + */ + ExchangisJobVo updateJobContent(ExchangisJobVo jobVo) throws ExchangisJobServerException, ExchangisDataSourceException; + + /** + * Copy job exchangis job basic info. + * + * @param jobVo the exchangis job basic info dto + * @return the exchangis job basic info vo + */ + ExchangisJobVo copyJob(ExchangisJobVo jobVo); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/JobTransformService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/JobTransformService.java new file mode 100644 index 000000000..d24f4b371 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/JobTransformService.java @@ -0,0 +1,61 @@ +package com.webank.wedatasphere.exchangis.job.server.service; + +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformRequestVo; +import com.webank.wedatasphere.exchangis.job.server.render.transform.TransformSettings; +import com.webank.wedatasphere.exchangis.job.server.render.transform.processor.TransformProcessor; + +import java.util.Map; + +/** + * Transform service + */ +public interface JobTransformService { + + /** + * Get transform settings + * @param requestVo request vo + * @return type => settings + */ + Map getSettings(TransformRequestVo requestVo); + /** + * Save processor + * @param processor processor + * @return process code id + */ + Long saveProcessor(TransformProcessor processor); + + /** + * Save processor information + * @param processor processor + * @return process code id + */ + Long saveProcessorInfo(TransformProcessor processor); + /** + * Get processor with code content + * @param procCodeId process code id + * @return processor + */ + TransformProcessor getProcessorWithCode(String procCodeId); + + /** + * Get processor with code content + * @param procCodeId process code id + * @return processor + */ + TransformProcessor getProcessorInfo(String procCodeId); + + /** + * Update processor + * @param processor processor + * @return process code id + */ + Long updateProcessor(TransformProcessor processor); + + /** + * Update processor information + * @param processor processor + * @return process code id + */ + Long updateProcessorInfo(TransformProcessor processor); + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/TaskExecuteService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/TaskExecuteService.java new file mode 100644 index 000000000..7ac422a75 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/TaskExecuteService.java @@ -0,0 +1,35 @@ +package com.webank.wedatasphere.exchangis.job.server.service; + +import com.webank.wedatasphere.exchangis.job.exception.ExchangisOnEventException; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; +import com.webank.wedatasphere.exchangis.job.server.execution.TaskExecutionListener; + +import java.util.Date; + +/** + * Task execute service + */ +public interface TaskExecuteService extends TaskExecutionListener { + + /** + * Update the task and its related job status + * @param task task + * @param status status + */ + void updateTaskStatus(LaunchedExchangisTask task, TaskStatus status, boolean updateJob) throws ExchangisOnEventException; + + void updateTaskProgress(LaunchedExchangisTask task, float progress) throws ExchangisOnEventException; + + /** + * Try to update the job progress by executionId + * @param jobExecutionId job execution id + */ + void updateJobProgress(String jobExecutionId, Date updateTime); + /** + * Try to update the job status by executionId + * @param jobExecutionId job execution id + * @param status status + */ + void updateJobStatus(String jobExecutionId, TaskStatus status, Date updateTime); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/TaskGenerateService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/TaskGenerateService.java new file mode 100644 index 000000000..5c9672cd7 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/TaskGenerateService.java @@ -0,0 +1,9 @@ +package com.webank.wedatasphere.exchangis.job.server.service; + +import com.webank.wedatasphere.exchangis.job.server.execution.generator.TaskGenerateListener; + +/** + * TaskGenerate Service + */ +public interface TaskGenerateService extends TaskGenerateListener { +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/TaskObserverService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/TaskObserverService.java new file mode 100644 index 000000000..1aaa35dc1 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/TaskObserverService.java @@ -0,0 +1,25 @@ +package com.webank.wedatasphere.exchangis.job.server.service; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; + +import java.util.List; + +/** + * Task observer service + */ +public interface TaskObserverService { + + /** + * Get the launchable task + * @param limitSize limit size + * @return list + */ + List onPublishLaunchableTask(int limitSize); + + /** + * Subscribe the launchable task + * @param task task + * @return boolean + */ + boolean subscribe(LaunchableExchangisTask task); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultJobExecuteService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultJobExecuteService.java new file mode 100644 index 000000000..6d02407a0 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultJobExecuteService.java @@ -0,0 +1,387 @@ +package com.webank.wedatasphere.exchangis.job.server.service.impl; + +import com.webank.wedatasphere.exchangis.common.UserUtils; +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; +import com.webank.wedatasphere.exchangis.job.launcher.AccessibleLauncherTask; +import com.webank.wedatasphere.exchangis.job.launcher.ExchangisTaskLaunchManager; +import com.webank.wedatasphere.exchangis.job.launcher.ExchangisTaskLauncher; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisJob; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; +import com.webank.wedatasphere.exchangis.job.launcher.entity.LaunchedExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.launcher.entity.LaunchedExchangisTaskEntity; +import com.webank.wedatasphere.exchangis.job.launcher.exception.ExchangisTaskLaunchException; +import com.webank.wedatasphere.exchangis.job.log.LogQuery; +import com.webank.wedatasphere.exchangis.job.log.LogResult; +import com.webank.wedatasphere.exchangis.job.server.mapper.LaunchableTaskDao; +import com.webank.wedatasphere.exchangis.job.server.mapper.LaunchedJobDao; +import com.webank.wedatasphere.exchangis.job.server.mapper.LaunchedTaskDao; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerException; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskGenerateException; +import com.webank.wedatasphere.exchangis.job.server.execution.DefaultTaskExecution; +import com.webank.wedatasphere.exchangis.job.server.execution.TaskExecution; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.TaskGenerator; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks.GenerationSchedulerTask; +import com.webank.wedatasphere.exchangis.job.server.log.JobLogService; +import com.webank.wedatasphere.exchangis.job.server.metrics.ExchangisMetricsVo; +import com.webank.wedatasphere.exchangis.job.server.metrics.converter.MetricConverterFactory; +import com.webank.wedatasphere.exchangis.job.server.metrics.converter.MetricsConverter; +import com.webank.wedatasphere.exchangis.job.server.service.JobExecuteService; +import com.webank.wedatasphere.exchangis.job.server.vo.*; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.conf.CommonVars; +import org.modelmapper.ModelMapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import javax.annotation.PostConstruct; +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import java.util.*; + +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.*; + +@Service +public class DefaultJobExecuteService implements JobExecuteService { + private final static Logger LOG = LoggerFactory.getLogger(DefaultJobExecuteService.class); + + private static final CommonVars TASK_LOG_IGNORE_KEYS = CommonVars.apply( + "wds.exchangis.job.task.log.ignore-keys", + "service.DefaultManagerService,info.DefaultNodeHealthyInfoManager"); + + @Autowired + private LaunchedTaskDao launchedTaskDao; + + @Autowired + private LaunchedJobDao launchedJobDao; + + @Autowired + private LaunchableTaskDao launchableTaskDao; + + @Autowired + private ModelMapper modelMapper; + + /** + * Task generator + */ + @Resource + private TaskGenerator taskGenerator; + + /** + * Task execution + */ + @Resource + private TaskExecution taskExecution; + + @Autowired + private JobLogService jobLogService; + + + /** + * Launch manager + */ + @Resource + private ExchangisTaskLaunchManager launchManager; + + /** + * Metrics converter factory + */ + @Resource + private MetricConverterFactory metricConverterFactory; + + /** + * Log ignore key set + */ + private final Set logIgnoreKeySet = new HashSet<>(); + + @PostConstruct + public void init(){ + String defaultIgnoreKeys = TASK_LOG_IGNORE_KEYS.getValue(); + if (StringUtils.isNotBlank(defaultIgnoreKeys)){ + logIgnoreKeySet.addAll(Arrays.asList(defaultIgnoreKeys.split(","))); + } + } + @Override + public List getExecutedJobTaskList(String jobExecutionId) throws ExchangisJobServerException{ + List launchedExchangisTaskEntities = launchedTaskDao.selectTaskListByJobExecutionId(jobExecutionId); + List jobTaskList = new ArrayList<>(); + if(launchedExchangisTaskEntities != null) { + try { + launchedExchangisTaskEntities.forEach(taskEntity -> { + ExchangisJobTaskVo exchangisTaskVO = modelMapper.map(taskEntity, ExchangisJobTaskVo.class); + jobTaskList.add(exchangisTaskVO); + } + ); + } catch (Exception e) { + LOG.error("Exception happened while get TaskLists mapping to Vo(获取task列表映射至页面是出错,请校验任务信息), " + "message: " + e.getMessage(), e); + } + } + + return jobTaskList; + } + + @Override + public ExchangisJobProgressVo getExecutedJobProgressInfo(String jobExecutionId) throws ExchangisJobServerException { + LaunchedExchangisJobEntity launchedExchangisJobEntity = launchedJobDao.searchLaunchedJob(jobExecutionId); + if (launchedExchangisJobEntity == null) { + throw new ExchangisJobServerException(31100, "Get jobProgress information is null(獲取job进度信息为空), " + "jobExecutionId = " + jobExecutionId); + } + ExchangisJobProgressVo jobProgressVo = null; + try { + jobProgressVo = modelMapper.map(launchedExchangisJobEntity, ExchangisJobProgressVo.class); + List launchedExchangisTaskEntity = launchedTaskDao.selectTaskListByJobExecutionId(jobExecutionId); + ExchangisJobProgressVo finalJobProgressVo = jobProgressVo; + launchedExchangisTaskEntity.forEach(taskEntity -> { + finalJobProgressVo.addTaskProgress(new ExchangisJobProgressVo.ExchangisTaskProgressVo(taskEntity.getTaskId(), taskEntity.getName(), taskEntity.getStatus(), taskEntity.getProgress())); + }); + } catch (Exception e){ + LOG.error("Get job and task progress happen execption ," + "[jobExecutionId =" + jobExecutionId + "]", e); + } + return jobProgressVo; + } + + @Override + public ExchangisJobProgressVo getJobStatus(String jobExecutionId) throws ExchangisJobServerException{ + LaunchedExchangisJobEntity launchedExchangisJobEntity = launchedJobDao.searchLaunchedJob(jobExecutionId); + ExchangisJobProgressVo jobProgressVo = null; + try { + jobProgressVo = modelMapper.map(launchedExchangisJobEntity, ExchangisJobProgressVo.class); + } catch (Exception e) { + LOG.error("Get job status happen execption, " + "[jobExecutionId =" + jobExecutionId + "](获取作业状态错误)", e); + } + + boolean allTaskStatus = false; + + assert jobProgressVo != null; + if(TaskStatus.isCompleted(jobProgressVo.getStatus())) { + List taskStatusList = launchedTaskDao.getTaskStatusList(jobExecutionId); + allTaskStatus = taskStatusList.isEmpty(); + if (!allTaskStatus){ + allTaskStatus = taskStatusList.stream().allMatch( status -> + StringUtils.isNotBlank(status) && TaskStatus.isCompleted(TaskStatus.valueOf(status))); + } + } + jobProgressVo.setAllTaskStatus(allTaskStatus); + return jobProgressVo; + } + + @Override + public void killJob(String jobExecutionId) throws ExchangisJobServerException{ + Calendar calendar = Calendar.getInstance(); + launchedJobDao.upgradeLaunchedJobStatus(jobExecutionId, TaskStatus.Cancelled.name(), calendar.getTime()); + } + + @Override + public ExchangisLaunchedTaskMetricsVo getLaunchedTaskMetrics(String taskId, String jobExecutionId) throws ExchangisJobServerException { + LaunchedExchangisTaskEntity launchedExchangisTaskEntity = launchedTaskDao.getLaunchedTaskMetrics(jobExecutionId, taskId); + ExchangisLaunchedTaskMetricsVo exchangisLaunchedTaskVo = new ExchangisLaunchedTaskMetricsVo(); + exchangisLaunchedTaskVo.setTaskId(launchedExchangisTaskEntity.getTaskId()); + exchangisLaunchedTaskVo.setName(launchedExchangisTaskEntity.getName()); + exchangisLaunchedTaskVo.setStatus(launchedExchangisTaskEntity.getStatus().name()); + MetricsConverter metricsConverter = metricConverterFactory.getOrCreateMetricsConverter(launchedExchangisTaskEntity.getEngineType()); + if (Objects.nonNull(metricsConverter)){ + try { + exchangisLaunchedTaskVo.setMetrics(metricsConverter.convert(launchedExchangisTaskEntity.getMetricsMap())); + }catch (ExchangisJobServerException e){ + // Print the problem in convert metrics vo + LOG.warn("Problem occurred in convert of metrics vo", e); + } + } + return exchangisLaunchedTaskVo; + } + + @Override + public ExchangisCategoryLogVo getJobLogInfo(String jobExecutionId, LogQuery logQuery) throws ExchangisJobServerException { + LaunchedExchangisJobEntity launchedExchangisJob = this.launchedJobDao.searchLogPathInfo(jobExecutionId); + LogResult logResult = jobLogService.logsFromPageAndPath(launchedExchangisJob.getLogPath(), logQuery); + return resultToCategoryLog(logQuery, logResult, launchedExchangisJob.getStatus()); + } + + @Override + public ExchangisCategoryLogVo getTaskLogInfo(String taskId, String jobExecutionId, LogQuery logQuery) + throws ExchangisJobServerException, ExchangisTaskLaunchException { + LaunchedExchangisTaskEntity launchedTaskEntity = this.launchedTaskDao.getLaunchedTaskEntity(taskId); + if (logIgnoreKeySet.size() > 0){ + String ignoreKeys = logQuery.getIgnoreKeywords(); + if (StringUtils.isNotBlank(ignoreKeys)){ + Set ignores = new HashSet<>(Arrays.asList(ignoreKeys.split(","))); + ignores.addAll(logIgnoreKeySet); + logQuery.setIgnoreKeywords(StringUtils.join(ignores, ",")); + } else { + logQuery.setIgnoreKeywords(StringUtils.join(logIgnoreKeySet, ",")); + } + } + if (Objects.isNull(launchedTaskEntity)){ + return resultToCategoryLog(logQuery, new LogResult(0, false, new ArrayList<>()), TaskStatus.Inited); + } + if (StringUtils.isBlank(launchedTaskEntity.getLinkisJobId())){ + TaskStatus status = launchedTaskEntity.getStatus(); + // Means that the task is not ready or task submit failed + return resultToCategoryLog(logQuery, new LogResult(0, TaskStatus.isCompleted(status), new ArrayList<>()), status); + } + + // Construct the launchedExchangisTask + LaunchedExchangisTask launchedTask = new LaunchedExchangisTask(); + launchedTask.setLinkisJobId(launchedTaskEntity.getLinkisJobId()); + launchedTask.setLinkisJobInfo(launchedTaskEntity.getLinkisJobInfo()); + launchedTask.setTaskId(launchedTaskEntity.getTaskId()); + launchedTask.setExecuteUser(launchedTaskEntity.getExecuteUser()); + launchedTask.setEngineType(launchedTaskEntity.getEngineType()); + ExchangisTaskLauncher taskLauncher = + this.launchManager.getTaskLauncher(DefaultTaskExecution.DEFAULT_LAUNCHER_NAME); + if (Objects.isNull(taskLauncher)){ + throw new ExchangisJobServerException(LOG_OP_ERROR.getCode(), "Unable to find the suitable launcher for [task: " + taskId + ", engine type: " + + launchedTask.getEngineType() +"]", null); + } + AccessibleLauncherTask accessibleLauncherTask = taskLauncher.launcherTask(launchedTask); + return resultToCategoryLog(logQuery, accessibleLauncherTask.queryLogs(logQuery), launchedTaskEntity.getStatus()); + } + + @Override + public List getExecutedJobList(String jobExecutionId, String jobName, String status, + Long launchStartTime, Long launchEndTime, int current, int size, HttpServletRequest request) throws ExchangisJobServerException{ + if (current <= 0) { + current = 1; + } + if (size <= 0) { + size = 10; + } + int start = (current - 1) * size; + List jobList = new ArrayList<>(); + Date startTime = launchStartTime == null ? null : new Date(launchStartTime); + Date endTime = launchEndTime == null ? null : new Date(launchEndTime); + List jobEntitylist = launchedJobDao.getAllLaunchedJob(jobExecutionId, jobName, status, startTime, endTime, start, size, UserUtils.getLoginUser(request)); + //LOG.info("ExecutedJobList information: " + jobExecutionId + jobName + status + launchStartTime + launchEndTime + current + size); + if(jobEntitylist != null) { + try { + for (LaunchedExchangisJobEntity launchedExchangisJobEntity : jobEntitylist) { + ExchangisLaunchedJobListVo exchangisJobVo = modelMapper.map(launchedExchangisJobEntity, ExchangisLaunchedJobListVo.class); + if (launchedExchangisJobEntity.getExchangisJobEntity() == null || launchedExchangisJobEntity.getExchangisJobEntity().getSource() == null) { + exchangisJobVo.setExecuteNode("-"); + } else { + Map sourceObject = Json.fromJson(launchedExchangisJobEntity.getExchangisJobEntity().getSource(), Map.class); + if (Objects.nonNull(sourceObject)){ + exchangisJobVo.setExecuteNode(String.valueOf(sourceObject + .getOrDefault("executeNode", "-"))); + } + } + List launchedExchangisTaskEntities = launchedTaskDao.selectTaskListByJobExecutionId(launchedExchangisJobEntity.getJobExecutionId()); + if (launchedExchangisTaskEntities == null) { + exchangisJobVo.setFlow((long) 0); + } else { + double flows = 0; + int taskNum = launchedExchangisTaskEntities.size(); + for (LaunchedExchangisTaskEntity launchedExchangisTaskEntity : launchedExchangisTaskEntities) { + MetricsConverter metricsConverter = metricConverterFactory.getOrCreateMetricsConverter(launchedExchangisTaskEntity.getEngineType()); + ExchangisLaunchedTaskMetricsVo exchangisLaunchedTaskVo = new ExchangisLaunchedTaskMetricsVo(); + if (launchedExchangisTaskEntity.getMetricsMap() == null) { + flows += 0; + continue; + } + exchangisLaunchedTaskVo.setMetrics(metricsConverter.convert(launchedExchangisTaskEntity.getMetricsMap())); + Map flowMap = (Map) launchedExchangisTaskEntity.getMetricsMap().get("traffic"); + //Map flowMap = (Map) launchedExchangisTaskEntity.getMetricsMap().get("traffic"); + //flows += flowMap == null ? 0 : Integer.parseInt(flowMap.get("flow").toString()); + flows += exchangisLaunchedTaskVo.getMetrics().getTraffic().getFlow(); + } + exchangisJobVo.setFlow(taskNum == 0 ? 0 : (long) (flows / taskNum)); + } + jobList.add(exchangisJobVo); + } + } catch (Exception e) { + LOG.error("Exception happened while get JobLists mapping to Vo(获取job列表映射至页面是出错,请校验任务信息), " + "message: " + e.getMessage(), e); + } + } + return jobList; + } + + @Override + public int count(String jobExecutionId, String jobName, String status, Long launchStartTime, Long launchEndTime, HttpServletRequest request) { + Date startTime = launchStartTime == null ? null : new Date(launchStartTime); + Date endTime = launchEndTime == null ? null : new Date(launchEndTime); + + return launchedJobDao.count(jobExecutionId, jobName, status, startTime, endTime, UserUtils.getLoginUser(request)); + } + + @Override + public String executeJob(ExchangisJobInfo jobInfo, String execUser) throws ExchangisJobServerException { + // Build generator scheduler task + GenerationSchedulerTask schedulerTask = null; + try { + schedulerTask = new GenerationSchedulerTask(taskGenerator, jobInfo); + } catch (ExchangisTaskGenerateException e) { + throw new ExchangisJobServerException(JOB_EXCEPTION_CODE.getCode(), "Exception in initial the launchable job", e); + } + // The scheduler task id is execution id + String jobExecutionId = schedulerTask.getId(); + // Use exec user as tenancy + schedulerTask.setTenancy(execUser); + LOG.info("Submit the generation scheduler task: [{}] for job: [{}], tenancy: [{}] to TaskExecution", jobExecutionId, jobInfo.getId(), execUser); + try { + taskExecution.submit(schedulerTask); + } catch (ExchangisSchedulerException e) { + throw new ExchangisJobServerException(JOB_EXCEPTION_CODE.getCode(), "Exception in submitting to taskExecution", e); + } + return jobExecutionId; + } + + /** + * Convert the log result to category log + * @param logResult log result + * @param status status + * @return category log + */ + private ExchangisCategoryLogVo resultToCategoryLog(LogQuery logQuery, LogResult logResult, TaskStatus status){ + ExchangisCategoryLogVo categoryLogVo = new ExchangisCategoryLogVo(); + boolean noLogs = logResult.getLogs().isEmpty(); + for (int i = 0; i < logResult.getLogs().size(); i++) { + if (logResult.getLogs().get(i).contains("password")) { + LOG.info("Sensitive information in there: {}", logResult.getLogs().get(i)); + logResult.getLogs().set(i, "----"); + LOG.info("Change line is: {}", logResult.getLogs().get(i)); + } + } + if (Objects.nonNull(logQuery.getLastRows())){ + logResult.setEnd(true); + }else if (noLogs || logQuery.isEnableTail()){ +// logResult.getLogs().add("<>"); + if (TaskStatus.isCompleted(status)){ + logResult.setEnd(true); +// categoryLogVo.setIsEnd(true); + } + } + categoryLogVo.newCategory("error", log -> log.contains("ERROR") || noLogs); + categoryLogVo.newCategory("warn", log -> log.contains("WARN") || noLogs); + categoryLogVo.newCategory("info", log -> log.contains("INFO") || noLogs); + categoryLogVo.processLogResult(logResult, false); + if (!noLogs) { + categoryLogVo.getLogs().put("all", StringUtils.join(logResult.getLogs(), "\n")); + } + return categoryLogVo; + } + + @Transactional + @Override + public void deleteJob(String jobExecutionId) throws ExchangisJobServerException { + List taskStatusList = launchedTaskDao.getTaskStatusList(jobExecutionId); + if(taskStatusList.contains("Inited") || taskStatusList.contains("Scheduled") || taskStatusList.contains("Running") || taskStatusList.contains("WaitForRetry")){ + throw new ExchangisJobServerException(JOB_EXCEPTION_CODE.getCode(), "不能删除该作业"); + }else { + launchedTaskDao.deleteTask(jobExecutionId); + launchedJobDao.deleteJob(jobExecutionId); + } + } + + @Override + public List allTaskStatus(String jobExecutionId) throws ExchangisJobServerException { + List taskStatusList = launchedTaskDao.getTaskStatusList(jobExecutionId); + return taskStatusList; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultJobInfoService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultJobInfoService.java new file mode 100644 index 000000000..3acfbf5e1 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultJobInfoService.java @@ -0,0 +1,293 @@ +package com.webank.wedatasphere.exchangis.job.server.service.impl; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.github.pagehelper.PageHelper; +import com.github.pagehelper.PageInfo; +import com.webank.wedatasphere.exchangis.common.pager.PageResult; +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobDsBind; +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; +import com.webank.wedatasphere.exchangis.datasource.core.ui.viewer.ExchangisDataSourceUIViewer; +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.datasource.core.vo.ExchangisJobInfoContent; +import com.webank.wedatasphere.exchangis.datasource.service.ExchangisDataSourceService; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.job.server.mapper.ExchangisJobEntityDao; +import com.webank.wedatasphere.exchangis.job.server.service.JobInfoService; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobQueryVo; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVo; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.utils.JsonUtils; +import org.apache.linkis.manager.label.utils.LabelUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import java.util.*; +import java.util.stream.Collectors; + +/** + * Default implement + */ +@Service +public class DefaultJobInfoService implements JobInfoService { + + private static final Logger LOG = LoggerFactory.getLogger(DefaultJobInfoService.class); + + @Autowired + private ExchangisJobDsBindServiceImpl exchangisJobDsBindService; + + @Autowired + private ExchangisDataSourceService exchangisDataSourceService; + + @Resource + private ExchangisJobEntityDao jobEntityDao; + + @Resource + private JobInfoService jobInfoService; + + @Override + @Transactional(rollbackFor = Exception.class) + public ExchangisJobVo createJob(ExchangisJobVo jobVo) { + LOG.info("Sqoop job labels is: {}", jobVo.getJobLabels()); + ExchangisJobEntity jobEntity = new ExchangisJobEntity(); + jobEntity.setProjectId(jobVo.getProjectId()); + jobEntity.setJobType(jobVo.getJobType()); + jobEntity.setEngineType(jobVo.getEngineType()); + jobEntity.setJobLabel(jobVo.getJobLabels()); + jobEntity.setJobLabels(jobVo.getJobLabels()); + jobEntity.setName(jobVo.getJobName()); + jobEntity.setJobDesc(jobVo.getJobDesc()); + jobEntity.setExecuteUser(jobVo.getProxyUser()); + jobEntity.setJobParams(jobVo.getJobParams()); + jobEntity.setCreateUser(jobVo.getCreateUser()); + jobEntity.setCreateTime(Calendar.getInstance().getTime()); + jobEntity.setSource(Json.toJson(jobVo.getSource(), null)); + //jobEntity.setJobContent(jobVo.getContent()); + jobEntity.setModifyUser(jobVo.getModifyUser()); + LOG.info("Sqoop job Entity labels is: {}", jobEntity.getJobLabel()); + //Map contentVo = BDPJettyServerHelper.gson().fromJson(jobVo.getContent(), Map.class); + LOG.info("Sqoop job content is: {}, Modify user is: {}, jobType is: {}", jobVo.getContent(), jobEntity.getExecuteUser(), jobEntity.getJobType()); + if(jobVo.getContent() != null) { + jobEntity.setJobContent(jobVo.getContent()); + LOG.info("Sqoop job content is: {}, executor: {}", jobEntity.getJobContent(), jobEntity.getExecuteUser()); + } + jobEntityDao.addJobEntity(jobEntity); + jobVo.setId(jobEntity.getId()); + jobVo.setCreateTime(jobEntity.getCreateTime()); + return jobVo; + } + + @Override + @Transactional(rollbackFor = Exception.class) + public ExchangisJobVo updateJob(ExchangisJobVo jobVo) { + ExchangisJobEntity jobEntity = new ExchangisJobEntity(); + jobEntity.setId(jobVo.getId()); + jobEntity.setJobType(jobVo.getJobType()); + jobEntity.setEngineType(jobVo.getEngineType()); + jobEntity.setJobLabel(jobVo.getJobLabels()); + jobEntity.setName(jobVo.getJobName()); + jobEntity.setJobLabels(jobVo.getJobLabels()); + jobEntity.setJobDesc(jobVo.getJobDesc()); + jobEntity.setLastUpdateTime(Calendar.getInstance().getTime()); + jobEntity.setModifyUser(jobVo.getModifyUser()); + jobEntityDao.upgradeBasicInfo(jobEntity); + return jobVo; + } + + @Override + public PageResult queryJobList(ExchangisJobQueryVo queryVo){ + PageHelper.startPage(queryVo.getPage(), queryVo.getPageSize()); + try{ + List jobEntities = this.jobEntityDao.queryPageList(queryVo); + PageInfo pageInfo = new PageInfo<>(jobEntities); + List infoList = jobEntities + .stream().map(ExchangisJobVo::new).collect(Collectors.toList()); + PageResult pageResult = new PageResult<>(); + pageResult.setList(infoList); + pageResult.setTotal(pageInfo.getTotal()); + return pageResult; + }finally { + PageHelper.clearPage(); + } + } + + @Override + @Transactional(rollbackFor = Exception.class) + public void deleteJob(Long id) { + this.jobEntityDao.deleteJobEntity(id); + this.exchangisJobDsBindService.updateJobDsBind(id, new ArrayList<>()); + } + + @Override + public ExchangisJobVo getJob(Long id, boolean basic) { + ExchangisJobEntity exchangisJob = basic ? this.jobEntityDao.getBasicInfo(id) : this.jobEntityDao.getDetail(id); + if (Objects.isNull(exchangisJob)) { + return null; + } + ExchangisJobVo jobVo = new ExchangisJobVo(exchangisJob); + jobVo.setProjectId(exchangisJob.getProjectId()); + if (exchangisJob != null && StringUtils.isNotBlank(exchangisJob.getJobContent())) { + jobVo.setContent(exchangisJob.getJobContent()); + jobVo.setSource(Objects.nonNull(exchangisJob.getSource())? + Json.fromJson(exchangisJob.getSource(), Map.class, String.class, Object.class) : new HashMap<>()); + } + return jobVo; + } + + @Override + public List getByNameAndProjectId(String jobName, Long projectId) { + List exchangisJobs = this.jobEntityDao.getByNameAndProjectId(jobName, projectId); + List exchangisJobVos = new ArrayList<>(); + for(ExchangisJobEntity exchangisJob : exchangisJobs){ + ExchangisJobVo jobVo = new ExchangisJobVo(exchangisJob); + if (exchangisJob != null && StringUtils.isNotBlank(exchangisJob.getJobContent())) { + jobVo.setContent(exchangisJob.getJobContent()); + jobVo.setSource(Objects.nonNull(exchangisJob.getSource())? + Json.fromJson(exchangisJob.getSource(), Map.class, String.class, Object.class) : new HashMap<>()); + } + exchangisJobVos.add(jobVo); + } + + return exchangisJobVos; + } + + @Override + public List getByNameWithProjectId(String jobName, Long projectId) { + List exchangisJobs = this.jobEntityDao.getByNameWithProjectId(jobName, projectId); + List exchangisJobVos = new ArrayList<>(); + for(ExchangisJobEntity exchangisJob : exchangisJobs){ + ExchangisJobVo jobVo = new ExchangisJobVo(exchangisJob); + if (exchangisJob != null && StringUtils.isNotBlank(exchangisJob.getJobContent())) { + jobVo.setContent(exchangisJob.getJobContent()); + jobVo.setSource(Objects.nonNull(exchangisJob.getSource())? + Json.fromJson(exchangisJob.getSource(), Map.class, String.class, Object.class) : new HashMap<>()); + } + exchangisJobVos.add(jobVo); + } + + return exchangisJobVos; + } + + @Override + public ExchangisJobVo getDecoratedJob(HttpServletRequest request, Long id) throws ExchangisJobServerException { + ExchangisJobEntity exchangisJob = this.jobEntityDao.getDetail(id); + ExchangisJobVo jobVo = new ExchangisJobVo(exchangisJob); + if (exchangisJob != null && StringUtils.isNotBlank(exchangisJob.getJobContent())) { + // Rebuild the job content with ui configuration + List jobDataSourceUIs = exchangisDataSourceService.getJobDataSourceUIs(request, id); + ObjectMapper objectMapper = JsonUtils.jackson(); + try { + String content = objectMapper.writeValueAsString(jobDataSourceUIs); + JsonNode contentJsonNode = objectMapper.readTree(content); + ObjectNode objectNode = objectMapper.createObjectNode(); + objectNode.set("subJobs", contentJsonNode); + jobVo.setContent(objectNode.toString()); + jobVo.setSource(Objects.nonNull(exchangisJob.getSource())? + Json.fromJson(exchangisJob.getSource(), Map.class, String.class, Object.class) : new HashMap<>()); + } catch (JsonProcessingException e) { + throw new ExchangisJobServerException(31100, + "Fail to rebuild the job content with ui (渲染任务内容失败)", e); + } + } + return jobVo; + } + + @Override + public List getSubJobList(HttpServletRequest request, Long projectId) throws ExchangisJobServerException{ + List exchangisJobList = this.jobEntityDao.getDetailList(projectId); + List exchangisJobVos = new ArrayList<>(); + if(!exchangisJobList.isEmpty()) { + for(ExchangisJobEntity exchangisJob : exchangisJobList){ + ExchangisJobVo jobVo = new ExchangisJobVo(exchangisJob); + if(StringUtils.isNotBlank(exchangisJob.getJobContent())){ + List jobDataSourceUIs = exchangisDataSourceService.getJobDataSourceUIs(request, exchangisJob.getId()); + ObjectMapper objectMapper = JsonUtils.jackson(); + try { + String content = objectMapper.writeValueAsString(jobDataSourceUIs); + JsonNode contentJsonNode = objectMapper.readTree(content); + ObjectNode objectNode = objectMapper.createObjectNode(); + objectNode.set("subJobs", contentJsonNode); + jobVo.setContent(objectNode.toString()); + jobVo.setSource(Objects.nonNull(exchangisJob.getSource())? + Json.fromJson(exchangisJob.getSource(), Map.class, String.class, Object.class) : new HashMap<>()); + } catch (JsonProcessingException e) { + throw new ExchangisJobServerException(31100, + "Fail to rebuild the job content with ui (渲染任务内容失败)", e); + } + } + exchangisJobVos.add(jobVo); + } + } + return exchangisJobVos; + } + + @Override + @Transactional(rollbackFor = Exception.class) + public ExchangisJobVo updateJobConfig(ExchangisJobVo jobVo) { + ExchangisJobEntity jobEntity = this.jobEntityDao.getBasicInfo(jobVo.getId()); + Map sourceMap = StringUtils.isNotBlank(jobEntity.getSource())? + Json.fromJson(jobEntity.getSource(), Map.class, String.class, Object.class) : null; + jobEntity.setExecuteUser(jobVo.getProxyUser()); + jobEntity.setJobParams(jobVo.getJobParams()); + if (Objects.isNull(sourceMap)){ + sourceMap = new HashMap<>(); + } + sourceMap.putAll(jobVo.getSource()); + jobEntity.setSource(Json.toJson(sourceMap, null)); + jobEntity.setModifyUser(jobVo.getModifyUser()); + jobEntity.setLastUpdateTime(Calendar.getInstance().getTime()); + this.jobEntityDao.upgradeConfig(jobEntity); + return jobVo; + } + + @Override + @Transactional(rollbackFor = Exception.class) + public ExchangisJobVo updateJobContent(ExchangisJobVo jobVo) throws ExchangisJobServerException, ExchangisDataSourceException { + Long jobId = jobVo.getId(); + ExchangisJobEntity exchangisJob = this.jobEntityDao.getDetail(jobId); + exchangisJob.setJobContent(jobVo.getContent()); + final String engine = exchangisJob.getEngineType(); + // 校验是否有重复子任务名 + List content = LabelUtils.Jackson.fromJson(exchangisJob.getJobContent(), + List.class, ExchangisJobInfoContent.class); + long count = content.stream().map(ExchangisJobInfoContent::getSubJobName).distinct().count(); + if (count < content.size()) { + throw new ExchangisJobServerException(31101, "Already exits duplicated job name(存在重复子任务名)"); + } + List dsBinds = new ArrayList<>(content.size()); + // 校验引擎是否支持该数据通道 + for (int i = 0; i < content.size(); i++) { + ExchangisJobInfoContent task = content.get(i); + String sourceType = task.getDataSources().getSourceId().split("\\.")[0]; + String sinkType = task.getDataSources().getSinkId().split("\\.")[0]; + this.exchangisDataSourceService.checkDSSupportDegree(engine, sourceType, sinkType); + ExchangisJobDsBind dsBind = new ExchangisJobDsBind(); + dsBind.setJobId(jobVo.getId()); + dsBind.setTaskIndex(i); + dsBind.setSourceDsId(Long.parseLong(task.getDataSources().getSourceId().split("\\.")[1])); + dsBind.setSinkDsId(Long.parseLong(task.getDataSources().getSinkId().split("\\.")[1])); + dsBinds.add(dsBind); + } + exchangisJob.setModifyUser(jobVo.getModifyUser()); + exchangisJob.setLastUpdateTime(jobVo.getModifyTime()); + this.exchangisJobDsBindService.updateJobDsBind(jobId, dsBinds); + this.jobEntityDao.upgradeContent(exchangisJob); + return jobVo; + } + + @Override + @Transactional(rollbackFor = Exception.class) + public ExchangisJobVo copyJob(ExchangisJobVo jobVo) { + ExchangisJobVo job = jobInfoService.getJob(jobVo.getId(), false); + ExchangisJobVo newJob = jobInfoService.createJob(job); + return newJob; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultJobTransformService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultJobTransformService.java new file mode 100644 index 000000000..40cbbf68d --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultJobTransformService.java @@ -0,0 +1,216 @@ +package com.webank.wedatasphere.exchangis.job.server.service.impl; + + +import com.webank.wedatasphere.exchangis.common.linkis.bml.BmlResource; +import com.webank.wedatasphere.exchangis.engine.resource.bml.BmlClients; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode; +import com.webank.wedatasphere.exchangis.job.server.mapper.JobTransformProcessorDao; +import com.webank.wedatasphere.exchangis.job.server.mapper.JobTransformRuleDao; +import com.webank.wedatasphere.exchangis.job.server.render.transform.*; +import com.webank.wedatasphere.exchangis.job.server.render.transform.processor.TransformProcessor; +import com.webank.wedatasphere.exchangis.job.server.service.JobTransformService; +import com.webank.wedatasphere.exchangis.job.server.utils.SpringContextHolder; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.bml.protocol.BmlDownloadResponse; +import org.apache.linkis.bml.protocol.BmlUpdateResponse; +import org.apache.linkis.bml.protocol.BmlUploadResponse; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import javax.annotation.Resource; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.*; + +@Service +public class DefaultJobTransformService implements JobTransformService { + + private static final Logger LOG = LoggerFactory.getLogger(DefaultJobTransformService.class); + + @Resource + private JobTransformProcessorDao processorDao; + + @Resource + private JobTransformRuleDao transformRuleDao; + + @Resource + private TransformRulesFusion transformDefineRulesFusion; + + @Resource + private TransformerContainer transformerContainer; + + private JobTransformService selfService; + + + @Override + public Map getSettings(TransformRequestVo requestVo) { + Map settingsMap = new HashMap<>(); + // First to get the definition rule to select transformers + TransformDefine sourceDefine = getTransformDefineRule(requestVo.getSourceTypeId(), requestVo.getEngine(), TransformRule.Direction.SOURCE.name()); + TransformDefine sinkDefine = getTransformDefineRule(requestVo.getSinkTypeId(), requestVo.getEngine(), TransformRule.Direction.SINK.name()); + TransformDefine transformDefine = transformDefineRulesFusion.fuse(sourceDefine, sinkDefine); + for(String type : transformDefine.getTypes()){ + Transformer transformer = this.transformerContainer.getTransformer(type); + if (Objects.nonNull(transformer)){ + settingsMap.put(type, transformer.getSettings(requestVo)); + } + } + return settingsMap; + } + + @Override + public Long saveProcessor(TransformProcessor processor) { + String content = processor.getCodeContent(); + if (StringUtils.isNotBlank(content)){ + try { + BmlResource bmlResource = saveCodeToBml(processor.getCreator(), content); + Optional.ofNullable(bmlResource).ifPresent(resource -> { + processor.setBmlResource(resource); + // Empty the code content + processor.setCodeContent(null); + }); + }catch (Exception e){ + LOG.warn("Unable to save the code content of processor to the bml server: [{}]", e.getMessage()); + } + } + return getSelfService().saveProcessorInfo(processor); + } + + @Override + @Transactional(rollbackFor = Exception.class) + public Long saveProcessorInfo(TransformProcessor processor) { + processorDao.saveOne(processor); + return processor.getId(); + } + + @Override + public TransformProcessor getProcessorWithCode(String procCodeId) { + TransformProcessor processor = processorDao.getProcDetail(Long.valueOf(procCodeId)); + if (Objects.nonNull(processor)) { + if (StringUtils.isBlank(processor.getCodeContent()) && + Objects.nonNull(processor.getBmlResource())) { + // Means that the content has stored in bml + try { + String codeContent = downloadCodeFromBml(processor.getCreator(), processor.getBmlResource()); + processor.setCodeContent(codeContent); + } catch (Exception e) { + LOG.warn("Unable to fetch the code content of processor from bml server: [{}]", e.getMessage()); + } + } + } + return processor; + } + + @Override + public TransformProcessor getProcessorInfo(String procCodeId) { + return processorDao.getProcInfo(Long.valueOf(procCodeId)); + } + + @Override + public Long updateProcessor(TransformProcessor processor) { + BmlResource bmlResource = processor.getBmlResource(); + try { + if (Objects.nonNull(bmlResource)) { + bmlResource = updateCodeToBml(processor.getCreator(), bmlResource, processor.getCodeContent()); + } else { + bmlResource = saveCodeToBml(processor.getCreator(), processor.getCodeContent()); + } + Optional.ofNullable(bmlResource).ifPresent(resource -> { + processor.setBmlResource(resource); + // Empty the code content + processor.setCodeContent(null); + }); + } catch (Exception e){ + LOG.warn("Unable to save/update the code content of processor to the bml server: [{}]", e.getMessage()); + } + return getSelfService().updateProcessorInfo(processor); + } + + @Override + @Transactional(rollbackFor = Exception.class) + public Long updateProcessorInfo(TransformProcessor processor) { + processorDao.updateOne(processor); + return processor.getId(); + } + + /** + * Save code content to bml server + * @param operator operator + * @param codeContent code content + * @return bml resource + */ + private BmlResource saveCodeToBml(String operator, String codeContent){ + // TODO abstract the bml client in common module + BmlUploadResponse uploadResponse = BmlClients.getInstance().uploadResource(operator, "code", + IOUtils.toInputStream(codeContent, StandardCharsets.UTF_8)); + return Objects.nonNull(uploadResponse) ? + new BmlResource(uploadResponse.resourceId(), uploadResponse.version()) : null; + } + + + /** + * Update code content to bml server + * @param operator operator + * @param resource resource + * @param codeContent code content + * @return bml resource + */ + private BmlResource updateCodeToBml(String operator, BmlResource resource, String codeContent){ + // TODO abstract the bml client in common module + BmlUpdateResponse updateResponse = BmlClients.getInstance().updateResource(operator, resource.getResourceId(), + "code", IOUtils.toInputStream(codeContent, StandardCharsets.UTF_8)); + return Objects.nonNull(updateResponse) ? + new BmlResource(updateResponse.resourceId(), updateResponse.version()) : null; + } + + /** + * Download code content from bml server + * @param operator operator + * @param resource resource + * @return bml resource + */ + private String downloadCodeFromBml(String operator, BmlResource resource) throws IOException { + // TODO abstract the bml client in common module + BmlDownloadResponse downloadResponse = BmlClients.getInstance() + .downloadResource(operator, resource.getResourceId(), resource.getVersion()); + return Objects.nonNull(downloadResponse) && Objects.nonNull(downloadResponse.inputStream())? + IOUtils.toString(downloadResponse.inputStream(), StandardCharsets.UTF_8) : null; + } + + /** + * Get transform define rule + * @param dataSourceType data source type + * @param engine engine + * @param direction direction + * @return + */ + private TransformDefine getTransformDefineRule(String dataSourceType, String engine, String direction){ + TransformDefine resultDefine = new TransformDefine(TransformRule.Types.DEF, null); + this.transformRuleDao.getTransformRules(TransformRule.Types.DEF.name(), dataSourceType) + .stream().filter(rule -> rule.matchInFraction(dataSourceType, engine, direction) > 0) + .forEach(rule -> Optional.ofNullable(rule.toRule(TransformDefine.class)).ifPresent(define -> resultDefine.getTypes().addAll(define.getTypes()))); + if (resultDefine.getTypes().isEmpty()){ + // Add MAPPING type default + resultDefine.getTypes().add(TransformTypes.MAPPING.name()); + } + return resultDefine; + } + /** + * Get the self-service + * @return transform service + */ + private JobTransformService getSelfService(){ + if (Objects.isNull(selfService)){ + this.selfService = SpringContextHolder.getBean(JobTransformService.class); + if (Objects.isNull(this.selfService)){ + throw new ExchangisJobException.Runtime(ExchangisJobExceptionCode.RENDER_TRANSFORM_ERROR.getCode(), + "JobTransformService cannot be found in spring context", null); + } + } + return this.selfService; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultTaskExecuteService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultTaskExecuteService.java new file mode 100644 index 000000000..0a35fafcf --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultTaskExecuteService.java @@ -0,0 +1,174 @@ +package com.webank.wedatasphere.exchangis.job.server.service.impl; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisOnEventException; +import com.webank.wedatasphere.exchangis.job.launcher.exception.ExchangisTaskLaunchException; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; +import com.webank.wedatasphere.exchangis.job.launcher.entity.LaunchedExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.server.mapper.LaunchableTaskDao; +import com.webank.wedatasphere.exchangis.job.server.mapper.LaunchedJobDao; +import com.webank.wedatasphere.exchangis.job.server.mapper.LaunchedTaskDao; +import com.webank.wedatasphere.exchangis.job.server.execution.events.*; +import com.webank.wedatasphere.exchangis.job.server.log.cache.JobLogCacheUtils; +import com.webank.wedatasphere.exchangis.job.server.service.TaskExecuteService; +import com.webank.wedatasphere.exchangis.job.server.utils.SpringContextHolder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import javax.annotation.Resource; +import java.util.Calendar; +import java.util.Date; +import java.util.List; +import java.util.Objects; + +@Service +public class DefaultTaskExecuteService implements TaskExecuteService { + + private static final Logger LOG = LoggerFactory.getLogger(DefaultTaskExecuteService.class); + @Resource + private LaunchedTaskDao launchedTaskDao; + + @Resource + private LaunchedJobDao launchedJobDao; + + @Resource + private LaunchableTaskDao launchableTaskDao; + + private TaskExecuteService selfService; + + @Override + public void onMetricsUpdate(TaskMetricsUpdateEvent metricsUpdateEvent) { + LaunchedExchangisTask task = metricsUpdateEvent.getLaunchedExchangisTask(); + task.setLastUpdateTime(Calendar.getInstance().getTime()); + launchedTaskDao.upgradeLaunchedTaskMetrics(task.getTaskId(), Json.toJson(metricsUpdateEvent.getMetrics(), null), + task.getLastUpdateTime()); + } + + @Override + public void onStatusUpdate(TaskStatusUpdateEvent statusUpdateEvent) throws ExchangisOnEventException { + LaunchedExchangisTask task = statusUpdateEvent.getLaunchedExchangisTask(); + TaskStatus status = statusUpdateEvent.getUpdateStatus(); + LaunchedExchangisJobEntity launchedJob = null; + if (!TaskStatus.isCompleted(status)){ + launchedJob = launchedJobDao.searchLaunchedJob(task.getJobExecutionId()); + TaskStatus jobStatus = launchedJob.getStatus(); + if (TaskStatus.isCompleted(jobStatus) && Objects.nonNull(task.getLauncherTask())){ + // Kill the remote task + try { + task.getLauncherTask().kill(); + } catch (ExchangisTaskLaunchException e) { + throw new ExchangisOnEventException("Kill linkis_id: [" + task.getLinkisJobId() + "] fail", e); + } + }else if (jobStatus == TaskStatus.Scheduled || jobStatus == TaskStatus.Inited){ + launchedJobDao.upgradeLaunchedJobStatusInVersion(launchedJob.getJobExecutionId(), TaskStatus.Running.name(), 0, launchedJob.getLastUpdateTime()); + } + } + // Have different status, then update + if (!task.getStatus().equals(status)){ + launchedJob = Objects.isNull(launchedJob) ? + launchedJobDao.searchLaunchedJob(task.getJobExecutionId()) : launchedJob; + getSelfService().updateTaskStatus(task, status, !TaskStatus.isCompleted(launchedJob.getStatus())); + } + } + + @Override + @Transactional(rollbackFor = Exception.class) + public void onLaunch(TaskLaunchEvent taskLaunchEvent) { + LaunchedExchangisTask task = taskLaunchEvent.getLaunchedExchangisTask(); + task.setLastUpdateTime(Calendar.getInstance().getTime()); + this.launchedTaskDao.updateLaunchInfo(task); + // Well, search the job info + LaunchedExchangisJobEntity launchedJob = launchedJobDao.searchLaunchedJob(task.getJobExecutionId()); + TaskStatus jobStatus = launchedJob.getStatus(); + if (jobStatus == TaskStatus.Scheduled || jobStatus == TaskStatus.Inited) { + // Update the job status also, status change to Running + this.launchedJobDao.upgradeLaunchedJobStatusInVersion(task.getJobExecutionId(), + TaskStatus.Running.name(), 0, launchedJob.getLastUpdateTime()); + } + JobLogCacheUtils.flush(task.getJobExecutionId(), false); + } + + @Override + public void onDelete(TaskDeleteEvent deleteEvent) { + this.launchedTaskDao.deleteLaunchedTask(deleteEvent.getTaskId()); + } + + @Override + public void onDequeue(TaskDequeueEvent dequeueEvent) throws ExchangisOnEventException { + // Delete task in table + this.launchableTaskDao.deleteLaunchableTask(dequeueEvent.getTaskId()); + } + + @Override + public void onProgressUpdate(TaskProgressUpdateEvent updateEvent) throws ExchangisOnEventException { + LaunchedExchangisTask task = updateEvent.getLaunchedExchangisTask(); + if (task.getProgress() != updateEvent.getProgressInfo().getProgress()) { + getSelfService().updateTaskProgress(task, updateEvent.getProgressInfo().getProgress()); + } + } + + /** + * First to update task status, then update the job + * @param task task + * @param status status + */ + @Override + @Transactional(rollbackFor = Exception.class) + public void updateTaskStatus(LaunchedExchangisTask task, TaskStatus status, boolean updateJob) throws ExchangisOnEventException { + JobLogCacheUtils.flush(task.getJobExecutionId(), false); + task.setLastUpdateTime(Calendar.getInstance().getTime()); + launchedTaskDao.upgradeLaunchedTaskStatus(task.getTaskId(), status.name(), task.getLastUpdateTime()); + if (updateJob) { + if (status == TaskStatus.Failed || status == TaskStatus.Cancelled) { + // Update directly, no open another transaction + launchedJobDao.upgradeLaunchedJobStatus(task.getJobExecutionId(), status.name(), task.getLastUpdateTime()); + } else if (status == TaskStatus.Success) { + getSelfService().updateJobStatus(task.getJobExecutionId(), TaskStatus.Success, task.getLastUpdateTime()); + } + } + } + + @Override + public void updateTaskProgress(LaunchedExchangisTask task, float progress) throws ExchangisOnEventException { + task.setLastUpdateTime(Calendar.getInstance().getTime()); + this.launchedTaskDao.upgradeLaunchedTaskProgress(task.getTaskId(), progress, task.getLastUpdateTime()); + getSelfService().updateJobProgress(task.getJobExecutionId(), task.getLastUpdateTime()); + } + + @Override + @Transactional(rollbackFor = Exception.class) + public void updateJobProgress(String jobExecutionId, Date updateTime) { + // Sum all the task's progress + float totalTaskProgress = this.launchedTaskDao.sumProgressByJobExecutionId(jobExecutionId); + if (totalTaskProgress > 0){ + this.launchedJobDao.upgradeLaunchedJobProgress(jobExecutionId, totalTaskProgress, updateTime); + } + } + + @Override + @Transactional(rollbackFor = Exception.class) + public void updateJobStatus(String jobExecutionId, TaskStatus status, Date updateTime) { + List statusList = launchedTaskDao.selectTaskStatusByJobExecutionId(jobExecutionId); + if (statusList.stream().allMatch(taskStatus -> taskStatus.equalsIgnoreCase(TaskStatus.Success.name()))){ + launchedJobDao.upgradeLaunchedJobStatusInVersion(jobExecutionId, + TaskStatus.Success.name(), statusList.size(), updateTime); + } + } + + /** + * Get the self service + * @return service + */ + private TaskExecuteService getSelfService() throws ExchangisOnEventException { + if (Objects.isNull(selfService)){ + this.selfService = SpringContextHolder.getBean(TaskExecuteService.class); + if (Objects.isNull(this.selfService)){ + throw new ExchangisOnEventException("TaskExecuteService cannot be found in spring context", null); + } + } + return this.selfService; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultTaskGenerateService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultTaskGenerateService.java new file mode 100644 index 000000000..f4a6aa44d --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultTaskGenerateService.java @@ -0,0 +1,72 @@ +package com.webank.wedatasphere.exchangis.job.server.service.impl; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisJob; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; +import com.webank.wedatasphere.exchangis.job.launcher.entity.LaunchedExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.server.mapper.LaunchableTaskDao; +import com.webank.wedatasphere.exchangis.job.server.mapper.LaunchedJobDao; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.events.TaskGenerateErrorEvent; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.events.TaskGenerateInitEvent; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.events.TaskGenerateSuccessEvent; +import com.webank.wedatasphere.exchangis.job.server.execution.subscriber.NewInTaskObserver; +import com.webank.wedatasphere.exchangis.job.server.log.cache.JobLogCacheUtils; +import com.webank.wedatasphere.exchangis.job.server.service.TaskGenerateService; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import javax.annotation.Resource; +import java.util.Calendar; +import java.util.List; + +/** + * Task generate service + */ +@Service +public class DefaultTaskGenerateService implements TaskGenerateService { + @Resource + private LaunchedJobDao launchedJobDao; + + @Resource + private LaunchableTaskDao launchableTaskDao; + + @Resource + private NewInTaskObserver newInTaskObserver; + @Override + public void onError(TaskGenerateErrorEvent errorEvent) { + JobLogCacheUtils.flush(errorEvent.getLaunchableExchangisJob().getJobExecutionId(), true); + this.launchedJobDao.upgradeLaunchedJobStatus(errorEvent.getLaunchableExchangisJob().getJobExecutionId() + , TaskStatus.Failed.name(), Calendar.getInstance().getTime()); + } + + @Override + @Transactional(rollbackFor = Exception.class) + public void onInit(TaskGenerateInitEvent initEvent) { + LaunchableExchangisJob job = initEvent.getLaunchableExchangisJob(); + LaunchedExchangisJobEntity launchedJob = new LaunchedExchangisJobEntity(job); + launchedJobDao.insertLaunchedJob(launchedJob); + JobLogCacheUtils.flush(job.getJobExecutionId(), false); + } + + @Override + @Transactional(rollbackFor = Exception.class) + public void onSuccess(TaskGenerateSuccessEvent successEvent) { + LaunchableExchangisJob launchableExchangisJob = successEvent.getLaunchableExchangisJob(); + List tasks = successEvent.getTaskGenerated(); + Calendar calendar = Calendar.getInstance(); + tasks.forEach( task -> { + task.setJobExecutionId(launchableExchangisJob.getJobExecutionId()); + task.setCreateTime(calendar.getTime()); + task.setLastUpdateTime(task.getCreateTime()); + }); + this.launchableTaskDao.addLaunchableTask(tasks); + LaunchedExchangisJobEntity launchedJob = new LaunchedExchangisJobEntity(launchableExchangisJob); + launchedJob.setStatus(TaskStatus.Scheduled); + launchedJob.setLaunchableTaskNum(tasks.size()); + launchedJob.setLastUpdateTime(calendar.getTime()); + this.launchedJobDao.updateLaunchInfo(launchedJob); + // Offer to the observer + tasks.forEach(task -> this.newInTaskObserver.getCacheQueue().offer(task)); + JobLogCacheUtils.flush(launchableExchangisJob.getJobExecutionId(), false); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultTaskObserverService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultTaskObserverService.java new file mode 100644 index 000000000..6a4e58822 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultTaskObserverService.java @@ -0,0 +1,55 @@ +package com.webank.wedatasphere.exchangis.job.server.service.impl; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; +import com.webank.wedatasphere.exchangis.job.launcher.entity.LaunchedExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.launcher.entity.LaunchedExchangisTaskEntity; +import com.webank.wedatasphere.exchangis.job.server.mapper.LaunchableTaskDao; +import com.webank.wedatasphere.exchangis.job.server.mapper.LaunchedJobDao; +import com.webank.wedatasphere.exchangis.job.server.mapper.LaunchedTaskDao; +import com.webank.wedatasphere.exchangis.job.server.service.TaskObserverService; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import javax.annotation.Resource; +import java.util.List; +import java.util.Objects; + +/** + * Task observer service + */ +@Service +public class DefaultTaskObserverService implements TaskObserverService { + + /** + * Launchable task + */ + @Resource + private LaunchableTaskDao launchableTaskDao; + + @Resource + private LaunchedTaskDao launchedTaskDao; + + @Resource + private LaunchedJobDao launchedJobDao; + @Override + public List onPublishLaunchableTask(int limitSize) { + return launchableTaskDao.getTaskToLaunch(limitSize); + } + + @Override + @Transactional(rollbackFor = Exception.class) + public boolean subscribe(LaunchableExchangisTask task) { + LaunchedExchangisTaskEntity taskEntity = new LaunchedExchangisTaskEntity(task); + LaunchedExchangisJobEntity jobEntity = launchedJobDao.searchLaunchedJob(task.getJobExecutionId()); + if (Objects.isNull(jobEntity) || TaskStatus.isCompleted(jobEntity.getStatus())){ + taskEntity.setStatus(jobEntity.getStatus()); + this.launchedTaskDao.insertLaunchedTaskOrUpdate(taskEntity); + // TODO delete the launch able task + + return false; + } else { + return this.launchedTaskDao.insertLaunchedTaskOrUpdate(taskEntity) == 1; + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/ExchangisJobDsBindServiceImpl.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/ExchangisJobDsBindServiceImpl.java new file mode 100644 index 000000000..7ec485afd --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/ExchangisJobDsBindServiceImpl.java @@ -0,0 +1,38 @@ +package com.webank.wedatasphere.exchangis.job.server.service.impl; + +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobDsBind; +import com.webank.wedatasphere.exchangis.dao.mapper.ExchangisJobDsBindMapper; +import com.webank.wedatasphere.exchangis.job.server.service.ExchangisJobDsBindService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.List; +import java.util.Optional; + +@Service +public class ExchangisJobDsBindServiceImpl implements ExchangisJobDsBindService { + + @Autowired + private ExchangisJobDsBindMapper dsBindMapper; + + @Override + public void updateJobDsBind(Long jobId, List dsBinds) { + + QueryWrapper deleteCondition = new QueryWrapper<>(); + deleteCondition.eq("job_id", jobId); + this.dsBindMapper.delete(deleteCondition); + + for (ExchangisJobDsBind dsBind : dsBinds) { + this.dsBindMapper.insert(dsBind); + } + } + + @Override + public boolean inUse(Long datasourceId) { + QueryWrapper condition = new QueryWrapper<>(); + condition.eq("source_ds_id", datasourceId).or().eq("sink_ds_id", datasourceId); + Long count = Optional.ofNullable(this.dsBindMapper.selectCount(condition)).orElse(0l); + return count > 0; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/ExchangisMetricsServiceImpl.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/ExchangisMetricsServiceImpl.java new file mode 100644 index 000000000..a29c9bc71 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/ExchangisMetricsServiceImpl.java @@ -0,0 +1,348 @@ +package com.webank.wedatasphere.exchangis.job.server.service.impl; + +import com.webank.wedatasphere.exchangis.job.server.dto.ExchangisEngineResourceMetricsDTO; +import com.webank.wedatasphere.exchangis.job.server.dto.ExchangisTaskProcessMetricsDTO; +import com.webank.wedatasphere.exchangis.job.server.dto.ExchangisTaskStatusMetricsDTO; +import com.webank.wedatasphere.exchangis.job.server.mapper.ExchangisLaunchTaskMapper; +import com.webank.wedatasphere.exchangis.job.server.service.ExchangisMetricsService; +import org.apache.linkis.server.Message; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import javax.servlet.http.HttpServletRequest; +import java.text.SimpleDateFormat; +import java.util.*; + +@Service +public class ExchangisMetricsServiceImpl implements ExchangisMetricsService { + + + private final ExchangisLaunchTaskMapper exchangisLaunchTaskMapper; + + @Autowired + public ExchangisMetricsServiceImpl(ExchangisLaunchTaskMapper exchangisLaunchTaskMapper) { + this.exchangisLaunchTaskMapper = exchangisLaunchTaskMapper; + } + + @Override + public Message getTaskStateMetrics(HttpServletRequest request) { + List metrices = new ArrayList<>(); + // TODO hard code + ExchangisTaskStatusMetricsDTO success = exchangisLaunchTaskMapper.getTaskMetricsByStatus("SUCCESS"); + ExchangisTaskStatusMetricsDTO failed = exchangisLaunchTaskMapper.getTaskMetricsByStatus("FAILED"); + ExchangisTaskStatusMetricsDTO running = exchangisLaunchTaskMapper.getTaskMetricsByStatus("RUNNING"); + ExchangisTaskStatusMetricsDTO busy = exchangisLaunchTaskMapper.getTaskMetricsByStatus("BUSY"); + ExchangisTaskStatusMetricsDTO idle = exchangisLaunchTaskMapper.getTaskMetricsByStatus("IDLE"); + ExchangisTaskStatusMetricsDTO unlock = exchangisLaunchTaskMapper.getTaskMetricsByStatus("UNLOCK"); + + Optional.ofNullable(success).ifPresent(metrices::add); + Optional.ofNullable(failed).ifPresent(metrices::add); + Optional.ofNullable(running).ifPresent(metrices::add); + Optional.ofNullable(busy).ifPresent(metrices::add); + Optional.ofNullable(idle).ifPresent(metrices::add); + Optional.ofNullable(unlock).ifPresent(metrices::add); + + Message message = Message.ok(); + message.setMethod("/dss/exchangis/main/metrics/taskstate"); + message.data("metrices", metrices); + return message; + } + + @Override + public Message getTaskProcessMetrics(HttpServletRequest request) { + // TODO mock data for process metrics + List list = new ArrayList<>(); + + // total + ExchangisTaskProcessMetricsDTO total = new ExchangisTaskProcessMetricsDTO(); + total.setKey("total"); + total.setTitle("总进度"); + total.setRunning(50); + total.setInitialized(10); + total.setTotal(120); + total.setPercentOfComplete("48%"); + list.add(total); + + // bdp + ExchangisTaskProcessMetricsDTO bdp = new ExchangisTaskProcessMetricsDTO(); + bdp.setKey("bdp"); + bdp.setTitle("BDP"); + bdp.setRunning(20); + bdp.setInitialized(10); + bdp.setTotal(60); + bdp.setPercentOfComplete("33%"); + list.add(bdp); + + // es + ExchangisTaskProcessMetricsDTO es = new ExchangisTaskProcessMetricsDTO(); + es.setKey("es"); + es.setTitle("ES"); + es.setRunning(20); + es.setInitialized(0); + es.setTotal(40); + es.setPercentOfComplete("50%"); + list.add(es); + + // fps + ExchangisTaskProcessMetricsDTO fps = new ExchangisTaskProcessMetricsDTO(); + fps.setKey("fps"); + fps.setTitle("FPS"); + fps.setRunning(10); + fps.setInitialized(0); + fps.setTotal(20); + fps.setPercentOfComplete("50%"); + list.add(fps); + + Message message = Message.ok(); + message.setMethod("/dss/exchangis/main/metrics/taskprocess"); + message.data("list", list); + return message; + } + + // mock data for echarts + + /** + * + * dataset: { + * source: [ + * ['datasource', '2021-10-25 15:00', '2021-10-25 15:01', '2021-10-25 15:02', '2021-10-25 15:03', '2021-10-25 15:04'], + * ['ds1', 41.1, 30.4, 65.1, 53.3, 44.2], + * ['ds2', 86.5, 92.1, 85.7, 83.1, 93.2], + * ['ds3', 24.1, 67.2, 79.5, 86.4, 76.2] + * ] + * }, + */ + @Override + public Message getDataSourceFlowMetrics(HttpServletRequest request) { + // TODO + // make last past 4 hours data, dimension is min + String fromDateTime = "2021-10-25 15:00"; + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm"); + Date parsedFrom; + try { + parsedFrom = sdf.parse(fromDateTime); + } catch (Exception e) { + parsedFrom = new Date(); + } + Calendar calendar = Calendar.getInstance(); + calendar.setTime(parsedFrom); + + List> dataset = new ArrayList<>(); + List header = new ArrayList<>(); + int loopNum = 4 * 60; + + // 添加第一行,头信息 +// header.add("数据源"); +// for (int i = 1; i <= loopNum; i++) { +// header.add(sdf.format(calendar.getTime())); +// calendar.add(Calendar.MINUTE, 1); +// } + header.add("时间"); + header.add("ds1"); + header.add("ds2"); + header.add("ds3"); + dataset.add(header); + + List realData; + int max = 10240; + int min = 512; + for (int i = 1; i <= loopNum; i++) { + realData = new ArrayList<>(); + realData.add(sdf.format(calendar.getTime())); + calendar.add(Calendar.MINUTE, 1); + realData.add(Math.random() * (max - min) + min); + realData.add(Math.random() * (max - min) + min); + realData.add( Math.random() * (max - min) + min); + dataset.add(realData); + } + + // 添加数据信息 +// List ds1Data = new ArrayList<>(); +// ds1Data.add("ds1"); +// +// List ds2Data = new ArrayList<>(); +// ds2Data.add("ds2"); +// +// List ds3Data = new ArrayList<>(); +// ds3Data.add("ds3"); +// for (int i = 1; i <= loopNum; i++) { +// ds1Data.add(i * RandomUtils.nextInt(1024)); +// ds2Data.add(i * RandomUtils.nextInt(512)); +// ds3Data.add(i * RandomUtils.nextInt(2048)); +// } +// dataset.add(ds1Data); +// dataset.add(ds2Data); +// dataset.add(ds3Data); + Message message = Message.ok(); + message.setMethod("/dss/exchangis/main/metrics/datasourceflow"); + message.data("dataset", dataset); + return message; + } + + @Override + public Message getEngineResourceCpuMetrics(HttpServletRequest request) { + // make last past 4 hours data, dimension is min + String fromDateTime = "2021-10-25 15:00"; + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm"); + Date parsedFrom; + try { + parsedFrom = sdf.parse(fromDateTime); + } catch (Exception e) { + parsedFrom = new Date(); + } + Calendar calendar = Calendar.getInstance(); + calendar.setTime(parsedFrom); + + List> dataset = new ArrayList<>(); + List header = new ArrayList<>(); + int loopNum = 4 * 60; + + // 添加第一行,头信息 + header.add("时间"); + header.add("datax"); + header.add("sqoop"); + header.add("linkis"); +// for (int i = 1; i <= loopNum; i++) { +// header.add(sdf.format(calendar.getTime())); +// calendar.add(Calendar.MINUTE, 1); +// } + dataset.add(header); + + // 添加数据信息 + List realData; +// ds1Data.add("datax"); + +// List ds2Data = new ArrayList<>(); +// ds2Data.add("sqoop"); +// +// List ds3Data = new ArrayList<>(); +// ds3Data.add("linkis"); + int min = 1; + int max = 8; + for (int i = 1; i <= loopNum; i++) { + realData = new ArrayList<>(); + realData.add(sdf.format(calendar.getTime())); + calendar.add(Calendar.MINUTE, 1); + realData.add(Math.random() * (max - min) + min); + realData.add(Math.random() * (max - min) + min); + realData.add( Math.random() * (max - min) + min); + dataset.add(realData); + } +// dataset.add(ds1Data); +// dataset.add(ds2Data); +// dataset.add(ds3Data); +// dataset.add(realData); + Message message = Message.ok(); + message.setMethod("/dss/exchangis/main/metrics/engineresourcecpu"); + message.data("dataset", dataset); + return message; + + + } + + @Override + public Message getEngineResourceMemMetrics(HttpServletRequest request) { + // make last past 4 hours data, dimension is min + String fromDateTime = "2021-10-25 15:00"; + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm"); + Date parsedFrom; + try { + parsedFrom = sdf.parse(fromDateTime); + } catch (Exception e) { + parsedFrom = new Date(); + } + Calendar calendar = Calendar.getInstance(); + calendar.setTime(parsedFrom); + + List> dataset = new ArrayList<>(); + List header = new ArrayList<>(); + int loopNum = 4 * 60; + + // 添加第一行,头信息 +// header.add("引擎"); +// for (int i = 1; i <= loopNum; i++) { +// header.add(sdf.format(calendar.getTime())); +// calendar.add(Calendar.MINUTE, 1); +// } +// dataset.add(header); +// +// // 添加数据信息 +// List ds1Data = new ArrayList<>(); +// ds1Data.add("datax"); +// +// List ds2Data = new ArrayList<>(); +// ds2Data.add("sqoop"); +// +// List ds3Data = new ArrayList<>(); +// ds3Data.add("linkis"); +// for (int i = 1; i <= loopNum; i++) { +// ds1Data.add(i * RandomUtils.nextInt(4192)); +// ds2Data.add(i * RandomUtils.nextInt(2048)); +// ds3Data.add(i * RandomUtils.nextInt(1024)); +// } + + // 添加第一行,头信息 + header.add("时间"); + header.add("datax"); + header.add("sqoop"); + header.add("linkis"); +// for (int i = 1; i <= loopNum; i++) { +// header.add(sdf.format(calendar.getTime())); +// calendar.add(Calendar.MINUTE, 1); +// } + dataset.add(header); + + // 添加数据信息 + List realData; + int max = 8192; + int min = 1024; + for (int i = 1; i <= loopNum; i++) { + realData = new ArrayList<>(); + realData.add(sdf.format(calendar.getTime())); + calendar.add(Calendar.MINUTE, 1); + realData.add(Math.random() * (max - min) + min); + realData.add(Math.random() * (max - min) + min); + realData.add( Math.random() * (max - min) + min); +// realData.add(i * RandomUtils.nextInt(4)); +// realData.add(i * RandomUtils.nextInt(4)); +// realData.add(i * RandomUtils.nextInt(4)); + dataset.add(realData); + } +// dataset.add(ds1Data); +// dataset.add(ds2Data); +// dataset.add(ds3Data); + Message message = Message.ok(); + message.setMethod("/dss/exchangis/main/metrics/engineresourcemem"); + message.data("dataset", dataset); + return message; + } + + @Override + public Message getEngineResourceMetrics(HttpServletRequest request) { + List list = new ArrayList<>(); + ExchangisEngineResourceMetricsDTO sqoop = new ExchangisEngineResourceMetricsDTO(); + sqoop.setEngine("sqoop"); + sqoop.setCpu("45%"); + sqoop.setMem("1782Mi"); + list.add(sqoop); + + ExchangisEngineResourceMetricsDTO datax = new ExchangisEngineResourceMetricsDTO(); + datax.setEngine("datax"); + datax.setCpu("32%"); + datax.setMem("512Mi"); + list.add(datax); + + ExchangisEngineResourceMetricsDTO linkis = new ExchangisEngineResourceMetricsDTO(); + linkis.setEngine("linkis"); + linkis.setCpu("78%"); + linkis.setMem("4196Mi"); + list.add(linkis); + + Message message = Message.ok(); + message.setMethod("/dss/exchangis/main/metrics/engineresource"); + message.data("list", list); + return message; + } + + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/JobFunctionServiceImpl.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/JobFunctionServiceImpl.java new file mode 100644 index 000000000..04d28ffea --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/JobFunctionServiceImpl.java @@ -0,0 +1,42 @@ +package com.webank.wedatasphere.exchangis.job.server.service.impl; + +import com.webank.wedatasphere.exchangis.job.server.mapper.JobFunctionDao; +import com.webank.wedatasphere.exchangis.job.server.service.JobFuncService; +import com.webank.wedatasphere.exchangis.job.server.vo.JobFunction; +import org.apache.commons.lang.StringUtils; +import org.springframework.stereotype.Service; + +import javax.annotation.Resource; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * @author davidhua + * 2020/4/23 + */ +@Service +public class JobFunctionServiceImpl implements JobFuncService { + @Resource + private JobFunctionDao functionDao; + + + @Override + public Map getFuncRefName(String tabName, JobFunction.FunctionType functionType) { + Map funcRefNameMap = new HashMap<>(); + List jobFunctions = functionDao.listFunctions(tabName, functionType.name()); + jobFunctions.forEach(jobFunction -> { + String funcName = jobFunction.getFuncName(); + String refName = jobFunction.getRefName(); + if(StringUtils.isNotBlank(funcName) && StringUtils.isNotBlank(refName)){ + funcRefNameMap.put(funcName, refName); + } + }); + return funcRefNameMap; + } + + @Override + public List getFunctions(String tabName, JobFunction.FunctionType functionType) { + return functionDao.listFunctions(tabName, functionType.name()); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/open/DefaultJobOpenService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/open/DefaultJobOpenService.java new file mode 100644 index 000000000..81ec15408 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/open/DefaultJobOpenService.java @@ -0,0 +1,66 @@ +package com.webank.wedatasphere.exchangis.job.server.service.open; + +import com.github.pagehelper.PageHelper; +import com.webank.wedatasphere.exchangis.job.api.ExchangisJobOpenService; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.server.mapper.ExchangisJobEntityDao; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobQueryVo; +import org.apache.commons.lang.StringUtils; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import javax.annotation.Resource; +import java.util.List; + +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.JOB_EXCEPTION_CODE; + + +/** + * Default implement + */ +@Service +public class DefaultJobOpenService implements ExchangisJobOpenService { + + @Resource + private ExchangisJobEntityDao jobEntityDao; + + @Override + public ExchangisJobEntity getJobById(Long id, boolean basic) throws ExchangisJobException { + try { + return basic ? this.jobEntityDao.getBasicInfo(id) : this.jobEntityDao.getDetail(id); + } catch (Exception e){ + throw new ExchangisJobException(JOB_EXCEPTION_CODE.getCode(), + "Fail to the information of job [id: " + id + "]", e); + } + } + + @Override + public List queryJobs(ExchangisJobQueryVo queryVo, boolean inPage) throws ExchangisJobException { + try { + if (inPage) { + PageHelper.startPage(queryVo.getPage(), queryVo.getPageSize()); + try { + return this.jobEntityDao.queryPageList(queryVo); + } finally { + PageHelper.clearPage(); + } + } + return this.jobEntityDao.queryPageList(queryVo); + } catch (Exception e){ + throw new ExchangisJobException(JOB_EXCEPTION_CODE.getCode(), + "Fail to query job list", e); + } + } + + @Override + @Transactional(rollbackFor = Exception.class) + public void deleteJobBatch(List idList) throws ExchangisJobException { + try { + this.jobEntityDao.deleteBatch(idList); + } catch (Exception e){ + throw new ExchangisJobException(JOB_EXCEPTION_CODE.getCode(), + "Fail to delete batch job ids, id list: [" + StringUtils.join(idList,",") + "]", e); + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/DateTool.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/DateTool.java new file mode 100644 index 000000000..4e2b20ce6 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/DateTool.java @@ -0,0 +1,225 @@ +package com.webank.wedatasphere.exchangis.job.server.utils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.DateFormat; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Calendar; +import java.util.Date; +import java.util.TimeZone; +import java.util.regex.Pattern; + +/** + * Created by devendeng on 2018/10/22. + */ +public class DateTool { + private static final String TIME_PLACEHOLDER_DATE_0 = "${yyyyMMdd}"; + private static final String TIME_PLACEHOLDER_DATE_1 = "${yyyy-MM-dd}"; + public static final String TIME_PLACEHOLDER_DATE_TIME = "${yyyy-MM-dd HH:mm:ss}"; + /** + * yyyyMMddHH + * yyyy-MM-dd-HH + * HH + */ + public static final String TIME_PLACEHOLDER_DATE_HOUR_0 = "${yyyyMMddHH}"; + public static final String TIME_PLACEHOLDER_DATE_HOUR_1 = "${yyyy-MM-dd-HH}"; + public static final String TIME_PLACEHOLDER_DATE_HOUR_2 = "${HH}"; + + static final String TIME_PLACEHOLDER_TIMESTAMP = "timestamp"; + static final String MONTH_BEGIN_SYMBOL = "run_month_begin"; + static final String MONTH_END_SYMBOL = "run_month_end"; + static final String[] HOUR_SPEC_SYMBOLS = new String[]{"yyyyMMdd", "yyyy-MM-dd", "HH"}; + static final String FORMAT_STD_SYMBOL = "_std"; + static final String FORMAT_UTC_SYMBOL = "_utc"; + public static final String[] TIME_PLACEHOLDER = new String[]{ + TIME_PLACEHOLDER_DATE_0, TIME_PLACEHOLDER_DATE_1, TIME_PLACEHOLDER_DATE_TIME, TIME_PLACEHOLDER_TIMESTAMP, + TIME_PLACEHOLDER_DATE_HOUR_0, TIME_PLACEHOLDER_DATE_HOUR_1, TIME_PLACEHOLDER_DATE_HOUR_2}; + + private static final String TIME_REGULAR_EXPRESSION = "\\$\\{(run_date|run_month_begin|run_month_end|HH|yyyyMMddHH|yyyy-MM-dd-HH|yyyyMMdd|yyyy-MM-dd|timestamp)(_std|_utc|_y|_M|_d)?\\s*([+-])?\\s*([0-9])?\\}"; + public static final Pattern TIME_REGULAR_PATTERN = Pattern.compile(TIME_REGULAR_EXPRESSION); + + private static Logger log = LoggerFactory.getLogger(DateTool.class); + private Calendar calendar=Calendar.getInstance(); + + public DateTool(Date date){ + this.calendar.setTime(date); + } + + public DateTool(long timeInMillis){ + this.calendar.setTimeInMillis(timeInMillis); + } + + public DateTool(){ + } + + public DateTool set(int field, int value) { + calendar.set(field, value); + return this; + } + + public DateTool getDate(String date) { + SimpleDateFormat format = new SimpleDateFormat("yyyyMMdd"); + try { + calendar.setTime(format.parse(date)); + } catch (ParseException e) { + log.error("Parse exception.",e); + } + return this; + } + + /** + * + * @return + */ + public DateTool getHalfBeg(int amount) { + calendar.add(Calendar.MONTH, amount * 6); + int currentMonth = calendar.get(Calendar.MONTH) + 1; + + if (currentMonth >= 1 && currentMonth <= 6) { + calendar.set(Calendar.MONTH, 0); + } else if (currentMonth >= 7 && currentMonth <= 12) { + calendar.set(Calendar.MONTH, 6); + } + calendar.set(Calendar.DATE, 1); + return this; + } + + /** + * + * @return + */ + public DateTool getHalfEnd(int amount) { + calendar.add(Calendar.MONTH, amount * 6); + int currentMonth = calendar.get(Calendar.MONTH) + 1; + + if (currentMonth >= 1 && currentMonth <= 6) { + calendar.set(Calendar.MONTH, 5); + calendar.set(Calendar.DATE, 30); + } else if (currentMonth >= 7 && currentMonth <= 12) { + calendar.set(Calendar.MONTH, 11); + calendar.set(Calendar.DATE, 31); + } + return this; + } + + /** + * Begin of quarter + * + * @return + */ + public DateTool getQuarterBeg(int amount) { + calendar.add(Calendar.MONTH, amount * 3); + + int currentMonth = calendar.get(Calendar.MONTH) + 1; + + if (currentMonth >= 1 && currentMonth <= 3) { + calendar.set(Calendar.MONTH, 0); + } else if (currentMonth >= 4 && currentMonth <= 6) { + calendar.set(Calendar.MONTH, 3); + } else if (currentMonth >= 7 && currentMonth <= 9) { + calendar.set(Calendar.MONTH, 6); + } else if (currentMonth >= 10 && currentMonth <= 12) { + calendar.set(Calendar.MONTH, 9); + } + calendar.set(Calendar.DATE, 1); + return this; + } + + /** + * End of quarter + * + * @return + */ + public DateTool getQuarterEnd(int amount) { + calendar.add(Calendar.MONTH, amount * 3); + int currentMonth = calendar.get(Calendar.MONTH) + 1; + if (currentMonth >= 1 && currentMonth <= 3) { + calendar.set(Calendar.MONTH, 2); + calendar.set(Calendar.DATE, 31); + } else if (currentMonth >= 4 && currentMonth <= 6) { + calendar.set(Calendar.MONTH, 5); + calendar.set(Calendar.DATE, 30); + } else if (currentMonth >= 7 && currentMonth <= 9) { + calendar.set(Calendar.MONTH, 8); + calendar.set(Calendar.DATE, 30); + } else if (currentMonth >= 10 && currentMonth <= 12) { + calendar.set(Calendar.MONTH, 11); + calendar.set(Calendar.DATE, 31); + } + return this; + } + + public DateTool addDay(int amount){ + calendar.add(Calendar.DAY_OF_YEAR, amount); + return this; + } + + public DateTool addHour(int amount){ + calendar.add(Calendar.HOUR_OF_DAY, amount); + return this; + } + public DateTool add(int field, int amount){ + calendar.add(field, amount); + return this; + } + + public DateTool addMonth(int amount){ + calendar.add(Calendar.MONTH, amount); + return this; + } + + public DateTool addYesterdayMonth(int amount){ + calendar.add(Calendar.DAY_OF_MONTH, -1); + calendar.add(Calendar.MONTH, amount); + return this; + } + public DateTool getMonthEnd(int amount){ + calendar.set(Calendar.DATE, 1); + calendar.add(Calendar.MONTH, amount+1); + calendar.add(Calendar.DAY_OF_MONTH, -1); + return this; + } + + public DateTool getMonthBegin(int amount){ + calendar.set(Calendar.DATE, 1); + calendar.add(Calendar.MONTH, amount); + return this; + } + + public String format(String pattern){ + SimpleDateFormat format=new SimpleDateFormat(pattern); + return format.format(calendar.getTime()); + } + public String format(String pattern, String timeZone){ + SimpleDateFormat format=new SimpleDateFormat(pattern); + format.setTimeZone(TimeZone.getTimeZone(timeZone)); + return format.format(calendar.getTime()); + } + public String format(String pattern, long time){ + SimpleDateFormat format = new SimpleDateFormat(pattern); + return format.format(new Date(time)); + } + public String currentTimestamp(){ + return String.valueOf(System.currentTimeMillis()/1000); + } + + + public static Date stringToDate(String dateStr, String formatStr){ + DateFormat sdf=new SimpleDateFormat(formatStr); + Date date=null; + try { + date = sdf.parse(dateStr); + } catch (ParseException e) { + log.error("Parse exception.",e); + } + return date; + } + + public DateTool truncate(long mills) { + calendar.setTimeInMillis(mills * (calendar.getTimeInMillis() / mills)); + return this; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/JobAuthorityUtils.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/JobAuthorityUtils.java new file mode 100644 index 000000000..29fd31713 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/JobAuthorityUtils.java @@ -0,0 +1,119 @@ +package com.webank.wedatasphere.exchangis.job.server.utils; + +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.domain.OperationType; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode; +import com.webank.wedatasphere.exchangis.job.launcher.entity.LaunchedExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.job.server.mapper.ExchangisJobEntityDao; +import com.webank.wedatasphere.exchangis.job.server.mapper.LaunchedJobDao; +import com.webank.wedatasphere.exchangis.project.entity.domain.ExchangisProjectUser; +import com.webank.wedatasphere.exchangis.project.provider.mapper.ProjectUserMapper; +import org.apache.commons.lang3.StringUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.util.Objects; + +/** + * @author jefftlin + * @Date 2022-09-15 + */ +@Component +public class JobAuthorityUtils { + + static ProjectUserMapper projectUserMapper; + static LaunchedJobDao launchedJobDao; + static ExchangisJobEntityDao exchangisJobEntityDao; + + @Autowired + public void setProjectUserMapper(ProjectUserMapper projectUserMapper) { + JobAuthorityUtils.projectUserMapper = projectUserMapper; + } + + @Autowired + public void setLaunchedJobDao(LaunchedJobDao launchedJobDao) { + JobAuthorityUtils.launchedJobDao = launchedJobDao; + } + + @Autowired + public void setExchangisJobEntityDao(ExchangisJobEntityDao exchangisJobEntityDao) { + JobAuthorityUtils.exchangisJobEntityDao = exchangisJobEntityDao; + } + + /** + * @param privUser privUser + * @param projectId project id + * @param operationType enum("JOB_QUERY","JOB_ALTER","JOB_EXECUTE","JOB_RELEASE") + * @return + */ + public static boolean hasProjectAuthority(String privUser, Long projectId, OperationType operationType) throws ExchangisJobServerException { + ExchangisProjectUser exchangisProjectUser = new ExchangisProjectUser(); + exchangisProjectUser.setProjectId(projectId); + exchangisProjectUser.setPrivUser(privUser); + exchangisProjectUser = projectUserMapper.queryProjectUser(exchangisProjectUser); + if (Objects.isNull(exchangisProjectUser)) { + String errorMsg = String.format("Project may be deleted, please check it with project_id [%s] and priv_user [%s] in table exchangis_project_user", + projectId, privUser); + throw new ExchangisJobServerException(ExchangisJobExceptionCode.JOB_EXCEPTION_CODE.getCode(), errorMsg); + } + if (StringUtils.isNotEmpty(privUser) && + Objects.nonNull(operationType)) { + Integer privValue = exchangisProjectUser.getPriv(); + + /** + * view 4 + * edit 4+2=6 + * release 4+2+1=7 + */ + switch (operationType) { + case JOB_QUERY: + return true; + case JOB_ALTER: + case JOB_EXECUTE: + return privValue >= 6; + case JOB_RELEASE: + return privValue == 7; + default: + throw new ExchangisJobServerException(ExchangisJobExceptionCode.UNSUPPORTED_OPERATION.getCode(), "Unsupported operationType"); + } + } + return false; + } + + /** + * @param privUser privUser + * @param jobId job id + * @param operationType enum("JOB_QUERY","JOB_ALTER","JOB_EXECUTE","JOB_RELEASE") + * @return + */ + public static boolean hasJobAuthority(String privUser, Long jobId, OperationType operationType) throws ExchangisJobServerException { + ExchangisJobEntity exchangisBasicJob = exchangisJobEntityDao.getBasicInfo(jobId); + if (Objects.isNull(exchangisBasicJob)) { + String errorMsg = String.format("Job may be deleted, please check it with job_id [%s] in table exchangis_job_entity", jobId); + throw new ExchangisJobServerException(ExchangisJobExceptionCode.JOB_EXCEPTION_CODE.getCode(), errorMsg); + } + + ExchangisProjectUser exchangisProjectUser = new ExchangisProjectUser(); + exchangisProjectUser.setProjectId(exchangisBasicJob.getProjectId()); + exchangisProjectUser.setPrivUser(privUser); + exchangisProjectUser = projectUserMapper.queryProjectUser(exchangisProjectUser); + if (Objects.isNull(exchangisProjectUser)) { + String errorMsg = String.format("Project may be deleted, please check it with project_id [%s] and priv_user [%s] in table exchangis_project_user", + exchangisBasicJob.getProjectId(), privUser); + throw new ExchangisJobServerException(ExchangisJobExceptionCode.JOB_EXCEPTION_CODE.getCode(), errorMsg); + } + return hasProjectAuthority(privUser, exchangisProjectUser.getProjectId(), operationType); + } + + public static boolean hasJobExecuteSituationAuthority(String privUser, String jobExecutionId, OperationType operationType) throws ExchangisJobServerException { + LaunchedExchangisJobEntity launchedExchangisJob = launchedJobDao.searchLaunchedJob(jobExecutionId); + if (Objects.isNull(launchedExchangisJob)) { + String errorMsg = String.format("Luanched job may be deleted, please check it with job_execution_id [%s] in table exchangis_launched_job_entity", + jobExecutionId); + throw new ExchangisJobServerException(ExchangisJobExceptionCode.JOB_EXCEPTION_CODE.getCode(), errorMsg); + } + + return hasJobAuthority(privUser, launchedExchangisJob.getJobId(), operationType); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/JobUtils.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/JobUtils.java new file mode 100644 index 000000000..0afd52848 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/JobUtils.java @@ -0,0 +1,181 @@ +package com.webank.wedatasphere.exchangis.job.server.utils; + +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.utils.VariableUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.StringWriter; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.regex.Matcher; + + +public class JobUtils { + + private static final String MARKER_HEAD = "r"; + + private static Logger logger = LoggerFactory.getLogger(JobUtils.class); + + /** + * Replace the parameter in variables + * @param map + * @param key + * @param subValue + */ + private static void replaceParameters(Map map, String key, + StringBuffer subValue) { + StringBuilder subKey = new StringBuilder(); + char[] chars = key.toCharArray(); + int i=0; + boolean isParamter = false; + int count = 0; + while(i 0){ + subKey.append(chars[i]).append(chars[i+1]); + } + count++; + i=i+2; + continue; + }else if(chars[i] == '}'){ + count--; + if(count == 0){ + String parameter = subKey.toString(); + if(parameter.contains("${") && parameter.contains("}")){ + StringBuffer sb = new StringBuffer(); + replaceParameters(map, parameter, sb); + parameter = sb.toString(); + } + String v = map.get(parameter); + isParamter = false; + subKey.delete(0, subKey.length()); + if(null != v && !"".equals(v)){ + if(v.contains("${") && v.contains("}")){ + replaceParameters(map, v, subValue); + }else{ + subValue.append(v); + } + }else{ + subValue.append("${").append(parameter).append("}"); + } + i=i+1; + continue; + } + + } + if(isParamter){ + subKey.append(chars[i]); + }else{ + subValue.append(chars[i]); + } + i=i+1; + } + } + + + public static String renderDt(String template, Calendar calendar){ + long time = calendar.getTimeInMillis(); + if(template==null){ + return null; + } + Date date =new Date(); + + Matcher matcher= DateTool.TIME_REGULAR_PATTERN.matcher(template); + while(matcher.find()){ + try { + String m = template.substring(matcher.start(), matcher.end()); + StringWriter sw = new StringWriter(); + DateTool dataTool = new DateTool(time); + String symbol = matcher.group(1); + boolean spec = false; + if (null != symbol) { + String startTime = null; + String tempTime = null; + for(String specSymbol : DateTool.HOUR_SPEC_SYMBOLS){ + if(specSymbol.equals(symbol)){ + tempTime = dataTool.format(specSymbol); + startTime = template.replace(m, tempTime); + return startTime; + } + } + if(!spec) { + if (DateTool.MONTH_BEGIN_SYMBOL.equals(symbol)) { + dataTool.getMonthBegin(0); + } else if (DateTool.MONTH_END_SYMBOL.equals(symbol)) { + dataTool.getMonthEnd(0); + } else if (DateTool.TIME_PLACEHOLDER_TIMESTAMP.equals(symbol)){ + calendar.setTime(date); + calendar.add(Calendar.DAY_OF_MONTH, 0); + tempTime = String.valueOf(calendar.getTimeInMillis()); + startTime = template.replace(m, tempTime); + return startTime; + } + else { + dataTool.addDay(-1); + } + } + + } + String calculate = matcher.group(3); + String number = matcher.group(4); + if (null != calculate && null != number) { + if ("+".equals(calculate)) { + if(spec){ + dataTool.addHour(Integer.parseInt(number)); + }else { + dataTool.addDay(Integer.parseInt(number)); + } + } else if ("-".equals(calculate)) { + if(spec){ + dataTool.addHour(-Integer.parseInt(number)); + }else { + dataTool.addDay(-Integer.parseInt(number)); + } + } + } + String formatSymbol = matcher.group(2); + if(spec){ + sw.append(dataTool.format(symbol)); + }else if(DateTool.FORMAT_STD_SYMBOL.equals(formatSymbol)){ + sw.append(dataTool.format("yyyy-MM-dd")); + }else if(DateTool.FORMAT_UTC_SYMBOL.equals(formatSymbol)) { + // Set the hour as the beginning of day + sw.append(dataTool.format("yyyy-MM-dd'T'HH:00:00.000'Z'", "UTC")); +// sw.append(dataTool.format("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", "UTC")); + } else if(StringUtils.isNotBlank(formatSymbol) && formatSymbol.startsWith("_")){ + String format = formatSymbol.substring(1); + sw.append(dataTool.format(format)); + } else{ + sw.append(dataTool.format("yyyyMMdd")); + } + template=template.replace(m, sw.toString()); + matcher= DateTool.TIME_REGULAR_PATTERN.matcher(template); + }catch(Exception e){ + logger.error("TASK_ERROR, cannot render job's configuration, message: {}", e.getMessage(), e); + break; + } + } + //${yesterday} + return template.replace("${yesterday}",new DateTool(time).addDay(-1).format("yyyyMMdd")); + } + + /** + * Replace source string with variable (use Linkis common module) + * @param source source + * @return string + */ + public static String replaceVariable(String source, Map variables){ + String result = source; + if (StringUtils.isNotBlank(result)){ + result = VariableUtils.replace(MARKER_HEAD + source, variables).substring(MARKER_HEAD.length()); + if (StringUtils.isNotBlank(result)){ + // Render again + result = renderDt(result, Calendar.getInstance()); + } + } + return result; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/JsonEntity.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/JsonEntity.java new file mode 100644 index 000000000..448262822 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/JsonEntity.java @@ -0,0 +1,400 @@ +package com.webank.wedatasphere.exchangis.job.server.utils; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import org.apache.commons.lang.StringUtils; + +import java.util.*; +import java.util.stream.Collectors; + +/** + * Copy and simplify from 'Configuration' in 'DataX' + */ +public class JsonEntity { + public static final String SPLIT_CHAR = "."; + private Object root; + + public static JsonEntity from(String json){ + return new JsonEntity(json); + } + + public static JsonEntity from(Map map){ + return new JsonEntity(map); + } + + + public static boolean searchKeyToInsertValue(JsonEntity configuration, String path, + String key, Object value){ + JsonEntity subConf = configuration.getConfiguration(path); + Set keys = subConf.getKeys(); + //search key + for(String key0 : keys){ + if(key0.endsWith(SPLIT_CHAR + key)){ + configuration.set(StringUtils.join(new String[]{path, key0}, SPLIT_CHAR), value); + return true; + } + } + + return false; + } + + + public static List searchKeyPaths(JsonEntity configuration, String path, + String key){ + return searchKeyPaths(configuration, path, key, Integer.MAX_VALUE); + } + + public static List searchKeyPaths(JsonEntity configuration, String path, + String key, int depth){ + List result = new ArrayList<>(); + JsonEntity subConf = configuration.getConfiguration(path); + Set keys = subConf.getKeys(depth); + keys.forEach(key0 -> { + if(key0.equals(key) || key0.endsWith(SPLIT_CHAR + key)){ + result.add(key0); + } + }); + return result; + } + /** + * Serialize the configuration + * @return + */ + public String toJson(){ + return Json.toJson(root, null); + } + + @SuppressWarnings("unchecked") + public Map toMap(){ + if (this.root instanceof Map){ + return new HashMap<>((Map) root); + } + return null; + } + + /** + * Set value to path + * @param path path + * @param object object value + * @return + */ + public Object set(final String path, final Object object) { + if(!checkPath(path)){ + return null; + } + Object result = this.get(path); + setObject(path, extract(object)); + return result; + } + + /** + * Get value by path + * @param path path + * @return + */ + public Object get(final String path){ + this.checkPath(path); + return this.findObject(path); + } + + /** + * Get string value by path + * if the result is null, use the default value + * @param path path + * @param defaultValue default value + * @return + */ + public String getString(final String path, String defaultValue){ + Object result = this.get(path); + if(null == result){ + return defaultValue; + } + if(result instanceof String){ + return (String)result; + }else if(result.getClass().isPrimitive() || isWrapClass(result.getClass())){ + return String.valueOf(result); + }else{ + return Json.toJson(result, null); + } + } + + /** + * get integer value by path + * @param path path + * @return + */ + public Integer getInt(final String path){ + String result = this.getString(path); + if(null == result){ + return null; + } + return Integer.valueOf(result); + } + + /** + * get double value by path + * @param path path + * @return + */ + public Double getDouble(final String path){ + String result = this.getString(path); + if(null == result){ + return null; + } + return Double.valueOf(result); + } + + /** + * get long value by path + * @param path path + * @return + */ + public Long getLong(final String path){ + String result = this.getString(path); + if(null == result){ + return null; + } + return Long.valueOf(result); + } + /** + * Get string value by path + * @param path path + * @return + */ + public String getString(final String path){ + return getString(path, null); + } + + /** + * Get keys + * @return + */ + public Set getKeys(){ + return getKeys(Integer.MAX_VALUE); + } + + public Set getKeys(int maxDepth){ + Set collect = new HashSet<>(); + this.getKeysRecursive(this.root, "", collect, maxDepth); + return collect; + } + + public JsonEntity getConfiguration(final String path){ + Object object = this.get(path); + if(null == object){ + return null; + } + return from(Json.toJson(object, null)); + } + + private Object findObject(final String path){ + if(StringUtils.isBlank(path)){ + return this.root; + } + Object target = this.root; + for(final String each : split2List(path)){ + if(isPathMap(each) && target instanceof Map){ + target = ((Map)target).get(each); + }else if (isPathList(each) && target instanceof List){ + String index = each.replace("[", "").replace("]", ""); + if(!StringUtils.isNumeric(index)){ + throw new IllegalArgumentException("index value must be numeric, value: " + index); + } + target = ((List)target).get(Integer.valueOf(index)); + }else{ + target = null; + break; + } + } + return target; + } + + private void setObject(final String path, final Object object){ + Object newRoot = setObjectRecursive(this.root, split2List(path), 0, object); + boolean isSuit = null != newRoot && (newRoot instanceof List || object instanceof Map); + if(isSuit){ + this.root = newRoot; + } + } + + private Object setObjectRecursive(Object current, final List paths, + int index, final Object value){ + if(index >= paths.size()){ + return value; + } + String path = paths.get(index).trim(); + if(isPathMap(path)){ + //current object is not map + Map mapping; + if(!(current instanceof Map)){ + mapping = new HashMap<>(1); + mapping.put(path, buildObject(paths.subList(index + 1, paths.size()), value)); + return mapping; + } + mapping = (Map)current; + //current map does not have key + if(!mapping.containsKey(path)){ + mapping.put(path, buildObject(paths.subList(index + 1, paths.size()), value)); + return mapping; + } + mapping.put(path, setObjectRecursive(mapping.get(path), + paths, index + 1, value)); + return mapping; + } + if(isPathList(path)){ + List lists; + int listIndex = getIndex(path); + //current object is not list + if(!(current instanceof List)){ + lists = expand(new ArrayList<>(listIndex + 1), listIndex + 1); + lists.set(listIndex, buildObject(paths.subList(index + 1, paths.size()), value)); + return lists; + } + lists = (List) current; + lists = expand(lists, listIndex + 1); + //current list does not have the index + if(null == lists.get(listIndex)){ + lists.set(listIndex, buildObject(paths.subList(index + 1, paths.size()), value)); + return lists; + } + lists.set(listIndex, setObjectRecursive(lists.get(listIndex), + paths, index + 1, value)); + return lists; + } + throw new RuntimeException("system error"); + } + + private Object buildObject(final List paths, final Object object){ + if(null == paths ){ + throw new IllegalArgumentException("paths cannot be null"); + } + if(1 == paths.size() && StringUtils.isBlank(paths.get(0))){ + return object; + } + Object child = object; + for(int i = paths.size() - 1; i >= 0; i--){ + String path = paths.get(i); + if(isPathMap(path)){ + Map mapping = new HashMap<>(1); + mapping.put(path, child); + child = mapping; + continue; + } + if(isPathList(path)){ + int index = getIndex(path); + List lists = new ArrayList<>(index + 1); + expand(lists, index + 1); + lists.set(index, child); + child = lists; + continue; + } + throw new IllegalArgumentException("illegal path"); + } + return child; + } + + private Object extract(final Object object){ + if(object instanceof JsonEntity){ + return ((JsonEntity)object).root; + } + if(object instanceof List){ + List result = new ArrayList<>(); + for(final Object each : (List)object){ + result.add(extract(each)); + } + return result; + } + if(object instanceof Map){ + Map map = (Map)object; + Map result = new HashMap<>(map.size()); + for(final Object key : map.keySet()){ + result.put(String.valueOf(key), extract(map.get(key))); + } + return result; + } + return object; + } + + private boolean isPathList(final String path){ + return path.contains("[") && path.contains("]"); + } + + private boolean isPathMap(final String path){ + return StringUtils.isNotBlank(path) && !isPathList(path); + } + + private List split2List(final String path){ + return Arrays.asList(StringUtils.split(path.replace("[", ".["), ".")).stream() + .map(value -> value.replace("0x2e", ".")).collect(Collectors.toList()); + } + + private int getIndex(final String index) { + return Integer.parseInt(index.replace("[", "").replace("]", "")); + } + + private boolean checkPath(final String path){ + if(null == path){ + return false; + } + for(String each : StringUtils.split(SPLIT_CHAR)){ + if(StringUtils.isWhitespace(each)){ + throw new IllegalArgumentException("cannot contains white space in : " + path); + } + } + return true; + } + + private List expand(List list, int size) { + int expand = size - list.size(); + while (expand-- > 0) { + list.add(null); + } + return list; + } + + private void getKeysRecursive(final Object current, String path, Set collect,int depth){ + if(depth-- <= 0){ + collect.add(path); + return; + } + if(current instanceof Map){ + Map mapping = (Map)current; + for(final Object key : mapping.keySet()){ + String keyStr = String.valueOf(key).trim(); + if(StringUtils.isBlank(path)){ + getKeysRecursive(mapping.get(key), keyStr, collect, depth); + }else{ + getKeysRecursive(mapping.get(key), path + SPLIT_CHAR + keyStr, collect, depth); + } + } + }else if(current instanceof List){ + List lists = (List) current; + for(int i = 0; i < lists.size(); i++){ + getKeysRecursive(lists.get(i), path + String.format("[%d]", i), collect, depth); + } + }else{ + collect.add(path); + } + } + + private static boolean isWrapClass(Class clz){ + try{ + return ((Class)clz.getField("TYPE").get(null)).isPrimitive(); + }catch(Exception e){ + return false; + } + } + + public static String encodePath(String path){ + if (StringUtils.isNotBlank(path)){ + return path.replace(".", "0x2e"); + } + return path; + } + private JsonEntity(final String json){ + this.root = Json.fromJson(json, Map.class); + } + + private JsonEntity(final Map jsonMap){ + this.root = new HashMap<>(jsonMap); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/RSAUtil.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/RSAUtil.java new file mode 100644 index 000000000..6f8ef4966 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/RSAUtil.java @@ -0,0 +1,80 @@ +package com.webank.wedatasphere.exchangis.job.server.utils; + +import javax.crypto.Cipher; +import java.io.IOException; +import java.security.*; +import java.security.spec.PKCS8EncodedKeySpec; +import java.security.spec.X509EncodedKeySpec; +import java.util.Base64; + +/** + * @author tikazhang + * @Date 2022/8/4 10:35 + */ +public class RSAUtil { + //生成秘钥对 + public static KeyPair getKeyPair() throws Exception { + KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA"); + keyPairGenerator.initialize(2048); + KeyPair keyPair = keyPairGenerator.generateKeyPair(); + return keyPair; + } + + //获取公钥(Base64编码) + public static String getPublicKey(KeyPair keyPair){ + PublicKey publicKey = keyPair.getPublic(); + byte[] bytes = publicKey.getEncoded(); + return byte2Base64(bytes); + } + + //获取私钥(Base64编码) + public static String getPrivateKey(KeyPair keyPair){ + PrivateKey privateKey = keyPair.getPrivate(); + byte[] bytes = privateKey.getEncoded(); + return byte2Base64(bytes); + } + + //将Base64编码后的公钥转换成PublicKey对象 + public static PublicKey string2PublicKey(String pubStr) throws Exception{ + byte[] keyBytes = base642Byte(pubStr); + X509EncodedKeySpec keySpec = new X509EncodedKeySpec(keyBytes); + KeyFactory keyFactory = KeyFactory.getInstance("RSA"); + PublicKey publicKey = keyFactory.generatePublic(keySpec); + return publicKey; + } + + //将Base64编码后的私钥转换成PrivateKey对象 + public static PrivateKey string2PrivateKey(String priStr) throws Exception{ + byte[] keyBytes = base642Byte(priStr); + PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(keyBytes); + KeyFactory keyFactory = KeyFactory.getInstance("RSA"); + PrivateKey privateKey = keyFactory.generatePrivate(keySpec); + return privateKey; + } + + //公钥加密 + public static byte[] publicEncrypt(byte[] content, PublicKey publicKey) throws Exception{ + Cipher cipher = Cipher.getInstance("RSA"); + cipher.init(Cipher.ENCRYPT_MODE, publicKey); + byte[] bytes = cipher.doFinal(content); + return bytes; + } + + //私钥解密 + public static byte[] privateDecrypt(byte[] content, PrivateKey privateKey) throws Exception{ + Cipher cipher = Cipher.getInstance("RSA"); + cipher.init(Cipher.DECRYPT_MODE, privateKey); + byte[] bytes = cipher.doFinal(content); + return bytes; + } + + //字节数组转Base64编码 + public static String byte2Base64(byte[] bytes){ + return Base64.getEncoder().encodeToString(bytes); + } + + //Base64编码转字节数组 + public static byte[] base642Byte(String base64Key) throws IOException { + return Base64.getDecoder().decode(base64Key); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/SQLCommandUtils.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/SQLCommandUtils.java new file mode 100644 index 000000000..821b4087b --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/SQLCommandUtils.java @@ -0,0 +1,128 @@ +package com.webank.wedatasphere.exchangis.job.server.utils; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import org.apache.commons.lang3.StringUtils; + +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +/** + * Command utils for SQL + */ +public class SQLCommandUtils { + private static final String DEFAULT_COLUMN_SEPARATOR = ","; + private static final String DEFAULT_TABLE_COLUMN_SEPARATOR = "."; + /** + * Command list + */ + private static final String SQL_WHERE_CONDITION = " WHERE "; + private static final String SQL_INNER_JOIN = " INNER JOIN "; + private static final String SQL_SELECT_CONDITION = " SELECT "; + private static final String SQL_FROM_CONDITION = " FROM "; + private static final String SQL_ON_CONDITION = " ON "; + private static final String SQL_AND_CONDITION = " AND "; + + public static String contactSql(List tables, List alias, + List columns, List joinInfo, String whereClause){ + StringBuilder builder = new StringBuilder(SQL_SELECT_CONDITION) + .append(columnListSql(columns)) + .append(SQL_FROM_CONDITION) + .append(tableOnSql(tables, alias, joinInfo)); + if(StringUtils.isNotBlank(whereClause)){ + builder.append(SQL_WHERE_CONDITION).append(whereClause); + } + return builder.toString(); + } + + private static String columnListSql(List columns){ + StringBuilder builder = new StringBuilder(); + for(int i = 0; i < columns.size(); i++){ + builder.append(columns.get(i)); + if(i < columns.size() - 1){ + builder.append(DEFAULT_COLUMN_SEPARATOR); + } + builder.append(" "); + } + return builder.toString(); + } + + private static String tableOnSql(List tables, List alias, List joinInfo){ + StringBuilder builder = new StringBuilder(); + boolean onJoin = false; + for(int i = 0; i < tables.size(); i ++){ + builder.append(tables.get(i)); + if(alias != null) { + builder.append(" ").append(alias.get(i)); + } + if(onJoin && null != joinInfo){ + Object joinConditions = joinInfo.get(i - 1); + builder.append(SQL_ON_CONDITION).append(joinCondition( + //alias left, alias right + String.valueOf(alias.get(i - 1)), String.valueOf(alias.get(i)), + Objects.requireNonNull(Json.fromJson(Json.toJson(joinConditions, null), SqlJoinCondition.class)))); + onJoin = false; + } + if(i + 1 < tables.size() && null != alias){ + builder.append(SQL_INNER_JOIN); + onJoin = true; + } + } + return builder.toString(); + } + + private static String joinCondition(String aliasLeft, String aliasRight, + List joinConditions){ + //For example: t1.column1 + joinConditions.forEach( joinCondition ->{ + String left = joinCondition.getLeft(); + if(!left.contains(DEFAULT_TABLE_COLUMN_SEPARATOR)){ + joinCondition.setLeft(StringUtils.isNotBlank(aliasLeft)? aliasLeft + DEFAULT_TABLE_COLUMN_SEPARATOR + joinCondition.getLeft() : joinCondition.getLeft()); + } + String right = joinCondition.getRight(); + if(!right.contains(DEFAULT_TABLE_COLUMN_SEPARATOR)){ + joinCondition.setRight(StringUtils.isNotBlank(aliasRight)? aliasRight + DEFAULT_TABLE_COLUMN_SEPARATOR + joinCondition.getRight() : joinCondition.getRight()); + } + }); + SqlJoinCondition[] conditions = new SqlJoinCondition[joinConditions.size()]; + joinConditions.toArray(conditions); + return StringUtils.join(conditions, SQL_AND_CONDITION); + } + + /** + * Sql join condition + */ + public static class SqlJoinCondition { + private String left; + private String right; + private String condition; + public String getLeft() { + return left; + } + + public void setLeft(String left) { + this.left = left; + } + + public String getRight() { + return right; + } + + public void setRight(String right) { + this.right = right; + } + + public String getCondition() { + return condition; + } + + public void setCondition(String condition) { + this.condition = condition; + } + + @Override + public String toString(){ + return StringUtils.join(new String[]{left, condition, right}, " "); + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/SpringContextHolder.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/SpringContextHolder.java new file mode 100644 index 000000000..44d6bb7e5 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/utils/SpringContextHolder.java @@ -0,0 +1,43 @@ +/** + * + * + * Licensed under the Apache License, Version 2.0 (the "License"); + */ +package com.webank.wedatasphere.exchangis.job.server.utils; + +import org.springframework.beans.factory.DisposableBean; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.stereotype.Component; + +import java.util.Objects; + +@Component +public class SpringContextHolder implements ApplicationContextAware, DisposableBean { + + private static ApplicationContext applicationContext = null; + + public static ApplicationContext getApplicationContext() { + return applicationContext; + } + + public static T getBean(Class requiredType) { + if (Objects.nonNull(applicationContext)) { + return applicationContext.getBean(requiredType); + } + return null; + } + + @Override + public void setApplicationContext(ApplicationContext applicationContext) { + if (SpringContextHolder.applicationContext == null) { + SpringContextHolder.applicationContext = applicationContext; + } + } + + @Override + public void destroy() throws Exception { + applicationContext = null; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisCategoryLogVo.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisCategoryLogVo.java new file mode 100644 index 000000000..e044d4be9 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisCategoryLogVo.java @@ -0,0 +1,83 @@ +package com.webank.wedatasphere.exchangis.job.server.vo; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.webank.wedatasphere.exchangis.job.log.LogResult; +import org.apache.commons.lang.StringUtils; + +import java.util.*; +import java.util.function.Function; + +/** + * Category log + */ +@JsonInclude(JsonInclude.Include.NON_NULL) +public class ExchangisCategoryLogVo { + + @JsonIgnore + private Map categoryStrategy = new HashMap<>(); + + private int endLine; + + private boolean isEnd = false; + + private Map logs = new HashMap<>(); + + public void newCategory(String name, Function logAcceptable){ + this.categoryStrategy.put(name, new CategoryLog(logAcceptable)); + } + + public void processLogResult(LogResult logResult, boolean acceptEmpty){ + this.endLine = logResult.getEndLine(); + this.isEnd = logResult.isEnd(); + List categoryLogs = new ArrayList<>(this.categoryStrategy.values()); + Optional.ofNullable(logResult.getLogs()).ifPresent(logs -> logs.forEach(log -> { + for (CategoryLog categoryLog : categoryLogs){ + if (categoryLog.logAcceptable.apply(log)) { + categoryLog.logs.add(log); + } + } + })); + this.categoryStrategy.forEach((category, categoryLog) -> { + if (!categoryLog.logs.isEmpty() || acceptEmpty) { + this.logs.put(category, StringUtils.join(categoryLog.logs, "\n")); + categoryLog.logs.clear(); + } + }); + } + + public int getEndLine() { + return endLine; + } + + public void setEndLine(int endLine) { + this.endLine = endLine; + } + + public boolean getIsEnd() { + return isEnd; + } + + public void setIsEnd(boolean end) { + isEnd = end; + } + + public Map getLogs() { + return logs; + } + + public void setLogs(Map logs) { + this.logs = logs; + } + + private static class CategoryLog{ + + private List logs = new ArrayList<>(); + + private Function logAcceptable; + + public CategoryLog(Function logAcceptable){ + this.logAcceptable = logAcceptable; + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisJobBasicInfoVO.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisJobBasicInfoVO.java new file mode 100644 index 000000000..40751f3b7 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisJobBasicInfoVO.java @@ -0,0 +1,114 @@ +package com.webank.wedatasphere.exchangis.job.server.vo; + +public class ExchangisJobBasicInfoVO { + + private Long id; + + private Long projectId; + + private Long dssProjectId; + + private String dssProjectName; + + private String nodeId; + + private String nodeName; + + private String jobName; + + private String jobType; + + private String engineType; + + private String jobLabels; + + private String jobDesc; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Long getProjectId() { + return projectId; + } + + public void setProjectId(Long projectId) { + this.projectId = projectId; + } + + public String getJobName() { + return jobName; + } + + public void setJobName(String jobName) { + this.jobName = jobName; + } + + public String getJobType() { + return jobType; + } + + public void setJobType(String jobType) { + this.jobType = jobType; + } + + public String getEngineType() { + return engineType; + } + + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + public String getJobLabels() { + return jobLabels; + } + + public void setJobLabels(String jobLabels) { + this.jobLabels = jobLabels; + } + + public String getJobDesc() { + return jobDesc; + } + + public void setJobDesc(String jobDesc) { + this.jobDesc = jobDesc; + } + + public Long getDssProjectId() { + return dssProjectId; + } + + public void setDssProjectId(Long dssProjectId) { + this.dssProjectId = dssProjectId; + } + + public String getDssProjectName() { + return dssProjectName; + } + + public void setDssProjectName(String dssProjectName) { + this.dssProjectName = dssProjectName; + } + + public String getNodeId() { + return nodeId; + } + + public void setNodeId(String nodeId) { + this.nodeId = nodeId; + } + + public String getNodeName() { + return nodeName; + } + + public void setNodeName(String nodeName) { + this.nodeName = nodeName; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisJobExportVo.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisJobExportVo.java new file mode 100644 index 000000000..030e71808 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisJobExportVo.java @@ -0,0 +1,4 @@ +package com.webank.wedatasphere.exchangis.job.server.vo; + +public class ExchangisJobExportVo { +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisJobImportVo.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisJobImportVo.java new file mode 100644 index 000000000..ed0433750 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisJobImportVo.java @@ -0,0 +1,4 @@ +package com.webank.wedatasphere.exchangis.job.server.vo; + +public class ExchangisJobImportVo { +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisJobProgressVo.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisJobProgressVo.java new file mode 100644 index 000000000..80a56e0b8 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisJobProgressVo.java @@ -0,0 +1,73 @@ +package com.webank.wedatasphere.exchangis.job.server.vo; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; + +import java.util.*; + + +public class ExchangisJobProgressVo extends ExchangisProgressVo{ + + private Map> tasks = new HashMap<>(); + + public ExchangisJobProgressVo(TaskStatus status, Double progress) { + super(status, progress); + } + + public ExchangisJobProgressVo(){ + super(); + } + + /** + * Add task progress + * @param progressVo progress vo + */ + public void addTaskProgress(ExchangisProgressVo progressVo){ + TaskStatus status = progressVo.getStatus(); + if (Objects.nonNull(status)){ + tasks.compute(status, (keyStatus, statusTasks) -> { + if (Objects.isNull(statusTasks)){ + statusTasks = new ArrayList<>(); + } + statusTasks.add(progressVo); + return statusTasks; + }); + } + } + + public Map> getTasks() { + return tasks; + } + + public void setTasks(Map> tasks) { + this.tasks = tasks; + } + + public static class ExchangisTaskProgressVo extends ExchangisProgressVo{ + + private String taskId; + + private String name; + + public ExchangisTaskProgressVo(String taskId, String name, TaskStatus status, Double progress) { + super(status, progress); + this.taskId = taskId; + this.name = name; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getTaskId() { + return taskId; + } + + public void setTaskId(String taskId) { + this.taskId = taskId; + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisJobTaskVo.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisJobTaskVo.java new file mode 100644 index 000000000..ae6a94c12 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisJobTaskVo.java @@ -0,0 +1,128 @@ +package com.webank.wedatasphere.exchangis.job.server.vo; + +import java.util.Date; + +/** + * + * @Date 2022/1/12 22:34 + */ + +public class ExchangisJobTaskVo { + + private String name; + + private Date createTime; + + private String status; + + private Date lastUpdateTime; + + private String engineType; + + private String executeUser; + + private String taskId; + + private String linkisJobId; + + private String linkisJobInfo; + + private Date launchTime; + + public ExchangisJobTaskVo(){ + + } + + public ExchangisJobTaskVo(String taskId, String name, String status, Date createTime, Date launchTime, Date lastUpdateTime, String engineType, String linkisJobId, String linkisJobInfo, String executeUser){ + this.taskId = taskId; + this.name = name; + this.status = status; + this.createTime = createTime; + this.launchTime = launchTime; + this.lastUpdateTime = lastUpdateTime; + this.engineType = engineType; + this.linkisJobId = linkisJobId; + this.linkisJobInfo = linkisJobInfo; + this.executeUser = executeUser; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public Date getLastUpdateTime() { + return lastUpdateTime; + } + + public void setLastUpdateTime(Date lastUpdateTime) { + this.lastUpdateTime = lastUpdateTime; + } + + public String getEngineType() { + return engineType; + } + + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + public String getExecuteUser() { + return executeUser; + } + + public void setExecuteUser(String executeUser) { + this.executeUser = executeUser; + } + + public String getTaskId() { + return taskId; + } + + public void setTaskId(String taskId) { + this.taskId = taskId; + } + + public String getLinkisJobId() { + return linkisJobId; + } + + public void setLinkisJobId(String linkisJobId) { + this.linkisJobId = linkisJobId; + } + + public String getLinkisJobInfo() { + return linkisJobInfo; + } + + public void setLinkisJobInfo(String linkisJobInfo) { + this.linkisJobInfo = linkisJobInfo; + } + + public Date getLaunchTime() { + return launchTime; + } + + public void setLaunchTime(Date launchTime) { + this.launchTime = launchTime; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisLaunchedJobListVo.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisLaunchedJobListVo.java new file mode 100644 index 000000000..f48c9f8b2 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisLaunchedJobListVo.java @@ -0,0 +1,128 @@ +package com.webank.wedatasphere.exchangis.job.server.vo; + +import java.util.Date; + +/** + * + * @Date 2022/1/16 10:21 + */ +public class ExchangisLaunchedJobListVo { + private String jobExecutionId; + + private String executeNode; + + private String name; + + private Date createTime; + + private Long flow; + + private String createUser; + + private String executeUser; + + private String status; + + private double progress; + + private Date lastUpdateTime; + + public ExchangisLaunchedJobListVo(){ + + } + + public ExchangisLaunchedJobListVo(String jobExecutionId, String executeNode, String name, Date createTime, Long flow, String createUser, String executeUser, String status, double progress, Date lastUpdateTime){ + this.jobExecutionId = jobExecutionId; + this.executeNode = executeNode; + this.name = name; + this.status = status; + this.createTime = createTime; + this.flow = flow; + this.createUser = createUser; + this.lastUpdateTime = lastUpdateTime; + this.progress = progress; + this.lastUpdateTime = lastUpdateTime; + this.executeUser = executeUser; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } + + public String getJobExecutionId() { + return jobExecutionId; + } + + public void setJobExecutionId(String jobExecutionId) { + this.jobExecutionId = jobExecutionId; + } + + public String getExecuteNode() { + return executeNode; + } + + public void setExecuteNode(String executeNode) { + this.executeNode = executeNode; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Long getFlow() { + return flow; + } + + public void setFlow(Long flow) { + this.flow = flow; + } + + public String getExecuteUser() { + return executeUser; + } + + public void setExecuteUser(String executeUser) { + this.executeUser = executeUser; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public double getProgress() { + return progress; + } + + public void setProgress(double progress) { + this.progress = progress; + } + + public Date getLastUpdateTime() { + return lastUpdateTime; + } + + public void setLastUpdateTime(Date lastUpdateTime) { + this.lastUpdateTime = lastUpdateTime; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisLaunchedTaskMetricsVo.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisLaunchedTaskMetricsVo.java new file mode 100644 index 000000000..006863f20 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisLaunchedTaskMetricsVo.java @@ -0,0 +1,61 @@ +package com.webank.wedatasphere.exchangis.job.server.vo; + +import com.webank.wedatasphere.exchangis.job.server.metrics.ExchangisMetricsVo; + +/** + * + * @Date 2022/1/12 23:00 + */ +public class ExchangisLaunchedTaskMetricsVo { + private String taskId; + + private String name; + + private String status; + + private ExchangisMetricsVo metrics; + //private Map metrics; + + public ExchangisLaunchedTaskMetricsVo(){ + + } + + public ExchangisLaunchedTaskMetricsVo(String taskId, String name, String status, ExchangisMetricsVo metrics){ + this.taskId = taskId; + this.name = name; + this.status = status; + this.metrics = metrics; + } + + public String getTaskId() { + return taskId; + } + + public void setTaskId(String taskId) { + this.taskId = taskId; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public ExchangisMetricsVo getMetrics() { + return metrics; + } + + public void setMetrics(ExchangisMetricsVo metrics) { + this.metrics = metrics; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisProgressVo.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisProgressVo.java new file mode 100644 index 000000000..4db843806 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/ExchangisProgressVo.java @@ -0,0 +1,51 @@ +package com.webank.wedatasphere.exchangis.job.server.vo; + +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; + + +public class ExchangisProgressVo { + protected TaskStatus status; + + protected Double progress = 0.0d; + + protected Boolean allTaskStatus; + + public ExchangisProgressVo(){ + + } + + public ExchangisProgressVo(TaskStatus status, Double progress){ + this.status = status; + this.progress = progress; + } + + public ExchangisProgressVo(TaskStatus status, Double progress, Boolean allTaskStatus){ + this.status = status; + this.progress = progress; + this.allTaskStatus = allTaskStatus; + } + + public TaskStatus getStatus() { + return status; + } + + public void setStatus(TaskStatus status) { + this.status = status; + } + + public Double getProgress() { + return progress; + } + + public void setProgress(Double progress) { + this.progress = progress; + } + + public Boolean getAllTaskStatus() { + return allTaskStatus; + } + + public void setAllTaskStatus(Boolean allTaskStatus) { + this.allTaskStatus = allTaskStatus; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/JobFunction.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/JobFunction.java new file mode 100644 index 000000000..64bebe6c9 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/vo/JobFunction.java @@ -0,0 +1,129 @@ +package com.webank.wedatasphere.exchangis.job.server.vo; + + +import com.fasterxml.jackson.annotation.JsonInclude; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +/** + * Job function entity + * @author davidhua + * 2020/4/21 + */ +@JsonInclude(JsonInclude.Include.NON_EMPTY) +public class JobFunction { + public enum FunctionType{ + VERIFY, TRANSFORM + } + private Integer id; + + private FunctionType funcType; + + private String funcName; + + private String tabName; + + private String nameDisplay; + + private Integer paramNum = 0; + + private String refName; + + private String description; + + private Date modifyTime; + + private Date createTime; + + private List paramNames = new ArrayList<>(); + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public FunctionType getFuncType() { + return funcType; + } + + public void setFuncType(FunctionType funcType) { + this.funcType = funcType; + } + + public String getTabName() { + return tabName; + } + + public void setTabName(String tabName) { + this.tabName = tabName; + } + + public String getNameDisplay() { + return nameDisplay; + } + + public void setNameDisplay(String nameDisplay) { + this.nameDisplay = nameDisplay; + } + + public Integer getParamNum() { + return paramNum; + } + + public void setParamNum(Integer paramNum) { + this.paramNum = paramNum; + } + + public String getRefName() { + return refName; + } + + public void setRefName(String refName) { + this.refName = refName; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public Date getModifyTime() { + return modifyTime; + } + + public void setModifyTime(Date modifyTime) { + this.modifyTime = modifyTime; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getFuncName() { + return funcName; + } + + public void setFuncName(String funcName) { + this.funcName = funcName; + } + + public List getParamNames() { + return paramNames; + } + + public void setParamNames(List paramNames) { + this.paramNames = paramNames; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/DefaultRpcJobLogger.scala b/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/DefaultRpcJobLogger.scala new file mode 100644 index 000000000..62cc242d3 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/DefaultRpcJobLogger.scala @@ -0,0 +1,33 @@ +package com.webank.wedatasphere.exchangis.job.server.log + +import com.webank.wedatasphere.exchangis.job.listener.JobLogListener +import com.webank.wedatasphere.exchangis.job.listener.events.JobLogEvent +import com.webank.wedatasphere.exchangis.job.listener.events.JobLogEvent.Level +import org.slf4j.{Logger, LoggerFactory} + +/** + * Custom job logger, use log4j to record job logs + */ +class DefaultRpcJobLogger extends JobLogListener{ + + override def getLogger: Logger = DefaultRpcJobLogger.LOG + + /** + * Listen the event + * + * @param event event + */ + override def onEvent(event: JobLogEvent): Unit = { + val message = s"[${event.getTenancy}:${event.getJobExecutionId}] ${event.getMessage}" + event.getLevel match { + case Level.INFO => getLogger.info(message, event.getArgs: _*) + case Level.ERROR => getLogger.error(message, event.getArgs: _*) + case Level.WARN => getLogger.warn(message, event.getArgs: _*) + case _ => getLogger.trace(message, event.getArgs) + } + } +} + +object DefaultRpcJobLogger{ + private final val LOG: Logger = LoggerFactory.getLogger(this.getClass) +} diff --git a/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/JobLogService.scala b/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/JobLogService.scala new file mode 100644 index 000000000..f581f400c --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/JobLogService.scala @@ -0,0 +1,22 @@ +package com.webank.wedatasphere.exchangis.job.server.log +import java.util + +import com.webank.wedatasphere.exchangis.job.log.{LogQuery, LogResult} +import com.webank.wedatasphere.exchangis.job.server.log.cache.JobLogCache + + +/** + * Job Log service + */ +trait JobLogService{ + + def getOrCreateLogCache(jobExecId: String): JobLogCache[String] + + def logsFromPage(jobExecId: String, logQuery: LogQuery): LogResult + + def logsFromPageAndPath(logPath: String, logQuery: LogQuery): LogResult + + def appendLog(tenancy: String, jobExecId: String, logs: util.List[String]): Unit + + def appendLog(jobExecId: String, logs: util.List[String]): Unit +} diff --git a/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/cache/FileHandleCache.scala b/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/cache/FileHandleCache.scala new file mode 100644 index 000000000..415393b29 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/cache/FileHandleCache.scala @@ -0,0 +1,5 @@ +package com.webank.wedatasphere.exchangis.job.server.log.cache + +trait FileHandleCache { + +} diff --git a/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/cache/FileHandleLocalCache.scala b/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/cache/FileHandleLocalCache.scala new file mode 100644 index 000000000..f7ec6298d --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/cache/FileHandleLocalCache.scala @@ -0,0 +1,5 @@ +package com.webank.wedatasphere.exchangis.job.server.log.cache + +class FileHandleLocalCache { + +} diff --git a/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/cache/JobLogCache.scala b/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/cache/JobLogCache.scala new file mode 100644 index 000000000..75721f8ee --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/cache/JobLogCache.scala @@ -0,0 +1,100 @@ +package com.webank.wedatasphere.exchangis.job.server.log.cache + +import java.util +import java.util.concurrent.{ArrayBlockingQueue, Future} + +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.SchedulerThread +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.scheduler.Scheduler +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.LOG_OP_ERROR +import com.webank.wedatasphere.exchangis.job.server.log.JobLogService +import com.webank.wedatasphere.exchangis.job.server.utils.SpringContextHolder +import org.slf4j.{Logger, LoggerFactory} +trait JobLogCache[V] extends Logging { + def cacheLog(log: V) + + def flushCache(isEnd: Boolean): Unit +} + +object JobLogCacheUtils{ + lazy val jobLogService: JobLogService = SpringContextHolder.getBean(classOf[JobLogService]) + val LOG: Logger = LoggerFactory.getLogger(classOf[JobLogCache[String]]) + def flush(jobExecId: String, isEnd: Boolean = false): Unit ={ + Utils.tryAndWarn{ + jobLogService match { + case service: JobLogService => service.getOrCreateLogCache(jobExecId) match { + case cache: JobLogCache[String] => cache.flushCache(isEnd) + } + case _ => + } + }(LOG) + } +} +abstract class AbstractJobLogCache[V](scheduler: Scheduler, maxSize: Int = 100, flushInterval: Int = 2000) extends JobLogCache[V] with SchedulerThread{ + + var lastFlush: Long = -1L + + var cacheQueue: util.concurrent.ArrayBlockingQueue[V] = new ArrayBlockingQueue[V](maxSize) + + var isShutdown: Boolean = false + + var flushFuture: Future[_] = _ + /** + * Start entrance + */ + override def start(): Unit = { + this.flushFuture = scheduler match { + case scheduler: Scheduler => scheduler.getSchedulerContext.getOrCreateConsumerManager.getOrCreateExecutorService.submit(this) + case _ => throw new ExchangisJobServerException(LOG_OP_ERROR.getCode, s"TaskScheduler cannot be empty, please set it before starting the [$getName]") + } + } + + override def run(): Unit = { + Thread.currentThread.setName(s"JobLogCache-Refresher-$getName") + info(s"Thread: [ ${Thread.currentThread.getName} ] is started.") + while (!isShutdown){ + Utils.tryAndError{ + flushCache(false) + lastFlush = System.currentTimeMillis + } + Utils.tryAndError(Thread.sleep(flushInterval)) + } + info(s"Thread: [ ${Thread.currentThread.getName} ] is stopped.") + } + + override def cacheLog(log: V): Unit = { + val element: Any = getCacheQueueElement(log) + if (!cacheQueue.offer(element.asInstanceOf[V])) { + warn("The cache queue is full, should flush the cache immediately") + flushCache(false) + } else if (lastFlush + flushInterval < System.currentTimeMillis){ + trace("The cache has reached the time to be flush") + flushCache(false) + } else onCache(log) + } + + protected def onCache(log: V): Unit = { + // Do nothing + } + /** + * Stop entrance + */ + override def stop(): Unit = { + Option(this.flushFuture).foreach( future => { + this.isShutdown = true + future.cancel(true) + }) + } + + /** + * Name + * + * @return + */ + override def getName: String = "Default" + + def getCacheQueueElement(log: V): Any = { + log + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/cache/JobLogRpcCache.scala b/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/cache/JobLogRpcCache.scala new file mode 100644 index 000000000..1e5a3e072 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/cache/JobLogRpcCache.scala @@ -0,0 +1,50 @@ +//package com.webank.wedatasphere.exchangis.job.server.log.cache +// +//import java.util +// +//import org.apache.linkis.scheduler.Scheduler +//import java.util.concurrent.{ArrayBlockingQueue, ConcurrentHashMap} +//import java.util.concurrent.locks.ReentrantLock +// +//abstract class JobLogRpcCache(scheduler: Scheduler, maxSize: Int = 1000, flushInterval: Int = 100) extends AbstractJobLogCache[RpcCacheId, String](scheduler,maxSize,flushInterval) { +// val rpcCacheSplits: ConcurrentHashMap[String, ServerLogSplit] = new ConcurrentHashMap[String, ServerLogSplit] +// /** +// * Sync to flush special cache +// * @param cacheId id +// */ +// override def flushCache(cacheId: RpcCacheId): Unit = { +// } +// +// override def flushCache(): Unit = { +// +// } +// +// override def onCache(cacheId: RpcCacheId, log: String): Unit = { +// rpcCacheSplits.computeIfAbsent(s"${cacheId.protocol}://${cacheId.address}:${cacheId.port}", (serverUrl: String) => { +// new ServerLogSplit() +// }) match { +// case split: ServerLogSplit => split.queue.add(log) +// case _ => +// } +// } +// override def getCacheQueueElement(cacheId: RpcCacheId, log: String): Any = { +// // Just use the Int token as element +// 1 +// } +// +// def rpcCall(serverUrl: String, jobExecId: String, tenancy: String, logs: util.List[String]) +// +// class ServerLogSplit{ +// val splitLock: ReentrantLock = new ReentrantLock +// val queue: util.Queue[String] = new ArrayBlockingQueue[String](maxSize) +// } +//} +//object JobLogRpcCache{ +// implicit def scalaFunctionToJava[From, To](function: (From) => To): java.util.function.Function[From, To] = { +// new java.util.function.Function[From, To] { +// override def apply(input: From): To = function(input) +// } +// } +//} +//case class RpcCacheId(address: String, port: String, jobExecId: String, tenancy: String = "default", protocol: String = "http") +// diff --git a/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/client/JobLogHttpClient.scala b/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/client/JobLogHttpClient.scala new file mode 100644 index 000000000..f63f1fdb3 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/client/JobLogHttpClient.scala @@ -0,0 +1,17 @@ +package com.webank.wedatasphere.exchangis.job.server.log.client + +import com.webank.wedatasphere.exchangis.job.server.utils.SpringContextHolder +import org.springframework.context.ApplicationContext + +class JobLogHttpClient { + +} +object JobLogHttpClient{ + def main(args: Array[String]): Unit = { + val spring: ApplicationContext = SpringContextHolder.getApplicationContext + spring match { + case e: ApplicationContext => print("hello world") + case _ => print("none") + } + } +} \ No newline at end of file diff --git a/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/serivce/DefaultJobLogService.scala b/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/serivce/DefaultJobLogService.scala new file mode 100644 index 000000000..b5baa50df --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/serivce/DefaultJobLogService.scala @@ -0,0 +1,19 @@ +package com.webank.wedatasphere.exchangis.job.server.log.serivce + +import java.util + +import com.webank.wedatasphere.exchangis.job.log.{LogQuery, LogResult} +import com.webank.wedatasphere.exchangis.job.server.log.JobLogService +import com.webank.wedatasphere.exchangis.job.server.log.cache.JobLogCache + +class DefaultJobLogService extends JobLogService{ + override def getOrCreateLogCache(jobExecId: String): JobLogCache[String] = ??? + + override def logsFromPage(jobExecId: String, logQuery: LogQuery): LogResult = ??? + + override def appendLog(tenancy: String, jobExecId: String, logs: util.List[String]): Unit = ??? + + override def appendLog(jobExecId: String, logs: util.List[String]): Unit = ??? + + override def logsFromPageAndPath(logPath: String, logQuery: LogQuery): LogResult = ??? +} diff --git a/exchangis-job/exchangis-job-server/src/main/test/com/webank/wedatasphere/exchangis/job/server/builder/JobBuilderMainProgress.java b/exchangis-job/exchangis-job-server/src/main/test/com/webank/wedatasphere/exchangis/job/server/builder/JobBuilderMainProgress.java new file mode 100644 index 000000000..0d83c2cde --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/test/com/webank/wedatasphere/exchangis/job/server/builder/JobBuilderMainProgress.java @@ -0,0 +1,61 @@ +package com.webank.wedatasphere.exchangis.job.server.builder; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisEngineJob; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVO; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.server.builder.engine.DataxExchangisEngineJob; +import com.webank.wedatasphere.exchangis.job.server.builder.engine.SqoopExchangisEngineJob; +import com.webank.wedatasphere.exchangis.job.server.builder.transform.TransformExchangisJob; + +import java.security.SecureRandom; +import java.util.*; + +public class JobBuilderMainProgress { + + public static void main(String[] args) throws Exception{ +// System.setProperty("log4j.configurationFile", "C:\\Users\\hadoop\\IdeaProjects\\Exchangis\\assembly-package\\config\\log4j2.xml"); + System.setProperty("log4j.configurationFile", "C:\\Users\\davidhua\\IdeaProjects\\Exchangis\\assembly-package\\config\\log4j2.xml"); + SpringExchangisJobBuilderManager jobBuilderManager = new SpringExchangisJobBuilderManager(); + jobBuilderManager.init(); + ExchangisJobInfo jobInfo = getDemoSqoopJobInfo(); + ExchangisJobBuilderContext ctx = new ExchangisJobBuilderContext(); + ctx.setOriginalJob(jobInfo); + // ExchangisJob -> ExchangisTransformJob(SubExchangisJob) + TransformExchangisJob transformJob = jobBuilderManager.doBuild(jobInfo, TransformExchangisJob.class, ctx); + List engineJobs = new ArrayList<>(); + // ExchangisTransformJob(SubExchangisJob) -> List + for(SubExchangisJob subExchangisJob : transformJob.getSubJobSet()){ + Optional.ofNullable(jobBuilderManager.doBuild(subExchangisJob, + SubExchangisJob.class, ExchangisEngineJob.class, ctx)).ifPresent(engineJobs::add); + } + engineJobs.forEach(engineJob -> { + if(engineJob instanceof DataxExchangisEngineJob){ + Map code = Json.fromJson(((DataxExchangisEngineJob)engineJob).getCode(), Map.class); + try { + System.out.println(Json.getMapper().writerWithDefaultPrettyPrinter().writeValueAsString(code)); + } catch (JsonProcessingException e) { + e.printStackTrace(); + } + }else if(engineJob instanceof SqoopExchangisEngineJob){ + try { + System.out.println(Json.getMapper().writerWithDefaultPrettyPrinter().writeValueAsString(engineJobs)); + } catch (JsonProcessingException e) { + e.printStackTrace(); + } + } + }); + } + + public static ExchangisJobInfo getDemoSqoopJobInfo(){ + ExchangisJobVO job = new ExchangisJobVO(); + job.setId((long) new SecureRandom().nextInt(100)); + job.setJobName("Job-Builder-Main"); + job.setContent("[{\"subjobName\":\"subjob1\",\"dataSources\":{\"source_id\":\"MYSQL.10002.db_mask.table_source\",\"sink_id\":\"MYSQL.10002.db_mask.table_sink\"},\"params\":{\"sources\":[{\"config_key\":\"exchangis.job.mysql.write_type\",\"config_name\":\"写入方式\",\"config_value\":\"insert\",\"sort\":1},{\"config_key\":\"exchangis.job.mysql.batch_size\",\"config_name\":\"批量大小\",\"config_value\":1000,\"sort\":2}],\"sinks\":[{\"config_key\":\"exchangis.job.mysql.write_type\",\"config_name\":\"写入方式\",\"config_value\":\"insert\",\"sort\":1},{\"config_key\":\"exchangis.job.mysql.batch_size\",\"config_name\":\"批量大小\",\"config_value\":1000,\"sort\":2}]},\"transforms\":{\"type\":\"MAPPING\",\"sql\":\"\",\"mapping\":[{\"source_field_name\":\"field1\",\"source_field_type\":\"varchar\",\"sink_field_name\":\"field2\",\"sink_field_type\":\"varchar\",\"validator\":[\">100\",\"<200\"],\"transformer\":{\"name\":\"ex_substr\",\"params\":[\"1\",\"3\"]}},{\"source_field_name\":\"field3\",\"source_field_type\":\"varchar\",\"sink_field_name\":\"field4\",\"sink_field_type\":\"varchar\",\"validator\":[\"like'%example'\"],\"transformer\":{\"name\":\"ex_replace\",\"params\":[\"1\",\"3\",\"***\"]}}]},\"settings\":[{\"config_key\":\"errorlimit_percentage\",\"config_name\":\"脏数据占比阈值\",\"config_value\":\"insert\",\"sort\":1},{\"config_key\":\"errorlimit_record\",\"config_name\":\"脏数据最大记录数\",\"config_value\":\"10\",\"sort\":2}]}]"); + job.setEngineType("sqoop"); + return new ExchangisJobInfo(job); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/test/com/webank/wedatasphere/exchangis/job/server/builder/JobBuilderTest.java b/exchangis-job/exchangis-job-server/src/main/test/com/webank/wedatasphere/exchangis/job/server/builder/JobBuilderTest.java new file mode 100644 index 000000000..5a30a1231 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/test/com/webank/wedatasphere/exchangis/job/server/builder/JobBuilderTest.java @@ -0,0 +1,219 @@ +package com.webank.wedatasphere.exchangis.job.server.builder; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisEngineJob; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVO; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode; +import com.webank.wedatasphere.exchangis.job.server.builder.transform.TransformExchangisJob; + +import java.util.*; + +public class JobBuilderTest { + + private static SpringExchangisJobBuilderManager jobBuilderManager = new SpringExchangisJobBuilderManager(); + + static { + jobBuilderManager.init(); + } + + public static void main(String[] args) throws ExchangisJobException, JsonProcessingException { + + String code = "{\n" + + " \"job\": {\n" + + " \"content\":[\n" + + " {\n" + + " \"reader\": {\n" + + " \"name\": \"txtfilereader\", \n" + + " \"parameter\": {\n" + + " \"path\":[\"/opt/install/datax/data/test1.csv\"],\n" + + " \"encoding\":\"gbk\",\n" + + " \"column\": [\n" + + " {\n" + + " \"index\":0,\n" + + " \"type\":\"string\"\n" + + " },\n" + + " {\n" + + " \"index\":1,\n" + + " \"type\":\"string\"\n" + + " }\n" + + " ], \n" + + " \"fileldDelimiter\":\",\"\n" + + " }\n" + + " }, \n" + + " \"writer\": {\n" + + " \"name\": \"mysqlwriter\", \n" + + " \"parameter\": {\n" + + " \"username\": \"root\",\n" + + " \"password\": \"MTIzNDU2\", \n" + + " \"column\": [\n" + + " \"i\",\n" + + " \"j\"\n" + + " ],\n" + + " \"preSql\": [], \n" + + " \"connection\": [\n" + + " {\n" + + " \"jdbcUrl\":\"jdbc:mysql://127.0.0.1:3306/test\", \n" + + " \"table\": [\"testtab\"]\n" + + " }\n" + + " ]\n" + + " }\n" + + " }\n" + + " }\n" + + " ], \n" + + " \"setting\": {\n" + + " \"speed\": {\n" + + " \"channel\": \"4\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; +// System.out.println(code); + + ExchangisJobInfo job = getSqoopJob(); + System.out.println(job.getName()); + ExchangisJobBuilderContext ctx = new ExchangisJobBuilderContext(); + ctx.putEnv("USER_NAME", "xxxxyyyyzzzz"); + ctx.setOriginalJob(job); + TransformExchangisJob transformJob = jobBuilderManager.doBuild(job, TransformExchangisJob.class, ctx); + List engineJobs = new ArrayList<>(); + + + for (SubExchangisJob subExchangisJob : transformJob.getSubJobSet()) { + String sourceDsId = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE).get("datasource").getValue().toString(); + String sinkDsId = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SINK).get("datasource").getValue().toString(); +// if (!ctx.containsDatasourceParam(sourceDsId)) { +// Map sourceDsParam = getDsParam(sourceDsId); +// ctx.putDatasourceParam(sourceDsId, sourceDsParam); +// } +// if (!ctx.containsDatasourceParam(sinkDsId)) { +// Map sinkDsParam = getDsParam(sinkDsId); +// ctx.putDatasourceParam(sinkDsId, sinkDsParam); +// } + // connectParams + Optional.ofNullable(jobBuilderManager.doBuild(subExchangisJob, + SubExchangisJob.class, ExchangisEngineJob.class, ctx)).ifPresent(engineJobs::add); + } + + // List -> List + List launchableTasks = new ArrayList<>(); + for (ExchangisEngineJob engineJob : engineJobs) { + Optional.ofNullable(jobBuilderManager.doBuild(engineJob, + ExchangisEngineJob.class, LaunchableExchangisTask.class, ctx)).ifPresent(launchableTasks::add); + } + if (launchableTasks.isEmpty()) { + throw new ExchangisJobException(ExchangisJobExceptionCode.TASK_BUILDER_ERROR.getCode(), + "The result set of launcher job is empty, please examine your job entity, [ 生成LauncherJob为空 ]", null); + } + + for (LaunchableExchangisTask launchableTask : launchableTasks) { + System.out.println(launchableTask.getName()); + } + + } + + public static ExchangisJobInfo getSqoopJob() { + ExchangisJobVO job = new ExchangisJobVO(); + job.setId(22L); + job.setProjectId(1456173825011081218L); + job.setJobName("T_SQOOP"); + job.setJobType("OFFLINE"); + job.setEngineType("DATAX"); + job.setJobLabels(""); + job.setJobDesc(""); + job.setContent("[{\n" + + " \"subJobName\": \"new\",\n" + + " \"dataSources\": {\n" + + " \"source_id\": \"HIVE.34.test.psn\",\n" + + " \"sink_id\": \"MYSQL.29.test.t_psn\"\n" + + " },\n" + + " \"params\": {\n" + + " \"sources\": [{\n" + + " \"config_key\": \"exchangis.job.ds.params.sqoop.hive.r.trans_proto\",\n" + + " \"config_name\": \"传输方式\",\n" + + " \"config_value\": \"记录\",\n" + + " \"sort\": 1\n" + + " }, {\n" + + " \"config_key\": \"exchangis.job.ds.params.sqoop.hive.r.partition\",\n" + + " \"config_name\": \"分区信息\",\n" + + " \"config_value\": \"year,month=2018,09\",\n" + + " \"sort\": 2\n" + + " }, {\n" + + " \"config_key\": \"exchangis.job.ds.params.sqoop.hive.r.row_format\",\n" + + " \"config_name\": \"字段格式\",\n" + + " \"config_value\": \"1\",\n" + + " \"sort\": 3\n" + + " }],\n" + + " \"sinks\": [{\n" + + " \"config_key\": \"exchangis.job.ds.params.sqoop.mysql.w.write_type\",\n" + + " \"config_name\": \"写入方式\",\n" + + " \"config_value\": \"UPDATEONLY\",\n" + + " \"sort\": 1\n" + + " }, {\n" + + " \"config_key\": \"exchangis.job.ds.params.sqoop.mysql.w.batch_size\",\n" + + " \"config_name\": \"批量大小\",\n" + + " \"config_value\": \"1\",\n" + + " \"sort\": 2\n" + + " }]\n" + + " },\n" + + " \"transforms\": {\n" + + " \"addEnable\": false,\n" + + " \"type\": \"MAPPING\",\n" + + " \"sql\": null,\n" + + " \"mapping\": [\n" + + " {\n" + + " \"sink_field_name\": \"id\",\n" + + " \"sink_field_type\": \"INT\",\n" + + " \"validator\": [],\n" + + " \"transformer\": {},\n" + + " \"source_field_name\": \"id\",\n" + + " \"source_field_type\": \"INT\"\n" + + " }, {\n" + + " \"sink_field_name\": \"age\",\n" + + " \"sink_field_type\": \"INT\",\n" + + " \"validator\": [],\n" + + " \"transformer\": {},\n" + + " \"source_field_name\": \"age\",\n" + + " \"source_field_type\": \"INT\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"settings\": [{\n" + + " \"config_key\": \"exchangis.sqoop.setting.max.parallelism\",\n" + + " \"config_name\": \"作业最大并行数\",\n" + + " \"config_value\": \"1\",\n" + + " \"sort\": 1\n" + + " }, {\n" + + " \"config_key\": \"exchangis.sqoop.setting.max.memory\",\n" + + " \"config_name\": \"作业最大内存\",\n" + + " \"config_value\": \"1\",\n" + + " \"sort\": 2\n" + + " }]\n" + + "}]"); + job.setProxyUser("hdfs"); + job.setSyncType("FULL"); + job.setJobParams("{}"); + return new ExchangisJobInfo(job); + } + + public static Map getDsParam(String id) { + Map params = new HashMap<>(); + if (id.equals("29")) { + params.put("host", "192.168.0.66"); + params.put("port", "3306"); + params.put("username", "scm"); + params.put("password", "scm_@casc2f"); + } + if (id.equals("34")) { + params.put("host", "192.168.0.111"); + params.put("port", "9083"); + params.put("uris", "thrift://192.168.0.111:9083"); + } + return params; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/test/com/webank/wedatasphere/exchangis/job/server/builder/TestDataXJobBuilder.java b/exchangis-job/exchangis-job-server/src/main/test/com/webank/wedatasphere/exchangis/job/server/builder/TestDataXJobBuilder.java new file mode 100644 index 000000000..7d2121294 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/test/com/webank/wedatasphere/exchangis/job/server/builder/TestDataXJobBuilder.java @@ -0,0 +1,82 @@ +package com.webank.wedatasphere.exchangis.job.server.builder; + +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisEngineJob; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobInfo; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVO; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode; +import com.webank.wedatasphere.exchangis.job.server.builder.transform.TransformExchangisJob; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +public class TestDataXJobBuilder { + + private static SpringExchangisJobBuilderManager jobBuilderManager = new SpringExchangisJobBuilderManager(); + + static { + jobBuilderManager.init(); + } + + public static void main(String[] args) throws ExchangisJobException { + ExchangisJobVO job = getDataxJob(); + ExchangisJobBuilderContext ctx = new ExchangisJobBuilderContext(); + ctx.putEnv("USER_NAME", "xxxxyyyyzzzz"); + ExchangisJobInfo jobInfo = new ExchangisJobInfo(job); + ctx.setOriginalJob(jobInfo); + TransformExchangisJob transformJob = jobBuilderManager.doBuild(jobInfo, TransformExchangisJob.class, ctx); + List engineJobs = new ArrayList<>(); + + + for (SubExchangisJob subExchangisJob : transformJob.getSubJobSet()) { + String sourceDsId = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE).get("datasource").getValue().toString(); + String sinkDsId = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SINK).get("datasource").getValue().toString(); + // if (!ctx.containsDatasourceParam(sourceDsId)) { + // Map sourceDsParam = getDsParam(sourceDsId); + // ctx.putDatasourceParam(sourceDsId, sourceDsParam); + // } + // if (!ctx.containsDatasourceParam(sinkDsId)) { + // Map sinkDsParam = getDsParam(sinkDsId); + // ctx.putDatasourceParam(sinkDsId, sinkDsParam); + // } + // connectParams + Optional.ofNullable(jobBuilderManager.doBuild(subExchangisJob, + SubExchangisJob.class, ExchangisEngineJob.class, ctx)).ifPresent(engineJobs::add); + } + + // List -> List + List launchableTasks = new ArrayList<>(); + for (ExchangisEngineJob engineJob : engineJobs) { + Optional.ofNullable(jobBuilderManager.doBuild(engineJob, + ExchangisEngineJob.class, LaunchableExchangisTask.class, ctx)).ifPresent(launchableTasks::add); + } + if (launchableTasks.isEmpty()) { + throw new ExchangisJobException(ExchangisJobExceptionCode.TASK_BUILDER_ERROR.getCode(), + "The result set of launcher job is empty, please examine your job entity, [ 生成LauncherJob为空 ]", null); + } + + for (LaunchableExchangisTask launchableTask : launchableTasks) { + System.out.println(launchableTask.getName()); + } + } + + public static ExchangisJobVO getDataxJob() { + ExchangisJobVO job = new ExchangisJobVO(); + job.setId(22L); + job.setProjectId(1456173825011081218L); +// job.setName("T_DATAX"); + job.setJobType("OFFLINE"); + job.setEngineType("DATAX"); + job.setJobLabels(""); + job.setJobDesc(""); + job.setProxyUser("hdfs"); + job.setSyncType("FULL"); + job.setJobParams("{}"); + job.setContent("[{\"subJobName\":\"TjpQkiAeGfTe\",\"dataSources\":{\"source_id\":\"HIVE.22.test.psn\",\"sink_id\":\"MYSQL.29.test.t_psn\"},\"params\":{\"sources\":[{\"config_key\":\"exchangis.job.ds.params.datax.hive.r.trans_proto\",\"config_name\":\"传输方式\",\"config_value\":\"记录\",\"sort\":1},{\"config_key\":\"exchangis.job.ds.params.datax.hive.r.partition\",\"config_name\":\"分区信息\",\"config_value\":\"\",\"sort\":2},{\"config_key\":\"exchangis.job.ds.params.datax.hive.r.row_format\",\"config_name\":\"字段格式\",\"config_value\":\"\",\"sort\":3}],\"sinks\":[{\"config_key\":\"exchangis.job.ds.params.datax.mysql.w.write_type\",\"config_name\":\"写入方式\",\"config_value\":\"INSERT\",\"sort\":1},{\"config_key\":\"exchangis.job.ds.params.datax.mysql.w.batch_size\",\"config_name\":\"批量大小\",\"config_value\":\"1000\",\"sort\":2}]},\"transforms\":{\"type\":\"MAPPING\",\"mapping\":[{\"sink_field_name\":\"id\",\"sink_field_type\":\"INT\",\"sink_field_index\":0,\"sink_field_editable\":true,\"validator\":[\"> 1\"],\"transformer\":{\"name\":\"ex_pad\",\"params\":[\"r\",\"3\",\"0\"]},\"deleteEnable\":false,\"source_field_name\":\"id\",\"source_field_type\":\"int\",\"source_field_index\":0,\"source_field_editable\":false},{\"sink_field_name\":\"age\",\"sink_field_type\":\"INT\",\"sink_field_index\":1,\"sink_field_editable\":true,\"validator\":[\"> 0\"],\"transformer\":{\"name\":\"ex_pad\",\"params\":[\"l\",\"3\",\"0\"]},\"deleteEnable\":false,\"source_field_name\":\"age\",\"source_field_type\":\"int\",\"source_field_index\":1,\"source_field_editable\":false}],\"addEnable\":false},\"settings\":[{\"config_key\":\"exchangis.datax.setting.speed.bytes\",\"config_name\":\"作业速率限制\",\"config_value\":\"1000\",\"sort\":1},{\"config_key\":\"exchangis.datax.setting.speed.records\",\"config_name\":\"作业记录数限制\",\"config_value\":\"1000\",\"sort\":2},{\"config_key\":\"exchangis.datax.setting.max.parallelism\",\"config_name\":\"作业最大并行度\",\"config_value\":\"1\",\"sort\":3},{\"config_key\":\"exchangis.datax.setting.max.memory\",\"config_name\":\"作业最大使用内存\",\"config_value\":\"2000\",\"sort\":4},{\"config_key\":\"exchangis.datax.setting.errorlimit.record\",\"config_name\":\"最多错误记录数\",\"config_value\":\"1000\",\"sort\":5}]}]"); + return job; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/test/com/webank/wedatasphere/exchangis/job/server/execution/JobExecutionUnitTest.java b/exchangis-job/exchangis-job-server/src/main/test/com/webank/wedatasphere/exchangis/job/server/execution/JobExecutionUnitTest.java new file mode 100644 index 000000000..ed992d87f --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/test/com/webank/wedatasphere/exchangis/job/server/execution/JobExecutionUnitTest.java @@ -0,0 +1,152 @@ +package com.webank.wedatasphere.exchangis.job.server.execution; + +import com.webank.wedatasphere.exchangis.job.exception.ExchangisOnEventException; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisJob; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.manager.LinkisExchangisTaskLaunchManager; +import com.webank.wedatasphere.exchangis.job.listener.events.JobLogEvent; +import com.webank.wedatasphere.exchangis.job.listener.JobLogListener; +import com.webank.wedatasphere.exchangis.job.server.builder.JobBuilderMainProgress; +import com.webank.wedatasphere.exchangis.job.server.builder.SpringExchangisJobBuilderManager; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerException; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerRetryException; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskExecuteException; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisTaskGenerateException; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.DefaultTaskGenerator; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.DefaultTaskGeneratorContext; +import com.webank.wedatasphere.exchangis.job.server.execution.generator.TaskGenerator; +import com.webank.wedatasphere.exchangis.job.server.execution.loadbalance.FlexibleTenancyLoadBalancer; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.*; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks.GenerationSchedulerTask; +import com.webank.wedatasphere.exchangis.job.server.execution.subscriber.MaxUsageTaskChooseRuler; +import com.webank.wedatasphere.exchangis.job.server.execution.subscriber.NewInTaskObserver; +import com.webank.wedatasphere.exchangis.job.server.execution.subscriber.TaskObserver; +import com.webank.wedatasphere.exchangis.job.server.log.DefaultRpcJobLogger; +import org.apache.linkis.scheduler.Scheduler; +import org.apache.linkis.scheduler.executer.ExecutorManager; +import org.apache.linkis.scheduler.queue.JobInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; + +/** + * Unit test of execution module + */ +public class JobExecutionUnitTest { +// private static final Logger LOG = LoggerFactory.getLogger(JobExecutionUnitTest.class); + + public static void main(String[] args) throws ExchangisTaskExecuteException { +// System.setProperty("log4j.configurationFile", "C:\\Users\\davidhua\\IdeaProjects\\Exchangis\\assembly-package\\config\\log4j2.xml"); + System.setProperty("log4j.configurationFile", "C:\\Users\\hadoop\\IdeaProjects\\Exchangis\\assembly-package\\config\\log4j2.xml"); + System.setProperty("wds.exchangis.job.scheduler.consumer.tenancies", "hadoop"); + final Logger LOG = LoggerFactory.getLogger(JobExecutionUnitTest.class); + LOG.info("Job Execution Unit Test begin to launch"); + // Logger + DefaultRpcJobLogger jobLogger = new DefaultRpcJobLogger(); + jobLogger.onEvent(new JobLogEvent(UUID.randomUUID().toString(), "That is just a test")); + // Start an endless thread to hold the running of program + new Thread(new EndlessThread()).start(); + try { +// JobLogListener logListener = getLogListener(); + // Task Generator + SpringExchangisJobBuilderManager jobBuilderManager = new SpringExchangisJobBuilderManager(); + jobBuilderManager.init(); + TaskGenerator taskGenerator = new DefaultTaskGenerator(new DefaultTaskGeneratorContext(jobLogger), jobBuilderManager); + // Executor manager + ExecutorManager executorManager = new ExchangisSchedulerExecutorManager(); + // Tenancy consumer manager + TenancyParallelConsumerManager consumerManager = new TenancyParallelConsumerManager(); + consumerManager.setInitResidentThreads(4); + // Task manager + TaskManager taskManager = new DefaultTaskManager(jobLogger); + Scheduler scheduler = new ExchangisGenericScheduler(executorManager, consumerManager); + scheduler.init(); + // Load balancer + FlexibleTenancyLoadBalancer loadBalancer = new FlexibleTenancyLoadBalancer(scheduler, taskManager); + // Task observers + List> observers = new ArrayList<>(); + NewInTaskObserver newInObserver = new NewInTaskObserver(); + observers.add(newInObserver); + // Launcher manager + LinkisExchangisTaskLaunchManager launchManager = new LinkisExchangisTaskLaunchManager(); + launchManager.init(); + // Task execution + TaskExecution execution = + new DefaultTaskExecution(scheduler, launchManager, taskManager, observers, loadBalancer, new MaxUsageTaskChooseRuler()); + execution.start(); + // Test submit + execution.submit(getTestUnitTask("hadoop")); + // Submit LaunchableExchangisTask + submitTest(execution, newInObserver); + // Generate + generateTest(execution, taskGenerator, "davidhua"); + } catch (Exception e){ + LOG.error("Job Execution Unit Test shutdown", e); + } + } + + private static void generateTest(TaskExecution execution, + TaskGenerator taskGenerator, + String tenancy) throws ExchangisSchedulerException, ExchangisTaskGenerateException { + GenerationSchedulerTask task = new GenerationSchedulerTask(taskGenerator, JobBuilderMainProgress.getDemoSqoopJobInfo()); + task.setTenancy(tenancy); + execution.submit(task); + } + private static void submitTest(TaskExecution execution, NewInTaskObserver newInTaskObserver){ + LaunchableExchangisTask task = new LaunchableExchangisTask(); + task.setId(1694451505815490560L); + task.setJobExecutionId(UUID.randomUUID().toString()); + newInTaskObserver.getCacheQueue().offer(task); + } + + private static JobLogListener getLogListener(){ + return new JobLogListener() { + @Override + public void onEvent(JobLogEvent event) throws ExchangisOnEventException { + + } + }; + } + + private static AbstractExchangisSchedulerTask getTestUnitTask(String user){ + return new AbstractExchangisSchedulerTask("execution-test") { + final Logger LOG = LoggerFactory.getLogger(this.getClass()); + @Override + public String getName() { + return null; + } + + @Override + public JobInfo getJobInfo() { + return null; + } + + @Override + public String getTenancy() { + return user; + } + + @Override + protected void schedule() throws ExchangisSchedulerException, ExchangisSchedulerRetryException { + LOG.info("id: [{}] schedule success", getId()); + } + }; + } + private static class EndlessThread implements Runnable{ + + @Override + public void run() { + while(true){ + try { + Thread.sleep(5000); + } catch (InterruptedException e) { + //Ignore + } + } + } + } +} diff --git a/exchangis-job/exchangis-job-service/pom.xml b/exchangis-job/exchangis-job-service/pom.xml new file mode 100644 index 000000000..10c0afb92 --- /dev/null +++ b/exchangis-job/exchangis-job-service/pom.xml @@ -0,0 +1,33 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-job-service + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-job-common + ${revision} + + + com.webank.wedatasphere.exchangis + exchangis-job-launcher + ${revision} + + + + \ No newline at end of file diff --git a/exchangis-job/pom.xml b/exchangis-job/pom.xml new file mode 100644 index 000000000..2ccba4766 --- /dev/null +++ b/exchangis-job/pom.xml @@ -0,0 +1,30 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + + exchangis-job + pom + ${revision} + + + exchangis-job-common + exchangis-job-server + exchangis-job-builder + exchangis-job-launcher + exchangis-job-metrics + + + + 8 + 8 + + + \ No newline at end of file diff --git a/exchangis-plugins/exchangis-appconn/pom.xml b/exchangis-plugins/exchangis-appconn/pom.xml new file mode 100644 index 000000000..aa84a4261 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/pom.xml @@ -0,0 +1,165 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-appconn + + + 8 + 8 + + + + + org.apache.linkis + linkis-module + ${linkis.version} + provided + + + org.apache.httpcomponents + httpclient + + + org.springframework + spring-core + + + org.springframework.boot + spring-boot + + + org.springframework.boot + spring-boot-starter-cache + + + org.springframework.cloud + spring-cloud-starter-netflix-eureka-client + + + true + + + com.webank.wedatasphere.dss + dss-project-plugin + ${dss.version} + + + com.webank.wedatasphere.dss + dss-appconn-core + ${dss.version} + + + org.apache.linkis + linkis-storage + ${linkis.version} + provided + + + org.apache.linkis + linkis-common + + + + + com.webank.wedatasphere.dss + dss-development-process-standard + ${dss.version} + + + com.webank.wedatasphere.dss + dss-structure-integration-standard + ${dss.version} + + + com.webank.wedatasphere.dss + dss-development-process-standard-execution + ${dss.version} + + + com.webank.wedatasphere.dss + dss-common + ${dss.version} + provided + + + org.springframework + spring-aop + + + org.springframework + spring-context-support + + + org.springframework + spring-webmvc + + + org.springframework + spring-jdbc + + + org.springframework + spring-tx + + + + + org.reflections + reflections + 0.9.10 + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + ${maven-deploy-plugin.version} + + + org.apache.maven.plugins + maven-jar-plugin + ${maven-jar-plugin.version} + + + org.apache.maven.plugins + maven-assembly-plugin + 2.3 + false + + + make-assembly + package + + single + + + + src/main/assembly/distribution.xml + + + + + + false + exchangis-appconn + false + false + + src/main/assembly/distribution.xml + + + + + + \ No newline at end of file diff --git a/exchangis-plugins/exchangis-appconn/src/main/assembly/distribution.xml b/exchangis-plugins/exchangis-appconn/src/main/assembly/distribution.xml new file mode 100644 index 000000000..8a2f5187d --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/assembly/distribution.xml @@ -0,0 +1,86 @@ + + + + exchangis-appconn + + zip + + true + exchangis + + + + + lib + true + true + false + true + true + + + + + + ${basedir}/src/main/resources + + appconn.properties + + 0777 + / + unix + + + + ${basedir}/src/main/resources + + log4j.properties + log4j2.xml + + 0777 + conf + unix + + + ${basedir}/src/main/resources + + init.sql + + 0777 + db + unix + + + ${basedir}/src/main/resources + + datax.icon + sqoop.icon + + 0777 + icons + unix + + + + + + diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/ExchangisAppConn.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/ExchangisAppConn.java new file mode 100644 index 000000000..59d2bc3bd --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/ExchangisAppConn.java @@ -0,0 +1,38 @@ +package com.webank.wedatasphere.exchangis.dss.appconn; + +import com.webank.wedatasphere.dss.appconn.core.ext.ThirdlyAppConn; +import com.webank.wedatasphere.dss.appconn.core.impl.AbstractOnlySSOAppConn; +import com.webank.wedatasphere.dss.standard.app.development.standard.DevelopmentIntegrationStandard; +import com.webank.wedatasphere.dss.standard.app.structure.StructureIntegrationStandard; + +/** + * Exchangis AppConn top implement + */ +public class ExchangisAppConn extends AbstractOnlySSOAppConn implements ThirdlyAppConn { + + /** + * Project service operation + */ + private ExchangisStructureIntegrationStandard exchangisStructureIntegrationStandard; + + /** + * Operation for flow node + */ + private ExchangisDevelopmentIntegrationStandard exchangisDevelopmentIntegrationStandard; + + @Override + public DevelopmentIntegrationStandard getOrCreateDevelopmentStandard() { + return exchangisDevelopmentIntegrationStandard; + } + + @Override + public StructureIntegrationStandard getOrCreateStructureStandard() { + return exchangisStructureIntegrationStandard; + } + + @Override + protected void initialize() { + exchangisStructureIntegrationStandard = new ExchangisStructureIntegrationStandard(); + exchangisDevelopmentIntegrationStandard = new ExchangisDevelopmentIntegrationStandard(); + } +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/ExchangisDevelopmentIntegrationStandard.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/ExchangisDevelopmentIntegrationStandard.java new file mode 100644 index 000000000..9db38afc4 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/ExchangisDevelopmentIntegrationStandard.java @@ -0,0 +1,36 @@ +package com.webank.wedatasphere.exchangis.dss.appconn; + +import com.webank.wedatasphere.dss.standard.app.development.service.*; +import com.webank.wedatasphere.dss.standard.app.development.standard.AbstractDevelopmentIntegrationStandard; +import com.webank.wedatasphere.exchangis.dss.appconn.service.*; + +/** + * Develop integration + */ +public class ExchangisDevelopmentIntegrationStandard extends AbstractDevelopmentIntegrationStandard { + + @Override + protected RefCRUDService createRefCRUDService() { + return new ExchangisRefCRUDService(); + } + + @Override + protected RefExecutionService createRefExecutionService() { + return new ExchangisRefExecutionService(); + } + + @Override + protected RefExportService createRefExportService() { + return new ExchangisRefExportService(); + } + + @Override + protected RefImportService createRefImportService() { + return new ExchangisRefImportService(); + } + + @Override + protected RefQueryService createRefQueryService() { + return new ExchangisRefQueryService(); + } +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/ExchangisStructureIntegrationStandard.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/ExchangisStructureIntegrationStandard.java new file mode 100644 index 000000000..0c331e22f --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/ExchangisStructureIntegrationStandard.java @@ -0,0 +1,17 @@ +package com.webank.wedatasphere.exchangis.dss.appconn; + +import com.webank.wedatasphere.dss.standard.app.structure.AbstractStructureIntegrationStandard; +import com.webank.wedatasphere.dss.standard.app.structure.project.ProjectService; +import com.webank.wedatasphere.exchangis.dss.appconn.service.ExchangisProjectService; + +/** + * Structure(Project) service implement + */ +public class ExchangisStructureIntegrationStandard extends AbstractStructureIntegrationStandard { + + @Override + protected ProjectService createProjectService() { + return new ExchangisProjectService(); + } + +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/constraints/Constraints.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/constraints/Constraints.java new file mode 100644 index 000000000..22f942fe5 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/constraints/Constraints.java @@ -0,0 +1,37 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.constraints; + + +import org.apache.linkis.common.conf.CommonVars; + +/** + * Constraints + */ +public class Constraints { + + // AppConn name + public final static String EXCHANGIS_APPCONN_NAME = CommonVars.apply("wds.dss.appconn.exchangis.name", "Exchangis").getValue(); + + public final static String API_REQUEST_PREFIX = CommonVars.apply("wds.dss.appconn.exchangis.api.request-prefix", "api/rest_j/v1/dss/exchangis/main").getValue(); + + public final static String DOMAIN_NAME = CommonVars.apply("wds.dss.appconn.exchangis.domain.name", "DSS").getValue(); + + // Constraint in Project operation + public final static String PROJECT_ID = "projectId"; + + // Node type + public final static String NODE_TYPE_SQOOP = CommonVars.apply("wds.dss.appconn.exchangis.node-type.sqoop", "linkis.appconn.exchangis.sqoop").getValue(); + public final static String NODE_TYPE_DATAX = CommonVars.apply("wds.dss.appconn.exchangis.node-type.datax", "linkis.appconn.exchangis.datax").getValue(); + + // Engine type + public final static String ENGINE_TYPE_DATAX_NAME = CommonVars.apply("wds.dss.appconn.exchangis.engine.datax.name", "DATAX").getValue(); + public final static String ENGINE_TYPE_SQOOP_NAME = CommonVars.apply("wds.dss.appconn.exchangis.engine.sqoop.name", "SQOOP").getValue(); + + // Job type + public final static String JOB_TYPE_OFFLINE = CommonVars.apply("wds.dss.appconn.exchangis.job-type.offline", "OFFLINE").getValue(); + + public final static String REF_JOB_ID = "id"; + public final static String REF_JUMP_URL_FORMAT = CommonVars.apply("wds.dss.appconn.exchangis.ref.jump","#/childJobManagement").getValue(); + + +} + diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisExportOperation.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisExportOperation.java new file mode 100644 index 000000000..f6dae2729 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisExportOperation.java @@ -0,0 +1,57 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.operation.development; + +import com.webank.wedatasphere.dss.standard.app.development.operation.AbstractDevelopmentOperation; +import com.webank.wedatasphere.dss.standard.app.development.operation.RefExportOperation; +import com.webank.wedatasphere.dss.standard.app.development.ref.ExportResponseRef; +import com.webank.wedatasphere.dss.standard.app.development.ref.impl.ThirdlyRequestRef; +import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSPostAction; +import com.webank.wedatasphere.dss.standard.common.entity.ref.InternalResponseRef; +import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException; +import com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints; +import com.webank.wedatasphere.exchangis.dss.appconn.utils.ExchangisHttpUtils; + +import static com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX; + +/** + * Ref export operation + */ +public class ExchangisExportOperation + extends AbstractDevelopmentOperation + implements RefExportOperation { + + private String exportUrl; + + @Override + protected String getAppConnName() { + return Constraints.EXCHANGIS_APPCONN_NAME; + } + + @Override + public void init() { + super.init(); + exportUrl = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "appJob/export")); + } + + @Override + public ExportResponseRef exportRef(ThirdlyRequestRef.RefJobContentRequestRefImpl exportRequestRef) throws ExternalOperationFailedException { + logger.info("User {} try to export Exchangis job {} with jobContent: {}, refProjectId: {}, projectName: {}, nodeType: {}.", + exportRequestRef.getUserName(), exportRequestRef.getName(), exportRequestRef.getRefJobContent(), + exportRequestRef.getRefProjectId(), exportRequestRef.getProjectName(), exportRequestRef.getType()); + DSSPostAction postAction = new DSSPostAction(); + postAction.setUser(exportRequestRef.getUserName()); + postAction.addRequestPayload("projectId", exportRequestRef.getRefProjectId()); + postAction.addRequestPayload("partial", true); + String nodeType = exportRequestRef.getType(); + Long id = ((Double) exportRequestRef.getRefJobContent().get(Constraints.REF_JOB_ID)).longValue(); + if(Constraints.NODE_TYPE_SQOOP.equalsIgnoreCase(nodeType)) { + postAction.addRequestPayload("sqoopIds", id); + } else if(Constraints.NODE_TYPE_DATAX.equalsIgnoreCase(nodeType)) { + postAction.addRequestPayload("dataXIds", id); + } else { + throw new ExternalOperationFailedException(90177, "Unknown Exchangis jobType " + exportRequestRef.getType()); + } + InternalResponseRef responseRef = ExchangisHttpUtils.getResponseRef(exportRequestRef, exportUrl, postAction, ssoRequestOperation); + return ExportResponseRef.newBuilder().setResourceMap(responseRef.getData()).success(); + } + +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisImportOperation.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisImportOperation.java new file mode 100644 index 000000000..11ac4e48e --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisImportOperation.java @@ -0,0 +1,66 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.operation.development; + +import com.webank.wedatasphere.dss.common.utils.MapUtils; +import com.webank.wedatasphere.dss.standard.app.development.operation.AbstractDevelopmentOperation; +import com.webank.wedatasphere.dss.standard.app.development.operation.RefImportOperation; +import com.webank.wedatasphere.dss.standard.app.development.ref.ImportRequestRef; +import com.webank.wedatasphere.dss.standard.app.development.ref.RefJobContentResponseRef; +import com.webank.wedatasphere.dss.standard.app.development.ref.impl.ThirdlyRequestRef; +import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSPostAction; +import com.webank.wedatasphere.dss.standard.common.entity.ref.InternalResponseRef; +import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException; +import com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints; +import com.webank.wedatasphere.exchangis.dss.appconn.utils.ExchangisHttpUtils; + +import java.util.Map; + +import static com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX; +import static com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints.REF_JOB_ID; + + +/** + * Ref import operation + */ +public class ExchangisImportOperation extends AbstractDevelopmentOperation + implements RefImportOperation { + + private String importUrl; + + @Override + protected String getAppConnName() { + return Constraints.EXCHANGIS_APPCONN_NAME; + } + + @Override + public void init() { + super.init(); + importUrl = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "appJob/import")); + } + + @Override + public RefJobContentResponseRef importRef(ThirdlyRequestRef.ImportRequestRefImpl importRequestRef) throws ExternalOperationFailedException { + logger.info("User {} try to import Exchangis job {} with jobContent: {}, refProjectId: {}, projectName: {}, nodeType: {}.", + importRequestRef.getUserName(), importRequestRef.getName(), importRequestRef.getRefJobContent(), + importRequestRef.getRefProjectId(), importRequestRef.getProjectName(), importRequestRef.getType()); + DSSPostAction postAction = new DSSPostAction(); + postAction.setUser(importRequestRef.getUserName()); + postAction.addRequestPayload("projectId", importRequestRef.getRefProjectId()); + postAction.addRequestPayload("projectVersion", "v1"); + postAction.addRequestPayload("flowVersion", importRequestRef.getNewVersion()); + postAction.addRequestPayload("resourceId", importRequestRef.getResourceMap().get(ImportRequestRef.RESOURCE_ID_KEY)); + postAction.addRequestPayload("version", importRequestRef.getResourceMap().get(ImportRequestRef.RESOURCE_VERSION_KEY)); + postAction.addRequestPayload("user", importRequestRef.getUserName()); + InternalResponseRef responseRef = ExchangisHttpUtils.getResponseRef(importRequestRef, importUrl, postAction, ssoRequestOperation); + Map realNode = (Map) responseRef.getData().get("sqoop"); + long newId = 0L; + for (Map.Entry entry : realNode.entrySet()) { + newId = ((Double) Double.parseDouble(entry.getValue().toString())).longValue(); + if (newId != 0) { + break; + } + } + logger.info("New job id is {}", newId); + return RefJobContentResponseRef.newBuilder().setRefJobContent(MapUtils.newCommonMap(REF_JOB_ID, newId)).success(); + } + +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisRefCopyOperation.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisRefCopyOperation.java new file mode 100644 index 000000000..ed22275a2 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisRefCopyOperation.java @@ -0,0 +1,74 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.operation.development; + +import com.webank.wedatasphere.dss.common.utils.MapUtils; +import com.webank.wedatasphere.dss.standard.app.development.operation.AbstractDevelopmentOperation; +import com.webank.wedatasphere.dss.standard.app.development.operation.RefCopyOperation; +import com.webank.wedatasphere.dss.standard.app.development.ref.RefJobContentResponseRef; +import com.webank.wedatasphere.dss.standard.app.development.ref.impl.ThirdlyRequestRef; +import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSPostAction; +import com.webank.wedatasphere.dss.standard.common.entity.ref.InternalResponseRef; +import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException; +import com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints; +import com.webank.wedatasphere.exchangis.dss.appconn.utils.ExchangisHttpUtils; + +import java.util.Map; + +import static com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX; +import static com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints.REF_JOB_ID; + + +public class ExchangisRefCopyOperation extends + AbstractDevelopmentOperation + implements RefCopyOperation { + + private String copyUrl; + + @Override + public void init() { + super.init(); + copyUrl = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "appJob/copy")); + } + + @Override + protected String getAppConnName() { + return Constraints.EXCHANGIS_APPCONN_NAME; + } + + @Override + public RefJobContentResponseRef copyRef(ThirdlyRequestRef.CopyRequestRefImpl copyRequestRef) throws ExternalOperationFailedException { + logger.info("User {} try to copy Exchangis job {} with jobContent: {}, refProjectId: {}, projectName: {}, nodeType: {}.", + copyRequestRef.getUserName(), copyRequestRef.getName(), copyRequestRef.getRefJobContent(), + copyRequestRef.getRefProjectId(), copyRequestRef.getProjectName(), copyRequestRef.getType()); + DSSPostAction postAction = new DSSPostAction(); + postAction.setUser(copyRequestRef.getUserName()); + postAction.addRequestPayload("projectId", copyRequestRef.getRefJobContent().get("refProjectId")); + postAction.addRequestPayload("partial", true); + postAction.addRequestPayload("projectVersion", "v1"); + postAction.addRequestPayload("flowVersion", copyRequestRef.getNewVersion()); + String nodeType = copyRequestRef.getType(); + Long id = ((Integer) copyRequestRef.getRefJobContent().get(REF_JOB_ID)).longValue(); + if(Constraints.NODE_TYPE_SQOOP.equals(nodeType)) { + postAction.addRequestPayload("sqoopIds", id); + } else if(Constraints.NODE_TYPE_DATAX.equalsIgnoreCase(nodeType)) { + postAction.addRequestPayload("dataXIds", id); + } else { + throw new ExternalOperationFailedException(90177, "Unknown Exchangis jobType " + nodeType); + } + InternalResponseRef responseRef = ExchangisHttpUtils.getResponseRef(copyRequestRef, copyUrl, postAction, ssoRequestOperation); + Map sqoops = (Map) responseRef.getData().get("sqoop"); + //Map sqoops = (Map) responseRef.getData().get("sqoop"); + /*long newId = 0L; + newId = ((Double) sqoops.get(id)).longValue();*/ + + long newId = 0L; + for (Map.Entry entry : sqoops.entrySet()) { + newId = ((Double) Double.parseDouble(entry.getValue().toString())).longValue(); + if (newId != 0) { + break; + } + } + logger.info("New job id is {}", newId); + return RefJobContentResponseRef.newBuilder().setRefJobContent(MapUtils.newCommonMap(REF_JOB_ID, newId)).success(); + } + +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisRefCreationOperation.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisRefCreationOperation.java new file mode 100644 index 000000000..f6ad859f0 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisRefCreationOperation.java @@ -0,0 +1,70 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.operation.development; + +import com.webank.wedatasphere.dss.common.utils.MapUtils; +import com.webank.wedatasphere.dss.standard.app.development.operation.AbstractDevelopmentOperation; +import com.webank.wedatasphere.dss.standard.app.development.operation.RefCreationOperation; +import com.webank.wedatasphere.dss.standard.app.development.ref.DSSJobContentRequestRef; +import com.webank.wedatasphere.dss.standard.app.development.ref.RefJobContentResponseRef; +import com.webank.wedatasphere.dss.standard.app.development.ref.impl.ThirdlyRequestRef; +import com.webank.wedatasphere.dss.standard.app.development.utils.DSSJobContentConstant; +import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSPostAction; +import com.webank.wedatasphere.dss.standard.common.entity.ref.InternalResponseRef; +import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException; +import com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints; +import com.webank.wedatasphere.exchangis.dss.appconn.utils.ExchangisHttpUtils; +import org.apache.linkis.httpclient.request.POSTAction; + +import static com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX; + + +/** + * Ref creation operation + */ +public class ExchangisRefCreationOperation + extends AbstractDevelopmentOperation + implements RefCreationOperation { + + private String createUrl; + + @Override + protected String getAppConnName() { + return Constraints.EXCHANGIS_APPCONN_NAME; + } + + @Override + public void init() { + super.init(); + createUrl = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "appJob/create")); + } + + @Override + public RefJobContentResponseRef createRef(ThirdlyRequestRef.DSSJobContentRequestRefImpl createRequestRef) throws ExternalOperationFailedException { + logger.info("User {} try to create Exchangis job {} with jobContent: {}, refProjectId: {}, projectName: {}, nodeType:{}.", + createRequestRef.getUserName(), createRequestRef.getName(), createRequestRef.getDSSJobContent(), + createRequestRef.getRefProjectId(), createRequestRef.getProjectName(), createRequestRef.getType()); + DSSPostAction postAction = new DSSPostAction(); + postAction.setUser(createRequestRef.getUserName()); + // TODO 创建工作流节点返回的projectid不正确 + addExchangisJobInfo(postAction, createRequestRef, createRequestRef.getRefProjectId()); + InternalResponseRef responseRef = ExchangisHttpUtils.getResponseRef(createRequestRef, createUrl, postAction, ssoRequestOperation); + return RefJobContentResponseRef.newBuilder().setRefJobContent(responseRef.getData()).success(); + } + + static void addExchangisJobInfo(POSTAction postAction, DSSJobContentRequestRef requestRef, Long refProjectId) { + String desc = String.valueOf(requestRef.getDSSJobContent().get(DSSJobContentConstant.NODE_DESC_KEY)); + postAction.addRequestPayload("projectId", refProjectId); + postAction.addRequestPayload("jobType", Constraints.JOB_TYPE_OFFLINE); + postAction.addRequestPayload("jobDesc", desc); + postAction.addRequestPayload("jobName", requestRef.getName()); + postAction.addRequestPayload("source", MapUtils.newCommonMap("version", + requestRef.getDSSJobContent().get(DSSJobContentConstant.ORC_VERSION_KEY), + "workflowName", requestRef.getDSSJobContent().get(DSSJobContentConstant.ORCHESTRATION_NAME))); + if(Constraints.NODE_TYPE_SQOOP.equalsIgnoreCase(requestRef.getType())){ + postAction.addRequestPayload("engineType", Constraints.ENGINE_TYPE_SQOOP_NAME); + } else if(Constraints.NODE_TYPE_DATAX.equalsIgnoreCase(requestRef.getType())){ + postAction.addRequestPayload("engineType", Constraints.ENGINE_TYPE_DATAX_NAME); + } else { + throw new ExternalOperationFailedException(90512, "not supported Exchangis jobType " + requestRef.getType()); + } + } +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisRefDeletionOperation.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisRefDeletionOperation.java new file mode 100644 index 000000000..44be762ba --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisRefDeletionOperation.java @@ -0,0 +1,39 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.operation.development; + +import com.webank.wedatasphere.dss.standard.app.development.operation.AbstractDevelopmentOperation; +import com.webank.wedatasphere.dss.standard.app.development.operation.RefDeletionOperation; +import com.webank.wedatasphere.dss.standard.app.development.ref.impl.ThirdlyRequestRef; +import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSPostAction; +import com.webank.wedatasphere.dss.standard.common.entity.ref.ResponseRef; +import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException; +import com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints; +import com.webank.wedatasphere.exchangis.dss.appconn.utils.ExchangisHttpUtils; + +import static com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX; + +/** + * Ref delete operation + */ +public class ExchangisRefDeletionOperation extends AbstractDevelopmentOperation + implements RefDeletionOperation { + + @Override + protected String getAppConnName() { + return Constraints.EXCHANGIS_APPCONN_NAME; + } + + @Override + public ResponseRef deleteRef(ThirdlyRequestRef.RefJobContentRequestRefImpl deleteRequestRef) throws ExternalOperationFailedException { + Integer id = (Integer) deleteRequestRef.getRefJobContent().get(Constraints.REF_JOB_ID); + + + + String url = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "appJob/" + id)); + logger.info("User {} try to delete Exchangis job {} in project {}, the url is {}.", deleteRequestRef.getUserName(), + deleteRequestRef.getName(), deleteRequestRef.getProjectName(), url); + DSSPostAction postAction = new DSSPostAction(); + postAction.setUser(deleteRequestRef.getUserName()); + return ExchangisHttpUtils.getResponseRef(deleteRequestRef, url, postAction, ssoRequestOperation); + } + +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisRefExecutionOperation.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisRefExecutionOperation.java new file mode 100644 index 000000000..cd74b8c97 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisRefExecutionOperation.java @@ -0,0 +1,117 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.operation.development; + +import com.webank.wedatasphere.dss.standard.app.development.listener.common.AbstractRefExecutionAction; +import com.webank.wedatasphere.dss.standard.app.development.listener.common.RefExecutionAction; +import com.webank.wedatasphere.dss.standard.app.development.listener.common.RefExecutionState; +import com.webank.wedatasphere.dss.standard.app.development.listener.core.Killable; +import com.webank.wedatasphere.dss.standard.app.development.listener.core.LongTermRefExecutionOperation; +import com.webank.wedatasphere.dss.standard.app.development.listener.ref.ExecutionResponseRef; +import com.webank.wedatasphere.dss.standard.app.development.listener.ref.RefExecutionRequestRef; +import com.webank.wedatasphere.dss.standard.app.development.operation.RefExecutionOperation; +import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSGetAction; +import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSPostAction; +import com.webank.wedatasphere.dss.standard.common.entity.ref.InternalResponseRef; +import com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints; +import com.webank.wedatasphere.exchangis.dss.appconn.utils.ExchangisHttpUtils; + +import static com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX; + +/** + * Ref execute operation + */ +public class ExchangisRefExecutionOperation + extends LongTermRefExecutionOperation implements Killable { + + @Override + protected String getAppConnName() { + return Constraints.EXCHANGIS_APPCONN_NAME; + } + + @Override + protected RefExecutionAction submit(RefExecutionRequestRef.RefExecutionProjectRequestRef executionRequestRef) { + String user = executionRequestRef.getExecutionRequestRefContext().getUser(); + logger.info("User {} try to execute Exchangis job {} with jobContent: {}, refProjectId: {}, projectName: {}, nodeType:{}.", + user, executionRequestRef.getName(), executionRequestRef.getRefJobContent(), + executionRequestRef.getRefProjectId(), executionRequestRef.getProjectName(), executionRequestRef.getType()); + Long id = ((Double) executionRequestRef.getRefJobContent().get(Constraints.REF_JOB_ID)).longValue(); + String url = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "appJob/execute/" + id)); + executionRequestRef.getExecutionRequestRefContext().appendLog("try to execute " + executionRequestRef.getType() + " node, ready to request to " + url); + DSSPostAction postAction = new DSSPostAction(); + postAction.setUser(user); + postAction.addRequestPayload("submitUser", executionRequestRef.getExecutionRequestRefContext().getSubmitUser()); + InternalResponseRef responseRef = ExchangisHttpUtils.getResponseRef(executionRequestRef, url, postAction, ssoRequestOperation); + ExchangisExecutionAction action = new ExchangisExecutionAction(); + action.setExecId((String) responseRef.getData().get("jobExecutionId")); + action.setRequestRef(executionRequestRef); + executionRequestRef.getExecutionRequestRefContext().appendLog("submitted to Exchangis with execId: " + action.getExecId()); + return action; + } + + @Override + public RefExecutionState state(RefExecutionAction refExecutionAction) { + ExchangisExecutionAction action = (ExchangisExecutionAction) refExecutionAction; + String url = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "job/execution/" + action.getExecId() +"/status")); + DSSGetAction getAction = new DSSGetAction(); + getAction.setUser(action.getExecutionRequestRefContext().getUser()); + InternalResponseRef responseRef = ExchangisHttpUtils.getResponseRef(action.getRequestRef(), url, getAction, ssoRequestOperation); + String status = (String) responseRef.getData().get("status"); + action.getExecutionRequestRefContext().appendLog("ExchangisJob(execId: " + action.getExecId() + ") is in state " + status); + switch (status) { + case "Failed": + case "Timeout": + return RefExecutionState.Failed; + case "Success": + return RefExecutionState.Success; + case "Cancelled": + return RefExecutionState.Killed; + default: + return RefExecutionState.Running; + } + } + + @Override + public ExecutionResponseRef result(RefExecutionAction refExecutionAction) { + RefExecutionState state = state(refExecutionAction); + if(state.isSuccess()) { + return ExecutionResponseRef.newBuilder().success(); + } else { + // TODO 补充详细错误信息 + return ExecutionResponseRef.newBuilder().error("Please jump into Exchangis for more detail errors."); + } + } + + @Override + public boolean kill(RefExecutionAction refExecutionAction) { + // TODO 没有调用kill方法 + ExchangisExecutionAction action = (ExchangisExecutionAction) refExecutionAction; + action.getExecutionRequestRefContext().appendLog("try to kill ExchangisJob with execId: " + action.getExecId()); + String url = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "job/execution/" + action.getExecId() +"/kill")); + DSSPostAction postAction = new DSSPostAction(); + postAction.setUser(action.getExecutionRequestRefContext().getUser()); + ExchangisHttpUtils.getResponseRef(action.getRequestRef(), url, postAction, ssoRequestOperation); + return true; + } + + static class ExchangisExecutionAction extends AbstractRefExecutionAction { + + private String execId; + private RefExecutionRequestRef.RefExecutionProjectRequestRef requestRef; + + public String getExecId() { + return execId; + } + + public void setExecId(String _execId) { + this.execId = _execId; + } + + public RefExecutionRequestRef.RefExecutionProjectRequestRef getRequestRef() { + return requestRef; + } + + public void setRequestRef(RefExecutionRequestRef.RefExecutionProjectRequestRef requestRef) { + this.requestRef = requestRef; + } + + } +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisRefQueryOperation.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisRefQueryOperation.java new file mode 100644 index 000000000..63557c0e4 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisRefQueryOperation.java @@ -0,0 +1,32 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.operation.development; + +import com.webank.wedatasphere.dss.standard.app.development.operation.AbstractDevelopmentOperation; +import com.webank.wedatasphere.dss.standard.app.development.operation.RefQueryJumpUrlOperation; +import com.webank.wedatasphere.dss.standard.app.development.ref.QueryJumpUrlResponseRef; +import com.webank.wedatasphere.dss.standard.app.development.ref.impl.ThirdlyRequestRef; +import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException; +import com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints; +import com.webank.wedatasphere.exchangis.dss.appconn.utils.ExchangisHttpUtils; + +/** + * Ref query operation + */ +public class ExchangisRefQueryOperation extends + AbstractDevelopmentOperation + implements RefQueryJumpUrlOperation { + + @Override + protected String getAppConnName() { + return Constraints.EXCHANGIS_APPCONN_NAME; + } + + @Override + public QueryJumpUrlResponseRef query(ThirdlyRequestRef.QueryJumpUrlRequestRefImpl openRequestRef) throws ExternalOperationFailedException { + Integer id = (Integer) openRequestRef.getRefJobContent().get(Constraints.REF_JOB_ID); + //String labels = ExchangisHttpUtils.serializeDssLabel(openRequestRef.getDSSLabels()); + String labels = openRequestRef.getDSSLabels().get(0).getValue().get("DSSEnv"); + String jumpUrl = mergeBaseUrl(Constraints.REF_JUMP_URL_FORMAT + "?id=" + id + "&labels=" + labels); + return QueryJumpUrlResponseRef.newBuilder().setJumpUrl(jumpUrl).success(); + } + +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisRefUpdateOperation.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisRefUpdateOperation.java new file mode 100644 index 000000000..2e8909804 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/development/ExchangisRefUpdateOperation.java @@ -0,0 +1,41 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.operation.development; + +import com.webank.wedatasphere.dss.standard.app.development.operation.AbstractDevelopmentOperation; +import com.webank.wedatasphere.dss.standard.app.development.operation.RefUpdateOperation; +import com.webank.wedatasphere.dss.standard.app.development.ref.RefJobContentResponseRef; +import com.webank.wedatasphere.dss.standard.app.development.ref.impl.ThirdlyRequestRef; +import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSPutAction; +import com.webank.wedatasphere.dss.standard.common.entity.ref.InternalResponseRef; +import com.webank.wedatasphere.dss.standard.common.entity.ref.ResponseRef; +import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException; +import com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints; +import com.webank.wedatasphere.exchangis.dss.appconn.utils.ExchangisHttpUtils; + +import static com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX; + +/** + * Ref update operation + */ +public class ExchangisRefUpdateOperation extends AbstractDevelopmentOperation + implements RefUpdateOperation { + + @Override + public ResponseRef updateRef(ThirdlyRequestRef.UpdateRequestRefImpl updateRequestRef) throws ExternalOperationFailedException { + logger.info("User {} try to update Exchangis job {} with jobContent: {}, refProjectId: {}, projectName: {}, nodeType: {}.", + updateRequestRef.getUserName(), updateRequestRef.getName(), updateRequestRef.getDSSJobContent(), + updateRequestRef.getRefProjectId(), updateRequestRef.getProjectName(), updateRequestRef.getType()); + Integer id = (Integer) updateRequestRef.getDSSJobContent().get(Constraints.REF_JOB_ID); + String url = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "/appJob/" + id)); + DSSPutAction postAction = new DSSPutAction(); + postAction.setUser(updateRequestRef.getUserName()); + ExchangisRefCreationOperation.addExchangisJobInfo(postAction, updateRequestRef, updateRequestRef.getRefProjectId()); + postAction.addRequestPayload("id", id); + InternalResponseRef responseRef = ExchangisHttpUtils.getResponseRef(updateRequestRef, url, postAction, ssoRequestOperation); + return RefJobContentResponseRef.newBuilder().setRefJobContent(responseRef.getData()).success(); + } + + @Override + protected String getAppConnName() { + return Constraints.EXCHANGIS_APPCONN_NAME; + } +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/project/ExchangisProjectCreationOperation.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/project/ExchangisProjectCreationOperation.java new file mode 100644 index 000000000..c80a7fc56 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/project/ExchangisProjectCreationOperation.java @@ -0,0 +1,116 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.operation.project; + +import com.webank.wedatasphere.dss.common.utils.MapUtils; +import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSPostAction; +import com.webank.wedatasphere.dss.standard.app.structure.AbstractStructureOperation; +import com.webank.wedatasphere.dss.standard.app.structure.project.ProjectCreationOperation; +import com.webank.wedatasphere.dss.standard.app.structure.project.ref.DSSProjectContentRequestRef; +import com.webank.wedatasphere.dss.standard.app.structure.project.ref.ProjectResponseRef; +import com.webank.wedatasphere.dss.standard.common.entity.ref.InternalResponseRef; +import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException; +import com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints; +import com.webank.wedatasphere.exchangis.dss.appconn.utils.ExchangisHttpUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.httpclient.request.POSTAction; +import org.apache.linkis.server.BDPJettyServerHelper; + +import java.util.Map; + +import static com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX; + +/** + * Project create operation + */ +public class ExchangisProjectCreationOperation extends AbstractStructureOperation + implements ProjectCreationOperation { + + private String projectUrl; + + + @Override + protected String getAppConnName() { + return Constraints.EXCHANGIS_APPCONN_NAME; + } + + @Override + public ProjectResponseRef createProject(DSSProjectContentRequestRef.DSSProjectContentRequestRefImpl projectRequestRef) throws ExternalOperationFailedException { + logger.info("User {} want to create a Exchangis project with dssProjectName:{}, createUser:{}, parameters:{}, workspaceName:{}", + projectRequestRef.getUserName(), projectRequestRef.getDSSProject().getName(), + projectRequestRef.getDSSProject().getCreateBy(), projectRequestRef.getParameters().toString(), + projectRequestRef.getWorkspace().getWorkspaceName()); + DSSPostAction postAction = new DSSPostAction(); + postAction.setUser(projectRequestRef.getUserName()); + addProjectInfo(postAction, projectRequestRef); + InternalResponseRef responseRef = ExchangisHttpUtils.getResponseRef(projectRequestRef, projectUrl, postAction, ssoRequestOperation); + logger.info("User {} created a Exchangis project {} with response {}.", projectRequestRef.getUserName(), projectRequestRef.getDSSProject().getName(), responseRef.getResponseBody()); + long projectId; + try { + ResponseMessage data = BDPJettyServerHelper.gson().fromJson(responseRef.getResponseBody(), ResponseMessage.class); + //responseRef.getResponseBody(); + //projectId = Long.parseLong(String.valueOf(responseRef.getData().get(Constraints.PROJECT_ID))); + projectId = Long.parseLong(String.valueOf(data.getData().get("projectId"))); + logger.info("Exchangis projectId is {}", projectId); + } catch (Exception e){ + throw new ExternalOperationFailedException(31020, "Fail to resolve the project id from response entity", e); + } + return ProjectResponseRef.newInternalBuilder().setRefProjectId(projectId).success(); + } + + @Override + public void init() { + super.init(); + projectUrl = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "appProject")); + } + + public static void addProjectInfo(POSTAction postAction, DSSProjectContentRequestRef requestRef) { + postAction.addRequestPayload("projectName", requestRef.getDSSProject().getName()); + postAction.addRequestPayload("description", requestRef.getDSSProject().getDescription()); + postAction.addRequestPayload("domain", Constraints.DOMAIN_NAME); + postAction.addRequestPayload("source", MapUtils.newCommonMap("workspace", requestRef.getWorkspace().getWorkspaceName())); + postAction.addRequestPayload("editUsers", StringUtils.join(requestRef.getDSSProjectPrivilege().getEditUsers(),",")); + postAction.addRequestPayload("viewUsers", StringUtils.join(requestRef.getDSSProjectPrivilege().getAccessUsers(),",")); + postAction.addRequestPayload("execUsers", StringUtils.join(requestRef.getDSSProjectPrivilege().getReleaseUsers(),",")); + } + + public static class ResponseMessage { + private String method; + private Double status; + private String message; + private Map data; + + public ResponseMessage() { + } + + public String getMethod() { + return method; + } + + public void setMethod(String method) { + this.method = method; + } + + public Double getStatus() { + return status; + } + + public void setStatus(Double status) { + this.status = status; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + public Map getData() { + return data; + } + + public void setData(Map data) { + this.data = data; + } + } +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/project/ExchangisProjectDeletionOperation.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/project/ExchangisProjectDeletionOperation.java new file mode 100644 index 000000000..2cad3064a --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/project/ExchangisProjectDeletionOperation.java @@ -0,0 +1,35 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.operation.project; + +import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSPostAction; +import com.webank.wedatasphere.dss.standard.app.structure.AbstractStructureOperation; +import com.webank.wedatasphere.dss.standard.app.structure.project.ProjectDeletionOperation; +import com.webank.wedatasphere.dss.standard.app.structure.project.ref.RefProjectContentRequestRef; +import com.webank.wedatasphere.dss.standard.common.entity.ref.ResponseRef; +import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException; +import com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints; +import com.webank.wedatasphere.exchangis.dss.appconn.utils.ExchangisHttpUtils; + +import static com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX; + + +/** + * Project delete operation + */ +public class ExchangisProjectDeletionOperation extends AbstractStructureOperation + implements ProjectDeletionOperation { + + @Override + public ResponseRef deleteProject(RefProjectContentRequestRef.RefProjectContentRequestRefImpl projectRequestRef) throws ExternalOperationFailedException { + String url = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX,"appProject/" + projectRequestRef.getProjectName())); + logger.info("User {} try to delete Exchangis project with refProjectId: {}, name: {}, the url is {}.", projectRequestRef.getUserName(), + projectRequestRef.getRefProjectId(), projectRequestRef.getProjectName(), url); + DSSPostAction postAction = new DSSPostAction(); + postAction.setUser(projectRequestRef.getUserName()); + return ExchangisHttpUtils.getResponseRef(projectRequestRef, url, postAction, ssoRequestOperation); + } + + @Override + protected String getAppConnName() { + return Constraints.EXCHANGIS_APPCONN_NAME; + } +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/project/ExchangisProjectGetOperation.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/project/ExchangisProjectGetOperation.java new file mode 100644 index 000000000..95023d69c --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/project/ExchangisProjectGetOperation.java @@ -0,0 +1,47 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.operation.project; + +import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSPostAction; +import com.webank.wedatasphere.dss.standard.app.structure.AbstractStructureOperation; +import com.webank.wedatasphere.dss.standard.app.structure.project.ProjectSearchOperation; +import com.webank.wedatasphere.dss.standard.app.structure.project.ref.ProjectResponseRef; +import com.webank.wedatasphere.dss.standard.app.structure.project.ref.RefProjectContentRequestRef; +import com.webank.wedatasphere.dss.standard.common.entity.ref.InternalResponseRef; +import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException; +import com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints; +import com.webank.wedatasphere.exchangis.dss.appconn.utils.ExchangisHttpUtils; + +import java.util.Map; + +import static com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX; + +/** + * @author tikazhang + * @Date 2022/3/22 0:48 + */ +public class ExchangisProjectGetOperation extends AbstractStructureOperation + implements ProjectSearchOperation { + + @Override + public ProjectResponseRef searchProject(RefProjectContentRequestRef.RefProjectContentRequestRefImpl projectRequestRef) throws ExternalOperationFailedException { + + String url = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "appProject/check/" + projectRequestRef.getProjectName())); + logger.info("User {} try to search Exchangis project with name: {}, the url is {}.", projectRequestRef.getUserName(), + projectRequestRef.getProjectName(), url); + DSSPostAction postAction = new DSSPostAction(); + postAction.setUser(projectRequestRef.getUserName()); + InternalResponseRef responseRef = ExchangisHttpUtils.getResponseRef(projectRequestRef, url, postAction, ssoRequestOperation); + Map projectInfo = (Map) responseRef.getData().get("projectInfo"); + //如果查询不到project,则无重复项目可以直接返回 + if (projectInfo == null) { + return ProjectResponseRef.newInternalBuilder().success(); + } else { + return ProjectResponseRef.newInternalBuilder().setRefProjectId(Long.parseLong(projectInfo.get("id").toString())).success(); + } + } + + @Override + protected String getAppConnName() { + return Constraints.EXCHANGIS_APPCONN_NAME; + } + +} \ No newline at end of file diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/project/ExchangisProjectUpdateOperation.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/project/ExchangisProjectUpdateOperation.java new file mode 100644 index 000000000..a90cebdea --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/operation/project/ExchangisProjectUpdateOperation.java @@ -0,0 +1,60 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.operation.project; + +import com.webank.wedatasphere.dss.common.utils.MapUtils; +import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSPutAction; +import com.webank.wedatasphere.dss.standard.app.structure.AbstractStructureOperation; +import com.webank.wedatasphere.dss.standard.app.structure.project.ProjectUpdateOperation; +import com.webank.wedatasphere.dss.standard.app.structure.project.ref.ProjectUpdateRequestRef; +import com.webank.wedatasphere.dss.standard.common.entity.ref.InternalResponseRef; +import com.webank.wedatasphere.dss.standard.common.entity.ref.ResponseRef; +import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException; +import com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints; +import com.webank.wedatasphere.exchangis.dss.appconn.utils.ExchangisHttpUtils; + +import static com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX; + +/** + * Project update operation + */ +public class ExchangisProjectUpdateOperation + extends AbstractStructureOperation + implements ProjectUpdateOperation { + + private String projectUpdateUrl; + + @Override + public ResponseRef updateProject(ProjectUpdateRequestRef.ProjectUpdateRequestRefImpl projectRequestRef) throws ExternalOperationFailedException { + String url = mergeUrl(projectUpdateUrl, String.valueOf(projectRequestRef.getRefProjectId())); + logger.info("User {} try to update Exchangis project with dssProjectName: {}, refProjectId: {}, url is {}.", + projectRequestRef.getUserName(), projectRequestRef.getDSSProject().getName(), + projectRequestRef.getRefProjectId(), url); + DSSPutAction putAction = new DSSPutAction(); + putAction.setUser(projectRequestRef.getUserName()); + addProjectInfo(putAction, projectRequestRef); + logger.info("project payload is: {}", putAction.getRequestPayload()); + ExchangisProjectCreationOperation.addProjectInfo(putAction, projectRequestRef); + InternalResponseRef responseRef = ExchangisHttpUtils.getResponseRef(projectRequestRef, url, putAction, ssoRequestOperation); + logger.info("User {} updated Exchangis project {} with response {}.", projectRequestRef.getUserName(), projectRequestRef.getRefProjectId(), responseRef.getResponseBody()); + return responseRef; + } + + @Override + protected String getAppConnName() { + return Constraints.EXCHANGIS_APPCONN_NAME; + } + + @Override + public void init() { + super.init(); + projectUpdateUrl = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "appProject")); + } + + public static void addProjectInfo(DSSPutAction putAction, ProjectUpdateRequestRef requestRef) { + putAction.addRequestPayload("id", requestRef.getRefProjectId()); + putAction.addRequestPayload("projectName", requestRef.getDSSProject().getName()); + putAction.addRequestPayload("description", requestRef.getDSSProject().getDescription()); + putAction.addRequestPayload("domain", Constraints.DOMAIN_NAME); + putAction.addRequestPayload("source", MapUtils.newCommonMap("workspace", requestRef.getWorkspace().getWorkspaceName())); + } + +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/service/ExchangisProjectService.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/service/ExchangisProjectService.java new file mode 100644 index 000000000..848d10e77 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/service/ExchangisProjectService.java @@ -0,0 +1,43 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.service; + +import com.webank.wedatasphere.dss.standard.app.structure.project.*; +import com.webank.wedatasphere.exchangis.dss.appconn.operation.project.ExchangisProjectCreationOperation; +import com.webank.wedatasphere.exchangis.dss.appconn.operation.project.ExchangisProjectDeletionOperation; +import com.webank.wedatasphere.exchangis.dss.appconn.operation.project.ExchangisProjectGetOperation; +import com.webank.wedatasphere.exchangis.dss.appconn.operation.project.ExchangisProjectUpdateOperation; + +/** + * Project service implement + */ +public class ExchangisProjectService extends ProjectService { + + @Override + public boolean isCooperationSupported() { + return true; + } + + @Override + public boolean isProjectNameUnique() { + return true; + } + + @Override + protected ProjectCreationOperation createProjectCreationOperation() { + return new ExchangisProjectCreationOperation(); + } + + @Override + protected ProjectUpdateOperation createProjectUpdateOperation() { + return new ExchangisProjectUpdateOperation(); + } + + @Override + protected ProjectDeletionOperation createProjectDeletionOperation() { + return new ExchangisProjectDeletionOperation(); + } + + @Override + protected ProjectSearchOperation createProjectSearchOperation() { + return new ExchangisProjectGetOperation(); + } +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/service/ExchangisRefCRUDService.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/service/ExchangisRefCRUDService.java new file mode 100644 index 000000000..5e70b7db6 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/service/ExchangisRefCRUDService.java @@ -0,0 +1,34 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.service; + +import com.webank.wedatasphere.dss.standard.app.development.service.AbstractRefCRUDService; +import com.webank.wedatasphere.exchangis.dss.appconn.operation.development.ExchangisRefCopyOperation; +import com.webank.wedatasphere.exchangis.dss.appconn.operation.development.ExchangisRefCreationOperation; +import com.webank.wedatasphere.exchangis.dss.appconn.operation.development.ExchangisRefDeletionOperation; +import com.webank.wedatasphere.exchangis.dss.appconn.operation.development.ExchangisRefUpdateOperation; + +/** + * Ref CRUD service + */ +public class ExchangisRefCRUDService extends AbstractRefCRUDService { + + @Override + protected ExchangisRefCreationOperation createRefCreationOperation() { + return new ExchangisRefCreationOperation(); + } + + @Override + protected ExchangisRefCopyOperation createRefCopyOperation() { + return new ExchangisRefCopyOperation(); + } + + @Override + protected ExchangisRefUpdateOperation createRefUpdateOperation() { + return new ExchangisRefUpdateOperation(); + } + + @Override + @SuppressWarnings("unchecked") + protected ExchangisRefDeletionOperation createRefDeletionOperation() { + return new ExchangisRefDeletionOperation(); + } +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/service/ExchangisRefExecutionService.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/service/ExchangisRefExecutionService.java new file mode 100644 index 000000000..bb501d62b --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/service/ExchangisRefExecutionService.java @@ -0,0 +1,14 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.service; + +import com.webank.wedatasphere.dss.standard.app.development.operation.RefExecutionOperation; +import com.webank.wedatasphere.dss.standard.app.development.service.AbstractRefExecutionService; +import com.webank.wedatasphere.exchangis.dss.appconn.operation.development.ExchangisRefExecutionOperation; + +public class ExchangisRefExecutionService extends AbstractRefExecutionService { + + @Override + protected RefExecutionOperation createRefExecutionOperation() { + return new ExchangisRefExecutionOperation(); + } + +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/service/ExchangisRefExportService.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/service/ExchangisRefExportService.java new file mode 100644 index 000000000..221c1df70 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/service/ExchangisRefExportService.java @@ -0,0 +1,16 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.service; + +import com.webank.wedatasphere.dss.standard.app.development.service.AbstractRefExportService; +import com.webank.wedatasphere.exchangis.dss.appconn.operation.development.ExchangisExportOperation; + +/** + * Ref export service + */ +public class ExchangisRefExportService extends AbstractRefExportService { + + @Override + protected ExchangisExportOperation createRefExportOperation() { + return new ExchangisExportOperation(); + } + +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/service/ExchangisRefImportService.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/service/ExchangisRefImportService.java new file mode 100644 index 000000000..72bc0bc97 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/service/ExchangisRefImportService.java @@ -0,0 +1,17 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.service; + + +import com.webank.wedatasphere.dss.standard.app.development.service.AbstractRefImportService; +import com.webank.wedatasphere.exchangis.dss.appconn.operation.development.ExchangisImportOperation; + +/** + * Ref import service + */ +public class ExchangisRefImportService extends AbstractRefImportService { + + @Override + protected ExchangisImportOperation createRefImportOperation() { + return new ExchangisImportOperation(); + } + +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/service/ExchangisRefQueryService.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/service/ExchangisRefQueryService.java new file mode 100644 index 000000000..061e7c615 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/service/ExchangisRefQueryService.java @@ -0,0 +1,15 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.service; + +import com.webank.wedatasphere.dss.standard.app.development.service.AbstractRefQueryService; +import com.webank.wedatasphere.exchangis.dss.appconn.operation.development.ExchangisRefQueryOperation; + +/** + * Ref query service + */ +public class ExchangisRefQueryService extends AbstractRefQueryService { + + @Override + protected ExchangisRefQueryOperation createRefQueryOperation() { + return new ExchangisRefQueryOperation(); + } +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/utils/ExchangisHttpUtils.java b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/utils/ExchangisHttpUtils.java new file mode 100644 index 000000000..c45f40e12 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/java/com/webank/wedatasphere/exchangis/dss/appconn/utils/ExchangisHttpUtils.java @@ -0,0 +1,75 @@ +package com.webank.wedatasphere.exchangis.dss.appconn.utils; + +import com.webank.wedatasphere.dss.common.label.DSSLabel; +import com.webank.wedatasphere.dss.standard.app.sso.builder.SSOUrlBuilderOperation; +import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSHttpAction; +import com.webank.wedatasphere.dss.standard.app.sso.ref.WorkspaceRequestRef; +import com.webank.wedatasphere.dss.standard.app.sso.request.SSORequestOperation; +import com.webank.wedatasphere.dss.standard.common.entity.ref.InternalResponseRef; +import com.webank.wedatasphere.dss.standard.common.entity.ref.ResponseRef; +import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException; +import com.webank.wedatasphere.dss.standard.sso.utils.SSOHelper; +import com.webank.wedatasphere.exchangis.dss.appconn.constraints.Constraints; +import org.apache.linkis.httpclient.request.GetAction; +import org.apache.linkis.httpclient.request.HttpAction; +import org.apache.linkis.httpclient.request.POSTAction; +import org.apache.linkis.httpclient.response.HttpResult; +import org.apache.linkis.manager.label.entity.SerializableLabel; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; +import java.util.List; +import java.util.stream.Collectors; + +/** + * @author enjoyyin + * @date 2022-05-09 + * @since 0.5.0 + */ +public class ExchangisHttpUtils { + + private static final Logger LOG = LoggerFactory.getLogger(ExchangisHttpUtils.class); + + public static InternalResponseRef getResponseRef(WorkspaceRequestRef requestRef, String url, + DSSHttpAction httpAction, + SSORequestOperation ssoRequestOperation) { + + SSOUrlBuilderOperation ssoUrlBuilderOperation = SSOHelper.createSSOUrlBuilderOperation(requestRef.getWorkspace()); + ssoUrlBuilderOperation.setAppName(Constraints.EXCHANGIS_APPCONN_NAME); + ssoUrlBuilderOperation.setReqUrl(url); + httpAction.setUrl(ssoUrlBuilderOperation.getBuiltUrl()); + //String labels = serializeDssLabel(requestRef.getDSSLabels()); + if(httpAction instanceof POSTAction) { + HashMap labels = new HashMap<>(); + labels.put("route", requestRef.getDSSLabels().get(0).getValue().get("DSSEnv")); + //exchangisEntityPostAction.addRequestPayload("labels", labels); + ((POSTAction) httpAction).addRequestPayload("labels", labels); + } else if(httpAction instanceof GetAction) { + String labels = requestRef.getDSSLabels().get(0).getValue().get("DSSEnv"); + ((GetAction) httpAction).setParameter("labels", labels); + } + LOG.info("User {} try to request Exchangis with url {} and labels {}.", httpAction.getUser(), httpAction.getURL(), requestRef.getDSSLabels().get(0).getValue().get("DSSEnv")); + HttpResult httpResult = ssoRequestOperation.requestWithSSO(ssoUrlBuilderOperation, httpAction); + LOG.info("responseBody:{}", httpResult.getResponseBody()); + InternalResponseRef responseRef = ResponseRef.newInternalBuilder().setResponseBody(httpResult.getResponseBody()).build(); + if (responseRef.isFailed()){ + throw new ExternalOperationFailedException(95011, responseRef.getErrorMsg()); + } + return responseRef; + } + + /** + * Invoke the "getStringValue" method in label entity and then concat each one + * @param list label list + * @return serialized string value + */ + public static String serializeDssLabel(List list){ + String dssLabelStr = ""; + if(list != null && !list.isEmpty()){ + dssLabelStr = list.stream().map(SerializableLabel::getStringValue).collect(Collectors.joining(",")); + } + return dssLabelStr; + } + +} diff --git a/exchangis-plugins/exchangis-appconn/src/main/resources/appconn.properties b/exchangis-plugins/exchangis-appconn/src/main/resources/appconn.properties new file mode 100644 index 000000000..51a424d00 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/resources/appconn.properties @@ -0,0 +1,22 @@ +# +# /* +# * Copyright 2019 WeBank +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ +# + + + + + diff --git a/exchangis-plugins/exchangis-appconn/src/main/resources/datax.icon b/exchangis-plugins/exchangis-appconn/src/main/resources/datax.icon new file mode 100644 index 000000000..bf047fb7b --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/resources/datax.icon @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/exchangis-plugins/exchangis-appconn/src/main/resources/init.sql b/exchangis-plugins/exchangis-appconn/src/main/resources/init.sql new file mode 100644 index 000000000..0b98053c0 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/resources/init.sql @@ -0,0 +1,42 @@ +-- 删除exchangis关联的数据 -- +delete from `dss_appconn_instance` where `appconn_id` in (select `id` from `dss_appconn` where `appconn_name` = 'exchangis'); +delete from `dss_workspace_menu_appconn` where `appconn_id` in (select `id` from `dss_appconn` where `appconn_name` = 'exchangis'); +delete from `dss_appconn` where `appconn_name`='exchangis'; + +INSERT INTO `dss_appconn` (`appconn_name`, `is_user_need_init`, `level`, `if_iframe`, `is_external`, `reference`, `class_name`, `appconn_class_path`, `resource`) +VALUES ('exchangis', 0, 1, 1, 1, NULL, 'com.webank.wedatasphere.exchangis.dss.appconn.ExchangisAppConn', '/appcom/Install/dss/dss-appconns/exchangis', ''); + +INSERT INTO `dss_appconn_instance` (`appconn_id`, `label`, `url`, `enhance_json`, `homepage_uri`) +VALUES ((select id from `dss_appconn` where `appconn_name` = "exchangis" limit 1), 'DEV', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/', '', '#/projectManage'); + +-- 看appconn组件是要归属于哪个菜单 +INSERT INTO `dss_workspace_menu_appconn` (`appconn_id`, `menu_id`, `title_en`, `title_cn`, `desc_en`, `desc_cn`, `labels_en`, `labels_cn`, `is_active`, `access_button_en`, `access_button_cn`, `manual_button_en`, `manual_button_cn`, `manual_button_url`, `icon`, `order`, `create_by`, `create_time`, `last_update_time`, `last_update_user`, `image`) +VALUES((select id from `dss_appconn` where `appconn_name` = "exchangis" limit 1), (select id from `dss_workspace_menu` where `name` = "数据交换") +,'Exchangis','Exchangis','Exchangis','Exchangis数据交换平台','exchangis, statement','数据交换,数据源','1','enter Exchangis','进入Exchangis','user manual','用户手册','http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/projectManage','shujujiaohuan-logo',NULL,NULL,NULL,NULL,NULL,'shujujiaohuan-icon'); + +-- 卸载节点 +delete from `dss_workflow_node_to_group` where `node_id` in (select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.sqoop' or `node_type` = 'linkis.appconn.exchangis.datax'); +delete from `dss_workflow_node_to_ui` where `workflow_node_id` in (select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.sqoop' or `node_type` = 'linkis.appconn.exchangis.datax'); +delete from `dss_workflow_node` where `node_type` like '%exchangis%'; + +-- 节点表dss_workflow_node +insert into `dss_workflow_node` (`name`, `appconn_name`, `node_type`, `jump_type`, `support_jump`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `icon_path`) +values('sqoop','exchangis','linkis.appconn.exchangis.sqoop',1,'1','1','0','1','icons/sqoop.icon'); +insert into `dss_workflow_node` (`name`, `appconn_name`, `node_type`, `jump_type`, `support_jump`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `icon_path`) +values('datax','exchangis','linkis.appconn.exchangis.datax',1,'1','1','0','1','icons/datax.icon'); + +-- 节点组表dss_workflow_node_to_group +INSERT INTO `dss_workflow_node_to_group`(`node_id`,`group_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.sqoop' limit 1), (select id from `dss_workflow_node_group` where `name` = '数据交换')); +INSERT INTO `dss_workflow_node_to_group`(`node_id`,`group_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.datax' limit 1), (select id from `dss_workflow_node_group` where `name` = '数据交换')); + +-- 节点UI表dss_workflow_node_to_ui +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.sqoop' limit 1), 1); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.sqoop' limit 1), 2); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.sqoop' limit 1), 3); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.sqoop' limit 1), 4); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.sqoop' limit 1), 5); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.datax' limit 1), 1); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.datax' limit 1), 2); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.datax' limit 1), 3); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.datax' limit 1), 4); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.datax' limit 1), 5); diff --git a/exchangis-plugins/exchangis-appconn/src/main/resources/init_real.sql b/exchangis-plugins/exchangis-appconn/src/main/resources/init_real.sql new file mode 100644 index 000000000..e07e699df --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/resources/init_real.sql @@ -0,0 +1,99 @@ + +select @dss_appconn_mlssId:=id from `dss_appconn` where `appconn_name` = 'mlss'; +delete from `dss_appconn_instance` where `appconn_id`=@dss_appconn_mlssId; + +delete from `dss_appconn` where `appconn_name`='mlss'; +INSERT INTO `dss_appconn` ( + `appconn_name`, + `is_user_need_init`, + `level`, + `if_iframe`, + `is_external`, + `reference`, + `class_name`, + `appconn_class_path`, + `resource`) +VALUES ( + 'mlss', + 0, + 1, + NULL, + 0, + NULL, + 'com.webank.wedatasphere.dss.appconn.mlss.MLSSAppConn', + '/appcom/Install/DSSInstall/dss-1.1.3/dss-appconns/mlss', + ''); + + +select @dss_appconn_mlssId:=id from `dss_appconn` where `appconn_name` = 'mlss'; + + + + +INSERT INTO `dss_appconn_instance`( + `appconn_id`, + `label`, + `url`, + `enhance_json`, + `homepage_uri`) +VALUES ( + @dss_appconn_mlssId, + 'DEV', + 'http://10.107.127.19:30793/', + '{"MLSS-SecretKey":"MLFLOW","MLSS-Auth-Type":"SYSTEM","MLSS-APPSignature":"MLFLOW","MLSS-BaseUrl":"http://10.107.127.19:30793","baseUrl":"http://10.107.127.19:30793","MLSS-ModelMonitoring-JAR":"/appcom/Install/quickml/qml_algo/hwenzan/qml_algo.jar"}', + 'http://10.107.127.19:30793/#/mlFlow'); + + +INSERT INTO `dss_appconn_instance`( + `appconn_id`, + `label`, + `url`, + `enhance_json`, + `homepage_uri`) +VALUES ( + @dss_appconn_mlssId, + 'PROD', + 'http://10.107.127.19:30793/', + '{"MLSS-SecretKey":"MLFLOW","MLSS-Auth-Type":"SYSTEM","MLSS-APPSignature":"MLFLOW","MLSS-BaseUrl":"http://10.107.127.19:30793","baseUrl":"http://10.107.127.19:30793","MLSS-ModelMonitoring-JAR":"/appcom/Install/quickml/qml_algo/hwenzan/qml_algo.jar"}', + 'http://10.107.127.19:30793/#/mlFlow'); + +select @dss_mlssId:=name from `dss_workflow_node` where `node_type` = 'linkis.appconn.mlss'; +delete from `dss_workflow_node_to_group` where `node_id`=@dss_mlssId; +-- +delete from `dss_workflow_node` where `node_type`='linkis.appconn.mlss'; +INSERT INTO `dss_workflow_node` ( + `icon_path`, + `node_type`, + `appconn_name`, + `submit_to_scheduler`, + `enable_copy`, + `should_creation_before_node`, + `support_jump`, + `jump_type`, + `name`) +VALUES ( + 'icons/mlss.icon', + 'linkis.appconn.mlss', + 'mlss', + 1, + 0, + 0, + 1, + 1, + 'mlss'); + +-- +select @dss_mlssId:=id from `dss_workflow_node` where `node_type` = 'linkis.appconn.mlss'; +insert into `dss_workflow_node_to_group` (`node_id`, `group_id`) values (@dss_mlssId, 8); + + +select @dss_workflow_node_id:=id from `dss_workflow_node` where `node_type` = 'linkis.appconn.mlss'; +INSERT INTO `dss_workflow_node_to_ui` (`workflow_node_id`, `ui_id`) VALUES + (@dss_mlssId, 1), + (@dss_mlssId, 4), + (@dss_mlssId, 5), + (@dss_mlssId, 6), + (@dss_mlssId, 35), + (@dss_mlssId, 36), + (@dss_mlssId, 3); + diff --git a/exchangis-plugins/exchangis-appconn/src/main/resources/log4j.properties b/exchangis-plugins/exchangis-appconn/src/main/resources/log4j.properties new file mode 100644 index 000000000..55970acab --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/resources/log4j.properties @@ -0,0 +1,38 @@ +# +# /* +# * Copyright 2019 WeBank +# * +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ +# + +### set log levels ### + +log4j.rootCategory=INFO,console + +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.Threshold=INFO +log4j.appender.console.layout=org.apache.log4j.PatternLayout +#log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n +log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) %p %c{1} - %m%n + + +log4j.appender.com.webank.bdp.ide.core=org.apache.log4j.DailyRollingFileAppender +log4j.appender.com.webank.bdp.ide.core.Threshold=INFO +log4j.additivity.com.webank.bdp.ide.core=false +log4j.appender.com.webank.bdp.ide.core.layout=org.apache.log4j.PatternLayout +log4j.appender.com.webank.bdp.ide.core.Append=true +log4j.appender.com.webank.bdp.ide.core.File=logs/linkis.log +log4j.appender.com.webank.bdp.ide.core.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n + +log4j.logger.org.springframework=INFO diff --git a/exchangis-plugins/exchangis-appconn/src/main/resources/log4j2.xml b/exchangis-plugins/exchangis-appconn/src/main/resources/log4j2.xml new file mode 100644 index 000000000..6d1dd9a1f --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/resources/log4j2.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/exchangis-plugins/exchangis-appconn/src/main/resources/sqoop.icon b/exchangis-plugins/exchangis-appconn/src/main/resources/sqoop.icon new file mode 100644 index 000000000..bf047fb7b --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/main/resources/sqoop.icon @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/exchangis-plugins/exchangis-appconn/src/test/java/com/webank/wedatasphere/exchangis/appconn/TestAppConn.java b/exchangis-plugins/exchangis-appconn/src/test/java/com/webank/wedatasphere/exchangis/appconn/TestAppConn.java new file mode 100644 index 000000000..4347f6ea9 --- /dev/null +++ b/exchangis-plugins/exchangis-appconn/src/test/java/com/webank/wedatasphere/exchangis/appconn/TestAppConn.java @@ -0,0 +1,24 @@ +package com.webank.wedatasphere.exchangis.appconn; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.google.gson.Gson; +import org.apache.linkis.server.BDPJettyServerHelper; + +import java.util.HashMap; +import java.util.Map; + +public class TestAppConn { + private final static Gson gson = new Gson(); + public static void main(String[] args) throws JsonProcessingException { + //String str="{\"method\":null,\"status\":0,\"message\":\"OK\",\"data\":{\"item\":{\"id\":\"1469200683600183298\",\"dssProjectId\":null,\"name\":\"DWExchangis06\",\"workspaceName\":\"DWExchangis06\",\"description\":\"测试不要删除\",\"tags\":\"\",\"editUsers\":\"\",\"viewUsers\":\"\",\"execUsers\":\"\",\"domain\":\"STANDALONE\"}}}"; + String str = "{\"route\":\"prod\"}"; + Map labels = new HashMap<>(); + Map responseMap = BDPJettyServerHelper.jacksonJson().readValue(str, Map.class); + System.out.println(responseMap); + labels.put("labels", responseMap); + System.out.println(labels); + Map item = (Map) ((Map) responseMap.get("data")).get("item"); + System.out.println(item.get("id")); + + } +} diff --git a/exchangis-plugins/pom.xml b/exchangis-plugins/pom.xml new file mode 100644 index 000000000..2a93dfb19 --- /dev/null +++ b/exchangis-plugins/pom.xml @@ -0,0 +1,39 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + + exchangis-plugins + pom + ${revision} + + exchangis-appconn + + + + 8 + 8 + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.2.1 + + + assemble + none + + + + + + \ No newline at end of file diff --git a/exchangis-project/exchangis-project-entity/pom.xml b/exchangis-project/exchangis-project-entity/pom.xml new file mode 100644 index 000000000..44bba43b6 --- /dev/null +++ b/exchangis-project/exchangis-project-entity/pom.xml @@ -0,0 +1,33 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-project-entity + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${project.version} + + + org.apache.commons + commons-math3 + 3.6.1 + + + + \ No newline at end of file diff --git a/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/domain/ExchangisProjectUser.java b/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/domain/ExchangisProjectUser.java new file mode 100644 index 000000000..195741fa6 --- /dev/null +++ b/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/domain/ExchangisProjectUser.java @@ -0,0 +1,76 @@ +package com.webank.wedatasphere.exchangis.project.entity.domain; + +import java.util.Date; + +/** + * @author tikazhang + * @Date 2022/5/11 10:45 + */ +public class ExchangisProjectUser { + + private Long id; + + private Long projectId; + + private String privUser; + + private int priv; + + private Date updateTime; + + public ExchangisProjectUser() { + } + + public ExchangisProjectUser(Long projectId, String privUser) { + this.projectId = projectId; + this.privUser = privUser; + } + + public ExchangisProjectUser(Long id, Long projectId, String privUser, int priv, Date updateTime) { + this.id = id; + this.projectId = projectId; + this.privUser = privUser; + this.priv = priv; + this.updateTime = updateTime; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Long getProjectId() { + return projectId; + } + + public void setProjectId(Long projectId) { + this.projectId = projectId; + } + + public String getPrivUser() { + return privUser; + } + + public void setPrivUser(String privUser) { + this.privUser = privUser; + } + + public int getPriv() { + return priv; + } + + public void setPriv(int priv) { + this.priv = priv; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date createTime) { + this.updateTime = updateTime; + } +} diff --git a/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/domain/OperationType.java b/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/domain/OperationType.java new file mode 100644 index 000000000..ab8f2ccf4 --- /dev/null +++ b/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/domain/OperationType.java @@ -0,0 +1,27 @@ +package com.webank.wedatasphere.exchangis.project.entity.domain; + +/** + * @author jefftlin + * @create 2022-09-13 + **/ +public enum OperationType { + + /** + * project operation: + * query project + */ + PROJECT_QUERY("PROJECT_QUERY"), + + /** + * project operation: + * update project + * delete project + */ + PROJECT_ALTER("PROJECT_ALTER"); + + private String type; + + OperationType(String type) { + this.type = type; + } +} diff --git a/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/domain/ProjectPageQuery.java b/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/domain/ProjectPageQuery.java new file mode 100644 index 000000000..db42b536c --- /dev/null +++ b/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/domain/ProjectPageQuery.java @@ -0,0 +1,42 @@ +package com.webank.wedatasphere.exchangis.project.entity.domain; + +import com.webank.wedatasphere.exchangis.common.pager.PageQuery; + +/** + * For querying page + */ +public class ProjectPageQuery extends PageQuery { + + /** + * Project name + */ + protected String name; + + protected String domain; + + protected String createUser; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDomain() { + return domain; + } + + public void setDomain(String domain) { + this.domain = domain; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } +} diff --git a/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/entity/ExchangisProject.java b/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/entity/ExchangisProject.java new file mode 100644 index 000000000..286c2ceb1 --- /dev/null +++ b/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/entity/ExchangisProject.java @@ -0,0 +1,220 @@ +package com.webank.wedatasphere.exchangis.project.entity.entity; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.linkis.common.utils.JsonUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +public class ExchangisProject { + + private static final Logger LOG = LoggerFactory.getLogger(ExchangisProject.class); + + public enum Domain { + DSS, STANDALONE + } + + /** + * Id (Long value) + */ + private Long id; + /** + * Project name + */ + private String name; + /** + * Description + */ + private String description; + + /** + * Create user + */ + private String createUser; + + /** + * Create time + */ + private Date createTime; + + /** + * Last update user + */ + private String lastUpdateUser; + + /** + * Last update time + */ + private Date lastUpdateTime; + + /** + * Labels + */ + private String labels; + + /** + * User has editing permission + */ + private String editUsers; + + /** + * User has viewing permission + */ + private String viewUsers; + + /** + * User has executing permission + */ + private String execUsers; + + /** + * Domain + */ + private String domain; + + /** + * Source map + */ + private Map sourceMap = new HashMap<>(); + + private String source; + + private String privUser; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getLastUpdateTime() { + return lastUpdateTime; + } + + public void setLastUpdateTime(Date lastUpdateTime) { + this.lastUpdateTime = lastUpdateTime; + } + + public String getEditUsers() { + return editUsers; + } + + public void setEditUsers(String editUsers) { + this.editUsers = editUsers; + } + + public String getViewUsers() { + return viewUsers; + } + + public void setViewUsers(String viewUsers) { + this.viewUsers = viewUsers; + } + + public String getExecUsers() { + return execUsers; + } + + public void setExecUsers(String execUsers) { + this.execUsers = execUsers; + } + + public String getDomain() { return domain; } + + public void setDomain(String domain) { this.domain = domain; } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } + + public String getLastUpdateUser() { + return lastUpdateUser; + } + + public void setLastUpdateUser(String lastUpdateUser) { + this.lastUpdateUser = lastUpdateUser; + } + + public String getLabels() { + return labels; + } + + public void setLabels(String labels) { + this.labels = labels; + } + + public String getPrivUser() { + return privUser; + } + + public void setPrivUser(String privUser) { + this.privUser = privUser; + } + + // TODO use the common Json util + public Map getSourceMap() { + if (Objects.isNull(this.sourceMap) && Objects.nonNull(this.source)){ + try { + ObjectMapper mapper = JsonUtils.jackson(); + this.sourceMap = mapper.readValue(this.source, mapper.getTypeFactory() + .constructParametricType(Map.class, String.class, Object.class)); + } catch (JsonProcessingException e) { + //Ignore + LOG.warn("Cannot deserialize the source string: {}", this.source, e); + } + } + return sourceMap; + } + + public void setSourceMap(Map sourceMap) { + this.sourceMap = sourceMap; + } + + // TODO use the common Json util + public String getSource() { + if (Objects.isNull(this.source) && Objects.nonNull(this.sourceMap)){ + try { + this.source = JsonUtils.jackson().writeValueAsString(this.sourceMap); + } catch (JsonProcessingException e) { + // Ignore + LOG.warn("Cannot serialize the source map", e); + } + } + return source; + } +} diff --git a/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/vo/ExchangisProjectAppVo.java b/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/vo/ExchangisProjectAppVo.java new file mode 100644 index 000000000..d6ca525c5 --- /dev/null +++ b/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/vo/ExchangisProjectAppVo.java @@ -0,0 +1,196 @@ +package com.webank.wedatasphere.exchangis.project.entity.vo; + +import com.fasterxml.jackson.annotation.JsonAlias; +import com.webank.wedatasphere.exchangis.common.validator.groups.UpdateGroup; +import com.webank.wedatasphere.exchangis.project.entity.entity.ExchangisProject; +import org.hibernate.validator.constraints.NotBlank; + +import javax.validation.constraints.NotNull; +import javax.validation.constraints.Size; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * @author tikazhang + * @Date 2022/3/26 15:36 + */ +public class ExchangisProjectAppVo { + /** + * ID + */ + @NotNull(message = "Project id cannot be null (工程ID不能为空)", groups = UpdateGroup.class) + private Long id; + + /** + * Project name + */ + @NotBlank(message = "Project name cannot be null (工程名不能为空)") + @Size(max = 64, message = "Length of project name should be less than 64 (工程名长度不超过64)") + @JsonAlias("projectName") + private String name; + + /** + * Description + */ + @Size(max = 200, message = "Length of desc should be less than 200 (描述长度不超过200)") + private String description; + + /** + * Request domain + */ + private String domain; + + /** + * Information from the dss + */ + private Map source = new HashMap<>(); + + /** + * User has the edit permission + */ + private String editUsers; + + /** + * User has the view permission + */ + private String viewUsers; + + /** + * User has the execute permission + */ + private String execUsers; + + /** + * labels + */ + @JsonAlias("tags") + private String label; + + /** + * Create user + */ + private String createUser; + + /** + * Create time + */ + private Date createTime; + + private HashMap labels; + + public ExchangisProjectAppVo(){ + + } + + public ExchangisProjectAppVo(ExchangisProject project){ + this.setId(project.getId()); + this.setName(project.getName()); + this.setDescription(project.getDescription()); + this.setDomain(project.getDomain()); + this.setLabel(project.getLabels()); + this.setCreateUser(project.getCreateUser()); + this.setCreateTime(project.getCreateTime()); + } + public String getId() { + return id + ""; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getDomain() { + return domain; + } + + public void setDomain(String domain) { + this.domain = domain; + } + + public Map getSource() { + return source; + } + + public void setSource(Map source) { + this.source = source; + } + + public String getEditUsers() { + return editUsers; + } + + public void setEditUsers(String editUsers) { + this.editUsers = editUsers; + } + + public String getViewUsers() { + return viewUsers; + } + + public void setViewUsers(String viewUsers) { + this.viewUsers = viewUsers; + } + + public String getExecUsers() { + return execUsers; + } + + public void setExecUsers(String execUsers) { + this.execUsers = execUsers; + } + + public String getLabel() { + return label; + } + + public void setLabel(String label) { + this.label = label; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + @Deprecated + public String getTags(){ + return Objects.nonNull(label)? label : ""; + } + + public HashMap getLabels() { + return labels; + } + + public void setLabels(HashMap labels) { + this.labels = labels; + } +} diff --git a/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/vo/ExchangisProjectInfo.java b/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/vo/ExchangisProjectInfo.java new file mode 100644 index 000000000..dc356715b --- /dev/null +++ b/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/vo/ExchangisProjectInfo.java @@ -0,0 +1,275 @@ +package com.webank.wedatasphere.exchangis.project.entity.vo; + +import com.fasterxml.jackson.annotation.JsonAlias; +import com.webank.wedatasphere.exchangis.common.validator.groups.UpdateGroup; +import com.webank.wedatasphere.exchangis.project.entity.entity.ExchangisProject; +import org.hibernate.validator.constraints.NotBlank; + +import javax.validation.constraints.NotNull; +import javax.validation.constraints.Size; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * View object + * TODO @JsonInclude(JsonInclude.Include.NON_EMPTY) + */ +//@JsonInclude(JsonInclude.Include.NON_EMPTY) +public class ExchangisProjectInfo { + /** + * ID + */ + @NotNull(message = "Project id cannot be null (工程ID不能为空)", groups = UpdateGroup.class) + private Long id; + + /** + * Project name + */ + @NotBlank(message = "Project name cannot be null (工程名不能为空)") + @Size(max = 64, message = "Length of project name should be less than 64 (工程名长度不超过64)") + @JsonAlias("projectName") + private String name; + + /** + * Description + */ + @Size(max = 200, message = "Length of desc should be less than 200 (描述长度不超过200)") + private String description; + + /** + * Request domain + */ + private String domain; + + /** + * Information from the dss + */ + private Map source = new HashMap<>(); + + /** + * User has the edit permission + */ + private String editUsers; + + /** + * User has the view permission + */ + private String viewUsers; + + /** + * User has the execute permission + */ + private String execUsers; + + /** + * labels + */ + @JsonAlias("tags") + private String label; + + /** + * Create user + */ + private String createUser; + + /** + * Create time + */ + private Date createTime; + + private Map labels; + + private String privilege; + + private String privUser; + + public ExchangisProjectInfo(){ + + } + + public ExchangisProjectInfo(ExchangisProject project){ + this.setId(project.getId()); + this.setName(project.getName()); + this.setDescription(project.getDescription()); + this.setDomain(project.getDomain()); + this.setLabel(project.getLabels()); + this.setPrivilege(""); + this.setPrivUser(project.getPrivUser()); + this.setExecUsers(project.getExecUsers()); + this.setViewUsers(project.getViewUsers()); + this.setEditUsers(project.getEditUsers()); + this.setCreateUser(project.getCreateUser()); + this.setCreateTime(project.getCreateTime()); + } + + public ExchangisProjectInfo(ExchangisProjectAppVo project){ + Map labels = new HashMap<>(); + //labels.put("route", project.getLabels()); + this.setName(project.getName()); + this.setDescription(project.getDescription()); + this.setDomain(project.getDomain()); + this.setSource(project.getSource()); + this.setPrivUser(""); + this.setEditUsers(project.getEditUsers()); + this.setViewUsers(project.getViewUsers()); + this.setExecUsers(project.getExecUsers()); + this.setLabel(project.getLabel()); + this.setCreateUser(project.getCreateUser()); + this.setCreateTime(project.getCreateTime()); + this.setLabels(project.getLabels()); + } + public String getId() { + return id + ""; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getDomain() { + return domain; + } + + public void setDomain(String domain) { + this.domain = domain; + } + + public Map getSource() { + return source; + } + + public void setSource(Map source) { + this.source = source; + } + + public String getEditUsers() { + return editUsers; + } + + public void setEditUsers(String editUsers) { + this.editUsers = editUsers; + /*if(!privUser.equals(null)) { + if (this.editUsers.contains(privUser)) { + if (privilege.length() != 0) { + privilege += ",3"; + } else { + privilege += "3"; + } + } + }*/ + } + + public String getViewUsers() { + return viewUsers; + } + + public void setViewUsers(String viewUsers) { + this.viewUsers = viewUsers; + /*if(!privUser.equals(null)) { + if (this.viewUsers.contains(privUser)) { + if (privilege.length() != 0) { + privilege += ",1"; + } else { + privilege += "1"; + } + } + }*/ + } + + public String getExecUsers() { + return execUsers; + } + + public void setExecUsers(String execUsers) { + this.execUsers = execUsers; + /*if(!privUser.equals(null)) { + if (this.execUsers.contains(privUser)) { + if (privilege.length() != 0) { + privilege += ",2"; + } else { + privilege += "2"; + } + } + }*/ + } + + public String getLabel() { + return label; + } + + public void setLabel(String label) { + this.label = label; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + /*if(!privUser.equals(null)) { + if (this.createUser.contains(privUser)) { + if (privilege.length() != 0) { + privilege += ",0"; + } else { + privilege += "0"; + } + } + }*/ + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + @Deprecated + public String getTags(){ + return Objects.nonNull(label)? label : ""; + } + + public Map getLabels() { + return labels; + } + + public void setLabels(Map labels) { + this.labels = labels; + } + + public String getPrivilege() { + return privilege; + } + + public void setPrivilege(String privilege) { + this.privilege = privilege; + } + + public String getPrivUser() { + return privUser; + } + + public void setPrivUser(String privUser) { + this.privUser = privUser; + } +} diff --git a/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/vo/ExchangisProjectUserVo.java b/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/vo/ExchangisProjectUserVo.java new file mode 100644 index 000000000..fef892115 --- /dev/null +++ b/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/vo/ExchangisProjectUserVo.java @@ -0,0 +1,56 @@ +package com.webank.wedatasphere.exchangis.project.entity.vo; + +import com.webank.wedatasphere.exchangis.project.entity.domain.ExchangisProjectUser; + +import javax.validation.constraints.NotNull; + +/** + * @author tikazhang + * @Date 2022/3/26 15:36 + */ +public class ExchangisProjectUserVo { + /** + * ID + */ + @NotNull(message = "Project id cannot be null (项目ID不能为空)") + private String projectId; + + /** + * Project name + */ + @NotNull(message = "PrivUser cannot be null (用户名不能为空)") + private String privUser; + + @NotNull(message = "Priv cannot be null (用户项目权限不能为空)") + private int priv; + + public ExchangisProjectUserVo() { + } + + public ExchangisProjectUserVo(Long projectId, String privUser) { + this.projectId = String.valueOf(projectId); + this.privUser = privUser; + } + + public ExchangisProjectUserVo(ExchangisProjectUser exchangisProjectUser) { + this.projectId = String.valueOf(exchangisProjectUser.getProjectId()); + this.privUser = exchangisProjectUser.getPrivUser(); + this.priv = exchangisProjectUser.getPriv(); + } + + public String getProjectId() { + return projectId; + } + + public void setProjectId(Long projectId) { + this.projectId = String.valueOf(projectId); + } + + public String getPrivUser() { + return privUser; + } + + public void setPrivUser(String privUser) { + this.privUser = privUser; + } +} diff --git a/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/vo/ProjectQueryVo.java b/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/vo/ProjectQueryVo.java new file mode 100644 index 000000000..42498c95f --- /dev/null +++ b/exchangis-project/exchangis-project-entity/src/main/java/com/webank/wedatasphere/exchangis/project/entity/vo/ProjectQueryVo.java @@ -0,0 +1,40 @@ +package com.webank.wedatasphere.exchangis.project.entity.vo; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.webank.wedatasphere.exchangis.project.entity.domain.ProjectPageQuery; + +import java.util.Map; +import java.util.Optional; + + +/** + * Query vo + */ +@JsonInclude(JsonInclude.Include.NON_EMPTY) +public class ProjectQueryVo extends ProjectPageQuery { + + public ProjectQueryVo(){ + } + + private Map labels; + + public Map getLabels() { + return labels; + } + + public void setLabels(Map labels) { + this.labels = labels; + } + + public ProjectQueryVo(String name, Integer current, Integer size){ + this.name = name; + this.current = Optional.ofNullable(current).orElse(1); + this.size = Optional.ofNullable(size).orElse(10); + } + + public ProjectQueryVo(Map labels, String name){ + this.labels = labels; + this.name = name; + } + +} diff --git a/exchangis-project/exchangis-project-provider/pom.xml b/exchangis-project/exchangis-project-provider/pom.xml new file mode 100644 index 000000000..d87a4d29a --- /dev/null +++ b/exchangis-project/exchangis-project-provider/pom.xml @@ -0,0 +1,39 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-project-provider + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-project-entity + ${project.version} + + + + + + + src/main/java + + **/*.xml + + + + + + \ No newline at end of file diff --git a/exchangis-project/exchangis-project-provider/src/main/java/com/webank/wedatasphere/exchangis/project/provider/mapper/ProjectMapper.java b/exchangis-project/exchangis-project-provider/src/main/java/com/webank/wedatasphere/exchangis/project/provider/mapper/ProjectMapper.java new file mode 100644 index 000000000..4c7b7f41a --- /dev/null +++ b/exchangis-project/exchangis-project-provider/src/main/java/com/webank/wedatasphere/exchangis/project/provider/mapper/ProjectMapper.java @@ -0,0 +1,83 @@ +package com.webank.wedatasphere.exchangis.project.provider.mapper; + +import com.webank.wedatasphere.exchangis.project.entity.domain.ProjectPageQuery; +import com.webank.wedatasphere.exchangis.project.entity.entity.ExchangisProject; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** + * Project mapper + */ +public interface ProjectMapper { + + /** + * Query the page list + * @param pageQuery page query + * @return page list + */ + List queryPageList(ProjectPageQuery pageQuery); + + /** + * Get detail by id + * @param projectId project id + * @return project entity + */ + ExchangisProject getDetailById(Long projectId); + + /** + * Get basic info by id + * @param projectId project id + * @return project entity + */ + ExchangisProject getBasicById(Long projectId); + /** + * Insert project + * @param project project entity + * @return project id + */ + long insertOne(ExchangisProject project); + + /** + * If exists + * @param projectId project id + * @param projectName project name + * @return int + */ + Integer existsOne(@Param("projectId")Long projectId, @Param("projectName")String projectName); + + /** + * Update one + * @param project project entity + */ + void updateOne(ExchangisProject project); + + /** + * Delete project + * @param projectId + */ + void deleteOne(Long projectId); + + /** + * Delete project by name + * @param name + */ + void deleteByName(String name); + + /** + * get projects by name + * @param projectName + * @return List + */ + List getDetailByName(@Param("projectName") String projectName); + + ExchangisProject selectByName(String name); + + /** + * get projects authoritis + * @param projectId + * @param loginUser + * @return List + */ + List getAuthoritis(@Param("projectId") Long projectId, @Param("loginUser") String loginUser); +} diff --git a/exchangis-project/exchangis-project-provider/src/main/java/com/webank/wedatasphere/exchangis/project/provider/mapper/ProjectUserMapper.java b/exchangis-project/exchangis-project-provider/src/main/java/com/webank/wedatasphere/exchangis/project/provider/mapper/ProjectUserMapper.java new file mode 100644 index 000000000..33cac2930 --- /dev/null +++ b/exchangis-project/exchangis-project-provider/src/main/java/com/webank/wedatasphere/exchangis/project/provider/mapper/ProjectUserMapper.java @@ -0,0 +1,37 @@ +package com.webank.wedatasphere.exchangis.project.provider.mapper; + +import com.webank.wedatasphere.exchangis.project.entity.domain.ExchangisProjectUser; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** + * @author tikazhang + * @Date 2022/5/11 11:17 + */ +public interface ProjectUserMapper { + + /** + * query projectUser + * @param projectUser + */ + ExchangisProjectUser queryProjectUser(ExchangisProjectUser projectUser); + + /** + * add projectUsers + * @param projectUsers + */ + void addProjectUser(@Param("projectUsers") List projectUsers); + + /** + * update projectUsers + * @param projectUsers + */ + void updateProjectUser(@Param("projectUsers") List projectUsers); + + /** + * delete projectUsers + * @param projectId + */ + void deleteProjectUser(Long projectId); +} diff --git a/exchangis-project/exchangis-project-provider/src/main/java/com/webank/wedatasphere/exchangis/project/provider/mapper/impl/ProjectMapper.xml b/exchangis-project/exchangis-project-provider/src/main/java/com/webank/wedatasphere/exchangis/project/provider/mapper/impl/ProjectMapper.xml new file mode 100644 index 000000000..dbc90c879 --- /dev/null +++ b/exchangis-project/exchangis-project-provider/src/main/java/com/webank/wedatasphere/exchangis/project/provider/mapper/impl/ProjectMapper.xml @@ -0,0 +1,137 @@ + + + + + + + + + + + + + + + + + + + + + + + + + id, name, description, create_user, create_time, last_update_user, + last_update_time, project_labels, edit_users, view_users, exec_users, domain, source + + + id, name, description, create_user, create_time, last_update_user, + last_update_time, project_labels, domain + + + + + + AND dp.name like concat('%', #{name}, '%') + + + AND dp.domain = #{domain} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + `id` = #{id}; + + + + + + + + + + + + DELETE FROM exchangis_project_info WHERE name = #{name} + + + + + + + + + + diff --git a/exchangis-project/exchangis-project-provider/src/main/java/com/webank/wedatasphere/exchangis/project/provider/mapper/impl/ProjectUserMapper.xml b/exchangis-project/exchangis-project-provider/src/main/java/com/webank/wedatasphere/exchangis/project/provider/mapper/impl/ProjectUserMapper.xml new file mode 100644 index 000000000..f4b3709a6 --- /dev/null +++ b/exchangis-project/exchangis-project-provider/src/main/java/com/webank/wedatasphere/exchangis/project/provider/mapper/impl/ProjectUserMapper.xml @@ -0,0 +1,60 @@ + + + + + + + + + + + + + + + + id, project_id, priv_user, priv, last_update_time + + + + exchangis_project_user + + + delete from + + where project_id = #{projectId} + + + + + + + insert into + + (project_id, priv_user, priv, last_update_time) + values( + #{projectUser.projectId}, + #{projectUser.privUser}, + #{projectUser.priv}, + #{projectUser.updateTime} + ); + + + + + + UPDATE + SET + project_id = #{projectUser.projectId}, + priv_user = #{projectUser.privUser}, + priv = #{projectUser.priv}, + last_update_time = #{projectUser.updateTime} + + + diff --git a/exchangis-project/exchangis-project-server/pom.xml b/exchangis-project/exchangis-project-server/pom.xml new file mode 100644 index 000000000..d19247918 --- /dev/null +++ b/exchangis-project/exchangis-project-server/pom.xml @@ -0,0 +1,68 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../pom.xml + + 4.0.0 + + exchangis-project-server + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-project-provider + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-job-server + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-job-common + ${project.version} + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + + src/main/java + + **/*.xml + + + + + \ No newline at end of file diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/exception/ExchangisProjectErrorException.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/exception/ExchangisProjectErrorException.java new file mode 100644 index 000000000..951df9b88 --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/exception/ExchangisProjectErrorException.java @@ -0,0 +1,17 @@ +package com.webank.wedatasphere.exchangis.project.server.exception; + + +import org.apache.linkis.common.exception.ErrorException; + +public class ExchangisProjectErrorException extends ErrorException { + + public ExchangisProjectErrorException(int errCode, String desc) { + super(errCode, desc); + } + + public ExchangisProjectErrorException(int errorCode, String desc, Throwable throwable){ + super(errorCode, desc); + this.initCause(throwable); + } + +} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/exception/ExchangisProjectExceptionCode.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/exception/ExchangisProjectExceptionCode.java new file mode 100644 index 000000000..e1874cd63 --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/exception/ExchangisProjectExceptionCode.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.exchangis.project.server.exception; + +/** + * @author jefftlin + * @create 2022-09-13 + **/ +public enum ExchangisProjectExceptionCode { + + UNSUPPORTED_OPERATION(32001); + + private int code; + + public int getCode() { + return code; + } + + ExchangisProjectExceptionCode(int code) { + this.code = code; + } +} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/ExchangisProjectRestfulApi.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/ExchangisProjectRestfulApi.java new file mode 100644 index 000000000..fd4c49a77 --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/ExchangisProjectRestfulApi.java @@ -0,0 +1,297 @@ +package com.webank.wedatasphere.exchangis.project.server.restful; + + +import com.webank.wedatasphere.exchangis.common.AuditLogUtils; +import com.webank.wedatasphere.exchangis.common.UserUtils; +import com.webank.wedatasphere.exchangis.common.enums.OperateTypeEnum; +import com.webank.wedatasphere.exchangis.common.enums.TargetTypeEnum; +import com.webank.wedatasphere.exchangis.common.pager.PageResult; +import com.webank.wedatasphere.exchangis.common.validator.groups.UpdateGroup; +import com.webank.wedatasphere.exchangis.job.server.service.JobInfoService; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobQueryVo; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVo; +import com.webank.wedatasphere.exchangis.project.entity.domain.OperationType; +import com.webank.wedatasphere.exchangis.project.entity.entity.ExchangisProject; +import com.webank.wedatasphere.exchangis.project.entity.domain.ExchangisProjectUser; +import com.webank.wedatasphere.exchangis.project.server.service.ProjectService; +import com.webank.wedatasphere.exchangis.project.server.utils.ProjectAuthorityUtils; +import com.webank.wedatasphere.exchangis.project.server.utils.ExchangisProjectConfiguration; +import com.webank.wedatasphere.exchangis.project.server.utils.ExchangisProjectRestfulUtils; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectInfo; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectUserVo; +import com.webank.wedatasphere.exchangis.project.entity.vo.ProjectQueryVo; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.math3.util.Pair; +import org.apache.linkis.common.utils.JsonUtils; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.validation.BindingResult; +import org.springframework.validation.annotation.Validated; +import org.springframework.web.bind.annotation.*; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import javax.validation.groups.Default; +import java.util.Objects; +import java.util.Optional; + + +/** + * This is the restful class for exchangis project + */ +@RestController +@RequestMapping(value = "/dss/exchangis/main", produces = {"application/json;charset=utf-8"}) +public class ExchangisProjectRestfulApi { + private static final Logger LOG = LoggerFactory.getLogger(ExchangisProjectRestfulApi.class); + + /** + * Project service + */ + @Resource + private ProjectService projectService; + + /** + * JobInfo service + */ + @Resource + private JobInfoService jobInfoService; + + /** + * Project query + * @param request http request + * @param queryVo query vo + * @param current current page + * @param size size + * @param name name + * @return message + */ + @RequestMapping( value = "projects", method = RequestMethod.POST) + public Message queryProjects(HttpServletRequest request, + @RequestBody ProjectQueryVo queryVo, + @RequestParam(value = "current", required = false) Integer current, + @RequestParam(value = "size", required = false) Integer size, + @RequestParam(value = "name", required = false) String name) { + String username = UserUtils.getLoginUser(request); + if (StringUtils.isNotBlank(name)) { + name = name.replaceAll("_", "/_"); + } + Optional.ofNullable(current).ifPresent(queryVo::setCurrent); + Optional.ofNullable(size).ifPresent(queryVo::setSize); + Optional.ofNullable(name).ifPresent(queryVo::setName); + queryVo.setCreateUser(username); + try { + PageResult pageResult = projectService.queryProjects(queryVo); + return pageResult.toMessage(); + } catch (Exception t) { + LOG.error("Failed to query project list for user {}", username, t); + return Message.error("Failed to query project list (获取项目列表失败)"); + } + } + + /** + * Project query detail by id + * @param request http request + * @param projectId project id + * @return + */ + @RequestMapping( value = "projects/{projectId:\\d+}", method = RequestMethod.GET) + public Message queryProjectDetail(HttpServletRequest request, + @PathVariable("projectId") Long projectId) { + String username = SecurityFilter.getLoginUsername(request); + try { + ExchangisProjectInfo project = projectService.getProjectDetailById(projectId); + if (Objects.isNull(project)){ + return Message.error("Not found the project (找不到对应项目)"); + } + if (!ProjectAuthorityUtils.hasProjectAuthority(username, project, OperationType.PROJECT_QUERY)){ + return Message.error("You have no permission to query (没有项目查看权限)"); + } + return Message.ok().data("item", project); + } catch (Exception t) { + LOG.error("failed to get project detail for user {}", username, t); + return Message.error("Fail to get project detail (获取项目详情失败)"); + } + } + + /** + * Create project + * @param request request + * @param projectVo project vo + * @param result result + * @return + */ + @RequestMapping(value = "createProject", method = RequestMethod.POST) + public Message createProject(@Validated @RequestBody ExchangisProjectInfo projectVo, + BindingResult result, HttpServletRequest request) { + if (ExchangisProjectConfiguration.LIMIT_INTERFACE.getValue()) { + return Message.error("You have no permission to create (没有项目创建权限)"); + } + if (result.hasErrors()){ + return Message.error(result.getFieldErrors().get(0).getDefaultMessage()); + } + + String username = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); + if (StringUtils.isBlank(projectVo.getViewUsers()) || !StringUtils.contains(projectVo.getViewUsers(), username)) { + projectVo.setViewUsers(username + "," + projectVo.getViewUsers()); + } + if (StringUtils.isBlank(projectVo.getEditUsers()) || !StringUtils.contains(projectVo.getEditUsers(), username)) { + projectVo.setEditUsers(username + "," + projectVo.getEditUsers()); + } + if (StringUtils.isBlank(projectVo.getExecUsers()) || !StringUtils.contains(projectVo.getExecUsers(), username)) { + projectVo.setExecUsers(username + "," + projectVo.getExecUsers()); + } + + try { + if (projectService.existsProject(null, projectVo.getName())){ + return Message.error("Have the same name project (存在同名项目)"); + } + LOG.info("CreateProject vo: {}, userName: {}", JsonUtils.jackson().writeValueAsString(projectVo), username); + long projectId = projectService.createProject(projectVo, username); + AuditLogUtils.printLog(oringinUser, username, TargetTypeEnum.PROJECT, String.valueOf(projectId), "Project name is: " + projectVo.getName(), OperateTypeEnum.CREATE, request); + return ExchangisProjectRestfulUtils.dealOk("创建项目成功", + new Pair<>("projectName", projectVo.getName()), + new Pair<>("projectId", projectId)); + } catch (Exception t) { + LOG.error("Failed to create project for user {}", username, t); + return Message.error("Fail to create project (创建项目失败)"); + } + } + /** + * check project name + * @param request http request + * @param name project name + * @return + */ + @RequestMapping( value = "/check/{name}", method = RequestMethod.POST) + public Message getProjectByName(HttpServletRequest request, @PathVariable("name") String name) { + String username = UserUtils.getLoginUser(request); + try { + ExchangisProjectInfo projectInfo = projectService.selectByName(name); + return ExchangisProjectRestfulUtils.dealOk("根据名字获取项目成功", + new Pair<>("projectInfo",projectInfo)); + } catch (Exception t) { + LOG.error("Failed to get project for user {}", username, t); + return Message.error("Failed to get project (根据名字获取项目失败)"); + } + } + + + /** + * Update project + * @param request request + * @param projectVo project vo + * @return + */ + @RequestMapping( value = "updateProject", method = RequestMethod.PUT) + public Message updateProject(@Validated({UpdateGroup.class, Default.class}) @RequestBody ExchangisProjectInfo projectVo + , BindingResult result, HttpServletRequest request) { + if (ExchangisProjectConfiguration.LIMIT_INTERFACE.getValue()) { + return Message.error("You have no permission to update (没有项目更新权限)"); + } + if (result.hasErrors()){ + return Message.error(result.getFieldErrors().get(0).getDefaultMessage()); + } + String oringinUser = SecurityFilter.getLoginUsername(request); + String username = UserUtils.getLoginUser(request); + //String username = SecurityFilter.getLoginUsername(request); + if (StringUtils.isBlank(projectVo.getViewUsers()) || !StringUtils.contains(projectVo.getViewUsers(), username)) { + projectVo.setViewUsers(username + "," + projectVo.getViewUsers()); + } + if (StringUtils.isBlank(projectVo.getEditUsers()) || !StringUtils.contains(projectVo.getEditUsers(), username)) { + projectVo.setEditUsers(username + "," + projectVo.getEditUsers()); + } + if (StringUtils.isBlank(projectVo.getExecUsers()) || !StringUtils.contains(projectVo.getExecUsers(), username)) { + projectVo.setExecUsers(username + "," + projectVo.getExecUsers()); + } + + try { + ExchangisProjectInfo projectStored = projectService.getProjectDetailById(Long.valueOf(projectVo.getId())); + if (!ProjectAuthorityUtils.hasProjectAuthority(username, projectStored, OperationType.PROJECT_ALTER)) { + return Message.error("You have no permission to update (没有项目的更新权限)"); + } + + String domain = projectStored.getDomain(); + if (StringUtils.isNotBlank(domain) && !ExchangisProject.Domain.STANDALONE.name() + .equalsIgnoreCase(domain)){ + return Message.error("Cannot update the outer project (无法更新来自 " + domain + " 的外部项目)"); + } + + LOG.info("UpdateProject vo: {}, userName: {}", JsonUtils.jackson().writeValueAsString(projectVo), username); + projectService.updateProject(projectVo, username); + AuditLogUtils.printLog(oringinUser, username, TargetTypeEnum.PROJECT, projectVo.getId(), "Project name is: " + projectVo.getName(), OperateTypeEnum.UPDATE, request); + return ExchangisProjectRestfulUtils.dealOk("更新项目成功", + new Pair<>("projectName", projectVo.getName()), + new Pair<>("projectId", projectVo.getId())); + } catch (Exception t) { + LOG.error("Failed to update project for user {}", username, t); + return Message.error("Fail to update project (更新项目失败)"); + } + } + + /** + * Delete project + * @param request http request + * @param id project id + * @return + */ + @DeleteMapping( value = "/projects/{id:\\d+}") + public Message deleteProject(HttpServletRequest request, @PathVariable("id") Long id) { + if (ExchangisProjectConfiguration.LIMIT_INTERFACE.getValue()) { + return Message.error("You have no permission to update (没有编辑权限,无法删除项目)"); + } + String oringinUser = SecurityFilter.getLoginUsername(request); + String username = UserUtils.getLoginUser(request); + try { + ExchangisProjectInfo projectInfo = projectService.getProjectDetailById(id); + if (!ProjectAuthorityUtils.hasProjectAuthority(username, projectInfo, OperationType.PROJECT_ALTER)) { + return Message.error("You have no permission to delete (没有权限删除项目!)"); + } + + String domain = projectInfo.getDomain(); + if (StringUtils.isNotBlank(domain) && !ExchangisProject.Domain.STANDALONE.name() + .equalsIgnoreCase(domain)){ + return Message.error("Cannot delete the outer project (无法删除来自 " + domain + " 的外部项目)"); + } + + // 校验是否有任务 + ExchangisJobQueryVo queryVo = new ExchangisJobQueryVo(id, null, null); + PageResult exchangisJobVoPageResult = jobInfoService.queryJobList(queryVo); + if (Objects.nonNull(exchangisJobVoPageResult) && Objects.nonNull(exchangisJobVoPageResult.getList()) + && exchangisJobVoPageResult.getList().size() > 0) { + return Message.error("Jobs already exist under this project and the project cannot be deleted (该项目下已存在子任务,无法删除)"); + } + + projectService.deleteProject(id); + AuditLogUtils.printLog(oringinUser, username, TargetTypeEnum.PROJECT, id.toString(), "Project", OperateTypeEnum.DELETE, request); + return ExchangisProjectRestfulUtils.dealOk("删除项目成功"); + } catch (Exception t) { + LOG.error("Failed to delete project for user {}", username, t); + return Message.error("Failed to delete project (删除项目失败)"); + } + } + + /** + * get project permission + * @param request http request + * @param id project id + * @return + */ + @RequestMapping( value = "/getProjectPermission/{id:\\d+}", method = RequestMethod.GET) + public Message getProjectPermission(HttpServletRequest request, @PathVariable("id") Long id) { + String username = SecurityFilter.getLoginUsername(request); + try { + ExchangisProjectUserVo exchangisProjectUserVo = new ExchangisProjectUserVo(id, username); + ExchangisProjectUser exchangisProjectUser = projectService.queryProjectUser(exchangisProjectUserVo); + + return ExchangisProjectRestfulUtils.dealOk("根据项目ID和用户获取项目权限信息成功", + new Pair<>("exchangisProjectUser", new ExchangisProjectUserVo(exchangisProjectUser))); + } catch (Exception t) { + LOG.error("Failed to get exchangisProjectUser for project {} and privUser {}", id, username); + return Message.error("Failed to get project (根据项目ID和用户获取项目权限信息失败)"); + } + } + +} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/external/ExchangisProjectDssAppConnRestfulApi.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/external/ExchangisProjectDssAppConnRestfulApi.java new file mode 100644 index 000000000..2e8457219 --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/external/ExchangisProjectDssAppConnRestfulApi.java @@ -0,0 +1,179 @@ +package com.webank.wedatasphere.exchangis.project.server.restful.external; + +import com.webank.wedatasphere.exchangis.common.AuditLogUtils; +import com.webank.wedatasphere.exchangis.common.UserUtils; +import com.webank.wedatasphere.exchangis.common.enums.OperateTypeEnum; +import com.webank.wedatasphere.exchangis.common.enums.TargetTypeEnum; +import com.webank.wedatasphere.exchangis.common.pager.PageResult; +import com.webank.wedatasphere.exchangis.common.validator.groups.UpdateGroup; +import com.webank.wedatasphere.exchangis.job.server.service.JobInfoService; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobQueryVo; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVo; +import com.webank.wedatasphere.exchangis.project.entity.domain.OperationType; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectAppVo; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectInfo; +import com.webank.wedatasphere.exchangis.project.server.service.ProjectService; +import com.webank.wedatasphere.exchangis.project.server.utils.ExchangisProjectRestfulUtils; +import com.webank.wedatasphere.exchangis.project.server.utils.ProjectAuthorityUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.math3.util.Pair; +import org.apache.linkis.common.utils.JsonUtils; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.validation.BindingResult; +import org.springframework.validation.annotation.Validated; +import org.springframework.web.bind.annotation.*; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import javax.validation.groups.Default; +import java.util.Objects; + +/** + * Restful class for dss project + */ +@RestController +@RequestMapping(value = "/dss/exchangis/main/appProject", produces = {"application/json;charset=utf-8"}) +public class ExchangisProjectDssAppConnRestfulApi { + private static final Logger LOG = LoggerFactory.getLogger(ExchangisProjectDssAppConnRestfulApi.class); + + /** + * Project service + */ + @Resource + private ProjectService projectService; + + /** + * JobInfo service + */ + @Resource + private JobInfoService jobInfoService; + + @RequestMapping(value = "", method = RequestMethod.POST) + public Message createProject(@Validated @RequestBody ExchangisProjectAppVo project, + BindingResult result, HttpServletRequest request){ + ExchangisProjectInfo projectVo = new ExchangisProjectInfo(project); + if (result.hasErrors()){ + return Message.error(result.getFieldErrors().get(0).getDefaultMessage()); + } + + String oringinUser = SecurityFilter.getLoginUsername(request); + String username = UserUtils.getLoginUser(request); + if (StringUtils.isBlank(projectVo.getViewUsers()) || !StringUtils.contains(projectVo.getViewUsers(), username)) { + projectVo.setViewUsers(username + projectVo.getViewUsers()); + } + if (StringUtils.isBlank(projectVo.getEditUsers()) || !StringUtils.contains(projectVo.getEditUsers(), username)) { + projectVo.setEditUsers(username + projectVo.getEditUsers()); + } + if (StringUtils.isBlank(projectVo.getExecUsers()) || !StringUtils.contains(projectVo.getExecUsers(), username)) { + projectVo.setExecUsers(username + projectVo.getExecUsers()); + + } + + try { + LOG.info("CreateProject from DSS AppConn, vo: {}, userName: {}", JsonUtils.jackson().writeValueAsString(projectVo), username); + if (projectService.existsProject(null, projectVo.getName())){ + return Message.error("Have the same name project (存在同名工程)"); + } + long projectIdd = projectService.createProject(projectVo, username); + String projectId = String.valueOf(projectIdd); + AuditLogUtils.printLog(oringinUser, username, TargetTypeEnum.PROJECT, String.valueOf(projectId), "Project name is: " + projectVo.getName(), OperateTypeEnum.CREATE, request); + return ExchangisProjectRestfulUtils.dealOk("创建工程成功", + new Pair<>("projectName", projectVo.getName()), + new Pair<>("projectId", projectId)); + } catch (Exception t) { + LOG.error("Failed to create project for user {} from DSS", username, t); + return Message.error("Fail to create project from DSS(创建工程失败)"); + } + } + + + /** + * Update project + * @param request request + * @param projectVo project vo + * @return + */ + @RequestMapping( value = "/{id:\\d+}", method = RequestMethod.PUT) + public Message updateProject(@PathVariable("id") Long id, @Validated({UpdateGroup.class, Default.class}) @RequestBody ExchangisProjectInfo projectVo + , BindingResult result, HttpServletRequest request) { + if (result.hasErrors()){ + return Message.error(result.getFieldErrors().get(0).getDefaultMessage()); + } + String username = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); + try { + ExchangisProjectInfo projectStored = projectService.getProjectDetailById(Long.valueOf(projectVo.getId())); + if (!ProjectAuthorityUtils.hasProjectAuthority(username, projectStored, OperationType.PROJECT_ALTER)) { + return Message.error("You have no permission to update (没有项目的更新权限)"); + } + + LOG.info("UpdateProject vo: {}, userName: {}", JsonUtils.jackson().writeValueAsString(projectVo), username); + projectService.updateProject(projectVo, username); + AuditLogUtils.printLog(oringinUser, username, TargetTypeEnum.PROJECT, id.toString(), "Project name is: " + projectVo.getName(), OperateTypeEnum.UPDATE, request); + return ExchangisProjectRestfulUtils.dealOk("更新工程成功", + new Pair<>("projectName", projectVo.getName()), + new Pair<>("projectId", projectVo.getId())); + } catch (Exception t) { + LOG.error("Failed to update project for user {}", username, t); + return Message.error("Fail to update project (更新工程失败)"); + } + } + + /** + * Delete project + * @param request http request + * @param name project name + * @return + */ + @RequestMapping( value = "/{name}", method = RequestMethod.POST) + public Message deleteProject(HttpServletRequest request, @PathVariable("name") String name) { + String username = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); + try { + ExchangisProjectInfo projectInfo = projectService.selectByName(name); + if (!ProjectAuthorityUtils.hasProjectAuthority(username, projectInfo, OperationType.PROJECT_ALTER)) { + return Message.error("You have no permission to delete (删除项目失败)"); + } + + // 校验是否有任务 + ExchangisJobQueryVo queryVo = new ExchangisJobQueryVo(Long.parseLong(projectInfo.getId()), null, null); + PageResult exchangisJobVoPageResult = jobInfoService.queryJobList(queryVo); + if (Objects.nonNull(exchangisJobVoPageResult) && Objects.nonNull(exchangisJobVoPageResult.getList()) + && exchangisJobVoPageResult.getList().size() > 0) { + return Message.error("Jobs already exist under this project and the project cannot be deleted (该项目下已存在子任务,无法删除)"); + } + + projectService.deleteProjectByName(name); + AuditLogUtils.printLog(oringinUser, username, TargetTypeEnum.PROJECT, "", "Project name is: " + name, OperateTypeEnum.DELETE, request); + return ExchangisProjectRestfulUtils.dealOk("删除工程成功"); + } catch (Exception t) { + LOG.error("Failed to delete project for user {}", username, t); + return Message.error("Failed to delete project (删除工程失败)"); + } + + } + + + /** + * check project name + * @param request http request + * @param name project name + * @return + */ + @RequestMapping( value = "/check/{name}", method = RequestMethod.POST) + public Message getProjectByName(HttpServletRequest request, @PathVariable("name") String name) { + String username = UserUtils.getLoginUser(request); + try { + ExchangisProjectInfo projectInfo = projectService.selectByName(name); + return ExchangisProjectRestfulUtils.dealOk("根据名字获取工程成功", + new Pair<>("projectInfo",projectInfo)); + } catch (Exception t) { + LOG.error("Failed to delete project for user {}", username, t); + return Message.error("Failed to delete project (根据名字获取工程失败)"); + } + } + +} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/external/ExchangisProjectJobDssAppConnRestfulApi.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/external/ExchangisProjectJobDssAppConnRestfulApi.java new file mode 100644 index 000000000..fc23ec0e0 --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/external/ExchangisProjectJobDssAppConnRestfulApi.java @@ -0,0 +1,109 @@ +package com.webank.wedatasphere.exchangis.project.server.restful.external; + +import com.webank.wedatasphere.exchangis.common.AuditLogUtils; +import com.webank.wedatasphere.exchangis.common.UserUtils; +import com.webank.wedatasphere.exchangis.common.enums.OperateTypeEnum; +import com.webank.wedatasphere.exchangis.common.enums.TargetTypeEnum; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.project.provider.mapper.ProjectMapper; +import com.webank.wedatasphere.exchangis.project.server.service.ProjectCopyService; +import com.webank.wedatasphere.exchangis.project.server.service.impl.ProjectExportServiceImpl; +import com.webank.wedatasphere.exchangis.project.server.service.impl.ProjectImportServerImpl; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.*; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.core.Context; +import java.rmi.ServerException; +import java.util.Map; + +/** + * Define to support the app conn, in order to distinguish from the inner api + */ +@RestController +@RequestMapping(value = "/dss/exchangis/main/appJob", produces = {"application/json;charset=utf-8"}) +public class ExchangisProjectJobDssAppConnRestfulApi { + + private static final Logger LOG = LoggerFactory.getLogger(ExchangisProjectJobDssAppConnRestfulApi.class); + + @Resource + private ProjectImportServerImpl projectImportServer; + + @Resource + private ProjectExportServiceImpl projectExportService; + + @Resource + private ProjectCopyService projectCopyService; + + @Autowired + private ProjectMapper projectMapper; + + @RequestMapping(value = "/import", method = RequestMethod.POST) + public Message importJob(@Context HttpServletRequest request, @RequestBody Map params) throws ServerException, ExchangisJobServerException { + + Message response = null; + String userName = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); + try { + LOG.info("param: {}", params); + /*if (!hasAuthority(userName, jobInfoService.getJob(((Integer) params.get("sqoopIds")).longValue(), true))) { + return Message.error("You have no permission to import (没有导入权限)"); + }*/ + response = projectImportServer.importProject(request, params); + LOG.info("import job success"); + } catch (ExchangisJobServerException e) { + String message = "Fail import job [ id: " + params + "] (导入任务失败)"; + LOG.error(message, e); + response = Message.error(message); + } + AuditLogUtils.printLog(oringinUser, userName, TargetTypeEnum.JOB, "", "Export parameter is: " + params.toString(), OperateTypeEnum.IMPORT, request); + return response; + + } + + @RequestMapping(value = "/export", method = RequestMethod.POST) + public Message exportJob(@Context HttpServletRequest request, @RequestBody Map params) throws ServerException, ExchangisJobServerException { + String userName = UserUtils.getLoginUser(request); + + LOG.info("export function params: {}", params); + String oringinUser = SecurityFilter.getLoginUsername(request); + Message response = null; + try { + /*if (!hasAuthority(userName, jobInfoService.getJob(((Integer) params.get("sqoopIds")).longValue(), true))) { + return Message.error("You have no permission to export (没有导出权限)"); + }*/ + response = projectExportService.exportProject(params, userName, request); + LOG.info("export job success"); + } catch (Exception e) { + String message = "Fail Export job [ id: " + params + "] (导出任务失败)"; + LOG.error(message, e); + response = Message.error(message); + } + AuditLogUtils.printLog(oringinUser, userName, TargetTypeEnum.JOB, "", "Export parameter is: " + params.toString(), OperateTypeEnum.EXPORT, request); + return response; + } + + @RequestMapping(value = "/copy", method = RequestMethod.POST) + public Message copy(@Context HttpServletRequest request, @RequestBody Map params) throws ServerException { + String userName = UserUtils.getLoginUser(request); + String oringinUser = SecurityFilter.getLoginUsername(request); + + LOG.info("copy function params: {}", params); + Message response = null; + try { + response = projectCopyService.copy(params, userName, request); + LOG.info("copy node success"); + } catch (Exception e) { + String message = "Fail Copy project [ id: " + params + "] (导出任务失败)"; + LOG.error(message, e); + response = Message.error(message); + } + AuditLogUtils.printLog(oringinUser, userName, TargetTypeEnum.JOB, "", "Copy parameter is: " + params.toString(), OperateTypeEnum.COPY, request); + return response; + } +} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectCopyService.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectCopyService.java new file mode 100644 index 000000000..9260d02bf --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectCopyService.java @@ -0,0 +1,21 @@ +package com.webank.wedatasphere.exchangis.project.server.service; + +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import org.apache.linkis.server.Message; + +import javax.servlet.http.HttpServletRequest; +import java.util.Map; + +/** + * @author tikazhang + * @Date 2022/4/24 21:11 + */ +public interface ProjectCopyService { + + /** + * Copy node + * @param + */ + Message copy(Map params, String UserName, HttpServletRequest request) throws ExchangisJobException, ExchangisJobServerException; +} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectExportService.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectExportService.java new file mode 100644 index 000000000..81fddc9eb --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectExportService.java @@ -0,0 +1,30 @@ +package com.webank.wedatasphere.exchangis.project.server.service; + +import com.webank.wedatasphere.exchangis.job.server.dto.ExportedProject; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import org.apache.linkis.server.Message; + +import javax.servlet.http.HttpServletRequest; +import java.rmi.ServerException; +import java.util.Map; +import java.util.Set; + +/** + * @author tikazhang + * @Date 2022/3/15 9:30 + */ +public interface ProjectExportService { + + /** + * Export exchangis job to BML. + * + * @param username params + * @return + */ + Message exportProject(Map params, String username, HttpServletRequest request) throws ExchangisJobServerException, ServerException; + + ExportedProject export(Long projectId, Map> moduleIdsMap, boolean partial, HttpServletRequest request) throws ExchangisJobServerException; + + Map> getModuleIdsMap(Map params); + +} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectImportService.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectImportService.java new file mode 100644 index 000000000..34b1a2219 --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectImportService.java @@ -0,0 +1,19 @@ +package com.webank.wedatasphere.exchangis.project.server.service; + +import com.webank.wedatasphere.exchangis.job.server.dto.IdCatalog; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import org.apache.linkis.server.Message; + +import javax.servlet.http.HttpServletRequest; +import java.rmi.ServerException; +import java.util.Map; + +/** + * @author tikazhang + * @Date 2022/3/15 10:01 + */ +public interface ProjectImportService { + Message importProject(HttpServletRequest req, Map params) throws ExchangisJobServerException, ServerException; + + IdCatalog importOpt(String projectJson, Long projectId, String versionSuffix, String userName, String importType) throws ExchangisJobServerException; +} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectService.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectService.java new file mode 100644 index 000000000..9c6556a34 --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/ProjectService.java @@ -0,0 +1,70 @@ +package com.webank.wedatasphere.exchangis.project.server.service; + + +import com.webank.wedatasphere.exchangis.common.pager.PageResult; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.project.entity.domain.ExchangisProjectUser; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectInfo; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectUserVo; +import com.webank.wedatasphere.exchangis.project.entity.vo.ProjectQueryVo; + +/** + * Project query + */ +public interface ProjectService { + + /** + * Create project + * @param projectVo project vo + * @return new project id + */ + long createProject(ExchangisProjectInfo projectVo, String userName); + + /** + * Exists project + * @param projectId project id + * @param projectName project name + * @return boolean + */ + boolean existsProject(Long projectId, String projectName); + + /** + * Update project + * @param projectVo project vo + * @param userName userName + */ + void updateProject(ExchangisProjectInfo projectVo, String userName); + + /** + * Query the page result + * @param queryVo result vo + * @return page result + */ + PageResult queryProjects(ProjectQueryVo queryVo); + + /** + * Delete project + * @param projectId project id + */ + void deleteProject(Long projectId) throws ExchangisJobException; + + /** + * Delete project by name + * @param name + */ + void deleteProjectByName(String name) throws ExchangisJobException; + + /** + * Query the project detail + * @param projectId project id + * @return project vo + */ + ExchangisProjectInfo getProjectDetailById(Long projectId); + + ExchangisProjectInfo getProjectById(Long projectId); + + ExchangisProjectInfo selectByName(String name); + + ExchangisProjectUser queryProjectUser(ExchangisProjectUserVo exchangisProjectUserVo); + +} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectCopyServiceImpl.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectCopyServiceImpl.java new file mode 100644 index 000000000..58bb7e94a --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectCopyServiceImpl.java @@ -0,0 +1,94 @@ +package com.webank.wedatasphere.exchangis.project.server.service.impl; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.google.common.collect.Lists; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.server.dto.ExportedProject; +import com.webank.wedatasphere.exchangis.job.server.dto.IdCatalog; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.job.server.mapper.ExchangisJobEntityDao; +import com.webank.wedatasphere.exchangis.job.server.restful.external.ModuleEnum; +import com.webank.wedatasphere.exchangis.project.server.service.ProjectCopyService; +import com.webank.wedatasphere.exchangis.project.server.service.ProjectImportService; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVo; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.server.BDPJettyServerHelper; +import org.apache.linkis.server.Message; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import java.util.*; + +/** + * @author tikazhang + * @Date 2022/4/24 21:15 + */ +@Service +public class ProjectCopyServiceImpl implements ProjectCopyService { + private static final Logger LOG = LoggerFactory.getLogger(ProjectCopyServiceImpl.class); + + @Autowired + private ProjectImportService projectImportService; + + @Autowired + private ProjectExportServiceImpl projectExportService; + + @Autowired + private ProjectImportServerImpl projectImportServer; + + @Resource + private ExchangisJobEntityDao jobEntityDao; + + @Override + public Message copy(Map params, String userName, HttpServletRequest request) throws ExchangisJobException, ExchangisJobServerException { + LOG.info("begin to copy in project params is {}", params); + //Long projectId = Long.parseLong(params.get("projectId").toString()); + Boolean partial = (Boolean) params.get("partial"); + Map> moduleIdsMap = projectExportService.getModuleIdsMap(params); + + Set longs = moduleIdsMap.get(Objects.isNull(params.get("dataXIds")) ? ModuleEnum.SQOOP_IDS.getName() : ModuleEnum.DATAX_IDS.getName()); + List list1 = new ArrayList(longs); + ExchangisJobEntity exchangisJob = this.jobEntityDao.getBasicInfo(list1.get(0)); + Long projectId = exchangisJob.getProjectId(); + + String projectVersion = params.getOrDefault("projectVersion", "v1").toString(); + String flowVersion = (String) params.get("flowVersion"); + if (StringUtils.isEmpty(flowVersion)) { + LOG.error("flowVersion is null, can not copy flow to a newest version"); + flowVersion = "v00001"; + } + String contextIdStr = (String) params.get("contextID"); + + ExportedProject exportedProject = projectExportService.export(projectId, moduleIdsMap, partial, request); + + copySqoop(moduleIdsMap, exportedProject); + + String projectJson = null; + try { + projectJson = BDPJettyServerHelper.jacksonJson().writeValueAsString(exportedProject); + } catch (JsonProcessingException e) { + LOG.error("Occur error while tranform class", e.getMessage()); + } + String versionSuffix = projectVersion + "_" + flowVersion; + + IdCatalog idCatalog = projectImportServer.importOpt(projectJson, projectId, versionSuffix, userName, "copy"); + + Message message = Message.ok() + .data("sqoop", idCatalog.getSqoop()); + return message; + } + + private void copySqoop(Map> moduleIdsMap, ExportedProject exportedProject) { + Set sqoopIds = moduleIdsMap.get(ModuleEnum.SQOOP_IDS.getName()); + if (!sqoopIds.isEmpty()) { + ExchangisJobVo sqoops = exportedProject.getSqoops().get(0); + exportedProject.setSqoops(Lists.newArrayList(sqoops)); + } + } + +} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectExportServiceImpl.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectExportServiceImpl.java new file mode 100644 index 000000000..2abc89864 --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectExportServiceImpl.java @@ -0,0 +1,204 @@ +package com.webank.wedatasphere.exchangis.project.server.service.impl; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; +import com.webank.wedatasphere.exchangis.job.server.dto.ExportedProject; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.job.server.restful.external.ModuleEnum; +import com.webank.wedatasphere.exchangis.job.server.service.JobInfoService; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVo; +import com.webank.wedatasphere.exchangis.project.server.service.ProjectExportService; +import com.webank.wedatasphere.exchangis.project.server.service.ProjectService; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectInfo; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.bml.client.BmlClient; +import org.apache.linkis.bml.client.BmlClientFactory; +import org.apache.linkis.bml.protocol.BmlUploadResponse; +import org.apache.linkis.server.BDPJettyServerHelper; +import org.apache.linkis.server.Message; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Service; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; +import java.rmi.ServerException; +import java.util.*; +import java.util.stream.Collectors; + +/** + * @author jefftlin + * @date 2023/7/13 + */ +@Service +public class ProjectExportServiceImpl implements ProjectExportService { + + private static final Logger LOG = LoggerFactory.getLogger(ProjectExportServiceImpl.class); + + /** + * Project service + */ + @Resource + private ProjectService projectService; + + @Resource + private JobInfoService jobInfoService; + + @Override + public Message exportProject(Map params, String userName, HttpServletRequest request) throws ExchangisJobServerException, ServerException { + ExportedProject exportedProject = null; + Long projectId = Long.parseLong(params.get("projectId").toString()); + Boolean partial = (Boolean) params.get("partial"); + Map> moduleIdsMap = getModuleIdsMap(params); + + LOG.info("export project, user: {}, project: {}, partial:{}", userName, projectId, partial); + exportedProject = export(projectId, moduleIdsMap, partial, request); + String exported = null; + try { + exported = BDPJettyServerHelper.jacksonJson().writeValueAsString(exportedProject); + } catch (JsonProcessingException e) { + LOG.error("Occur error while tranform class", e.getMessage()); + } + + LOG.info("projectName: {}, exported:{}", exportedProject.getName(), exported); + BmlClient bmlClient = BmlClientFactory.createBmlClient(userName); + BmlUploadResponse bmlUploadResponse = bmlClient.uploadShareResource(userName, exportedProject.getName(), + "exchangis_exported_" + UUID.randomUUID(), new ByteArrayInputStream(exported.getBytes(StandardCharsets.UTF_8))); + + if (bmlUploadResponse == null || !bmlUploadResponse.isSuccess()) { + throw new ServerException("cannot upload exported data to BML"); + } + + LOG.info("{} is exporting the project, uploaded to BML the resourceID is {} and the version is {}", + userName, bmlUploadResponse.resourceId(), bmlUploadResponse.version()); + + Message message = Message.ok("export job") + .data("resourceId", bmlUploadResponse.resourceId()) + .data("version", bmlUploadResponse.version()); + return message; + } + + @Override + public ExportedProject export(Long projectId, Map> moduleIdsMap, boolean partial, HttpServletRequest request) throws ExchangisJobServerException { + ExportedProject exportedProject = new ExportedProject(); + ExchangisProjectInfo project = projectService.getProjectDetailById(projectId); + + LOG.info("execute export method! export project is {}.", project.getName()); + exportedProject.setName(project.getName()); + + setSqoop(projectId, moduleIdsMap, partial, exportedProject, request); + + setDatax(projectId, moduleIdsMap, partial, exportedProject, request); + + return exportedProject; + } + + private void setSqoop(Long projectId, Map> moduleIdsMap, boolean partial, ExportedProject exportedProject, HttpServletRequest request) throws ExchangisJobServerException { + List sqoops = new ArrayList<>(); + LOG.info("Request: {}", request); + if (partial) { + Set longs = moduleIdsMap.get(ModuleEnum.SQOOP_IDS.getName()); + if (longs.size() > 0) { + for (Long id : longs) { + LOG.info("id: {}", id); + ExchangisJobVo job = jobInfoService.getJob(id, false); + + String sqoopStr = null; + try { + sqoopStr = BDPJettyServerHelper.jacksonJson().writeValueAsString(job); + } catch (JsonProcessingException e) { + LOG.error("Occur error while tranform class", e.getMessage()); + } + + LOG.info("sqoopStr:{}", sqoopStr); + LOG.info("ExchangisJobVo sqoop: {}", job.getContent()); + LOG.info("getCreateTime: {}", job.getId()); + LOG.info("executorUser: {}", job.getExecuteUser()); + sqoops.add(job); + } + exportedProject.setSqoops(sqoops); + } + + } else { + LOG.info("Through request {} and projectId {} get Sqoopjob", request, projectId); + sqoops = jobInfoService.getSubJobList(request, projectId); + exportedProject.setSqoops(sqoops); + //exportedProject.setSqoops(jobInfoService.getByProject(request, projectId)); + } + LOG.info("exporting project, export sqoopJob: {}", exportedProject); + } + + private void setDatax(Long projectId, Map> moduleIdsMap, boolean partial, ExportedProject exportedProject, HttpServletRequest request) throws ExchangisJobServerException { + List dataxs = new ArrayList<>(); + LOG.info("Request: {}", request); + if (partial) { + Set longs = moduleIdsMap.get(ModuleEnum.DATAX_IDS.getName()); + if (longs.size() > 0) { + for (Long id : longs) { + LOG.info("id: {}", id); + ExchangisJobVo job = jobInfoService.getJob(id, false); + + String dataxStr = null; + try { + dataxStr = BDPJettyServerHelper.jacksonJson().writeValueAsString(job); + } catch (JsonProcessingException e) { + LOG.error("Occur error while tranform class", e.getMessage()); + } + + LOG.info("dataxStr:{}", dataxStr); + LOG.info("ExchangisJobVo sqoop: {}", job.getContent()); + LOG.info("getCreateTime: {}", job.getId()); + LOG.info("executorUser: {}", job.getExecuteUser()); + dataxs.add(job); + } + exportedProject.setDataxes(dataxs); + } + + } else { + LOG.info("Through request {} and projectId {} get Dataxjob", request, projectId); + dataxs = jobInfoService.getSubJobList(request, projectId); + exportedProject.setSqoops(dataxs); + } + LOG.info("exporting project, export dataxJob: {}", exportedProject); + + } + + /** + * 获取需要导出对象集合 + * + * @param params + * @return + */ + @Override + public Map> getModuleIdsMap(Map params) { + + Map> map = Maps.newHashMap(); + String sqoopIdsStr = null; + if(params.get("sqoopIds") != null) { + sqoopIdsStr = params.get("sqoopIds").toString(); + } + String dataxIdsStr = null; + if(params.get("dataXIds") != null) { + dataxIdsStr = params.get("dataXIds").toString(); + } + + Set sqoopIds = Sets.newHashSet(); + Set dataxIds = Sets.newHashSet(); + + if (StringUtils.isNotEmpty(sqoopIdsStr)) { + sqoopIds = Arrays.stream(StringUtils.split(sqoopIdsStr, ",")) + .map(Long::parseLong).collect(Collectors.toSet()); + } + if (StringUtils.isNotEmpty(dataxIdsStr)) { + dataxIds = Arrays.stream(StringUtils.split(dataxIdsStr, ",")) + .map(Long::parseLong).collect(Collectors.toSet()); + } + map.put("sqoopIds", sqoopIds); + map.put("dataXIds", dataxIds); + LOG.info("The objects to be exported are: {}", map); + return map; + } +} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectImportServerImpl.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectImportServerImpl.java new file mode 100644 index 000000000..acae2fa98 --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectImportServerImpl.java @@ -0,0 +1,201 @@ +package com.webank.wedatasphere.exchangis.project.server.service.impl; + +import com.webank.wedatasphere.exchangis.job.server.dto.ExportedProject; +import com.webank.wedatasphere.exchangis.job.server.dto.IdCatalog; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.project.server.service.ProjectImportService; +import com.webank.wedatasphere.exchangis.job.server.service.JobInfoService; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobVo; +import com.webank.wedatasphere.exchangis.project.entity.entity.ExchangisProject; +import com.webank.wedatasphere.exchangis.project.provider.mapper.ProjectMapper; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.bml.client.BmlClient; +import org.apache.linkis.bml.client.BmlClientFactory; +import org.apache.linkis.bml.protocol.BmlDownloadResponse; +import org.apache.linkis.server.BDPJettyServerHelper; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.rmi.ServerException; +import java.util.Calendar; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * @author tikazhang + * @Date 2022/3/15 9:58 + */ + +@Service +public class ProjectImportServerImpl implements ProjectImportService { + + private static final Logger LOG = LoggerFactory.getLogger(ProjectImportServerImpl.class); + + Pattern pattern1 = Pattern.compile("([a-zA-Z]+_\\d+).*"); + + Pattern pattern2 = Pattern.compile("(\\S+)_v1\\S+"); + + @Resource + private JobInfoService jobInfoService; + + @Autowired + private ProjectMapper projectMapper; + + @Override + public Message importProject(HttpServletRequest req, Map params) throws ExchangisJobServerException, ServerException { + String userName = SecurityFilter.getLoginUsername(req); + //String resourceId = "99763d27-a35e-43f2-829b-100830bca538"; + String resourceId = (String) params.get("resourceId"); + String version = (String) params.get("version"); + Long projectId = (Long) params.get("projectId"); + //Long projectId = Long.parseLong("1497870871035973669"); + //Long projectId = Long.parseLong("111111"); + String projectVersion = (String) params.get("projectVersion"); + String flowVersion = (String) params.get("flowVersion"); + String versionSuffix = projectVersion + "_" + flowVersion; + LOG.info("resourceId: {}, projectId: {}, versionSuffix: {}, version: {}, userName: {}, flowVersion: {}", resourceId, projectId, versionSuffix, version, userName, flowVersion); + BmlClient bmlClient = BmlClientFactory.createBmlClient(userName); + BmlDownloadResponse bmlDownloadResponse = bmlClient.downloadShareResource(userName, resourceId, version); + LOG.info("bmlDownloadResponse: {}", bmlDownloadResponse); + Message message = null; + if (bmlDownloadResponse == null || !bmlDownloadResponse.isSuccess()) { + throw new ServerException("cannot download exported data from BML"); + } + try { + String projectJson = IOUtils.toString(bmlDownloadResponse.inputStream(), StandardCharsets.UTF_8); + LOG.info("projectJson: {}", projectJson); + IdCatalog idCatalog = importOpt(projectJson, projectId, versionSuffix, userName, "import"); + message = Message.ok("import Job ok") + .data("sqoop", idCatalog.getSqoop()) + .data("datax", idCatalog.getDatax()); + + return message; + } catch (IOException | ExchangisJobServerException e) { + LOG.error("Error occur while import option: {}", e.getMessage()); + message = Message.error("Error occur while import option: {}"); + //throw new ExchangisJobServerException(31101, "导入出现错误:" + "[" + e.getMessage() + "]"); + } + finally { + IOUtils.closeQuietly(bmlDownloadResponse.inputStream()); + } + + return message; + } + + @Override + public IdCatalog importOpt(String projectJson, Long projectId, String versionSuffix, String userName, String importType) throws ExchangisJobServerException { + ExportedProject exportedProject = BDPJettyServerHelper.gson().fromJson(projectJson, ExportedProject.class); + IdCatalog idCatalog = new IdCatalog(); + + importSqoop(projectId, versionSuffix, exportedProject, idCatalog, userName, importType); + + importDatax(projectId, versionSuffix, exportedProject, idCatalog, userName, importType); + + return idCatalog; + } + + private void importSqoop(Long projectId, String versionSuffix, ExportedProject exportedProject, IdCatalog idCatalog, String userName, String importType) throws ExchangisJobServerException { + List sqoops = exportedProject.getSqoops(); + if (sqoops == null) { + return; + } + List projects = projectMapper.getDetailByName(exportedProject.getName()); + if (projects.size() == 0) { + ExchangisProject project = new ExchangisProject(); + project.setName(exportedProject.getName()); + project.setCreateTime(Calendar.getInstance().getTime()); + project.setCreateUser(userName); + Long newProjectId = projectMapper.insertOne(project); + List newProjects = projectMapper.getDetailByName(exportedProject.getName()); + addSqoopTask (sqoops, newProjects, versionSuffix, idCatalog, projectId, importType); + } + else if (projects.size() == 1) { + addSqoopTask (sqoops, projects, versionSuffix, idCatalog, projectId, importType); + } + else { + throw new ExchangisJobServerException(31101, "Already exits duplicated project name(存在重复项目名称) projectName is:" + "[" + exportedProject.getName() + "]"); + } + } + + private void importDatax(Long projectId, String versionSuffix, ExportedProject exportedProject, IdCatalog idCatalog, String userName, String importType) throws ExchangisJobServerException { + + List dataxs = exportedProject.getDataxes(); + if (dataxs == null) { + return; + } + List projects = projectMapper.getDetailByName(exportedProject.getName()); + if (projects.size() == 0) { + ExchangisProject project = new ExchangisProject(); + project.setName(exportedProject.getName()); + project.setCreateTime(Calendar.getInstance().getTime()); + project.setCreateUser(userName); + Long newProjectId = projectMapper.insertOne(project); + List newProjects = projectMapper.getDetailByName(exportedProject.getName()); + addSqoopTask (dataxs, newProjects, versionSuffix, idCatalog, projectId, importType); + } + else if (projects.size() == 1) { + addSqoopTask (dataxs, projects, versionSuffix, idCatalog, projectId, importType); + } + else { + throw new ExchangisJobServerException(31101, "Already exits duplicated project name(存在重复项目名称) projectName is:" + "[" + exportedProject.getName() + "]"); + } + } + + public void addSqoopTask (List sqoops, List projects, String versionSuffix, IdCatalog idCatalog, Long projectId, String importType) throws ExchangisJobServerException { + for (ExchangisJobVo sqoop : sqoops) { + Long projectIdProd = projects.get(0).getId(); + Long oldId = sqoop.getId(); + if (importType.equals("import")) { + sqoop.setProjectId(projectId); + } + sqoop.setJobName(updateName(sqoop.getJobName(), versionSuffix)); + //Long existingId = (long) 55; + LOG.info("oldId: {}, projectid: {}, jobName: {}", sqoop.getId(), sqoop.getProjectId(), sqoop.getJobName()); + LOG.info("jobByNameWithProjectId: {}", jobInfoService.getByNameWithProjectId(sqoop.getJobName(), projectId)); + Long existingId; + if (jobInfoService.getByNameWithProjectId(sqoop.getJobName(), projectId) == null || jobInfoService.getByNameWithProjectId(sqoop.getJobName(), projectId).size() == 0) { + existingId = null; + } else { + existingId = jobInfoService.getByNameWithProjectId(sqoop.getJobName(), projectId).get(0).getId(); + } + //Long existingId = jobInfoService.getByNameWithProjectId(sqoop.getJobName(), projectId); + if (existingId != null) { + idCatalog.getSqoop().put(oldId, existingId); + throw new ExchangisJobServerException(31101, "Already exits duplicated job name(存在重复任务名称) jobName is:" + "[" + sqoop.getJobName() + "]"); + } else { + LOG.info("Sqoop job content is: {}, Modify user is: {}, jobType is: {}", sqoop.getContent(), sqoop.getExecuteUser(), sqoop.getJobType()); + ExchangisJobVo jobVo = jobInfoService.createJob(sqoop); + LOG.info("oldId: {}, newid: {}, jobName: {}", sqoop.getId(), jobVo.getId(), jobVo.getJobName()); + idCatalog.getSqoop().put(oldId, jobVo.getId()); + } + } + } + + private String updateName(String name, String versionSuffix) { + if (StringUtils.isBlank(versionSuffix)) { + return name; + } + + Matcher matcher = pattern1.matcher(name); + if (matcher.find()) { + return matcher.group(1) + "_" + versionSuffix; + } else { + Matcher matcher2 = pattern2.matcher(name); + if (matcher2.find()) { + return matcher2.group(1) + "_" + versionSuffix; + } + } + return name + "_" + versionSuffix; + } +} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectServiceImpl.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectServiceImpl.java new file mode 100644 index 000000000..89f3e12e6 --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/service/impl/ProjectServiceImpl.java @@ -0,0 +1,264 @@ +package com.webank.wedatasphere.exchangis.project.server.service.impl; + +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.github.pagehelper.PageHelper; +import com.github.pagehelper.PageInfo; +import com.webank.wedatasphere.exchangis.common.pager.PageResult; +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobDsBind; +import com.webank.wedatasphere.exchangis.dao.mapper.ExchangisJobDsBindMapper; +import com.webank.wedatasphere.exchangis.job.api.ExchangisJobOpenService; +import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.vo.ExchangisJobQueryVo; +import com.webank.wedatasphere.exchangis.project.provider.mapper.ProjectMapper; +import com.webank.wedatasphere.exchangis.project.provider.mapper.ProjectUserMapper; +import com.webank.wedatasphere.exchangis.project.server.service.ProjectService; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectInfo; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectUserVo; +import com.webank.wedatasphere.exchangis.project.entity.vo.ProjectQueryVo; +import com.webank.wedatasphere.exchangis.project.entity.domain.ExchangisProjectUser; +import com.webank.wedatasphere.exchangis.project.entity.entity.ExchangisProject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import javax.annotation.Resource; +import java.util.*; +import java.util.stream.Collectors; + +@Service +public class ProjectServiceImpl implements ProjectService { + + private static final Logger LOGGER = LoggerFactory.getLogger(ProjectServiceImpl.class); + + @Autowired + private ProjectMapper projectMapper; + + @Resource + private ExchangisJobOpenService jobServiceOpenApi; + + @Autowired + private ExchangisJobDsBindMapper jobDataSourceBindMapper; + + @Autowired + private ProjectUserMapper projectUserMapper; + + @Override + @Transactional(rollbackFor = Exception.class) + public long createProject(ExchangisProjectInfo projectVo, String userName) { + // Construct the entity + ExchangisProject project = new ExchangisProject(); + project.setName(projectVo.getName()); + project.setDescription(projectVo.getDescription()); + project.setDomain(Optional.ofNullable(projectVo.getDomain()).orElse(ExchangisProject.Domain.STANDALONE.name())); + project.setLabels(projectVo.getLabel()); + project.setSourceMap(projectVo.getSource()); + project.setViewUsers(projectVo.getViewUsers()); + project.setEditUsers(projectVo.getEditUsers()); + project.setExecUsers(projectVo.getExecUsers()); + project.setCreateUser(userName); + project.setCreateTime(Calendar.getInstance().getTime()); + this.projectMapper.insertOne(project); + + Map projectUserMap = new HashMap<>(); + if (Objects.nonNull(project.getViewUsers()) && project.getViewUsers().length() != 0) { + for (String viewUser : project.getViewUsers().split(",")) { + ExchangisProjectUser projectUser = new ExchangisProjectUser(); + projectUser.setProjectId(project.getId()); + projectUser.setPrivUser(viewUser); + projectUser.setPriv(4); + projectUser.setUpdateTime(project.getLastUpdateTime()); + projectUserMap.put(viewUser ,projectUser); + } + } + if (Objects.nonNull(project.getEditUsers()) && project.getEditUsers().length() != 0) { + for (String editUser : project.getEditUsers().split(",")) { + if (Objects.nonNull(projectUserMap.get(editUser))) { + projectUserMap.get(editUser).setPriv(6); + } else { + ExchangisProjectUser projectUser = new ExchangisProjectUser(); + projectUser.setProjectId(project.getId()); + projectUser.setPrivUser(editUser); + projectUser.setPriv(6); + projectUser.setUpdateTime(project.getLastUpdateTime()); + projectUserMap.put(editUser ,projectUser); + } + } + } + if (Objects.nonNull(project.getExecUsers()) && project.getExecUsers().length() != 0) { + for (String execUser : project.getExecUsers().split(",")) { + if (Objects.nonNull(projectUserMap.get(execUser))) { + projectUserMap.get(execUser).setPriv(7); + } else { + ExchangisProjectUser projectUser = new ExchangisProjectUser(); + projectUser.setProjectId(project.getId()); + projectUser.setPrivUser(execUser); + projectUser.setPriv(7); + projectUser.setUpdateTime(project.getLastUpdateTime()); + projectUserMap.put(execUser ,projectUser); + } + } + } + + if(projectUserMap.size() > 0) { + this.projectUserMapper.addProjectUser(new ArrayList<>(projectUserMap.values())); + + } + return project.getId(); + } + + @Override + public boolean existsProject(Long projectId, String projectName) { + Integer count = this.projectMapper.existsOne(projectId, projectName); + return null != count && count > 0; + } + + @Override + @Transactional(rollbackFor = Exception.class) + public void updateProject(ExchangisProjectInfo projectVo, String userName) { + ExchangisProject updatedProject = new ExchangisProject(); + updatedProject.setId(Long.valueOf(projectVo.getId())); + updatedProject.setName(projectVo.getName()); + updatedProject.setDescription(projectVo.getDescription()); + updatedProject.setLabels(projectVo.getLabel()); + updatedProject.setViewUsers(projectVo.getViewUsers()); + updatedProject.setEditUsers(projectVo.getEditUsers()); + updatedProject.setExecUsers(projectVo.getExecUsers()); + updatedProject.setDescription(projectVo.getDescription()); + // Set the updated properties + updatedProject.setLastUpdateUser(userName); + updatedProject.setLastUpdateTime(Calendar.getInstance().getTime()); + this.projectMapper.updateOne(updatedProject); + + Map projectUserMap = new HashMap<>(); + if (Objects.nonNull(updatedProject.getViewUsers()) && updatedProject.getViewUsers().length() != 0) { + for (String viewUser : updatedProject.getViewUsers().split(",")) { + ExchangisProjectUser projectUser = new ExchangisProjectUser(); + projectUser.setProjectId(updatedProject.getId()); + projectUser.setPrivUser(viewUser); + projectUser.setPriv(4); + projectUser.setUpdateTime(updatedProject.getLastUpdateTime()); + projectUserMap.put(viewUser ,projectUser); + } + } + if (Objects.nonNull(updatedProject.getEditUsers()) && updatedProject.getEditUsers().length() != 0) { + for (String editUser : updatedProject.getEditUsers().split(",")) { + if (Objects.nonNull(projectUserMap.get(editUser))) { + projectUserMap.get(editUser).setPriv(6); + } else { + ExchangisProjectUser projectUser = new ExchangisProjectUser(); + projectUser.setProjectId(updatedProject.getId()); + projectUser.setPrivUser(editUser); + projectUser.setPriv(6); + projectUser.setUpdateTime(updatedProject.getLastUpdateTime()); + projectUserMap.put(editUser ,projectUser); + } + } + } + if (Objects.nonNull(updatedProject.getExecUsers()) && updatedProject.getExecUsers().length() != 0) { + for (String execUser : updatedProject.getExecUsers().split(",")) { + if (Objects.nonNull(projectUserMap.get(execUser))) { + projectUserMap.get(execUser).setPriv(7); + } else { + ExchangisProjectUser projectUser = new ExchangisProjectUser(); + projectUser.setProjectId(updatedProject.getId()); + projectUser.setPrivUser(execUser); + projectUser.setPriv(7); + projectUser.setUpdateTime(updatedProject.getLastUpdateTime()); + projectUserMap.put(execUser ,projectUser); + } + } + } + + this.projectUserMapper.deleteProjectUser(Long.valueOf(projectVo.getId())); + if(projectUserMap.size() > 0) { + this.projectUserMapper.addProjectUser(new ArrayList<>(projectUserMap.values())); + } + } + + @Override + public PageResult queryProjects(ProjectQueryVo queryVo) { + PageHelper.startPage(queryVo.getPage(), queryVo.getPageSize()); + try{ + List projects = this.projectMapper.queryPageList(queryVo); + PageInfo pageInfo = new PageInfo<>(projects); + List infoList = projects + .stream().map(ExchangisProjectInfo::new).collect(Collectors.toList()); + PageResult pageResult = new PageResult<>(); + pageResult.setList(infoList); + pageResult.setTotal(pageInfo.getTotal()); + return pageResult; + }finally { + PageHelper.clearPage(); + } + } + + @Override + @Transactional(rollbackFor = Exception.class) + public void deleteProject(Long projectId) throws ExchangisJobException { + // First to delete the project to lock the record + this.projectMapper.deleteOne(projectId); + // Query the related job + ExchangisJobQueryVo queryVo = new ExchangisJobQueryVo(); + queryVo.setProjectId(projectId); + List jobEntities = this.jobServiceOpenApi.queryJobs(queryVo, false); + if (!jobEntities.isEmpty()){ + List ids = jobEntities.stream().map(ExchangisJobEntity::getId).collect(Collectors.toList()); + this.jobServiceOpenApi.deleteJobBatch(ids); + QueryWrapper dsBindQuery = new QueryWrapper().in("job_id", ids); + this.jobDataSourceBindMapper.delete(dsBindQuery); + } + } + + @Override + @Transactional(rollbackFor = Exception.class) + public void deleteProjectByName(String name) throws ExchangisJobException { + // First to delete the project to lock the record + ExchangisProject project = this.projectMapper.selectByName(name); + this.projectMapper.deleteByName(name); + } + @Override + public ExchangisProjectInfo getProjectDetailById(Long projectId) { + ExchangisProject project = this.projectMapper.getDetailById(projectId); + if (Objects.nonNull(project)){ + ExchangisProjectInfo projectVo = new ExchangisProjectInfo(project); + projectVo.setViewUsers(project.getViewUsers()); + projectVo.setEditUsers(project.getEditUsers()); + projectVo.setExecUsers(project.getExecUsers()); + projectVo.setSource(project.getSourceMap()); + return projectVo; + } + return null; + } + + /** + * Basic info query + * @param projectId project id + * @return project info + */ + @Override + public ExchangisProjectInfo getProjectById(Long projectId) { + ExchangisProject project = projectMapper.getBasicById(projectId); + if (Objects.nonNull(project)){ + return new ExchangisProjectInfo(project); + } + return null; + } + + @Override + public ExchangisProjectInfo selectByName(String name){ + ExchangisProject project = this.projectMapper.selectByName(name); + if (Objects.nonNull(project)){ + return new ExchangisProjectInfo(project); + } + return null; + } + + @Override + public ExchangisProjectUser queryProjectUser(ExchangisProjectUserVo exchangisProjectUserVo) { + ExchangisProjectUser projectUser = new ExchangisProjectUser(Long.valueOf(exchangisProjectUserVo.getProjectId()), exchangisProjectUserVo.getPrivUser()); + return this.projectUserMapper.queryProjectUser(projectUser); + } +} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/utils/ExchangisProjectConfiguration.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/utils/ExchangisProjectConfiguration.java new file mode 100644 index 000000000..81ae25a13 --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/utils/ExchangisProjectConfiguration.java @@ -0,0 +1,12 @@ +package com.webank.wedatasphere.exchangis.project.server.utils; + +import org.apache.linkis.common.conf.CommonVars; + +/** + * @author + * @Date + */ +public class ExchangisProjectConfiguration { + public static final CommonVars LIMIT_INTERFACE = CommonVars.apply("wds.exchangis.limit.interface.value", true); + +} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/utils/ExchangisProjectRestfulUtils.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/utils/ExchangisProjectRestfulUtils.java new file mode 100644 index 000000000..06a27f9c2 --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/utils/ExchangisProjectRestfulUtils.java @@ -0,0 +1,22 @@ +package com.webank.wedatasphere.exchangis.project.server.utils; + +import org.apache.commons.math3.util.Pair; +import org.apache.linkis.server.Message; + +import java.util.Arrays; + +/** + * Utils for restful + */ +public class ExchangisProjectRestfulUtils { + + + @SafeVarargs + public static Message dealOk(String msg, Pair... data){ + Message message = Message.ok(msg); + Arrays.stream(data).forEach(p -> message.data(p.getKey(), p.getValue())); + return message; + } + + +} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/utils/ProjectAuthorityUtils.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/utils/ProjectAuthorityUtils.java new file mode 100644 index 000000000..6fd06bf8f --- /dev/null +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/utils/ProjectAuthorityUtils.java @@ -0,0 +1,49 @@ +package com.webank.wedatasphere.exchangis.project.server.utils; + +import com.webank.wedatasphere.exchangis.project.entity.domain.OperationType; +import com.webank.wedatasphere.exchangis.project.server.exception.ExchangisProjectErrorException; +import com.webank.wedatasphere.exchangis.project.server.exception.ExchangisProjectExceptionCode; +import com.webank.wedatasphere.exchangis.project.entity.vo.ExchangisProjectInfo; +import org.apache.commons.lang3.StringUtils; + +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +/** + * @author tikazhang + * @Date 2022/5/10 20:10 + */ +public class ProjectAuthorityUtils { + + /** + * @param username username + * @param project project + * @param operationType enum("PROJECT_QUERY","PROJECT_ALTER") + * @return + */ + public static boolean hasProjectAuthority(String username, ExchangisProjectInfo project, OperationType operationType) throws ExchangisProjectErrorException { + if (StringUtils.isNotEmpty(username) && + Objects.nonNull(project) && + Objects.nonNull(operationType)) { + // Create users have all rights to the project. + List viewUsers = Arrays.stream(project.getViewUsers().split(",")).distinct().collect(Collectors.toList()); + List editUsers = Arrays.stream(project.getEditUsers().split(",")).distinct().collect(Collectors.toList()); + List execUsers = Arrays.stream(project.getExecUsers().split(",")).distinct().collect(Collectors.toList()); + + switch (operationType) { + case PROJECT_QUERY: + return StringUtils.equals(username, project.getCreateUser()) || + viewUsers.contains(username) || + editUsers.contains(username) || + execUsers.contains(username); + case PROJECT_ALTER: + return StringUtils.equals(username, project.getCreateUser()); + default: + throw new ExchangisProjectErrorException(ExchangisProjectExceptionCode.UNSUPPORTED_OPERATION.getCode(), "Unsupported operationType"); + } + } + return false; + } +} diff --git a/exchangis-project/pom.xml b/exchangis-project/pom.xml new file mode 100644 index 000000000..f4decd1d6 --- /dev/null +++ b/exchangis-project/pom.xml @@ -0,0 +1,26 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + + exchangis-project + pom + + exchangis-project-entity + exchangis-project-server + exchangis-project-provider + + + + 8 + 8 + + + \ No newline at end of file diff --git a/exchangis-server/pom.xml b/exchangis-server/pom.xml new file mode 100644 index 000000000..e8d5a5619 --- /dev/null +++ b/exchangis-server/pom.xml @@ -0,0 +1,125 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + + exchangis-server + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-datasource-server + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-engine-server + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-job-server + ${project.version} + + + + com.webank.wedatasphere.exchangis + exchangis-project-server + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${project.version} + + + com.fasterxml + classmate + 1.5.1 + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + org.apache.maven.plugins + maven-assembly-plugin + 3.1.0 + false + + + make-assembly + package + + single + + + + src/main/assembly/distribution.xml + + + + + + false + exchangis-server + false + false + + src/main/assembly/distribution.xml + + ${project.artifactId}_${project.version} + ${basedir}/target/packages + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + + src/main/java + + **/*.xml + + + + + \ No newline at end of file diff --git a/exchangis-server/src/main/assembly/distribution.xml b/exchangis-server/src/main/assembly/distribution.xml new file mode 100644 index 000000000..5145471d5 --- /dev/null +++ b/exchangis-server/src/main/assembly/distribution.xml @@ -0,0 +1,53 @@ + + + + exchangis-server + + tar.gz + + false + + + + + lib/${project.artifactId} + true + true + false + false + true + + + + + + ${basedir}/src/main/bin + 0755 + bin + unix + + + ${basedir}/../exchangis-datasource/exchangis-datasource-server/target/exchangis-datasource-server + 0755 + + + + \ No newline at end of file diff --git a/exchangis-server/src/main/java/com/webank/wedatasphere/exchangis/queue/BinlogArrayLockFreeQueue.java b/exchangis-server/src/main/java/com/webank/wedatasphere/exchangis/queue/BinlogArrayLockFreeQueue.java new file mode 100644 index 000000000..8a40a0c05 --- /dev/null +++ b/exchangis-server/src/main/java/com/webank/wedatasphere/exchangis/queue/BinlogArrayLockFreeQueue.java @@ -0,0 +1,465 @@ +package com.webank.wedatasphere.exchangis.queue; + +import sun.misc.Unsafe; + +import java.lang.reflect.Field; +import java.security.AccessController; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.locks.Condition; +import java.util.concurrent.locks.LockSupport; +import java.util.concurrent.locks.ReentrantLock; + +public class BinlogArrayLockFreeQueue { + /** + * Allocation 64 MB buffer default + */ + private static final int DEFAULT_QUEUE_BUFFER = 1024 * 1024 * 64; + + private static final long DEFAULT_MEASURE_INTERVAL = 2 * 1000L; + + /** + * We should reduce the cpu usage + */ + private static final long DEFAULT_SPIN_TIMES = 10; + + private final Unsafe unsafe = UnsafeUtil.unsafe; + + final Object[] items; + + /** + * take index + */ + private volatile long takeIndex; + + /** + * put index + */ + private volatile long putIndex; + + /** + * max take index + */ + private volatile long maxTakeIndex; + + /** + * Memory bytes accumulated + */ + private volatile long memoryBytes; + + /** + * Wait take time + */ + private volatile long waitTake; + + /** + * Wait put time + */ + private volatile long waitPut; + + /** + * Flag to measure + */ + private volatile long measureFlag; + + /** + * Buffer size limit + */ + private long bufferSize; + /** + * Measure interval + */ + private long measureInterval; + + + private final ReentrantLock waitLock = new ReentrantLock(false); + + private final Condition notEmpty = waitLock.newCondition(); + + public BinlogArrayLockFreeQueue(int capacity, long bufferSize, long measureInterval){ + // Init the array size as ring buffer, left one chunk + if ((capacity & (capacity - 1)) != 0){ + throw new IllegalArgumentException("the value of capacity must equal to 2^N and greater than 1"); + } + items = new Object[capacity]; + if (bufferSize <= 0){ + bufferSize = Integer.MAX_VALUE; + } + this.bufferSize = bufferSize; + this.measureInterval = measureInterval; + } + + public BinlogArrayLockFreeQueue(int capacity){ + this(capacity, DEFAULT_QUEUE_BUFFER, DEFAULT_MEASURE_INTERVAL); + } + + public void put(T message) throws InterruptedException { + if (Objects.nonNull(message)){ + long curTakeIndex; + long curPutIndex; + long nextPutIndex; + long waitTime = 0; + long clock = 0; + try { + do { + int counter = -1; + do { + counter ++; + // Lock and wait the queue not full + if (counter > 0) { + LockSupport.parkNanos(1L); + } + curPutIndex = this.putIndex; + curTakeIndex = this.takeIndex; + nextPutIndex = curPutIndex + 1; + clock = System.nanoTime(); + } while(toIndex(nextPutIndex) == toIndex(curTakeIndex)); + if (counter > 0){ + waitTime += (System.nanoTime() - clock); + } + } while (!unsafe.compareAndSwapLong(this, Offsets.putIndexOffset, curPutIndex, nextPutIndex)); + // Accumulate the memory + accumulateMemory(1); + // Write the circle + this.items[toIndex(curPutIndex)] = message; +// if (waitTime > 0) { +// unsafe.getAndAddLong(this, Offsets.waitTakeOffset, waitTime); +// } + while (!unsafe.compareAndSwapLong(this, Offsets.maxTakeIndexOffset, curPutIndex, nextPutIndex)){ + // Notify the older producer to update the max take index + Thread.yield(); + } + + }finally { + // Notify the waiter + waitLock.lock(); + try { + notEmpty.signalAll(); + } finally { + waitLock.unlock(); + } + // Try to measure the queue indicator +// measureIndicator(); + } + } + } + + + @SuppressWarnings("unchecked") + public T take(long timeout, TimeUnit unit) throws InterruptedException { + long nanos = unit.toNanos(timeout); + long curMaxTakeIndex; + long curTakeIndex; + long nextTakeIndex; + T element; + int takePos; + int iterator = 0; + long waitTime = 0; + do { + curMaxTakeIndex = this.maxTakeIndex; + curTakeIndex = this.takeIndex; + long clock = System.nanoTime(); + while (toIndex(curTakeIndex) == toIndex(curMaxTakeIndex)) { + // Wrap as wait strategy + ++ iterator; + // Enable to iterator times + if (iterator > DEFAULT_SPIN_TIMES && iterator <= DEFAULT_SPIN_TIMES * 2){ + // Try to park to release cpu + LockSupport.parkNanos(1L); + } else if (iterator > DEFAULT_SPIN_TIMES * 2){ + waitLock.lockInterruptibly(); + curTakeIndex = this.takeIndex; + curMaxTakeIndex = this.maxTakeIndex; + try { + if (toIndex(curTakeIndex) == toIndex(curMaxTakeIndex)) { + if (nanos <= 0) { + return null; + } + nanos = notEmpty.awaitNanos(nanos); + iterator = 0; + } + } finally { + waitLock.unlock(); + } + } + curTakeIndex = this.takeIndex; + curMaxTakeIndex = this.maxTakeIndex; + } + if (iterator > 0){ + waitTime += (System.nanoTime() - clock); + } + nextTakeIndex = curTakeIndex + 1; + takePos = toIndex(curTakeIndex); + element = (T) this.items[takePos]; + } while(!unsafe.compareAndSwapLong(this, Offsets.takeIndexOffset, curTakeIndex, nextTakeIndex)); + // Empty the cache and release the memory + if (null != element) { + this.items[takePos] = null; +// unsafe.getAndAddInt(this, Offsets.memoryBytesOffset, -1); + } +// if (waitTime > 0){ +// unsafe.getAndAddLong(this, Offsets.waitPutOffset, waitTime); +// } + this.items[takePos] = null; + // Try to measure the queue indicator + measureIndicator(); + return element; + } + + + @SuppressWarnings("unchecked") + public int drainTo(List elements, int maxElements) { + long curMaxTakeIndex = this.maxTakeIndex; + long curTakeIndex = this.takeIndex; + long nextTakeIndex; + int takePos; + int count = 0; + int bytesCnt = 0; + // Break if queue is empty + while(toIndex(curTakeIndex) != toIndex(curMaxTakeIndex)) { + nextTakeIndex = curTakeIndex + 1; + takePos = toIndex(curTakeIndex); + if (unsafe.compareAndSwapLong(this, Offsets.takeIndexOffset, curTakeIndex, nextTakeIndex)){ + T element = (T) this.items[takePos]; + elements.add(element); + count ++; + // Empty the cache + this.items[takePos] = null; + bytesCnt = bytesCnt + 1; + if (count >= maxElements){ + break; + } + } + curTakeIndex = this.takeIndex; + curMaxTakeIndex = this.maxTakeIndex; + } + if (bytesCnt > 0) { + unsafe.getAndAddInt(this, Offsets.memoryBytesOffset, -bytesCnt); + } + measureIndicator(); + return count; + } + + + + + public void adjustBuffer(long bufferSize) { + // Just update buffer size limit + this.bufferSize = bufferSize; + } + + /** + * Accumulate memory bytes + * @param byteSize byte Size + */ + private void accumulateMemory(int byteSize){ + // Add memory count + unsafe.getAndAddInt(this, Offsets.memoryBytesOffset, byteSize); + while(memoryBytes >= this.bufferSize){ + // Optimize the park strategy + LockSupport.parkNanos(1L); + } + } + /** + * Convert the long sequence to index + * @param sequence sequenceId + * @return position + */ + private int toIndex(long sequence){ + return (int) (sequence & (items.length - 1)); + } + + /** + * Measure method + */ + private void measureIndicator(){ +// long clock = System.currentTimeMillis(); +// long measureTime = this.measureFlag; +// if (clock >= measureTime){ +// // Only use the wait take time to measure pressure +// long waitTime = this.waitTake; +// if (unsafe.compareAndSwapLong(this, Offsets.measureFlagOffset, +// measureTime, clock + this.measureInterval)){ +// // decrease the wait take time +// indicator.setBufferUsed(memoryBytes); +// indicator.setPressure((double)waitTime/ ((double)(clock - measureTime) * Math.pow(10, 6))); +// long time = unsafe.getAndAddLong(this, Offsets.waitTakeOffset, -waitTime); +// if (time < waitTime){ +// // Occur some error? init to zero +// this.waitTake = 0; +// } +// this.waitPut = 0; +// //Invoke the listener +// try { +// listeners.forEach(listener -> listener.onMeasure(indicator)); +// }catch(Exception e){ +// LOG.warn("Error occurred while measuring the queue indicator", e); +// // Not to throw exception +// } +// } +// } + } + private static class UnsafeUtil{ + + private static final Unsafe unsafe; + + static { + final PrivilegedExceptionAction action = () -> { + Field theUnsafe = Unsafe.class.getDeclaredField("theUnsafe"); + theUnsafe.setAccessible(true); + return (Unsafe) theUnsafe.get(null); + }; + try { + unsafe = AccessController.doPrivileged(action); + } catch (PrivilegedActionException e) { + // Throw error + throw new Error(e); + } + } + } + + /** + * Queue field offsets + */ + private static class Offsets{ + /** + * Take index field offset + */ + private static final long takeIndexOffset; + + /** + * Put index field offset + */ + private static final long putIndexOffset; + + /** + * Max take index field offset + */ + private static final long maxTakeIndexOffset; + + /** + * Memory bytes field offset + */ + private static final long memoryBytesOffset; + + /** + * Wait put field offset + */ + private static final long waitPutOffset; + + /** + * Wait take field offset + */ + private static final long waitTakeOffset; + + /** + * Measure flag field offset + */ + private static final long measureFlagOffset; + + static { + Unsafe unsafe = UnsafeUtil.unsafe; + try { + takeIndexOffset = unsafe.objectFieldOffset + (BinlogArrayLockFreeQueue.class.getDeclaredField("takeIndex")); + putIndexOffset = unsafe.objectFieldOffset + (BinlogArrayLockFreeQueue.class.getDeclaredField("putIndex")); + maxTakeIndexOffset = unsafe.objectFieldOffset + (BinlogArrayLockFreeQueue.class.getDeclaredField("maxTakeIndex")); + memoryBytesOffset = unsafe.objectFieldOffset + (BinlogArrayLockFreeQueue.class.getDeclaredField("memoryBytes")); + waitPutOffset = unsafe.objectFieldOffset + (BinlogArrayLockFreeQueue.class.getDeclaredField("waitPut")); + waitTakeOffset = unsafe.objectFieldOffset + (BinlogArrayLockFreeQueue.class.getDeclaredField("waitTake")); + measureFlagOffset = unsafe.objectFieldOffset + (BinlogArrayLockFreeQueue.class.getDeclaredField("measureFlag")); + }catch (Exception e){ + throw new Error(e); + } + } + } + + public static void main(String[] args) { + ArrayBlockingQueue queue1 = new ArrayBlockingQueue<>((int)Math.pow(2, 10)); + Executors.newSingleThreadExecutor().submit(() -> { + int count = 0; + while(true) { + String value = null; + // value = queue1.poll(1, TimeUnit.SECONDS); +// queue1.drainTo(new ArrayList<>()); + queue1.take(); + // if (Objects.nonNull(value)){ +// count ++; +// } else { +// System.out.println("blockingQueue(num)" + count); +// break; +// } + } + }); + for (int j = 0; j < 1; j++){ + final int finalJ = j; + new Thread(new Runnable() { + + public void run() { + long time = System.currentTimeMillis(); + for(int i = 0; i < 6000000; i ++){ + try { + long clock = System.currentTimeMillis(); + queue1.put("hello"); + if (System.currentTimeMillis() - clock >= 3){ +// System.out.println("spend1: " + (System.currentTimeMillis() - clock)); + } + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + System.out.println("blockingQueue" + finalJ + ": " + (System.currentTimeMillis() - time)); + } + }).start(); + } + BinlogArrayLockFreeQueue queue = new BinlogArrayLockFreeQueue<>((int)Math.pow(2, 10)); + Executors.newSingleThreadExecutor().submit(() -> { + int count = 0; + while(true) { + // value = queue.take(1, TimeUnit.SECONDS); + int size = queue.drainTo(new ArrayList<>(), Integer.MAX_VALUE); + +// if (Objects.nonNull(value)){ +// count = count + 1; +// } else { +// System.out.println("lockFreeQueue(num)" + count); +// break; +// } + } + }); + for (int j = 0; j < 1; j++){ + final int finalJ = j; + new Thread(new Runnable() { + + public void run() { + long time = System.currentTimeMillis(); + for(int i = 0; i < 6000000; i ++){ + long clock = System.currentTimeMillis(); + try { + queue.put("hello"); + } catch (InterruptedException e) { + e.printStackTrace(); + } + if (System.currentTimeMillis() - clock >= 3){ +// System.out.println("spend2: " + i + ":" + (System.currentTimeMillis() - clock)); + } + } + System.out.println("lockFreeQueue" + finalJ + ": " + (System.currentTimeMillis() - time)); + } + }).start(); + } + } + +} diff --git a/exchangis-server/src/main/resources/application.yml b/exchangis-server/src/main/resources/application.yml new file mode 100644 index 000000000..c94128904 --- /dev/null +++ b/exchangis-server/src/main/resources/application.yml @@ -0,0 +1,20 @@ +server: + port: 9322 +spring: + application: + name: exchangis-server +eureka: + client: + serviceUrl: + defaultZone: http://localhost:20303/eureka/ + instance: + metadata-map: + test: wedatasphere + +management: + endpoints: + web: + exposure: + include: refresh,info +logging: + config: classpath:log4j2.xml diff --git a/exchangis-server/src/main/resources/exchangis.properties b/exchangis-server/src/main/resources/exchangis.properties new file mode 100644 index 000000000..4be93901f --- /dev/null +++ b/exchangis-server/src/main/resources/exchangis.properties @@ -0,0 +1,57 @@ +# +# Copyright 2019 WeBank +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# + +wds.linkis.test.mode=true +#wds.linkis.test.mode=false + +wds.linkis.server.mybatis.datasource.url=jdbc:mysql://localhost:3306/exchangis_v3?useSSL=false&characterEncoding=UTF-8 + +wds.linkis.server.mybatis.datasource.username= + +wds.linkis.server.mybatis.datasource.password= + +wds.linkis.log.clear=true + +wds.linkis.server.version=v1 + +## datasource client +wds.exchangis.datasource.client.serverurl= +wds.exchangis.datasource.client.authtoken.key=hdfs +wds.exchangis.datasource.client.authtoken.value=exchangis-auth +wds.exchangis.datasource.client.dws.version=v1 + +wds.exchangis.datasource.extension.dir=exchangis-extds/ + +##restful +wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.exchangis.datasource.server.restful.api,\ + com.webank.wedatasphere.exchangis.project.server.restful,\ + com.webank.wedatasphere.exchangis.job.server.web,\ + com.webank.wedatasphere.exchangis.metrics.web +wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/dss/framework/appconn/dao/impl/*.xml,classpath*:com/webank/wedatasphere/dss/workflow/dao/impl/*.xml,classpath*:com/webank/wedatasphere/exchangis/job/server/mapper/impl/*.xml + +wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.exchangis.dao,\ + com.webank.wedatasphere.exchangis.project.server.dao,\ + com.webank.wedatasphere.linkis.configuration.dao,\ + com.webank.wedatasphere.dss.framework.appconn.dao,\ + com.webank.wedatasphere.dss.workflow.dao,\ + com.webank.wedatasphere.linkis.metadata.dao,\ + com.webank.wedatasphere.exchangis.job.server.mapper,\ + com.webank.wedatasphere.exchangis.metrics.dao + + + + diff --git a/exchangis-server/src/main/resources/log4j.properties b/exchangis-server/src/main/resources/log4j.properties new file mode 100644 index 000000000..0807e6087 --- /dev/null +++ b/exchangis-server/src/main/resources/log4j.properties @@ -0,0 +1,37 @@ +# +# Copyright 2019 WeBank +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# + +### set log levels ### + +log4j.rootCategory=INFO,console + +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.Threshold=INFO +log4j.appender.console.layout=org.apache.log4j.PatternLayout +#log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n +log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) %p %c{1} - %m%n + + +log4j.appender.com.webank.bdp.ide.core=org.apache.log4j.DailyRollingFileAppender +log4j.appender.com.webank.bdp.ide.core.Threshold=INFO +log4j.additivity.com.webank.bdp.ide.core=false +log4j.appender.com.webank.bdp.ide.core.layout=org.apache.log4j.PatternLayout +log4j.appender.com.webank.bdp.ide.core.Append=true +log4j.appender.com.webank.bdp.ide.core.File=logs/linkis.log +log4j.appender.com.webank.bdp.ide.core.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n + +log4j.logger.org.springframework=INFO diff --git a/exchangis-server/src/main/resources/log4j2.xml b/exchangis-server/src/main/resources/log4j2.xml new file mode 100644 index 000000000..5ae60f144 --- /dev/null +++ b/exchangis-server/src/main/resources/log4j2.xml @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/exchangis-server/src/main/scala/com/webank/wedatasphere/exchangis/server/boot/ApplicationUtils.scala b/exchangis-server/src/main/scala/com/webank/wedatasphere/exchangis/server/boot/ApplicationUtils.scala new file mode 100644 index 000000000..a1db9d54a --- /dev/null +++ b/exchangis-server/src/main/scala/com/webank/wedatasphere/exchangis/server/boot/ApplicationUtils.scala @@ -0,0 +1,49 @@ +package com.webank.wedatasphere.exchangis.server.boot + +import java.io.IOException +import java.lang.management.ManagementFactory +import java.nio.file.{Files, Paths} + +import org.apache.linkis.common.utils.Utils +import org.slf4j.LoggerFactory + +/** + * Utils for application + */ +object ApplicationUtils{ + val PROC_PID_FILE = "pid.file" + + private lazy val LOG = LoggerFactory.getLogger(getClass) + /** + * Save the progress id into file + * @param mainProgram main program + * @param args arguments + */ + def savePidAndRun( mainProgram : => Unit, args: String*): Unit ={ + Utils.tryCatch{ + mountPID(System.getProperty(PROC_PID_FILE)) + mainProgram + }{ + case e: Exception => + LOG.info("The process has been shutdown: [" + e.getMessage + "]", e) + System.exit(1) + } + } + + def mountPID(pidPath: String): Unit = { + Option(pidPath).foreach( path => { + val name: String = ManagementFactory.getRuntimeMXBean.getName + val pid: String = name.split("@")(0).trim + Files.write(Paths.get(path), pid.getBytes) + Runtime.getRuntime.addShutdownHook(new Thread(new Runnable { + override def run(): Unit = { + try{ + Files.delete(Paths.get(path)) + }catch{ + case e: IOException => //Ignore + } + } + })) + }) + } +} diff --git a/exchangis-server/src/main/scala/com/webank/wedatasphere/exchangis/server/boot/ExchangisServerApplication.scala b/exchangis-server/src/main/scala/com/webank/wedatasphere/exchangis/server/boot/ExchangisServerApplication.scala new file mode 100644 index 000000000..365c0cf25 --- /dev/null +++ b/exchangis-server/src/main/scala/com/webank/wedatasphere/exchangis/server/boot/ExchangisServerApplication.scala @@ -0,0 +1,36 @@ +package com.webank.wedatasphere.exchangis.server.boot + +import org.apache.commons.lang.{ArrayUtils, StringUtils} +import org.apache.linkis.DataWorkCloudApplication +import org.apache.linkis.common.utils.Utils +import org.apache.linkis.server.utils.LinkisMainHelper +import org.slf4j.{Logger, LoggerFactory} +class ExchangisServerApplication{ + +} +object ExchangisServerApplication { + + private val LOG: Logger = LoggerFactory.getLogger(classOf[ExchangisServerApplication]) + + private val MAIN_PROGRAM_NAME: String = "exchangis" + + def main(args: Array[String]): Unit = { + LOG.info("Start to run ExchangisServerApplication") + ApplicationUtils.savePidAndRun{ + val userName = Utils.getJvmUser + val hostName = Utils.getComputerName + System.setProperty("hostName", hostName) + System.setProperty("userName", userName) + val serviceName = Option(System.getProperty(LinkisMainHelper.SERVER_NAME_KEY)).getOrElse(MAIN_PROGRAM_NAME) + System.setProperty("spring.application.name", serviceName) + LinkisMainHelper.formatPropertyFiles(MAIN_PROGRAM_NAME, serviceName) + val allArgs: Array[String] = ArrayUtils.addAll(args.asInstanceOf[Array[Object]], + LinkisMainHelper.getExtraSpringOptions(MAIN_PROGRAM_NAME).asInstanceOf[Array[Object]]).asInstanceOf[Array[String]] + val argsString = StringUtils.join(allArgs.asInstanceOf[Array[Object]], '\n') + val startLog = String.format(s"Ready to start $serviceName with args: $argsString.") + LOG.info(startLog) + DataWorkCloudApplication.main(allArgs) + } + } + +} diff --git a/images/en_US/ch1/architecture.png b/images/en_US/ch1/architecture.png new file mode 100644 index 000000000..73ead9581 Binary files /dev/null and b/images/en_US/ch1/architecture.png differ diff --git a/images/en_US/ch1/code.png b/images/en_US/ch1/code.png new file mode 100644 index 000000000..cec5ef68c Binary files /dev/null and b/images/en_US/ch1/code.png differ diff --git a/images/en_US/ch1/communication.png b/images/en_US/ch1/communication.png new file mode 100644 index 000000000..d8f0dc7e5 Binary files /dev/null and b/images/en_US/ch1/communication.png differ diff --git a/images/zh_CN/ch1/Hive_datasource_config.png b/images/zh_CN/ch1/Hive_datasource_config.png new file mode 100644 index 000000000..ec77d8700 Binary files /dev/null and b/images/zh_CN/ch1/Hive_datasource_config.png differ diff --git a/images/zh_CN/ch1/MySQL_datasource_config.png b/images/zh_CN/ch1/MySQL_datasource_config.png new file mode 100644 index 000000000..cfb8b1a0c Binary files /dev/null and b/images/zh_CN/ch1/MySQL_datasource_config.png differ diff --git a/images/zh_CN/ch1/appconn_pro_create.png b/images/zh_CN/ch1/appconn_pro_create.png new file mode 100644 index 000000000..e3185ffc4 Binary files /dev/null and b/images/zh_CN/ch1/appconn_pro_create.png differ diff --git a/images/zh_CN/ch1/appconn_pro_sqoop.png b/images/zh_CN/ch1/appconn_pro_sqoop.png new file mode 100644 index 000000000..c2d70cc78 Binary files /dev/null and b/images/zh_CN/ch1/appconn_pro_sqoop.png differ diff --git a/images/zh_CN/ch1/appconn_pro_sqoop_sync.jpg b/images/zh_CN/ch1/appconn_pro_sqoop_sync.jpg new file mode 100644 index 000000000..069e69a7b Binary files /dev/null and b/images/zh_CN/ch1/appconn_pro_sqoop_sync.jpg differ diff --git a/images/zh_CN/ch1/appconn_pro_sync.jpg b/images/zh_CN/ch1/appconn_pro_sync.jpg new file mode 100644 index 000000000..f1519b9e5 Binary files /dev/null and b/images/zh_CN/ch1/appconn_pro_sync.jpg differ diff --git a/images/zh_CN/ch1/architecture.png b/images/zh_CN/ch1/architecture.png new file mode 100644 index 000000000..a446fc099 Binary files /dev/null and b/images/zh_CN/ch1/architecture.png differ diff --git a/images/zh_CN/ch1/authority_group.png b/images/zh_CN/ch1/authority_group.png new file mode 100644 index 000000000..6b8cacbc1 Binary files /dev/null and b/images/zh_CN/ch1/authority_group.png differ diff --git a/images/zh_CN/ch1/code.png b/images/zh_CN/ch1/code.png new file mode 100644 index 000000000..cec5ef68c Binary files /dev/null and b/images/zh_CN/ch1/code.png differ diff --git a/images/zh_CN/ch1/communication.png b/images/zh_CN/ch1/communication.png new file mode 100644 index 000000000..d8f0dc7e5 Binary files /dev/null and b/images/zh_CN/ch1/communication.png differ diff --git a/images/zh_CN/ch1/data_source_field_mapping.png b/images/zh_CN/ch1/data_source_field_mapping.png new file mode 100644 index 000000000..53d139080 Binary files /dev/null and b/images/zh_CN/ch1/data_source_field_mapping.png differ diff --git a/images/zh_CN/ch1/data_source_insert_way.png b/images/zh_CN/ch1/data_source_insert_way.png new file mode 100644 index 000000000..1cfad3578 Binary files /dev/null and b/images/zh_CN/ch1/data_source_insert_way.png differ diff --git a/images/zh_CN/ch1/data_source_list.png b/images/zh_CN/ch1/data_source_list.png new file mode 100644 index 000000000..c0cf332ed Binary files /dev/null and b/images/zh_CN/ch1/data_source_list.png differ diff --git a/images/zh_CN/ch1/data_source_model_list.png b/images/zh_CN/ch1/data_source_model_list.png new file mode 100644 index 000000000..e4acbbec6 Binary files /dev/null and b/images/zh_CN/ch1/data_source_model_list.png differ diff --git a/images/zh_CN/ch1/data_source_model_new.png b/images/zh_CN/ch1/data_source_model_new.png new file mode 100644 index 000000000..4399c7fcf Binary files /dev/null and b/images/zh_CN/ch1/data_source_model_new.png differ diff --git a/images/zh_CN/ch1/data_source_new.png b/images/zh_CN/ch1/data_source_new.png new file mode 100644 index 000000000..026f02529 Binary files /dev/null and b/images/zh_CN/ch1/data_source_new.png differ diff --git a/images/zh_CN/ch1/data_source_select.png b/images/zh_CN/ch1/data_source_select.png new file mode 100644 index 000000000..11c813d67 Binary files /dev/null and b/images/zh_CN/ch1/data_source_select.png differ diff --git a/images/zh_CN/ch1/datasource_client_class_relation.png b/images/zh_CN/ch1/datasource_client_class_relation.png new file mode 100644 index 000000000..2654806f2 Binary files /dev/null and b/images/zh_CN/ch1/datasource_client_class_relation.png differ diff --git a/images/zh_CN/ch1/datasource_client_create.png b/images/zh_CN/ch1/datasource_client_create.png new file mode 100644 index 000000000..4c964a29d Binary files /dev/null and b/images/zh_CN/ch1/datasource_client_create.png differ diff --git a/images/zh_CN/ch1/datasource_client_create2.png b/images/zh_CN/ch1/datasource_client_create2.png new file mode 100644 index 000000000..f00401afd Binary files /dev/null and b/images/zh_CN/ch1/datasource_client_create2.png differ diff --git a/images/zh_CN/ch1/datasource_client_query.png b/images/zh_CN/ch1/datasource_client_query.png new file mode 100644 index 000000000..d973f8494 Binary files /dev/null and b/images/zh_CN/ch1/datasource_client_query.png differ diff --git a/images/zh_CN/ch1/datasource_client_scructure.png b/images/zh_CN/ch1/datasource_client_scructure.png new file mode 100644 index 000000000..340d00252 Binary files /dev/null and b/images/zh_CN/ch1/datasource_client_scructure.png differ diff --git a/images/zh_CN/ch1/datasource_client_update.png b/images/zh_CN/ch1/datasource_client_update.png new file mode 100644 index 000000000..1f01aab63 Binary files /dev/null and b/images/zh_CN/ch1/datasource_client_update.png differ diff --git a/images/zh_CN/ch1/datasource_func.png b/images/zh_CN/ch1/datasource_func.png new file mode 100644 index 000000000..3202756d2 Binary files /dev/null and b/images/zh_CN/ch1/datasource_func.png differ diff --git a/images/zh_CN/ch1/datasource_list.png b/images/zh_CN/ch1/datasource_list.png new file mode 100644 index 000000000..c0cf332ed Binary files /dev/null and b/images/zh_CN/ch1/datasource_list.png differ diff --git a/images/zh_CN/ch1/datasource_structure.png b/images/zh_CN/ch1/datasource_structure.png new file mode 100644 index 000000000..5df2c9bfa Binary files /dev/null and b/images/zh_CN/ch1/datasource_structure.png differ diff --git a/images/zh_CN/ch1/datasource_timelimit.png b/images/zh_CN/ch1/datasource_timelimit.png new file mode 100644 index 000000000..95cdef515 Binary files /dev/null and b/images/zh_CN/ch1/datasource_timelimit.png differ diff --git a/images/zh_CN/ch1/datasource_type.png b/images/zh_CN/ch1/datasource_type.png new file mode 100644 index 000000000..c7d264931 Binary files /dev/null and b/images/zh_CN/ch1/datasource_type.png differ diff --git a/images/zh_CN/ch1/datasource_ui.png b/images/zh_CN/ch1/datasource_ui.png new file mode 100644 index 000000000..18b5472cc Binary files /dev/null and b/images/zh_CN/ch1/datasource_ui.png differ diff --git a/images/zh_CN/ch1/datasource_ui_mysql.png b/images/zh_CN/ch1/datasource_ui_mysql.png new file mode 100644 index 000000000..bf17a2958 Binary files /dev/null and b/images/zh_CN/ch1/datasource_ui_mysql.png differ diff --git a/images/zh_CN/ch1/es_model_new.png b/images/zh_CN/ch1/es_model_new.png new file mode 100644 index 000000000..8f23cb5e2 Binary files /dev/null and b/images/zh_CN/ch1/es_model_new.png differ diff --git a/images/zh_CN/ch1/eureka_exchangis.png b/images/zh_CN/ch1/eureka_exchangis.png new file mode 100644 index 000000000..68ad48c6f Binary files /dev/null and b/images/zh_CN/ch1/eureka_exchangis.png differ diff --git a/images/zh_CN/ch1/exchangis1.0_entrance.png b/images/zh_CN/ch1/exchangis1.0_entrance.png new file mode 100644 index 000000000..e779bf790 Binary files /dev/null and b/images/zh_CN/ch1/exchangis1.0_entrance.png differ diff --git a/images/zh_CN/ch1/executor_management.png b/images/zh_CN/ch1/executor_management.png new file mode 100644 index 000000000..77d7bec3e Binary files /dev/null and b/images/zh_CN/ch1/executor_management.png differ diff --git a/images/zh_CN/ch1/frontend_view.png b/images/zh_CN/ch1/frontend_view.png new file mode 100644 index 000000000..bad523915 Binary files /dev/null and b/images/zh_CN/ch1/frontend_view.png differ diff --git a/images/zh_CN/ch1/hive_model_new.png b/images/zh_CN/ch1/hive_model_new.png new file mode 100644 index 000000000..a7b41eb14 Binary files /dev/null and b/images/zh_CN/ch1/hive_model_new.png differ diff --git a/images/zh_CN/ch1/home_page_en.png b/images/zh_CN/ch1/home_page_en.png new file mode 100644 index 000000000..a752e2984 Binary files /dev/null and b/images/zh_CN/ch1/home_page_en.png differ diff --git a/images/zh_CN/ch1/home_page_zh.png b/images/zh_CN/ch1/home_page_zh.png new file mode 100644 index 000000000..2b71f53b3 Binary files /dev/null and b/images/zh_CN/ch1/home_page_zh.png differ diff --git a/images/zh_CN/ch1/item_list.png b/images/zh_CN/ch1/item_list.png new file mode 100644 index 000000000..864adbc82 Binary files /dev/null and b/images/zh_CN/ch1/item_list.png differ diff --git a/images/zh_CN/ch1/job_backend_datasource_design.png b/images/zh_CN/ch1/job_backend_datasource_design.png new file mode 100644 index 000000000..8f766060f Binary files /dev/null and b/images/zh_CN/ch1/job_backend_datasource_design.png differ diff --git a/images/zh_CN/ch1/job_backend_uml_1.png b/images/zh_CN/ch1/job_backend_uml_1.png new file mode 100644 index 000000000..a482dfed7 Binary files /dev/null and b/images/zh_CN/ch1/job_backend_uml_1.png differ diff --git a/images/zh_CN/ch1/job_backend_uml_2.png b/images/zh_CN/ch1/job_backend_uml_2.png new file mode 100644 index 000000000..8d5c96ce2 Binary files /dev/null and b/images/zh_CN/ch1/job_backend_uml_2.png differ diff --git a/images/zh_CN/ch1/job_backend_uml_3.png b/images/zh_CN/ch1/job_backend_uml_3.png new file mode 100644 index 000000000..c1baca583 Binary files /dev/null and b/images/zh_CN/ch1/job_backend_uml_3.png differ diff --git a/images/zh_CN/ch1/job_backend_uml_4.png b/images/zh_CN/ch1/job_backend_uml_4.png new file mode 100644 index 000000000..c738d100b Binary files /dev/null and b/images/zh_CN/ch1/job_backend_uml_4.png differ diff --git a/images/zh_CN/ch1/job_backend_uml_5.png b/images/zh_CN/ch1/job_backend_uml_5.png new file mode 100644 index 000000000..917eafcd8 Binary files /dev/null and b/images/zh_CN/ch1/job_backend_uml_5.png differ diff --git a/images/zh_CN/ch1/job_config.png b/images/zh_CN/ch1/job_config.png new file mode 100644 index 000000000..badab61c3 Binary files /dev/null and b/images/zh_CN/ch1/job_config.png differ diff --git a/images/zh_CN/ch1/job_config_source.png b/images/zh_CN/ch1/job_config_source.png new file mode 100644 index 000000000..07c4d76e7 Binary files /dev/null and b/images/zh_CN/ch1/job_config_source.png differ diff --git a/images/zh_CN/ch1/job_es_processor.png b/images/zh_CN/ch1/job_es_processor.png new file mode 100644 index 000000000..e4ffecb43 Binary files /dev/null and b/images/zh_CN/ch1/job_es_processor.png differ diff --git a/images/zh_CN/ch1/job_frontend_1.png b/images/zh_CN/ch1/job_frontend_1.png new file mode 100644 index 000000000..e98ee4e79 Binary files /dev/null and b/images/zh_CN/ch1/job_frontend_1.png differ diff --git a/images/zh_CN/ch1/job_frontend_2.png b/images/zh_CN/ch1/job_frontend_2.png new file mode 100644 index 000000000..e73fe548d Binary files /dev/null and b/images/zh_CN/ch1/job_frontend_2.png differ diff --git a/images/zh_CN/ch1/job_frontend_3.png b/images/zh_CN/ch1/job_frontend_3.png new file mode 100644 index 000000000..c83359895 Binary files /dev/null and b/images/zh_CN/ch1/job_frontend_3.png differ diff --git a/images/zh_CN/ch1/job_frontend_4.png b/images/zh_CN/ch1/job_frontend_4.png new file mode 100644 index 000000000..74d92e95b Binary files /dev/null and b/images/zh_CN/ch1/job_frontend_4.png differ diff --git a/images/zh_CN/ch1/job_frontend_5.png b/images/zh_CN/ch1/job_frontend_5.png new file mode 100644 index 000000000..0f79f10dc Binary files /dev/null and b/images/zh_CN/ch1/job_frontend_5.png differ diff --git a/images/zh_CN/ch1/job_frontend_backend.png b/images/zh_CN/ch1/job_frontend_backend.png new file mode 100644 index 000000000..e3d714660 Binary files /dev/null and b/images/zh_CN/ch1/job_frontend_backend.png differ diff --git a/images/zh_CN/ch1/job_info.png b/images/zh_CN/ch1/job_info.png new file mode 100644 index 000000000..ba2e90351 Binary files /dev/null and b/images/zh_CN/ch1/job_info.png differ diff --git a/images/zh_CN/ch1/job_limit.png b/images/zh_CN/ch1/job_limit.png new file mode 100644 index 000000000..93020cb73 Binary files /dev/null and b/images/zh_CN/ch1/job_limit.png differ diff --git a/images/zh_CN/ch1/job_overall.png b/images/zh_CN/ch1/job_overall.png new file mode 100644 index 000000000..1d4cd99f7 Binary files /dev/null and b/images/zh_CN/ch1/job_overall.png differ diff --git a/images/zh_CN/ch1/job_task_list.png b/images/zh_CN/ch1/job_task_list.png new file mode 100644 index 000000000..1b3cbf77c Binary files /dev/null and b/images/zh_CN/ch1/job_task_list.png differ diff --git a/images/zh_CN/ch1/job_task_log.png b/images/zh_CN/ch1/job_task_log.png new file mode 100644 index 000000000..575d87bfa Binary files /dev/null and b/images/zh_CN/ch1/job_task_log.png differ diff --git a/images/zh_CN/ch1/job_task_mapping.png b/images/zh_CN/ch1/job_task_mapping.png new file mode 100644 index 000000000..579a4d094 Binary files /dev/null and b/images/zh_CN/ch1/job_task_mapping.png differ diff --git a/images/zh_CN/ch1/linkis_datasource_structure.png b/images/zh_CN/ch1/linkis_datasource_structure.png new file mode 100644 index 000000000..63837904b Binary files /dev/null and b/images/zh_CN/ch1/linkis_datasource_structure.png differ diff --git a/images/zh_CN/ch1/local_fs.png b/images/zh_CN/ch1/local_fs.png new file mode 100644 index 000000000..1c1f88bcb Binary files /dev/null and b/images/zh_CN/ch1/local_fs.png differ diff --git a/images/zh_CN/ch1/login.png b/images/zh_CN/ch1/login.png new file mode 100644 index 000000000..2859ab30b Binary files /dev/null and b/images/zh_CN/ch1/login.png differ diff --git a/images/zh_CN/ch1/mysql_model_new.png b/images/zh_CN/ch1/mysql_model_new.png new file mode 100644 index 000000000..cfb996fc4 Binary files /dev/null and b/images/zh_CN/ch1/mysql_model_new.png differ diff --git a/images/zh_CN/ch1/partition_value.png b/images/zh_CN/ch1/partition_value.png new file mode 100644 index 000000000..90dd9df66 Binary files /dev/null and b/images/zh_CN/ch1/partition_value.png differ diff --git a/images/zh_CN/ch1/production_center.png b/images/zh_CN/ch1/production_center.png new file mode 100644 index 000000000..c29f90138 Binary files /dev/null and b/images/zh_CN/ch1/production_center.png differ diff --git a/images/zh_CN/ch1/register_eureka.png b/images/zh_CN/ch1/register_eureka.png new file mode 100644 index 000000000..2cd1323ec Binary files /dev/null and b/images/zh_CN/ch1/register_eureka.png differ diff --git a/images/zh_CN/ch1/sftp_model_new.png b/images/zh_CN/ch1/sftp_model_new.png new file mode 100644 index 000000000..50b8bbe11 Binary files /dev/null and b/images/zh_CN/ch1/sftp_model_new.png differ diff --git a/images/zh_CN/ch1/sqoop_config.png b/images/zh_CN/ch1/sqoop_config.png new file mode 100644 index 000000000..5e4b13d52 Binary files /dev/null and b/images/zh_CN/ch1/sqoop_config.png differ diff --git a/images/zh_CN/ch1/sqoop_execute.png b/images/zh_CN/ch1/sqoop_execute.png new file mode 100644 index 000000000..f1cd7cb90 Binary files /dev/null and b/images/zh_CN/ch1/sqoop_execute.png differ diff --git a/images/zh_CN/ch1/sqoop_user_config.png b/images/zh_CN/ch1/sqoop_user_config.png new file mode 100644 index 000000000..8f47f6fde Binary files /dev/null and b/images/zh_CN/ch1/sqoop_user_config.png differ diff --git a/images/zh_CN/ch1/sub_task_manage.png b/images/zh_CN/ch1/sub_task_manage.png new file mode 100644 index 000000000..904954aab Binary files /dev/null and b/images/zh_CN/ch1/sub_task_manage.png differ diff --git a/images/zh_CN/ch1/sync_history.png b/images/zh_CN/ch1/sync_history.png new file mode 100644 index 000000000..8bbd27375 Binary files /dev/null and b/images/zh_CN/ch1/sync_history.png differ diff --git a/images/zh_CN/ch1/system_appuser.png b/images/zh_CN/ch1/system_appuser.png new file mode 100644 index 000000000..1cf2348c1 Binary files /dev/null and b/images/zh_CN/ch1/system_appuser.png differ diff --git a/images/zh_CN/ch1/system_appuser_bind.png b/images/zh_CN/ch1/system_appuser_bind.png new file mode 100644 index 000000000..1dbb8ef54 Binary files /dev/null and b/images/zh_CN/ch1/system_appuser_bind.png differ diff --git a/images/zh_CN/ch1/system_executive_user.png b/images/zh_CN/ch1/system_executive_user.png new file mode 100644 index 000000000..0cf17880c Binary files /dev/null and b/images/zh_CN/ch1/system_executive_user.png differ diff --git a/images/zh_CN/ch1/task_list.png b/images/zh_CN/ch1/task_list.png new file mode 100644 index 000000000..bf3c68757 Binary files /dev/null and b/images/zh_CN/ch1/task_list.png differ diff --git a/images/zh_CN/ch1/task_proccess_control.png b/images/zh_CN/ch1/task_proccess_control.png new file mode 100644 index 000000000..69d1089ec Binary files /dev/null and b/images/zh_CN/ch1/task_proccess_control.png differ diff --git a/images/zh_CN/ch1/task_type_and_engine.png b/images/zh_CN/ch1/task_type_and_engine.png new file mode 100644 index 000000000..2fa4f0703 Binary files /dev/null and b/images/zh_CN/ch1/task_type_and_engine.png differ diff --git a/images/zh_CN/ch1/transport_type.png b/images/zh_CN/ch1/transport_type.png new file mode 100644 index 000000000..a1aad3e99 Binary files /dev/null and b/images/zh_CN/ch1/transport_type.png differ diff --git a/pom.xml b/pom.xml index aa2c1c6cf..f4415ef7c 100644 --- a/pom.xml +++ b/pom.xml @@ -22,110 +22,308 @@ com.webank.wedatasphere.exchangis exchangis - 0.5.0.RELEASE + ${revision} pom exchangis Unified data exchange service - - org.springframework.boot - spring-boot-starter-parent - 2.0.1.RELEASE - - + + + Apache 2.0 License + http://www.apache.org/licenses/LICENSE-2.0.html + repo + + - UTF-8 + 1.1.3 + 1.1.2 + 1.4.0 + 1.4.0 + 0.1.0-SNAPSHOT + 1.3.0 + 3.0.0 + 3.0.0 + 2.11.12 + 4.7.1 + 1.8 + 3.8.1 + 3.8.2 + 2.6 + 2.8.5 + 2.13.4 + 2.13.4.2 + 1.9.13 + 3.1.1 + 4.5.4 + 4.5.4 + 1.9.4 UTF-8 - UTF-8 - UTF-8 - 1.8 - 2.7.2 - 1.2.1 - 6.7.1 - 3.4 - Finchley.SR1 - 16.0.1 - 1.2.0 - 2.10.2 + 5.2.12.RELEASE + 2.1.2 + 2.3.7.RELEASE + 2.2.6.RELEASE + 3.1.1 + 3.8.1 + 2.6 + 0.9.10 + 2.21 + 1.9.5 + 0.1.0-SNAPSHOT + 1.9.3 + 1.4.20 - modules/eureka - modules/common - modules/service - modules/gateway - modules/executor - assembly - + exchangis-dao + exchangis-project + exchangis-datasource + exchangis-engines + exchangis-job + exchangis-plugins + exchangis-server + assembly-package + - org.springframework.cloud - spring-cloud-dependencies - ${spring-cloud.version} - pom - import + org.scala-lang + scala-library + ${scala.version} - com.fasterxml.jackson.core - jackson-core - ${jackson.version} + org.scala-lang + scala-compiler + ${scala.version} - io.springfox - springfox-swagger2 - 2.8.0 + org.scala-lang + scala-reflect + ${scala.version} - io.springfox - springfox-swagger-ui - 2.8.0 + org.scala-lang + scalap + ${scala.version} - mysql - mysql-connector-java - 5.1.38 + commons-lang + commons-lang + ${commons.lang.version} - org.mybatis.spring.boot - mybatis-spring-boot-starter - 1.3.2 + org.apache.linkis + linkis-mybatis + ${linkis.version} - com.alibaba - druid-spring-boot-starter - 1.1.9 + org.apache.linkis + linkis-module + ${linkis.version} + + + org.springframework.boot + spring-boot-starter-tomcat + + + hibernate-validator + org.hibernate.validator + + - + - org.fusesource - sigar - 1.6.4 + org.apache.linkis + linkis-gateway-httpclient-support + ${linkis.version} - com.alibaba - fastjson - 1.2.68 + org.apache.linkis + linkis-common + ${linkis.version} - org.apache.commons - commons-lang3 - ${commons.lang3.version} + org.apache.linkis + linkis-protocol + ${linkis.version} + + + org.apache.linkis + linkis-datasource-client + ${linkis.datasource.version} + + + org.apache.linkis + linkis-metadata-query-common + ${linkis.datasource.version} + + + com.google.code.gson + gson + ${gson.version} - javax.servlet - javax.servlet-api - 3.1.0 + com.fasterxml.jackson.core + jackson-databind + ${jackson-databind.version} + + + org.codehaus.jackson + jackson-mapper-asl + ${org.codehaus.jackson.version} + + + org.apache.commons + commons-math3 + ${commons.math.version} - com.google.guava - guava - ${guava-version} + xstream + com.thoughtworks.xstream + ${xstream.version} - + + + + + org.apache.maven.plugins + maven-deploy-plugin + ${maven-deploy-plugin.version} + + + org.apache.maven.plugins + maven-enforcer-plugin + ${maven-enforcer-plugin.version} + + + enforce-versions + + enforce + + + + + ${maven.version} + + + ${java.version} + + + + org.jboss.netty + + true + + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.5.1 + + ${jdk.compile.version} + ${jdk.compile.version} + + + + + + + + org.apache.maven.plugins + maven-site-plugin + 3.3 + + + net.alchim31.maven + scala-maven-plugin + ${scala-maven-plugin.version} + + + + + + + + + + + + + + eclipse-add-source + + add-source + + + + scala-compile-first + process-resources + + compile + + + + scala-test-compile-first + process-test-resources + + testCompile + + + + attach-scaladocs + verify + + doc-jar + + + + + + org.apache.maven.plugins + maven-jar-plugin + ${maven-jar-plugin.version} + + + org.codehaus.mojo + flatten-maven-plugin + ${flatten-maven-plugin.version} + + true + resolveCiFriendliesOnly + + + + flatten + + flatten + + process-resources + + + flatten.clean + + clean + + clean + + + + + + + + org.codehaus.mojo + flatten-maven-plugin + + + + diff --git a/web/.eslintrc.js b/web/.eslintrc.js new file mode 100644 index 000000000..6300816f8 --- /dev/null +++ b/web/.eslintrc.js @@ -0,0 +1,14 @@ +module.exports = { + extends: ['@webank/eslint-config-webank/vue.js'], + overrides: [ + { + files: [ + '**/__tests__/*.{j,t}s?(x)', + '**/tests/unit/**/*.spec.{j,t}s?(x)' + ] + } + ], + env: { + jest: true + } +}; diff --git a/web/.fes.js b/web/.fes.js new file mode 100644 index 000000000..3330a97b9 --- /dev/null +++ b/web/.fes.js @@ -0,0 +1,127 @@ +export default { + html: { + title: 'Exchangis', + favicon: './src/assets/img/E.jpg', + }, + publicPath: "./", + access: { + roles: { + admin: ["*"], + }, + }, + router: { + mode: "hash", + routes: [ + { + path: "/", + redirect: "/projectManage", + }, + { + path: "/projectManage", + component: "@/pages/projectManage", + meta: { + name: "projectManage", + title: "globalMenu.projectManage", + subs: [ + '/jobManagement' + ] + }, + }, + { + path: "/dataSourceManage", + component: "@/pages/dataSourceManage", + meta: { + name: "dataSourceManage", + title: "globalMenu.dataSourceManage", + }, + }, + { + path: "/jobManagement", + component: "@/pages/jobManagement", + meta: { + name: "jobManagement", + title: "globalMenu.jobManagement", + }, + }, + { + path: "/synchronizationHistory", + component: "@/pages/synchronizationHistory", + meta: { + name: "synchronizationHistory", + title: "globalMenu.synchronizationHistory", + }, + }, + { + path: "/homePage", + component: "@/pages/homePage", + meta: { + name: "homePage", + title: "globalMenu.homePage", + }, + }, + { + path: "/childJobManagement", + component: "@/pages/jobManagement/spaIndex", + meta: { + name: "synchronizationHistory", + title: "globalMenu.synchronizationHistory", + } + } + ], + }, + request: { + dataField: "data", + }, + extraBabelPlugins: [ + [ + "import", + { libraryName: "ant-design-vue", libraryDirectory: "es", style: "css" }, + ], + ], + layout: { + navigation: 'side', + theme: 'light', + title: "", + logo: 'data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAYEBQYFBAYGBQYHBwYIChAKCgkJChQODwwQFxQYGBcUFhYaHSUfGhsjHBYWICwgIyYnKSopGR8tMC0oMCUoKSj/2wBDAQcHBwoIChMKChMoGhYaKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCj/wAARCADPBDsDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwD6pooooAK+ev2iPEV/aeLLGx07ULm2SK0DusErR5Zmbrg88KK+ha+Q/jLf/wBo/ErW5AcpFItuB6bFCn9Qfzr0csgpVrvojkxsuWnoYR8S67n/AJDOpf8AgXJ/jSf8JNrv/QZ1L/wLk/xrHNFfQckex5PO+5sf8JNrv/QZ1L/wLk/xo/4SbXf+gzqX/gXJ/jWPRRyR7BzvubH/AAk2u/8AQZ1L/wAC5P8AGj/hJtd/6DOpf+Bcn+NY9FHJHsHO+5sf8JNrv/QZ1L/wLk/xo/4SbXf+gzqX/gXJ/jWPRRyR7BzvubH/AAk2u/8AQZ1L/wAC5P8AGj/hJtd/6DOpf+Bcn+NY9FHJHsHO+5sf8JNrv/QZ1L/wLk/xo/4SbXv+gzqX/gXJ/jWPRRyR7Bzvud18OtU1zVfHOh2kmsak8b3SNIrXUhDIp3MOvopr64r5c/Z2sftXxDWcjiztZZs+5wn/ALOa+o68DNJL2qiuiPUwKfs7vqFFFFeadoUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABXz/APEj4r+IdF8b6npmkSWy2VsyovmQhjnYN3Of72a+gK+IfE9//aniPVb/ADlbq6klX6Fia9PLKMak5OaukjixtRwilF2bO5Pxq8YZ/wBfY/8AgOP8aP8AhdXjD/nvY/8AgOP8a8yor2PqlH+Rfced9YqfzM9TsvjF4zvL23tYprLzJnWNf9GHVjgd/evp+vjv4TWJ1D4jaDARuC3InP8A2zBf/wBlr7Erx8zhCnOMYJLQ9HBSlKLlJhRRRXmHaFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAVn6/rWn+H9Lm1HV7lLa0i6u3c9gB1J9hWhXyn8cPFMviHxhPaRSE6bprm3iUHhnH33/Pj6AetdWEwzxFTl6dTDEVvYw5up1viL4+XBmZPD2lRJF/DNeksx/4ApGPzNc0fjb4vMmQ9gBnO0W/H065rzI0vFfQQwNCCty3PLliasne57z4Q+OzS3cdt4psoYonIX7Xa5AT3ZCTkepB/CvdYZEmiSWJ1eN1DKynIYHoRXwjivq34DahJffDexWZizWskluCeu0HIH4BgPwry8xwkKSU6eh24WvKbcZHodFFFeSdwUUUUAFFFFABRRRQAUUUjuqDLsFHqTigBaKp/wBqaf8A8/1r/wB/l/xqSK/tJjiG6gkPosgP9admK6LFFFFIYUUUUAFFFFABRRRQAUUUUAFFFFACMwVSzEBQMkntXw5rN42oaxe3rZ3XM8kxzycsxb+tfY3j6+Gm+Cdcus4ZLOQKf9oqQv6kV8W/xV7WUR+KXoebj5fChp60UHqaK9o84KMGivTPh58KrjxloL6mmppZoJmhVGhL7sAHOcjuSPwrOpVhSjzVHZFQpyqPljueZ0V7j/wz9d/9DBB/4Cn/AOKo/wCGfrv/AKGCD/wFP/xVc/1/D/z/AIP/ACN/qdXseHUV7j/wz9d/9DBB/wCAp/8AiqP+Gfrv/oYIP/AU/wDxVH1/D/z/AIP/ACD6nV7Hh1Fe4/8ADP13/wBDBB/4Cn/4qj/hn67/AOhgg/8AAU//ABVH9oYb+b8H/kH1Or2PDqK9x/4Z+u/+hgg/8BT/APFUf8M/Xf8A0MEH/gKf/iqP7Qw/834P/IPqdXsWf2X7E417UGHB8qBD/wB9Fv8A2WveK5H4Y+Dv+EK0CbT2ulupJbhp2lWPZ1VQBjJ6bf1rrq+fxVVVasprY9ShTdOmosKKKK5zYKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigDI8Y3/wDZfhPWL7dtaC0ldT/tBTj9cV8SHpX1f8er/wCxfDTUEBw11JFAPxcMf0U18oHpXvZTC1OUu7PLx8vfSCgdaKB1r1jgPWP2brEXHjm4umHFrZuwOP4mZVH6Fq+mq8Q/Zgsduna7flf9ZLHAp/3QWP8A6GK9vr5nMZc2IflY9nBx5aSCiiiuE6gooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKAKetXn9naNf3vX7NbyTf98qT/Svh2aR5pHkkYs7MWJPcnrX3Drll/aeiahYEgC6t5IOf9pSv9a+Ir21ls7ya1ukMU8LtHIjdVYHBFe1k9vf+X6nnY/7PzK/NGaMUA817J5wV9cfBHTm074baUJARJcB7gg+jMSv/AI7tr5r8B+Frvxd4it9OtlYRZD3EwHEUeeSffsB3NfZVrBHa20NvAoSGJBGijoFAwB+VePm1VWVJb7s9DAU3dzJKKKK8Q9IKKKKACiqOs6rZaNYvealcJBAndjyT6AdSfYV4p4y+KV/qbSW2ib7Gzzjzc4lce5/h/Dn3row+FqYh2gjGtXhRV5HrviHxXo/h9SNRvEE2MiCP5pD+A6fjivNdc+MM7Fk0WwSNe0lydzf98jgfma8utLW81S62W8c1xM5ycAnn1J/qa6az8DXZXdfTxW+f4E+c/j2/nXrQwOHo/wAV3Z5ksZXrfwlZf11KupeOPEeolvP1S5QH+GJvKH0+XH61zs09xM++eVnf+8xyf1ruT4W023++88p/2mAH6Cq8+j6av3bf/wAfb/GuqFWjD4F+BhKhXl8UjiGMv9/9KbulHQ5rpbrS7QA7A6H2bP8AOsi5smjyYzuH61qqkZbGE6NWGoyz8QapppBtb66t/wDrlKy/yNdZo/xX8QWWBPcR3kY/gnQE/mMH864Vs87h+Yqs8QOccUp0Kc/iihQxFSnsz6H8PfFzSL4rHqkMlhIeN4/eR/mOR+X416JZ3dve26T2c0c8LjKvGwYH8RXxeHkhODyvrW94c8ValodyJtMu5ID3XOVf6r0NedWyyL1pux30cya0qK59cUV514H+J1jrZjtNWVLG+bhWz+6kPsT90+x/OvRa8ipSnSfLNWPVp1Y1FzQdwooorM0CiiigAooooA82/aCv/sfw5nhBwby4ig/I7/8A2SvljtXvv7T9/tt9C09T95pZ3H0Cqv8ANq8DPTivpMshy0E+7/4B5GNlerbsNY80lB60V6Jx2AV9ffBuw/s/4baJGVw8sRnb33sWH6EV8hxKXkVVGSxAA9a+5dHs107SbKyTG22gSEY9FUD+lePm87QjHu/y/wCHO/L4+85FuiiivCPUCiiigAooooAKKKKACiiigAoorM8Sazb+HtEutUvY5pLa2AaQQqGYDIGcEjgZyfamld2QGnRXln/C8vCf/PPU/wDvwv8A8VR/wvLwn/zz1P8A78L/APFVt9VrfyP7jL29P+ZHqdFeWf8AC8vCf/PPU/8Avwv/AMVR/wALz8J/889T/wC/C/8AxVP6rW/kf3B7en/Mj1OiuZ8EeNtI8ZQ3UmjtMGtmCyRzKFYZ6HAJ4OD+VdNWMouD5ZKzNE01dBRRRUjCiivO9c+MPhbSNWudPkku7ia3ba7W8YZM9wCWGcHI/CrhTlN2irkynGPxOx6JRXln/C8vCn/PLVP+/C//ABVH/C8/Cn/PLVP+/C//ABVa/Va38j+4z9vT/mR6nRXln/C8vCn/ADy1T/vwv/xVWdO+Mnh3Ur2CzsbTVp7mdxHHGluuST/wKk8LWWri/uGq9N6cx6VRRRWBqFFFFABRRWHrHi7w9ozMmp6zYwSr1jMoLj/gI5/SmouTskJtLc3KK83vPjR4Ot2Iiu7q594rdh/6Fis5vjx4WDECz1hvcQx//HK2WFrP7D+4z9vT/mR6zRXlUPx08KSfeg1WM5xhoE/o5rStfjH4MnPz6jNB/wBdLaT/ANlBoeFrL7D+4FXpv7SPQ6K5rT/HnhbUMfZte0/J6CSURk/g2K6OORJY1eJ1dGGQynII+tZSjKOklY0Uk9h1FFFSM8R/afvtmlaHYBv9bPJORn+4oUf+hmvnuvWv2kr4XHje2tVbK2tmoI9GZmJ/TbXktfTZfHloRPExcuaqwooort6nMfVvwBsRZ/Dazlxg3c0s5/762D9EFei1zPhiSw8NeDdFtNQvbW08mziDGeVY/m2jceSO+aZdfELwlbZ8zxBp7Y/55SeZ/wCg5r5OopVqkpRV7s9+FqcFFvY6miuK/wCFp+C/+g7D/wB+pP8A4mp7b4k+D7n/AFev2a/9dCY//QgKl0Kq+y/uK9pDujrqKp6fqmn6kpbTr61u1AzmCZZB+hq5WRYUUUUAFFFFABRRRQAUUUUAFFV7y+tLGPfe3UFun96WQIPzNc7e/EPwjZnE3iCwb/rlJ5v/AKDmqjCU/hVxOSjuzqqK88uPjH4Li+5qM03/AFztZP6gVSk+OHhFDhf7Rceq2/H6kVqsNWf2H9xn7an/ADI9QorzS1+Nfg6YgST3tvnvJbMcf985rrND8Z+HNcIXS9Ys55D0jL7HP/AWwf0qZUKkNZRa+RSqwlomjfooorIsKKKKACiiigAory4/HDwoGYbNSOO4gXn/AMeo/wCF4eFP+eep/wDfhf8A4qt/q1b+V/cZe3p/zI9Rory3/heXhT/nlqn/AH4X/wCKo/4Xl4U/55ap/wB+F/8AiqPq1b+V/cHt6f8AMj1KivLf+F5eFP8Anlqn/fhf/iqP+F5eFP8Anlqn/fhf/iqPq1b+V/cHt6f8yPUqK8t/4Xl4U/556p/34X/4qki+OfhR/vw6pHzj54E/PhzR9Wrfyv7g9vT/AJkep0Vxmj/E/wAIaowSLWIYJD/DdAw/qwA/WuxikSWNZInV42GVZTkEexrKcJQ0krGikpbMdRRRUjCiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACuA+IHwt0fxhObze9hqZGDcRKGEnpvXjOPXIPvV3xp8RtE8IalFY6qLtp5IhMBBGGAUkgZyR3U1z3/AAvLwnn/AFep/wDfhf8A4qumjTrx/eUkzGpOk/cm0cbJ8ANRDEJrloydi0LA/wAzWlo3wAt45FfWNaeVO8drCEJ/4ExP8q6H/hePhT/nlqf/AH4X/wCKo/4Xj4U/55an/wB+F/8Aiq63VxzWz+7/AIBzKnhb3uvvO98NeHtL8NaeLLRbOO2h6tt5Zz6sx5J+tatcT4Q+Jeh+K9X/ALO0tL0XHltJmWIKoAxnkMfUV21edUjNS9/fzO2Di17uwUUUVBQVz/jLxVY+F9PM92wkuHH7m3VsNIf6D1NTeLvENr4a0aS+ujuf7sUQPMj9h9PU+lfNmtapfeItXkurtmmuZ2CqqjOPRVHp/nrXfgsG675pfCjixeLVBcsfiZN4l8Rah4k1A3OoSF+yRrwsY9FFdH4Z8AS3Ki61wvBD1WBTh2+p/hH6/Sul8FeDYNFjjvtVCS6iRuSM8rB/i3v27etdBfXZYnmu6ti1FezoaLucdLCOT9pX1fYqRR22nWy29lEkMS9FUfz9ay726xnmlvLrJIB4rEurjJ68Vzwi3qzsk0lZCXM5JJJrLubjqAaLq464NZk0uSa64xOdyuLNN1rPmlzmt2z8Ma9qYBstLuXRujuNin8WwK1I/hd4lmGXS0i9nmz/ACBq/bU47yRm6c5bJnntwobJ71Qbr1r1cfB/XpD+8vNOUezuf/Zajk+C2sE/8hGw/wDH/wDCqjjqK3kYTwdWWqieTvzweaqyqV5HSvYD8E9ZI/5CVh/4/wD4VG3wR1rnOpafj/gf/wATVfXsP/MZrBV/5fyPKLa7dGALcV7J8NfiXJY+Vp+uStLY8Kkx5aH69yv6j9K8Tuo/JmdVYOFYjcOh96ktLgqQM1pWoQrR5ZE0akqL5on21DLHPEksLq8bgMrKcgg9xT68E+Efj7+y5U0nVpSdPlbEUrH/AFDH/wBlP6dfWvexyOK+cxOHlh58svke/h68a8eaIUUUVzm4UUUUAfMP7Rd/9r8fC2B+WztY4iPRjl/5MteWe1dR8T73+0fiDr0+cr9reMH1CfIP0UVy1fW4WHJRgvI8GtLmqSfmFFFA610GVzovh7Y/2l450K12lle8jLAf3VYM36A19oV8tfs9WH2v4jQzFcizt5Zs46EgJ/7PX1LXzuayvVUeyPWwMbU2/MKKKK8w7QooooAKKKKACiiigAooooAKiu7eK7tZra4QPDMjRup7qRgj8jUtFAHxR420CXwx4ov9JmyRA/7tz/HGeVP5EZ981hn6V9F/tI+GPtekW3iG1jzNZ4huMDrEx+U/gxx/wKvnbmvqsJX9tSUnv1PEr0vZzaG4FGKDmjNdJhY7f4PeJv8AhF/G1pLNJssbr/RrnPQKx4Y/Q4P0zX15XwXX1z8GPE3/AAkvgm2M7hr6y/0af1OB8rfiuOfUGvGzWhtWXoz0sFV3ps7uiiuI+KnjmDwZomYtsuq3IK20JPT/AG2HoP1PHrXjwhKpJRjuzulJQXNLY5z43/EP+wbN9D0aX/ia3CfvpVPNuh/9mI6eg59K+aCc9SSetTXt1Pf3k11dytLczOXkkY5ZmPUmq/evqMLhlh4cq3e54lau60r9AopaK6TEciFmAUFiTgAdTX098Ffh4PDNgNV1aIf2zcp8qN/y7If4f949/Tp655j4EfDsloPE2tw/KDusYHHU9pSP/Qfz9K95rw8wxrn+6ht1PUwmG5ffluFFFUtZ1Sz0XTZ7/U50t7SFdzux/Qep9q8lK7sjvLpIUEkgAckmvK/G3xn0XRHktdGUarfLwWRsQof97+L/AIDx715R8S/ijqPi15LSx8yx0XdhYc4eYeshHb/Z6fWvOhyvNezhcruuat93+Z5tfG68tP7zr/FPxG8T+Iiy3WpyQW5/5d7X90mPTjkj6k1x386fS168KcaatBWOBzlJ3k7kdJTxTT1qhWExRRmloFoFeufs2wzzeNLpxLItvBaM7KGIVmLKoyOh4JP4V5HX0F+zBYbbDXdQYZ8ySOBT6bQWP/oa/lXHmEuWhI6MKuarE9wooqO4mS3t5ZpTiONS7H0AGTXzB7Z8gfFq/wD7R+I2vTZyFuDAP+2YCf8AstchVi/uXvL64upeZJ5Gkb6sST/Oq9fYUoezgo9kfPTlzSb7hRRRVkjiSxyxpOOwpB1pf0pgFKOtJRRcRJBNJBMssMjxyLyHQ7SPxr0Dwn8XPE2gyJHdXB1WzHWK6Ylse0n3h+OR7V51RmsatGFVWkrmkKkoO8WfZXgXxxpHjKyMumylLmMZmtZOJI/f3HuP0PFdRXw9oWr3uh6rb6jpkzQ3ULblYcj3BHcHuK+w/A/iS38V+GrTVbYBDINssWc+XIPvL/UexBrwMbgnh3zR+FnrYbEe1Vnub1FFFcB1BXGeMfiR4f8ACl09nfTTTX6KGNtAmWAIyCScKM/Wuzr46+J96dQ+IPiC43Ej7U0QOeCI/kH6LXZgsMsRU5ZbI58TWdKN1ud9rnx61OYlNE0q1tU6b7hzK31AGAP1rgtY+IvizViwutcu0Q9UgIhH0wmK5Tr1pDzivfp4SjD4Yr8zyp16kt5CyyPNI0kzs7scksck00mjvRmugxYtJilooGJinDrSUUAd94H+Kuv+GZI4p5n1HTRwbe4YkqP9h+q/Tke1fSvg/wAU6Z4t0pb7SZtwGBLE3DxN6MP69DXxVXQeBvFN74R16DUbElkB2zw5wJk7qf6Hsa83GYCNVc9NWl+Z2UMVKDSlsfaVFVNI1G21fS7XULF/MtrmMSRt7H19D2Iq3Xz7VtGetuFYvjW+/s3whrV5u2tFZyspzj5tp2/ritqvPPj1ffY/hxeRg4a7mitx/wB9Bj+imrpR5pqPdkVHyxbPlPoKSjpRX16PBDAowKKKdxBgUYFFFFwDAowKKKLgGK6Pwj4113wrcK2lXjCDOWtpCWhf6r/UYPvXOUVE4RmuWSuhxk4u8XY+u/hx8QtN8a2rJEPs2pxLmW1c5OP7yH+Jf1Hftnta+G9H1K60fVLe/wBOmMN1buHRx2Pv6j1FfYfgLxPB4t8M22pwBUkb93PED/q5B94fToR7EV89jsH7B80Phf4HrYbEe1XLLc6GiiivPOsKKKKACiq2pX9ppllNeahcRW1rEu55ZG2qorwnxz8b5ppJbTwnH5MI+U3ky/O3+6p4A9zk+wrajQnWdoIzqVY0leTPb9Y1rTNFg87Vr+2s4+xmkClvoOp/CvN9c+OfhuyZo9MgvNScdGRRFGfxbn/x2vnDUb+81K7a51C5mubh+TJK5Zj+Jqv1OcV7FPKoR1m7nnVMfJu0FY9m1D4+6s5P9n6NYQDsJ5HlI/LbWNL8bfFztlW09B6Lb8fqTXmX4UmPeuuOCoR+yYPE1ZbyPSh8avGH/PezP/buKkj+Nni9eWexf2a3/wACK8vp2ear6pR/kX3CVep/Mz1uD47eJ0H72z0mQepikB/R60bX4/ain/H3odpL/wBc5mj/AJhq8Tpc+9Q8Bh39kr6zVX2j6Esfj/prkfb9EvIR3MMyy4/MLXongjxvpPjOO6bR/tINtt81Zo9pXdnHQkH7p718aEc19J/sz2Ag8H6hekYa5vCo46qijH6s1efjcFSo0nOO514XEVJz5Zanr9FFFeOegfJvx3vvtvxL1JVbKWyxwD8EBP6sa4A9a1fFV8NU8UatfA5W4u5ZV+hckfpWVnmvr6EXCnGPZHg1HzTb8xcUEUUVrcyse2fsw2JfWNbvyP8AUwJAD/vsWP8A6LFfQleS/s2WH2fwVeXZXDXV42D6qqqB+u6vWq+Wx0+avJnuYaPLSQVHczxW1vLPcOI4YlLu7dFAGSakryn45eIzb2MOh2r4luMS3GOyA/Kv4kZ/D3rGjSdaagiq1VUoOb6HnPj3xRN4o1x7glls4iUt4z/Cvr9T1P8A9au0+HPhlNOtl1a/jBu5FzAjj/VL/ex6n9Pxrjvh7og1fVzPdLmztSHfPR27L/U+wr1i9uyzHmvZxVRU4rD09F1PKwlN1JuvU+QXt11Zjlj3rCu7oknmnXlwSDzWHdT8nmuanTO6Ur7hdXGSeay7mfGeeaS5nxnB5rpvh54W/tu6a91BG+wQtgKf+WrdcfQcZreTVKPNIwV5vliU/Cvg+98QMJ5CbaxzzKw5cf7I7/XpXq+heFtJ0VAbO0QzDrNJ8zn8e34YrZjRY0VI1CqowAOgp1eVWxM6ul9Dtp0YwXmHAGBRRSEgdTWBrcXNFZM/iXQ4GKz6xp0ZHBD3KDH5moP+Ev8ADf8A0MGk/wDgZH/jVckuwuZLdm70FZfiu9OneGdVvAcNBaySD6hSR+tVv+Ev8N4/5D+k/wDgZH/jXG/F3xbpE/gLULfTNWsLm5n2RiOC5R2xvBbgHOMA/nV06TlOKa6mdSrGMG0z5ukO4sT3NVwdrVOWqvKOcivqj5xF+1nwRzxX0X8FvF7apYDRr+TN3bJmByeZIx2+q/y+lfM8DYNdN4Y1i40rUba7tXKTwOGQ/wAx9CODXPiqPt6fI9+h0Yer7GfMvmfYVFUdC1ODWdItdQtT+6nQMB3U9wfcHI/Cr1fMtNOzPoE7q6CoL+5Sysbi6mIEcEbSsSccKMn+VT1yfxYvv7P+HWuzZ5e3MA/7aEJ/7NVU488lFdRSlyxb7Hx9cTPPPJLISXkYsSfU1HR3pDX2CR8+3cKKKKBBniiiigAJJPWjJopQM0aBdicmnDj60CiiyHcCT1zRk5pCfSgDNGwXPbf2YrEvq+t35GBFAkA/4G27/wBpivoSvJf2bLD7P4Ku7tlw11eNg+qqqgfrur1qvl8dLmryZ7eGjy0ooKKKK5DcKKKKAK2pWUGpafc2V2m+3uI2ikX1UjBr4t8VaJceHfEN9pV3nzbaQoGx99eqsPqMH8a+268O/aS8L+ZbWniS1j+eIi2uto/hP3GP0Py/8CX0r0csr+zq8j2f5nHjKXPDmXQ+fj1ooPWivojyQr0f4G+Jv7A8aw2877bLUsW0mTgK+fkb8zj6Ma84pQSGBBII5BFZVqSqwcH1Kpy5JKS6H2r4z8TWHhPQZ9T1JxtQYjiBw0r9lH+PYc18g+KdevvEmt3GqalJunlPCj7qL2VfQCrvjHxhqvi2a0fVZcpbQrHGi9M4AZ/95iMn8u1c2a5sDg1QXNL4jfFV/auy2Bm9OlNpxFJg5ruOW4V6h8F/h43ijURqeqREaLbN91h/x8OP4R/sjufw7nHP/DXwVdeNNdW3TdFYQkPdXAH3V9B/tHt+favrjS7C20vTrexsIlhtYECRovQAV5eYYz2S9nDf8jtwmG53zy2LKKqIqIoVVGAAMACloorwD1iG9uoLG0muruVIbeFDJJI5wFUDJJr5M+KXj278Z6uQjPFo9uxFvb/3v9tv9o/oOB3J7j9ofxoZ7oeF9Ol/dRYe9ZT95+qp+HBPvj0rw4172W4RRXtp7vY8rG4i79nH5h3ooor1jhsFGKCCK6jwb4G1zxfNjSrbFsrYe6mykSfjjk+wyaic4wi5TdkOMXJ2W5y9Ar6a8MfBDQdPRH1uWbVbgclSTFED/uqcn8T+Feh6Z4d0XSlVdO0qxtsd4oFU/icZNebUzWnHSCv+B3QwM38TsfF0Gm3k+Gt7O4kHqkZb+Qptxp15b83FpPEPWSIqP1r7oorD+13/ACfj/wAAv+z1/N+B8H7cV9U/s/2P2T4bWkpGGu5pZz/31sH6IK6/VvC+hauG/tLSLG4Y/wAbwrv/AO+uo/Or2lafa6Tp1vYafCIbSBdkcYJO0emTzWGKxyxFNQSs7muHwrpT5m7lquX+KF9/Z3w+16fOCbVoh9X+Qfq1dRXl37Rd/wDZfAKWwbDXd3HGR6qAX/mq1yYePPVjHzOirLlg2fL1JSnrRX1vU8AbS0o4NerfCf4WweMdLk1XUb+a3tEnMIhhQb3wASdxyB1x0PQ1nWrQox55sunTlUfLHc8px9aSvrvR/hV4P0sAppCXMg6vdOZc/gTt/Sulh8P6NAmyHSNPjT+6lsgH6CvNnm0F8MW/w/zOyOAk/iZ8QUV9ma34E8Ma1CyXui2YYj/WwxiKQf8AAlwfz4r5p+KXgebwTrSRCRp9OuQWtpmHPHVW/wBoZH1yD7Dow+YQxD5LWZlWwkqS5r3RxVLSGiu85hTXsv7NOuNb69qGiyN+5u4vPjB7SJ1x9VJ/75FeMmu1+DE7W/xN0NxxukeM4/2o2X+tc2Mgp0JJ9vyNsPLlqxZ9eUUUV8oe4QahdJY2FzdzcRwRNK3OOFBJ/lXwxcTPcTyzStuklYux9STkmvr74vX39n/DbXpc8yW5gA9fMIT+TGvjzOa9zKIWjKf9f1qeZj5XcYi0V1fhjwF4l8SBX03TZRbt/wAvE37uP8Cev4Zr0/QPgF9yTX9Y/wB6GyT/ANnb/wCJruq4yjS0k9fvOWGGqT1ijwXFA619caR8KPB+mBSNKW6kH8d07SZ/4Dnb+ldbYaTp2nADT7C0tQOAIIVT+Qrinm8F8Mb/AIf5nVHASfxM+KItLv5lzFY3TjOMrCx/pVWWJ4ZDHKjo46qwwR+Br7urL8Q+H9L8Q2TWusWUNzGQQGZfmT3VuoP0qI5vr70NPUp4DTSR8RUVreLNKOh+JdS0vf5gtZ3iV/7yg8H8sVldq9mMlJJo4GrOzExQOlA6UfSgk+if2aNce50XUdFmck2biaEHsj53AfRhn/gVe0V8w/s43DQ/EB4h92ezkQ/gVb+lfT1fM5hDkru3XU9nCS5qSv0CvEv2nr/Zpeh2APEs0lwR7ooUf+jDXttfM/7SV99o8cWtqp+W1s1BHozMzH9NtGXQ5sRHyDFy5aTPJzRSe1Ar6Y8YQ0p6UGuy+EWiwa74/wBNs76FZrP55JkYcMFRiAfxxU1JqEXN9Bwi5SUe5xuaMivsX/hXXhH/AKAFl/3yf8aP+Fc+EP8AoAWX/fJ/xrzf7Wpfys7fqE+6PjrOaWvru/8Ahb4OvIDG2jRQnGA8DsjL78HB/HNfOHxK8IS+DPEjWHmNNayKJreUjBZCSMH3BBB/PvXRh8dTry5VdMxrYeVFc0tjk+lGaKK7Uc4lev8A7NuutZ+KbrR5G/cX8JdFJ/5apzx/wHd+QryCuo+F1wbX4ieH5FOCbyOPP++dp/8AQq5sVBToyT7GtGXLUiz7Jooor5Q90Ky/Euu2HhvRrjU9Ul8u3hHb7zt2VR3J/wA8Vo3E0dtBJPO6xwxKXd2OAqgZJPtivkj4q+N5vGmvM8RZNJtiVtIj3HeRvc4/AYHrXXhMK8RO3RbmGIrKlG/UrfELx5qfjTUd9yxgsYz+4tFb5U9z6t7/AJYrkD6Gk70V9LCEaceSCskeJOcpu8tRaOc0VJFG80iJGjO7HCqoySfpVkpCUleoeF/gt4i1aNZtRMWlW7DIE+WlI/3B0/Eg13Vn8AtHQD7brF/Me/lIkefzDVxzzChDTmv6HVHC1ZdD5zoxX0hc/ATQSD9m1TU4z/00Mb/yUV86XARJ5ViffGGIViMZGeDWtDFU8Rfk6E1KM6XxEdFFFdBgLX158GrH7B8NdEjIw0sRnPvvYsP0Ir5ERS7qqjLMcAetfcmkWa6dpNlZJgrbQJCMeiqB/SvJzedoRj3f5f8ADnoYCN5ORbrK8WX39meF9Wvs4NvaSyKfcKcfritWvPvjxf8A2H4Z6kqsVkuWjgXHu4JH/fKtXi0Y89SMe7R6E5csWz5QJzSUUV9gz58SjBoHJqWKN55EijG53YKq+pPApXtqFrn1/wDCSxOn/DjQYWGGe3888Y/1hL/+zV11V9OtlstPtbVPuwRLEPooA/pVivjpy55OXc+hjHlikR3E0dvbyzTMFijUuzHsAMk18qeJ9Wl1zXrzUJs5mkyqn+Feir+AwK96+L2qHTfBdwiHEt4wth9Dkt+gI/GvBvDFqLvW7SJlym/e2fQc/wBK9jLKfJCVZnlZjU5pxoo9S8NWg0fw/bwEBZ3HmS/7x/w4H4UXVz15qO8uSckmsa4nL554rNJzk5s6VaMVFdCS6ucg81j3M/J5ouZuvNZc83J5rpirGMmSu5kkjjQFpJGCqvqa+jdE0+PStKtbKEDbCgUkdz3P4nNfPngpFufF2kpJ0+0o35HP9K+ka4Mwk7qJ0YRXTkFFFFecdZ5v8VvHz+GQmn6V5balKm8yONywqehx3J9P8a8D1jXNR1mcyapez3T+juSo+g6D8K6/442Vza+PLq4mUiG6SN4W7EBFU/qDx7j1rzw19HgqEI0ozS1Z4OLrTdRxewjsSaYTQetITXYzjuxhpppxphNFkXcj6GmPT260xqY0MB2sDV23k2sGzVE9alhbBwaQH0V8Ate82C70aVvuj7RCM/QMP5H869gr5Q+Gusf2T4o027ZtsayhJD/sN8rfoTX1fXgZjS5KvMup7WBqc9PlfQK8o/aQvfI8EWtqpw11eICPVVVmP6ha9Xr59/aevt2p6HYqeYoZJiP99go/9ANZ4GHPXiv60NsVLlpNnh54NFBor6k8IUmkoqe2gku7mG3gTdLK4RFHck4A/OkPcgowa77/AIVH44/6An/k1B/8XS/8Kj8b/wDQE/8AJuD/AOLrH6zS/nX3o09jP+V/czgMUvNd9/wqPxv/ANAT/wAm4P8A4uj/AIVH43/6An/k3B/8XS+s0v5196H7Gf8AK/uZwGDRg13/APwqPxv/ANAT/wAm4P8A4uj/AIVH43/6An/k3B/8XR9Zo/zr70HsZ/yv7mcBg0vNd9/wqPxv/wBAT/ybg/8Ai6P+FR+N/wDoCf8Ak3B/8XR9Zpfzr70Hsan8r+4+hPhHYf2d8ONChK4Z7fzzx/z0Jf8A9mrr6r6fbLZWFtap9yCJYh9FAH9KsV8rOXPJy7nuRXLFIKKKKkoKKKKACqWtabb6xpN3p16m+3uYmicexHUe46irtFCdtUDVz4g17S7jRNZvNMvFKz20jRtxjOOhHsRgj2NZ3evdf2kvDGyW08S2qfK+La6x6/wN+WV/Ba8LOO1fWYasq1NTPCr0/ZzcRneilpK3MbDjSUUUDDtWx4U8PXvifW7fTNNTdNKcsxHyxr3ZvYf4DvWfp1ncahewWdnE01zM4SONRksxr61+F3ge38GaGI2CSapcANdTjueyL/sj9etceMxSw8NPiexvhqDrPyRs+D/Ddj4V0KDTNOX5UGZJSPmlc9Wb3/kMDtW3RRXzMpOTu9z2kklZBWL4012Lw14Y1DVpsH7PGSin+Jzwq/iSK2q8J/ab13Ymk6FE3L5vJlB7cqn4Z3/lW2Gpe2qxgZ1p+zg5HhV3cy3t3PdXLl5pnaR2PUsTkmoDwaBxRX1qseD6hRQK6j4d+GJvF3im202PKwf665k/uRAjP4nIA9yKic4wi5yeiGouTst2dZ8Hfho3imZdV1lHj0WJvkTODcsDyB6L6n8B3I+mLO1gsrWO2tIY4LeMbUjjUKqj2ApLC0gsLKC0s4lit4UEcaKMBVHQVPXy+JxMsRO726I9yjRjSjZbhRRRXMbBRRRQAUUUUAFeBftPX+brQtPB4SOWdh67iqr/AOgt+de+18sftA332v4kXMW7ItIIoB37b/5vXflsOaun2OTGytSa7nmh60UrdaSvpDxxVr66+Ctj9g+GmjKVw8yNO3vvckfpivkZFLMFUZYnAFfceh2Q03RdPsRjFtbxw8f7Kgf0ryM2laMY9zvwEffci7RRRXhnqBXk37SlrHL4Is52H7yG+QKfYq2R+g/KvWa8p/aQlVPAtqhPzSX6Afgkhrpwd/bwt3Mq/wDDkfMlFGKK+qPBAV3HwTtjcfEzRVAyEd5DnttjY/0rhzXrf7NVibjxne3bDKWtocH0ZmAH6bq58XLloTfkbUFzVIrzPpaiiivlD3Tj/ij4ZvvF2g2+k2M8Vsj3KyXE0mTtjUN0UdTkrxkdOtVPCHws8NeGxHKLUX98vP2i7AfB/wBlfur+Wfeu7orVV5qHs07Ih04uXM1qAAAwBgCis7Wdc0vRIfN1bULazXGR5sgUt9B1P4V55rXxx8MWRZNPjvNRkHQxx+Wh/FsH9KKdGpU+CLYTqQh8TseqUV89an8ftRckaZotpAOxnkaU/ptrmr34zeMLnIivbe1B7RWyn9WzXVHLa73VjnljaUfM+qq4Tx18T9B8L20scdzFfamBhLWB92G7b2HCj68+1fM2s+LPEGshl1LWb6eNusbSsEP/AAEHH6VhFeuec12UcpV71JfcYzx19IItapez6nqd1fXbb7i5laWQ+rMcmqxpp60GvZSsrI89u4o6UUd6BSEem/s7xs3xGQjolrKT7Dgf1FfUlfPH7MNiZNc1q/I4ht0gB/32z/7Tr6Hr5zM5Xrtdkj2MGrUgr49+Ll+dR+I+uzA5CXBgH/bMBP8A2Wvr+aRYYXlkOERSzH0Ar4Y1G6a9v7m7kzvnlaVs+rEn+tb5RH35S7L8/wDhjLHytFIr0UUV7h5YCvZP2Z7HzfFWqXpXIt7QRg+hdx/RDXjdfRv7Mll5fhvV74jBnuli/BEB/wDZzXFmEuWhL+up04SN6q8j2WiiivmT2gr5x/aaulk8U6VarjdDZmRiP9pzx/47+tfR1fJXxwvvt/xL1YqcpBsgX/gKDP8A48Wr0MsjzV79l/wDjxz/AHVu7ODFLR2pK+kPJFHSuo+Flsbr4ieHo1XJW8SQj/cO7/2WuWHTNen/ALOlibr4h+eR8tpayS59zhP5Oa58VLlozb7M1oq9SK8z6ioopk0qQwvLKwWNFLMx6ADkmvkz3Txv9o3xYbHSoPDtpJie9Hm3O08iIHhf+BEfkPevnYdMVseNNdl8SeKdR1aUnFxKTGp/hjHCD8FArHzX1OEo+xpKPXdnh16ntJuXQSiiiuoxNDQdHvte1SDT9Lgae6mOFUdAO5J7AdzX1P8ADj4caZ4OtUmcLd6uy/vLphwvqqDsPfqf0qr8GfA6eFNAW6vIgNYvUDTEjmJOojHp6n3+gr0Svn8fjXVk6cH7q/H/AIB62FwyprmluFFFFeYdhjeNb7+zfCGtXgba0NnKyn/a2HH64r4n69a+rfj9fiy+Gl+mcNdSxQL+Lhj+imvlKveymNoSl3Z5mOd5JBR9KKK9Y886L4e2H9peONCtdpZXvIy4H90MGb9Aa+z6+Wv2erE3fxGhmxxZ28s35jZ/7PX1LXz+azvVUeyPWwMbQbCvE/2n77Zo2iaeD/rrh5yP9xQv/tSvbK+av2lb/wA/xnY2inK2tmCR6MzEn9AtYZdHmxEfI0xkrUmeR0UD3pa+nPFCul+Gdh/aPj/QbbGR9rSRh6qh3n9FNc3Xp/7Otgbr4hfaCvy2drJLn0Jwg/Rj+Vc+Kly0ZPyZrRjzTivM+oKKKK+TPePGfj9fE3elWCnhI2nYeu47R/6CfzrifBChb+eXoUiIB9CSK1/jdceb43eM/wDLGGNP03f+zVieE2w1z67V/rX0dCPLhEvL8zwasubF3/rQ6i5ud5wDxWXdXPVVPHc0lxNwVU/U1mzydaiETplMLmbg81nyPyTmieX3qlLMc1ukYORt+Eb9LHxXpVxMwEaXCbiegBOCfwzX1BXxy7knOa9z+F3xGttStodK1udYb+MBIpnbCzjtkn+L+f1rgzDDymlUj03OnCVoxbg+p6nRRRXjnpGZ4h0HTfENgbTVrZJ4jypPDIfVT1BrxvxD8EryMvJoOoxTrnIhuRsYD0DDIJ/AV7uaK3o4mpR+BmNXD06uskfImu+CvEWjBmv9KuViXkyxjzEA92XIH41zR9+tfcFc94g8F+HtfVjqml28krf8tkXZJ/30uCfxr0Keaf8APyP3HDPLV9iR8ek1GTzXrPjv4N3ujwS3/h6R9Rthlmt9v75B7f3u/Tn2NeSHpXpUa8KyvBnBUpSpO0kIW5ppbrTGPNIa1ELmnr1qIVKtAWNXTmx+HNfYvhi9OpeHdMvGOWmt0dv97aM/rmvjSybDAV9NfDLXobfwPpkU23egkHLgceY2P0xXnZnTcoRkujOzL58s2n2PRK+Uvj5ffbfiVexq2VtYo4Af+Ahj+rGvq2vijxnfnU/F2s3ucia7lZf93ccfpiufKY3quXZHXj5Wgl5mIeDRRRXvHlBXY/COxOofEjQIcZCXHnn28sF//Za48dDXrf7Ntj5/jS7umHFtZsQfRmZQP03Vhi5clCb8jbDx5qiR9LUUUV8me6FFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFeS/HPx//YVi2g6RNjVrpMyup5t4z/Jm7eg59K0o0pVpqESJzVOPNI4f47fEAa1ePoGky5022f8AfyKeJ5B2Hqqn8zz2FeP0rE596SvqqFJUaahHoeJVqOpJyYgpaSlrUzCgdaKKANrwdrcnhzxNp2rRZJtpQzAfxIeGH4qSPxr7Ut5o7m3ingcPFKodGHRlIyDXwjX1H+z94i/tfwZ/Z8z7rrTG8rBPJiPKH+a/8BryM1o3iqq6Hfgamrg+p6fRRRXhnphXyP8AGzUzqXxJ1c5zHbMtsntsUA/+Pbj+NfXFfD3iO5N74h1W7PWe7llz/vOT/WvVymF6kpdkcGYStBR7szT1pKU0le6eWOr6a/Z08PjTvCEmqypi41KQlSeoiQkAfidx/KvmQV9xeG9PXSfD+m6egwLa3jix7hQD+teZmtRxpqC6/od2Ahebl2NGiiivAPVCiiigAooooAKKKKACvi3x3ff2l4z1u8DbllvJShH90Nhf0Ar7E12+GmaJqF8cYtreSbn/AGVJ/pXw7gk85JPNexlEfelI4Mc9FEaetAooHWvcPL2N/wAA2J1PxrodpglZLuPcP9kMC36A19p18r/s+2P2v4kW0pGRaQSznI9tn83FfVFfP5rU5qqj2R62BjaDl3YUUUV5Z2hXhf7Tl9iPQrBT1Ms7j/vlV/8AZq90r5f/AGh9Q+1fEFrYHiztY4iB2Jy/8nFd2XQ5q6fbU5cZK1J+Z5cKWkFFfSnjoSvon9mKw8vQtZ1Ar/r7hIAfZFz/AO1K+duxr61+Bth9g+GmlZXD3HmXDe+5zg/98ha87NJ8tC3dnXgY3qX7He0UVz/jjxTZeENBl1K+O5h8kMIOGlc9FH8yewFfPxi5NRW7PWk1FXZa8TeItL8M6a99rV2ltAM7QeWc+ijqTXz940+Nesaq0kHh5f7Ls+nmHDTuPr0X8OfeuA8W+JNR8U6vJqGqSl5GOEQH5Il/uqOw/n3rDr6DDZbCmlKpq/wPJr42UnaGiJry6nu7h57qaWeZzlpJWLM31J5NMHFMzmivRWmxyXb1Y4HmlpMUUrXEHNBq5pul6hqsvl6ZY3V5J0IgiZyPyHFdJf8Aw38S6boN1rGp2SWVnbqGbzpVDnJCgBQSc5I64qJVoRdmy405S1SONooPWitSAoHFFFIGfSv7NNj5Pg6/vCMNc3ZUcdVRRj9S1eu1xnwbsfsHw10OMrhpIjOffexYfoRXZ18nip89aT8z3qEeWnFHN/Em9/s/wFr1xnaRaSRqfRnG0H82FfGH8NfUn7RV99k+HbwA4N5dRw49QMv/AOyCvlrtXsZVC1Jy7s8/HyvNR7CilpBS16hwiV9bfAyw+w/DTS8jD3Bknb8XOP8Ax0LXyVX294Vsf7M8M6TY4wba0iiP1VADXlZvP3Ix7v8Ar8zvwEbzcjUooorwT1Ar4g8S3x1PxHql8f8Al5uZZR9GYkV9keML/wDsvwprF8G2tb2ksin/AGgpx+uK+Je+a9nKIfHM87Hy+GIUveko717Z5wte8fsv2PGvagw7xQIf++mb/wBlrwevqH9nax+yfDtZyMG8upZgcdhhP/ZDXn5nLloNd3/wTpwUb1U+x6fXCfG7VzpHw51Mo22W7AtE99/3v/HA1d3XhP7UOoYh0HTlJwzS3Dj6BVX+bV4mDpqpXjFnp4ifJTbPAqWkpa+qPCuA613nwW8Pr4g8eWaTputrMG7lB6EKRtH/AH0V49M1wYr3/wDZg08Laa7qLL8zvHAh9AAWb+a/lXLjajp0JSRvho89VI9zooor5Y9wKKKKAPEP2oL7bpehaeGOJZpLhl/3FCj/ANDNfPtesftJXwuPG9taqci1tFVh6MzFv5Fa8nr6fL4cuHi+54uKlzVGFAoortOU92/ZgsQZte1Aj7qxQIfqWZv5LXvdeX/s62P2X4fG4K4N3dySg+wwn81NeoV8tjp89eT/AK0PdwseWlFBXyB8X77+0PiRrsgOVjn8ge3lqEP6qa+vZZEiieSRgqICzMewHU18M6ndvfald3b533Erytn1Zif612ZRD35T7L+vyOfHytFRK+OaDRR2r3DzLB2r3v8AZgsSItev2B5MUCH6bmb+a14JX1J+zxY/ZPhzFORg3lzLN+AIT/2SuDM58tBru0dWCjeqvI9Mooor5s9g+bPjIT/wsHUR7xf+ikrE0Sbyrhx03LXQfGmIx+Pbxz/GsTD/AL9qP6Vx8EhSRXHUGvqaHvUIryX5HzVd8tdvzZvzTVRml681HJc7ucVVlmzmpUDaUglkqpI1JJLVd5K0UTGUgkfFVmaldsmmGtUjFs7Twx8TPEPh+OOCO4W8tE4ENyN2B7N1H549q9N0H416PdGOPWLS4sJG4LofNQe/GG/Q18+E8VGa5quAo1He1mdFPGVafW59k6L4i0fW1B0rUba5brsRxuH1U8j8q1q+IEleNgUdlYcgg4ruPCnxT8R6C8aTXLalZg4aG5bc2PZ+o/Ue1edWyyUdabuejSzFPSasfU9FYng/xJY+KdEi1HTmYIx2SRt96Nx1U/mD9CK268ppxdmeimmroK+bf2gvCkWja1DrFjHsttQLCVV6LMOSf+BA5+oNfSVeYftFRK/w93sMtHdxMp9D8w/kTXVgqjhWVuuhz4qmp03fofL7GmE0E009K+jPEQ8VKvWolqVetMC7affFe0eDzjw5Z/Rv/QzXi9p98V714E0e4uvClhNH91g+OD/fYVz4uSjBN9zTDJubseq+JL7+zPD2p3+cG2tpJh9VUkfyr4fyevevrb44X/2H4a6rhsSXGyBffc4yP++Q1fJBrlymFoSl3f5f8OdmPl7yiB60UUV6xwBX0P8AsxWPl6Prd+R/rZ0gHH9xSx/9GCvnivrD4DWP2L4a2DkYa6kknP4sVH6KK8/NJ8tC3d/8E7MEr1fRHoVFFFfOHrhRRRQAUUUUAFFFFABRRRQAUUUUAFFFUdc1Wz0PSrnUdSmENpbpvdz/ACHuelNJt2QGD8SvGVt4M8PvdvskvpcpawE/ff1P+yOp/LvXyJqN7calfT3t9K011O5kkkY5JJra8eeKrvxh4gl1G8ykY+SCHORFH2H17k9zXO9a+lwWFWHhr8T3/wAjxcTX9rKy2Q0d6XFFL0ruOUZ0NT3NvNbMi3ETxF0WRQ64yrDII9iK774NeBW8W639qvkP9jWbAzE/8tW6iMfzPt9RXbftK+HFFvpuvWsSqI/9Dn2jGF5KH6D5h+IrjnjIRrqidMcPJ03UPBKKKK6jmDvXoHwS8Rf8I/46tVlfbaX3+iS56AsflP4Nj8Ca8/FKpKkEHBHQioq01Vg4S6mlObhJSXQ+8aK5j4beIR4n8G6dqLMGuCnlXGO0i8N+fX8a6evkpxcJOL3R78WpK6CvhK6DLcyiQgvvO4j1zzX3bXxJ4wsjpvivWLMjHkXcqD6Bjg/lXrZS9Zr0PPzBaRZjUo60Ule0eaWLNlS6haThFdS30zzX3ZXwbmvrz4Q+KYvE/g60YyA39mi29yhPO5RgN/wIDP1yO1eTm8G4xktkehgJJNxO2ooorwz0wooooAKKKKACiiigDh/jVf8A9n/DXWHBw8yrAvvucA/pmvkivoz9pq/8nwxpVgDhri7MpHqqIQf1cV8519DlcbUb92eTjnepYZQOtFFeicJ7r+zBY5udev2H3EigQ/UszfyWvfa8s/Zysfs3gB7llAa7u5JAfVVAQfqrV6nXy+Nnz15P+tD3cNHlpRQUUUVym4V8ZfEm/wD7S8e69dbtytdyIjeqodin8lFfYupXSWOnXV3J9y3ieVvooJP8q+GZ5GmmeSQ5dmLMfUmvYyiF5Sl/X9aHn496RRHRRQK9s8wK+4vDtgNL0DTbBRgWttHD/wB8qB/SvjnwPYf2p4x0WzK7lmvIg4/2dwLfpmvtavFzaesY+p6WAjpKQV8n/GrxS/iPxlcRRufsGns1vAvYkH53/Fh+QFfTfizUW0nwvq2oIcPbWskqf7wUkfriviN23HJJJPU+tTlNJOTqPoPHzslBdQJPSkoor3TyxMYqa2t5bmeOC3jeWaRtqIi7mYnoAB1qLnNewfs2abb3Xiu+vJ0V5bS2zCCPuszYLD3wCP8AgVc9er7Gm59janT9pNQLng/4F3d3FHceJ7w2Stz9lt8NIB7scqD9Aa9T0T4Y+EdICmLSIbmQf8tLv98Sfo3A/AV2dFfOVcZWq/FLTstj2IYenBaIjt4IraJYreKOKJeiIoUD8BXmP7Rt99m8AJbq2Dd3ccZHqoDP/NVr1KvA/wBp++3XGg6epOVWWdx65Kqv8m/Ongoc9eK/rTUMQ+Wk2eE0UHrRX1R4QU5FLuqoNzE4A9aaK6P4eWP9peOdCtsZVryMtj+6p3N+gNRUlyRcuxUVzSSPsTSLNdO0myskxttoEhGPRVA/pVuiivjj6E8G/ahvv+QDp6n/AJ6zuP8AvlV/9mrwU16f+0Rf/a/iLJb54s7aKHHoSDJ/7OK8xNfUYGHJQin6/eeJipc1VhRRQOtdZzmt4Tsf7T8UaRZEZFxdxRn6FwD+lfblfJvwLsft3xL0slcpbiSd/bCEA/8AfRWvrKvBzaV6ij2R6uBjaDYUUUV5R3Hn3x4vfsfw01FAcPcvHAv4uGP6Ka+TxX0N+09fbNF0SwB/11w85H+4oA/9GV88V9FlceWjfu/+AeRjZXq27Du1FFFeicglfZnw1sf7O8A6DbYwRaJIR6FxuP6sa+OrG2a8v7e2jH7yaRY178kgD+dfc8ESQQRxRDbHGoRR6ADAryM3n7sI+p6GAWsmPr5q/aakY+NtOj/hXTlYfjJJn+Qr6Vr5v/abtyvi3Srk52yWPl/98yMf/ZhXDl3+8R+f5HTjP4TPHaKKK+kPFCvpb9mYg+BtQ6bv7SfPrjyosV8017d+zPr8VvqGpaFO+1roC4twe7KCHH1xg/8AATXFmUHKg7dDqwUkqup9CUUUV80eyFFFMnlWCCSWQ4SNSzH2AzQB8f8AxZv/AO0fiNr0+chbgwDHpGAn/stcjU9/cveX1xdSn95PI0jfUkk1BX2NOPJBR7I+enLmk2JRRVixtnvb62to/vzSLEv1JAH86pu2pK3PsT4ZWP8AZ3w/0G3Iw32RJGGMYLjef1Y101MgiSCCOGIbY41CKPQAYFPr46cuaTk+p9FFcqSOc+I99/Z3gPXrkHDC0kRT6Mw2j9SK+MQelfUf7RN99k+HUkIbBu7mOHGeoGX/APZK+W+2a97KYWpOXdnlY+V5pDsUUUV6Zxh719n/AA7sP7M8C6FaEYZLONnH+0y7m/UmvjvSLNtR1WyskzvuZkhXHqxAH86+5ERURUQAKowAOwrxs3lpGPqehgI6yYtFFFeKekeD/H+08rxBa3QXia2Az/tKxz+hFeWxSEoPavffjxppufDlpeoMtbSlW9lcf4qv518+Kdrla+jy+fPRS7Hz2YQcar8y55vFRu+c1XL80Fq7LHN7QHaoGanuaiY1VhXuNLUhIqb7Lc4B8iXawyDsPIpDa3GP9RL/AN8Gi4OLKxNNJ4qc2dz/AM+8v/fBoXT76Q4hsriQ+ixsf6VLmkUoNlUmkLY5rZtPCniK9YC20PUpM9xbuB+ZGBXf+FPgnq99OkviOVNOtAeYo2DzMPwyq/XJ+lYVcTThq5I6KdCpLRROm/ZniuRous3D5+ySTosQ7bgp3H8in5V7PVHQ9KtNE0u30/TYRDawLtVR/M+5q9Xztep7Wo5rqe7Rh7OCiFeP/tL6iLbwlp1kCN91eBseqopz+rLXsFfKPx38Trr3jaW2t33WmnD7OhzwXz+8P54H/ARW2BpudZPtqZYqahTfmecnrRRnNAr6I8YetSoOaYoNTRDmmQy/YJukFfXPw8szY+CdHhIwfIEmP9/Lf+zV8s+GdOk1DU7W0iB8y4lWNfxIH9a+xYIkghjijGI41CqPQAYFeVms9Iw+Z6GWw1lI8d/abvvK8O6RYg48+5ab6hEx/wC1BXzlXsP7TF753i3TrMMCttZ7yPRnc/0UV49XZl8eWhHzMsXLmqsKKKK7TmsJivT9D+MviDRdGstNs7HRzb2sSxIXilLEAYyf3nU9TivMaKyq0YVlaorlwqTg7xZ63/wvnxP/AM+Oi/8AfmX/AOOUf8L68T/8+Oi/9+Zf/jleSUVj9Rw/8pr9aq/zHrn/AAvnxP8A8+Oi/wDfmX/45R/wvnxP/wA+Oi/9+Zf/AI5XkdFH1Ch/KH1qr3PXP+F8+J/+fHRf+/Mv/wAco/4Xz4n/AOfHRf8AvzL/APHK8joo+oUP5Q+tVe565/wvnxP/AM+Oi/8AfmX/AOOUf8L58T/8+Oi/9+Zf/jleR0UfUMP/ACh9aq/zHrn/AAvnxP8A8+Oi/wDfmX/45Xt/w112+8S+DrLV9Tjt4p7lpCEgVlUKrlRwST/DnrXxpX2p4Csf7M8FaHaEYaOzj3D/AGioLfqTXnZlQpUYR5I2bOzCVZ1JPmZvUUUV5B3gSACScAV8t/Grx8fFOrHTdNlP9jWbnBB4nkHG8+w6D8T3rt/j58QPscEnhjR5v9JlXF7Kh/1aH/lmPcjr7cd+PnvHrXt5dhLfvp/L/M83GYi96cfmJRRRXsHnBW14S8P3nijXrbS9OXMspy0hHyxIPvO3sP8AAd6yYYpJ5UihRnkdgqIoyWJ6AV9ZfCHwPH4O0ANcoravdgPcv12ekY9h39Tn2rkxmKWHhdbvY6MPQdWWu3U6nw1odn4c0S10zTY9lvAuMnq7d2PuTzUPjLRY/EXhfUtKkC5uISqE9FccofwYA1s0V8zzvm5r6ntcqty9D4Smie3nkhlQpJGxRlPUEHBFRHrXpfx78P8A9jeOZbqFdttqa/aVwOPMziQfXPzf8CrzQ9TX1lGoqsFNdT5+rB05uLCikpa0JR7P+zd4iFnrl5oU74hvV86EE/8ALVRyB9V/9AFfRdfDmialcaPq9nqNocT2sqyp7kHOPoelfbGj6hBq2lWmoWjbre5iWVD7EZ5968HNKPLUVRdT1cDU5ocr6FuvmD9ojQ203xuNRRCLfUohIDjjzEAVh+QU/wDAq+n65H4oeEk8YeFp7JAq30X721kbjDjsT6Ecfke1cuCrqjVUntszfE0vaQaW58c0oqe8t5rK6mtrqJop4mMbxuOVIPIqvX1PoeHsKCc9q2fC/iPUvDOqpqGkXBinXhlxlZF7qw7j/Iwax8UUpRUlaS0HGTi7o+jfDvx40q4iVNesLizn7yW+JYz79Qw+nP1rqY/i54Idc/21t9mtZgf/AECvkmjNefLK6Ld1dHWsdUS6M+sLj4x+C4s7NSlmx/ctpB/6EBWNf/Hjw3ACLWy1O5bsdiIp/Etn9K+aKAcULK6KetxvHVH2Pb9S+P8AfPkaZoltD6G4maT9FC/zrjtW+LfjHUSwGpi0jP8ABaxKn/j3LfrXA1t+CrE6n4u0WzIyJruJW/3dwz+ma2WEw9JOXLt8zF16s3bmPsvRYZrbR7GC5keWeKCNJJHYszMFAJJPJJPc1coor5c9w+cf2mL/AM7xTpdiGytvaGQj0Z2Of0Ra8cOa7j41X/2/4mawc5SFkgX22oAf1zXDmvqsHDkoQXl+Z4VeXNVk/MSiirFjavd3tvbR/fmkWNfqxwP511N2Rja+h9h/DCw/s34faDbkAH7KkpHu/wA5/Vq6emW8KW9vFDEu2ONQij0AGBT6+NnLmk5dz6KK5UkFFFFSM4/4v3/9n/DfXZQcNJB5A9/MYIf0Y18e19L/ALSl/wDZ/BllZqfmursEj1VFJP6la+aK+gyuNqTl3Z5OOleduwUUUV6jOI9J/Z9sPtvxItZSMrZwS3B49tg/VxX1TXgf7L9juuNe1Bh91IoEP1LM38lr3yvmsynzV2u1v8z2cHG1JPuYXj20e+8E67bQ/wCskspgo9TsOBXxTivvMgEYPIr46+KPhaTwn4vu7MIRZSsZrVuxjY8D/gJyv4e9dOU1UnKm+pjjoNpSRyNLRiivaPLCuq+HPi2XwZ4ki1FYzLAymK4iBwXQ4PB9QQCK5Sn/AI0pwjUi4SWjKhJwkpLofYeh/EbwprECyW+tWkDHrFdOIXB9MNjP4ZrX/wCEl0L/AKDWmf8AgXH/AI18RkD1pMV5UspjfSR3rHy6xPt3/hJtB/6Demf+Bcf+NfNHx51iHVviDMbSeK4gtbeOBJInDKeC5wRx1cj8K872ijFb4bALDz573M62KdWPLaw3P0ozRRXonGLXpv7PNibv4iwzY+W0t5Zs49QE/wDZ68yr3b9l+xzPrt+wPypFAh9clmb/ANBWuTHT5MPL+tzfCrmqI98ooqvqV0tjp11dyfcgieVvooJ/pXyx7h8dfEm+/tLx5r1znIa7kRSO6qdi/oormvxp88rzTSSyHLuxZj6k0yvsYR5IqPY+em+aTYUUUVZJ7T+zFY+Z4g1i/I/1FqsI9t7Z/wDadfRNePfszWXleE9SvCMG4vNg9wiD+rGvYa+Wx8uavI9zDK1JBRRRXIbnzX+0pfCfxlZWaklbazBI9GZmJ/QLXkVdl8YL7+0fiTrsgOVjmEA9vLUIf1U1xtfWYSHJQgvL8zw68uarJi0UUVuZHWfCWy/tH4jaDDjIW5Ex/wC2YL/+y19iV8yfs3WIufHU90w+W1s3YH/aYqv8i1fTdfPZpPmrW7I9XAxtTb7sK8g/aU0V73wpZapEuW0+YiQ+kcmAT/30E/OvX6qatp9vq2mXWn3qb7a5jaKRfYjHHvXFQqulUU10OmpBVIOL6nwvS1t+MvDt34V8Q3Wl3oy0TZjkxxIh+64+v6HI7ViV9bGSmlJbM8KUXF2YCrNheT2F5Dd2crQ3MLh45FOCpHINVqKq11Ymx9C+DfjnZS28dv4qt5ILgcG6t03I3uy9Qfpn8K9GsfiB4TvUDQ+INOUEZxNMIj+T4NfG1FebUyulN3i7HZTxs0rS1Ps+bxt4WhQu/iLSCB/du42P5A5rifHvxW8MN4a1Wx0zUGu724t5IEEUL7QWUrncQBxnPB7V8y9qOKmGVU4u7bYTx82rJJCE0opKd2r1DiCuu+ElgdR+JGhQlcqlx5546eWC/wD7KK5GvWf2bLH7R42u7phkWtmxB9GZlA/TdXNi5ctGT8jWhHmqxR9L0UUV8oe8eC/tQ3/Og6crc/vbh1/75VT/AOh14RXp37RN99q+IbQdRZ20cP4kF/8A2cV5j1NfU4GPJQiv61PDxMuarIKBRRXSYnZ/Buw/tD4laJGRlY5TOfbYpcfqBX19XzZ+zRY+f4w1C8ZSRbWZUH0Z2GP0Vq+k6+ezOXNWt2R62CjalfuFFFFecdhm+JdMXWdAvtPYDM8RVc9A3VT+BAr5E1SF4LiQOpRkJBU8EHuK+za+evjf4c/s3Xvt8CYtb7MnH8Mn8Y/UH8T6V6uWVuWTpvqebmNLmiproeXI+5d3egt+VVyxjcjtTtwbp0r3UeG42JM5NMCmSRFXlmIApCewrb8CWZ1HxpolsRkNdxlwe6g5P6A1NSfLFy7GkI8zUe59badaiy061tV+7BEsY/4CMVaHNIDxS5r49n1KsLRRmigYYoxRSZoAWkqC/vrXT7V7m/uYba3TlpZXCKPqTXiHxB+N8SJLY+Dl3v8AdN/Knyj/AHEPX6n8j1rWjQnWdoIzqVY01eTOm+M/xDh8M6ZLpWmTB9buUx8p/wCPdD/Gf9rHQfj06/LIBJJJyT1PrU95cz3lzLcXUrzTyNueSRiWYnuTUYr6DDYeNCNluePWrutK/QAKmjjz1pI0yatxpmugxFhi3MPShE/eke9XraHAyajt491wTjvSixVEeqfAjQzfeJGvpUzDYpvz/ttwo/Lcfwr6Frkfhd4ePh7wpbxzptvLn9/MD1BI4X8Bj8c111fO42t7Wq2tloe1hKTpUknufInxmvv7Q+JWtyA5WKRYB7bFCn9QfzriSa7nWPAnjHUdYvr5tAvt1zcSTnKjqzE+vvVP/hXHjD/oAXv/AHyP8a+gpVaUKcY8y0XdHl1IVJSb5WcjijFdf/wrjxf/ANAC9/75H+NH/CuPF/8A0AL3/vkf41p9Ypfzr70T7Gp/KzkKMV1//CuPF/8A0AL3/vkf40v/AArjxf8A9AG9/wC+R/jR9Ypfzr7w9lU/lZx+KMV2H/CuPF//AEAb7/vkf40n/CufF/8A0AL7/vkf40fWKX86+8PZVP5WchijFdf/AMK48X/9AC+/75H+NH/CuPF//QAvv++R/jR9Yo/zL7xexn/K/uOQxRiuv/4Vx4v/AOgBff8AfI/xo/4Vx4v/AOgBff8AfI/xo+sUf5l94exn/K/uOQxQBXYf8K48X/8AQAvf++R/jR/wrjxf/wBAC9/75H+NH1il/OvvD2NT+VnPaHZHU9a0+xGc3NxHB/30wH9a+4wAAABgDtXzL8Lfh94htPHukXWraTcWtnbyGV5JFGAVUle/97FfTVeLmlVVJxUXdJHp4KDjFuSsFcN8WPHMXg3Qj5DK2rXIK2sZ52+shHoP1PHrXZahcNaWU9wkEtw8aFhDEMu5HYe5r5b8XeGfHnijXbjVNR0G9MkpwiADbEg+6i89B+vJ6k1zYOjCpO9RpJfia4ipKEbQV2zz64nkubiSed3kllYu7sclmPJJP1qPIrrf+Fc+L/8AoX77/vkf40f8K58X/wDQv3v/AHyP8a+i9vR25l96PI9lU/lZyFHSuu/4Vz4v/wChevv++R/jXTfD74T6xqXiGH/hI9PnstLh/eS+bwZcHhBg9+59PwqZYqjFOTkgjQqSaSizp/2f/AQCx+KdWi5P/HjE46esuP0H4n0Ne702KNIokjiUJGgCqqjAAHQCnV81iK8q83OR7dKkqUeVBRRRWJoeb/Hvw/8A214FmuoU3XWmt9pXHUp0cfl83/Aa+Vetfd88Uc8MkMyh4pFKMp6EEYIr5N134X+J7LWL230/SLq6tI5mWGZQMOmflP5Yr2csxEYxdObtba55uNoOUlOKOCorrf8AhW3i/wD6AF7/AN8j/Gj/AIVt4v8A+gBe/wDfI/xr1PrFL+Zfejh9jU/lZydfR37N3iP7b4futDnfM1g3mQgnkxOeQPo2f++hXj3/AArfxh/0AL3/AL5H+NdV8MfDfjDwv4zsNQk0K+W1ZvJuflH+rbgnr24b/gNc2MdKvSaUlfdao6MN7SnUTadj6aooor5s9g8y+K/wvt/FivqWllLbWlXnPCXGBwG9G7BvwPYj5o1nSb/RdQkstVtZbW6j4ZJBj8R6j3HBr7krL8QeH9K8Q2f2XWbGG7i/h3j5k91Ycg/Q16OFzCVFcktUcdfCRqPmjoz4hpK+hvEHwEspXeXQNVlts8iG6XzF+gYYIH1BrhtR+Cni+1YiCCzvR2MFwB/6Htr1qeOoT+1b10OCWGqx6HmVL0rtZ/hh4zhJ36DOcHHyOjfyY1FH8NfGDvtGgXYP+1tH6k1t9Ypfzr70Z+yqfyv7jjsGlwa762+EfjWfBOjiMHvJcxD9N2a3LD4FeJpyDdXWm2qdwZGdh+AXH61M8XQj9tDVCrLaLPJcV6L8A7D7Z8SrFyuVtY5Z2z/ulR+rCu+0r4A2aFW1bW55h3S2hEf/AI8xbP5V6N4O8BaB4RlefR7VxdSR+U88shd2XIOPQcgdAOlcWKzGk6bhDVs6qOEmpqUuh1NBIUEkgAckmisrxX9p/wCEY1YWETy3jWsqwonVnKkLj8cV4SV3Y9NuyPjHW7w6lrV/fMTm6uJJjn/aYn+tUT0rrf8AhXPjD/oX73/vkf40f8K58Yf9C/e/98j/ABr6xVqKVlJfejwfZTbvZnIV2HwlsTqPxH0CHGQlwJzn0jBf/wBlpP8AhXPjD/oX73/vkf416L8CvBGtaR4yk1DW9MntIYrVxG8oxl2KjA/DdWOJxNP2UuWSvY1o0Z+0jdaXPf6KKK+YPaCiiigD55/adv8AzNb0WwB4gt3nP1dsf+068TNezfGTwp4m1/x7e3Vho93cWaRxxQyKBtYBATjn+8Wrh/8AhW/jD/oAXn5D/GvpMHUpU6MYuS+88bEwnOo2kcjx70cehrrf+FbeMP8AoAXv/fI/xo/4Vt4w/wCgBe/98j/Gur29L+ZfejD2M/5X9x7r+zpYfZPh6Lllwby6klB9hhB+qGvUK5/4f6W+i+CtGsJkMc0VsnmIequRuYfgSa6Cvlq8+epKXdnuUo8sEgrlviJ4Os/GehPZ3GIrqPL21xjJjf39VPQj+oFdTRURk4NSjuW0pKzPiXxP4e1Lw1qslhq9u0Uy/dbqkg/vKe4/yaye9fbniLQNM8Rae1lrNpHcwHpuHzIfVT1B9xXivif4CzLI8vhrUkeM8iC8+Vh9HUYP5D6171DM4TVqmj/A8utgpLWGqPC8Uld3efCrxnaMQdFklH96KWN8/k2aq/8ACt/F/wD0ALz/AL5H+Ndv1ml/MvvOX2NT+V/ccfSV2P8Awrfxf/0Abz/vkf40f8K38X/9AG8/75H+NP6xS/mX3h7Gf8r+4478aPxrsf8AhW/i/wD6AN5/3yP8aP8AhW/i/wD6AN5/3yP8aPrFL+Zfeg9lU/lf3HHfjRXY/wDCuPF//QAvP++R/jR/wrjxf/0ALz/vkf40LEUv5l96D2VT+V/ccfX1B+znY/ZvAL3BA3XV27g/7KhVA/NWrxD/AIVv4v8A+gDef98j/Gvpz4aaTLongTRbC5jMVxHAGljPVXYlmB9wWNedmdeEqSjFp6nXgqUlNuSsdLXIfF2+/s/4ca7KDgvB5A9/MYJ/JjXX15z8d9N1bWPBsVhodlNdzS3SNKsfUIoY/wA9tePQSdWN9rnoVb8jsfKeD6UuDXWj4c+MMf8AIv3v/fI/xpf+Fc+MP+gBe/8AfI/xr6n6xS/nX3nh+yqfys5DB9KMGuv/AOFc+MP+gBe/98j/ABo/4Vz4w/6AF7/3yP8AGj6xS/nX3h7Kp/Kz6M+C1gNP+GujLjDTI07H13uSP0Irt6o6DY/2ZoenWIGPstvHD/3yoH9KvV8pVnzzcu7PeguWKQUkjrGjO5wqgkn0FLWP4xF03hPWE0+F5rx7SRIUT7xdlIGPxNSld2G3ZHxhql01/ql5eSZ33EzzN9WYn+tVcV1v/CufGH/QAvf++RR/wrnxh/0AL3/vkV9Yq9JKymvvPB9lUf2WcpikPSus/wCFc+Mf+gBe/wDfIo/4Vz4x/wCgBe/98in9Ypfzr7x+yqfynqX7MFhtstd1Bhy8kUCn02gsf/Ql/Kvcq4L4JaBd+HvA0dvqVu1tdzXEk0kTdR0UZ/BRXe181i6iqVpSR7GHhyUooKKKK5jY5L4i+CLDxppBguMQ30QJtroDmM+h9VPcV8p+KfDeq+F9Sex1m2MMg5R+qSr/AHlbuP1HfFfbNUNb0fTtcsXs9Xs4bu2b+CQZwfUHqD7jmu7CY6VD3XrH+tjmr4aNXVaM+HMUV774r+A6sXm8L6gE7i2vOn0DgZ/MfjXmWsfDjxbpRP2jQ7uVRzvtl84Y9fkz+te5TxlGotJffoeZUw9SD1Rx+DRjmrVzY3dsSLq1nhK9RJGVI/Oq+ea6U09jGw2irMVnczkeTbzSbum2MnP5Co5YnhlaOZGR14KsMEfhRdCsRUUlLTAK+hf2YbDZpOt6gR/rZ44Af9xSx/8AQxXz1X1h8BbH7H8NbByMPdSSzt/30VH6KK87NJ8tC3dr/M68FG9S/Y9CooqO6d4raWSJDJIqFlQfxEDgV84ewfGvxIv/AO0vHeu3O7cpu5EQ+qqdo/RRXNV2Mvw78ZSyvJJoN6zMSSdo5JP1pn/CuPF//Qv3v/fI/wAa+rhWowioqa080eDKnUk3JxevkclRj2rrP+Fb+MP+gBe/98j/ABo/4Vv4w/6AF7/3yP8AGq9vS/nX3i9jU/lPXv2YrHy9A1m/I/19ysI/4Auf/ale0VxXwc0O48P+ArG0v4WgvGeSWaNhypLnGf8AgIWu1r5rF1PaVpSXc9nDx5acUFFFFc5sFYfjTQIvEnh+5sJMCUjfC5/gkHQ/0Psa3KKqMnFqS3QpRUlZnxtrumy2d3NBPGY54mKOh/hYcEViZKEg19K/GDwUNUtX1nTYx9siXM8ajmVAPvfUfqPpz863tvgnAr6bC4hV48y3PncRRdGfK9iAOCOOtdJ8O9esvDniy01TUop5YLdXwsCqzFipUdSB3PeuSYFTjvQJOa2qR54uD2ZnTfLLmR9H/wDC8/Dn/PhrH/fqL/45R/wvPw2P+XDWP+/Uf/xyvm/dQT71wf2bR8zr+u1T6Nl+O/h8Y8rTdWb13JGP/ZzUTfHrQwPl0rUyfQ+WP/Zq+cyTnrTSaay2gX9eqn0DcfH+0UH7PoFw/p5lyE/kprmNZ+O+v3MbppdlZ2GRw5zM6/ngfpXkWaSrjgaEfskSxlWXU0td8Qatr1yZ9Yv7i7ft5jEqv+6vQD6Vl4p1KBXVGKgrJGbvLVsaAc1KqU5VqZE5HFNkiwxnitK2hziktoM44rXtYNuDis5M2hErzJ5NuSfvNwK7v4M+ETrWtC/vI82FmQzZHEkn8K/1P5d65vw5od34o12Gysl47seiL3Y/59BX0/4e0e10HSLfT7JcRRDlj1du7H3NcWMxPsoci+J/gjXDUfbVOd/CvxNGiiivCPXCiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiuM1/4jaH4f8VroeryPbu0KS/aMbkUsSNrY5HQHPTntVQhKbtFXE5JbnZ0VDZ3Vve26XFnPFPA4yskbBlP0IqapGFFFFABRRRQAyeVIIZJpmCRxqXZj0AAyTXxBr+oNq2u6hqL53XdxJNg9tzE4/Wvon4/+MotI8PvoVnKDqGoLtlCnmOHvn/e6fTNfNFe9lVFxi6j6nl46pzSUF0Gmig9aB1r1jzxyqWYKoJJOABX294Z00aP4d0zTuM2ttHExHchQCfxOa+VPhDoZ17x9pkBXMFu/2ubIyNqHIB+p2j8a+va8PNqicowXTU9TAQtFyCiiivHPQCiiigAooooAKKKKACiiigAooooAK8d+Knw5WRZdV0GDg5ae2QdP9pB/MflXsVFa0a0qMuaJlWoxrR5ZHxLeWbIxwMVQdCM8YNfT/wAQvhnb60sl7ooS3vz8zRdI5T7f3T+h/WvANZ0e60y8ktr63kguEOGRxgj39x719HhsVCvHTc8Cvh50Hr95zdBq3NAQelQFSK6GrGadyuRSGpSKYRSNCOlxTsGjaaAGgVMq5pirUqj0oHcnii6Vft7fdjiqcMrg8KD+FXI5Jm6Hb9KlpsftIo1I0jgAaRgPbuav6JpWo+JtRjsNNiLbjk54VR3Zj2H+eTWr4G8Aap4mmEwUwWIPzXMvT3Cj+I/p6mvoXwx4d0/w3p4tdNi25wZJW5eQ+rH+nQVwYnFxoaR1l+R10aE62r0j+ZV8D+FbTwppItrcCS5kw08+MGRv6KOwroqKK8OUnNuUtz1YxUFyx2CiiipKCiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACvkH4yXx1D4la3JkFI5RAo9NihT+qmvr1mCKWY4UDJJ7V8NazevqWsXt6+d1xO8pz/tMT/WvWymPvyl5W+//AIY4Me/dUS3oPiLV/D8/m6NqNxaMTlhG/wArfVTwfxFel6J8d9ctVVNX0+0v1HV0Jhc/UjK/korx09aWvWqYWlV+NHDCrOHws+kLH496FIAL3TNSgb/pnskH6kH9K1E+NvhEqCz36H0Nvz+hr5bpK5nldB9zZYyqj6eu/jn4VhX9zDqdwe2yFQP/AB5hXGeJPjxf3MTw6BpsdlkYE87CRx7hcbQfrurxWirhltCDva/qKWLqvZ2LN/eXOoXkt3fTyT3MzbpJZGyzH3NQHpSUV2pW0Ry+Y0mnLx1FJ1r0L4Q+BJfF+sia6Vk0e0YGd/8AnoeojB9T3x0H1FRVqxpRc5bIqEHN8sdz1f8AZ68LnSvDcms3SbbrUsGMHqsI6f8AfRyfptr1ikRFjRUjUKijCqowAPQUtfJ1qrqzc31PdpwVOKigooorMsKKKKACiiigAooooAKKKKACiiigAooooAKzNe0HTdetfI1S1jnX+FiPmT6HqK06KabTuhNJqzPDfFHwduod0uhTLdxDpDKQsg/Hof0ryvWdAvNMn8q/tJ7WXsksZXP09RX2NUV1bQXcLQ3cMU8LdUkQMp/A16NLM6kVaaucNTL6cneOh8UG19qhktiO1fWWp/DfwxfksdPFu56tbuU/Tp+lc5efBnS5CTbaleRj0lVZP5YrthmVF73RySwFWO2p83CBv7tOEDf3a9//AOFIx/8AQd/8k/8A7OpIvglbg/vtakYf7NsF/wDZjWjzDD/zfgyPqdft+KPnsQEnpVuOy6E19I2Xwf8AD0O03El7cEdi4VT+Qz+tdXpPhPQdIIaw0u2jcdJGXe//AH02TWM8zpL4U2axwFR/E0j5z8NeANc1vY1nYOkDf8t5/wB2mPXJ6/gDXr/hP4T6XpeyfV2GoXI52YxEp+nVvx49q9JorgrZhVqaLReR2UsFTp6vViIiogVFCqOAAMAUtFFcJ2BRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAZPi8XLeFdXSwiea7e0lSJE6lipAx+dfFN1bT2dw8F3DLBOnDRyKVZfqDX3ZWXrvh7SNfhEWsadb3agYBkT5l+jdR+BruwWMWGumrpnNiMP7azT2PiGivpXW/gToF2WbSb2805j0Q4mjH0Bw3/j1cVqXwH1+EsbC/wBOu1HQOWjY/hgj9a9iGY0J9bep508HVj0ueP0V3l98J/GNmC8mlI0Y/jS6ix+rZ/Ssa48FeILeUxzWG1x1HnRn/wBmrpjXpS+GS+8zdOa3TOcxRiuqs/h/4mvApttN3hjgf6REOfxaugsfgv4xuD++tLW0/wCu1ypx/wB8bqUsTSjvJfeCpTlsmea0V7vo/wAApWKtrWtRqB96O0iLZ/4E2P8A0GvTfC3w68NeGmSWw09ZbpelzcnzJAfUZ4U/QCuOrmdGK93Vm8MFUl8Wh4d8OvhFqfiCSK81tJNO0o4b5hiaYf7Kn7o9z+ANfSej6XZaNp0NhplulvawjCRoP1Pqfc1corxsTip4iV5fcejRoRpL3dwooormNgooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooAKKKKACiiigAooooA//9k=', + footer: "", + multiTabs: false, + menus: [ + /*{ + name: "homePage", + icon: 'home' + },*/ + { + name: "projectManage", + icon: 'project' + }, + { + name: "dataSourceManage", + icon: 'database' + }, + { + name: "synchronizationHistory", + icon: 'history' + }, + ], + }, + devServer: { + host: "0.0.0.0", + port: 8000 + }, + proxy: { + "/api": { + //target: "http://192.168.0.157:9321/", + //target: "http://172.24.8.51:9321/", + //target: "http://124.70.31.149:20088", + target: "http://43.132.221.25:8088/", + changeOrigin: true, + pathRewrite: { "^/api": "/api" }, + }, + }, + locale: { + locale: "zh-CN", // default locale + fallbackLocale: "zh-CN", // set fallback locale + baseNavigator: true, // 开启浏览器语言检测 + share: true, // 用户是否需要手动改变语言 + }, +}; diff --git a/web/.fes.prod.js b/web/.fes.prod.js new file mode 100644 index 000000000..30652ce9c --- /dev/null +++ b/web/.fes.prod.js @@ -0,0 +1,5 @@ +// .fes.js 只负责管理编译时配置,只能使用plain Object + +export default { + publicPath: "", +}; diff --git a/web/.gitignore b/web/.gitignore new file mode 100644 index 000000000..fb563d92b --- /dev/null +++ b/web/.gitignore @@ -0,0 +1,15 @@ +.DS_Store +.cache + +# dependencies +/node_modules +/coverage + +# fes +/src/.fes +/src/.fes-production +/src/.fes-test +/.env.local +/dist/ +# vs code +.vscode diff --git a/web/README.md b/web/README.md new file mode 100644 index 000000000..b7500960c --- /dev/null +++ b/web/README.md @@ -0,0 +1,3 @@ +# Exchangis Web + +基于[FES](https://webank.gitee.io/fes.js/)打造的管理系统 diff --git a/web/mock.js b/web/mock.js new file mode 100644 index 000000000..7ea99ef8f --- /dev/null +++ b/web/mock.js @@ -0,0 +1,147 @@ +export default function ({ cgiMock, mockjs, utils }) { + const { Random } = mockjs; + + // 测试 proxy 与 mock 用例集合 + cgiMock("/movie/in_theaters_mock", (req, res) => { + setTimeout(() => { + res.send( + JSON.stringify({ + code: "0", + msg: "", + result: { + text: "movie: movie/in_theaters_mock ~~~~~", + }, + }) + ); + }, 2000); + }); + cgiMock("/movie/test_mock", (req, res) => { + setTimeout(() => { + res.send( + JSON.stringify({ + code: "0", + msg: "", + result: { + text: "mock: movie/test_mock", + }, + }) + ); + }, 2000); + }); + + // 测试用例: mock.js change,重现请求,需要能拉最新的数据 + cgiMock("/watchtest", (req, res) => { + res.send( + JSON.stringify({ + code: "0", + msg: "", + result: { + text: "通过 register 测试 mock watch: 初始状态", + }, + }) + ); + }); + + // 返回一个数字 + // cgiMock('/number', 666); + cgiMock("/number", 999); + + // 返回一个json + cgiMock({ + url: "/json", + result: { + code: "400101", + msg: "不合法的请求:Missing cookie 'wb_app_id' for method parameter of type String", + transactionTime: "20170309171146", + success: false, + }, + }); + + // 利用 mock.js 产生随机文本 + cgiMock("/text", Random.cparagraph()); + + // 返回一个字符串 利用 mock.js 产生随机字符 + cgiMock( + "/random", + mockjs.mock({ + "string|1-10": "★", + }) + ); + + // 正则匹配url, 返回一个字符串 + cgiMock(/\/abc|\/xyz/, "regexp test!"); + + // option.result 参数如果是一个函数, 可以实现自定义返回内容, 接收的参数是是经过 express 封装的 req 和 res 对象. + cgiMock(/\/function$/, (req, res) => { + res.send("function test"); + }); + + // 返回文本 readFileSync + cgiMock("/file", utils.file("./package.json")); + + // 更复杂的规则配置 + cgiMock({ + url: /\/who/, + method: "GET", + result(req, res) { + if (req.query.name === "kwan") { + res.json({ kwan: "孤独患者" }); + } else { + res.send("Nooooooooooo"); + } + }, + headers: { + "Content-Type": "text/plain", + "Content-Length": "123", + ETag: "12345", + }, + cookies: [ + { + name: "myname", + value: "kwan", + maxAge: 900000, + httpOnly: true, + }, + ], + }); + + // 携带参数的请求 + cgiMock("/v2/audit/list", (req, res) => { + const { currentPage, pageSize, isAudited } = req.body; + res.send({ + code: "0", + msg: "", + data: { + currentPage, + pageSize, + totalPage: 2, + totalCount: 12, + pageData: Array.from({ length: pageSize }, () => ({ + title: Random.title(), + authorName: Random.cname(), + authorId: Random.name(), + createTime: Date.now(), + updateTime: Date.now(), + readCount: Random.integer(60, 1000), + favoriteCount: Random.integer(1, 50), + postId: "12323", + serviceTag: "业务类型", + productTag: "产品类型", + requestTag: "需求类型", + handleTag: "已采纳", + postType: "voice", + postStatus: isAudited ? "pass" : "auditing", + auditStatus: "audit1", + })), + }, + }); + }); + + // multipart/form-data 类型 + cgiMock("/v2/upload", (req, res) => { + res.send({ + code: "0", + msg: "文件上传成功", + }); + }); +} diff --git a/web/package.json b/web/package.json new file mode 100644 index 000000000..41f210b83 --- /dev/null +++ b/web/package.json @@ -0,0 +1,69 @@ +{ + "name": "@fesjs/template", + "version": "2.0.0", + "description": "fes项目模版", + "scripts": { + "build": "fes build", + "prod": "FES_ENV=prod fes build", + "analyze": "ANALYZE=1 fes build", + "dev": "fes dev", + "test:unit": "fes test:unit" + }, + "keywords": [ + "管理端", + "fes", + "fast", + "easy", + "strong" + ], + "files": [ + ".eslintrc.js", + ".gitignore", + ".fes.js", + ".fes.prod.js", + "mock.js", + "package.json", + "README.md", + "tsconfig.json", + "/src", + "/config" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/WeBankFinTech/fes.js.git", + "directory": "packages/fes-template" + }, + "author": "harrywan", + "license": "MIT", + "bugs": { + "url": "https://github.com/WeBankFinTech/fes.js/issues" + }, + "homepage": "https://github.com/WeBankFinTech/fes.js#readme", + "publishConfig": { + "access": "public" + }, + "devDependencies": { + "@webank/eslint-config-webank": "0.3.0" + }, + "dependencies": { + "@ant-design/icons-vue": "^6.0.1", + "@fesjs/fes": "^2.0.0", + "@fesjs/plugin-access": "^2.0.0", + "@fesjs/plugin-enums": "^2.0.0", + "@fesjs/plugin-layout": "^2.0.8", + "@fesjs/plugin-locale": "^2.0.3", + "@fesjs/plugin-model": "^2.0.0", + "@fesjs/plugin-request": "^2.0.2", + "@form-create/ant-design-vue": "^3.0.0-alpha.2", + "@vue/compiler-sfc": "3.1.4", + "ant-design-vue": "^2.2.7", + "echarts": "^5.2.1", + "lodash-es": "4.17.21", + "moment": "^2.29.4", + "monaco-editor": "^0.34.0", + "monaco-editor-webpack-plugin": "^7.0.1", + "vue": "3.1.4", + "vue-request": "^1.2.0" + }, + "private": true +} diff --git a/web/src/app.js b/web/src/app.js new file mode 100644 index 000000000..6bb5903f0 --- /dev/null +++ b/web/src/app.js @@ -0,0 +1,81 @@ +/* + * @Description: + * @Author: sueRim + * @Date: 2022-05-13 10:19:27 + */ +import { pum as pumApi, request as ajax, access as accessInstance, getRouter } from "@fesjs/fes"; +import { message, Modal, ConfigProvider } from "ant-design-vue"; +import zhCN from "ant-design-vue/es/locale/zh_CN"; +import PageLoading from "@/components/PageLoading"; +import UserCenter from "@/components/UserCenter"; +import { BASE_URL } from "@/common/constants"; +import { loadAllRegister } from "./register"; + +export const beforeRender = { + loading: , + action() { + const { setRole } = accessInstance; + return new Promise((resolve) => { + setTimeout(() => { + setRole("admin"); + // 初始化应用的全局状态,可以通过 useModel('@@initialState') 获取,具体用法看@/components/UserCenter 文件 + resolve({ + userName: "harrywan", + }); + }, 1000); + }); + }, +}; + +export function rootContainer(Container) { + return () => ( + + + + ); +} + +// 自定义 axios +export const request = { + baseURL: BASE_URL, + responseDataAdaptor: (data) => { + data.code = String(data.status); + return data; + }, + errorHandler: { + default(error) { + if (error.message && error.message.indexOf('timeout of') > -1) { + return message.warning('请求超时') + } + if (error?.response?.data?.data?.errorMsg) { + return message.error(error.response.data.data.errorMsg.desc); + } + console.log(error, error?.response) + if (error?.type === 'REPEAT') return // 重复请求不进行提示 + message.error(error?.response?.data?.message || error?.data?.message || "系统异常"); + }, + }, + timeout: 30000 +}; + +// 这里 自定义注册header +export const layout = { + customHeader: , +}; + +export function onAppCreated({ app }) { + loadAllRegister(app); +} + +const localStr = localStorage.getItem('fes_locale') +if (localStr !== 'zh-CN') { + localStorage.setItem('fes_locale', 'zh-CN') + document.location = '/' +} + + +window.addEventListener('beforeunload', function () { + if (localStorage.getItem('exchangis_environment')) { + localStorage.removeItem('exchangis_environment'); + } +}); \ No newline at end of file diff --git a/web/src/assets/demo.css b/web/src/assets/demo.css new file mode 100644 index 000000000..a67054a0a --- /dev/null +++ b/web/src/assets/demo.css @@ -0,0 +1,539 @@ +/* Logo 字体 */ +@font-face { + font-family: "iconfont logo"; + src: url('https://at.alicdn.com/t/font_985780_km7mi63cihi.eot?t=1545807318834'); + src: url('https://at.alicdn.com/t/font_985780_km7mi63cihi.eot?t=1545807318834#iefix') format('embedded-opentype'), + url('https://at.alicdn.com/t/font_985780_km7mi63cihi.woff?t=1545807318834') format('woff'), + url('https://at.alicdn.com/t/font_985780_km7mi63cihi.ttf?t=1545807318834') format('truetype'), + url('https://at.alicdn.com/t/font_985780_km7mi63cihi.svg?t=1545807318834#iconfont') format('svg'); +} + +.logo { + font-family: "iconfont logo"; + font-size: 160px; + font-style: normal; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +/* tabs */ +.nav-tabs { + position: relative; +} + +.nav-tabs .nav-more { + position: absolute; + right: 0; + bottom: 0; + height: 42px; + line-height: 42px; + color: #666; +} + +#tabs { + border-bottom: 1px solid #eee; +} + +#tabs li { + cursor: pointer; + width: 100px; + height: 40px; + line-height: 40px; + text-align: center; + font-size: 16px; + border-bottom: 2px solid transparent; + position: relative; + z-index: 1; + margin-bottom: -1px; + color: #666; +} + + +#tabs .active { + border-bottom-color: #f00; + color: #222; +} + +.tab-container .content { + display: none; +} + +/* 页面布局 */ +.main { + padding: 30px 100px; + width: 960px; + margin: 0 auto; +} + +.main .logo { + color: #333; + text-align: left; + margin-bottom: 30px; + line-height: 1; + height: 110px; + margin-top: -50px; + overflow: hidden; + *zoom: 1; +} + +.main .logo a { + font-size: 160px; + color: #333; +} + +.helps { + margin-top: 40px; +} + +.helps pre { + padding: 20px; + margin: 10px 0; + border: solid 1px #e7e1cd; + background-color: #fffdef; + overflow: auto; +} + +.icon_lists { + width: 100% !important; + overflow: hidden; + *zoom: 1; +} + +.icon_lists li { + width: 100px; + margin-bottom: 10px; + margin-right: 20px; + text-align: center; + list-style: none !important; + cursor: default; +} + +.icon_lists li .code-name { + line-height: 1.2; +} + +.icon_lists .icon { + display: block; + height: 100px; + line-height: 100px; + font-size: 42px; + margin: 10px auto; + color: #333; + -webkit-transition: font-size 0.25s linear, width 0.25s linear; + -moz-transition: font-size 0.25s linear, width 0.25s linear; + transition: font-size 0.25s linear, width 0.25s linear; +} + +.icon_lists .icon:hover { + font-size: 100px; +} + +.icon_lists .svg-icon { + /* 通过设置 font-size 来改变图标大小 */ + width: 1em; + /* 图标和文字相邻时,垂直对齐 */ + vertical-align: -0.15em; + /* 通过设置 color 来改变 SVG 的颜色/fill */ + fill: currentColor; + /* path 和 stroke 溢出 viewBox 部分在 IE 下会显示 + normalize.css 中也包含这行 */ + overflow: hidden; +} + +.icon_lists li .name, +.icon_lists li .code-name { + color: #666; +} + +/* markdown 样式 */ +.markdown { + color: #666; + font-size: 14px; + line-height: 1.8; +} + +.highlight { + line-height: 1.5; +} + +.markdown img { + vertical-align: middle; + max-width: 100%; +} + +.markdown h1 { + color: #404040; + font-weight: 500; + line-height: 40px; + margin-bottom: 24px; +} + +.markdown h2, +.markdown h3, +.markdown h4, +.markdown h5, +.markdown h6 { + color: #404040; + margin: 1.6em 0 0.6em 0; + font-weight: 500; + clear: both; +} + +.markdown h1 { + font-size: 28px; +} + +.markdown h2 { + font-size: 22px; +} + +.markdown h3 { + font-size: 16px; +} + +.markdown h4 { + font-size: 14px; +} + +.markdown h5 { + font-size: 12px; +} + +.markdown h6 { + font-size: 12px; +} + +.markdown hr { + height: 1px; + border: 0; + background: #e9e9e9; + margin: 16px 0; + clear: both; +} + +.markdown p { + margin: 1em 0; +} + +.markdown>p, +.markdown>blockquote, +.markdown>.highlight, +.markdown>ol, +.markdown>ul { + width: 80%; +} + +.markdown ul>li { + list-style: circle; +} + +.markdown>ul li, +.markdown blockquote ul>li { + margin-left: 20px; + padding-left: 4px; +} + +.markdown>ul li p, +.markdown>ol li p { + margin: 0.6em 0; +} + +.markdown ol>li { + list-style: decimal; +} + +.markdown>ol li, +.markdown blockquote ol>li { + margin-left: 20px; + padding-left: 4px; +} + +.markdown code { + margin: 0 3px; + padding: 0 5px; + background: #eee; + border-radius: 3px; +} + +.markdown strong, +.markdown b { + font-weight: 600; +} + +.markdown>table { + border-collapse: collapse; + border-spacing: 0px; + empty-cells: show; + border: 1px solid #e9e9e9; + width: 95%; + margin-bottom: 24px; +} + +.markdown>table th { + white-space: nowrap; + color: #333; + font-weight: 600; +} + +.markdown>table th, +.markdown>table td { + border: 1px solid #e9e9e9; + padding: 8px 16px; + text-align: left; +} + +.markdown>table th { + background: #F7F7F7; +} + +.markdown blockquote { + font-size: 90%; + color: #999; + border-left: 4px solid #e9e9e9; + padding-left: 0.8em; + margin: 1em 0; +} + +.markdown blockquote p { + margin: 0; +} + +.markdown .anchor { + opacity: 0; + transition: opacity 0.3s ease; + margin-left: 8px; +} + +.markdown .waiting { + color: #ccc; +} + +.markdown h1:hover .anchor, +.markdown h2:hover .anchor, +.markdown h3:hover .anchor, +.markdown h4:hover .anchor, +.markdown h5:hover .anchor, +.markdown h6:hover .anchor { + opacity: 1; + display: inline-block; +} + +.markdown>br, +.markdown>p>br { + clear: both; +} + + +.hljs { + display: block; + background: white; + padding: 0.5em; + color: #333333; + overflow-x: auto; +} + +.hljs-comment, +.hljs-meta { + color: #969896; +} + +.hljs-string, +.hljs-variable, +.hljs-template-variable, +.hljs-strong, +.hljs-emphasis, +.hljs-quote { + color: #df5000; +} + +.hljs-keyword, +.hljs-selector-tag, +.hljs-type { + color: #a71d5d; +} + +.hljs-literal, +.hljs-symbol, +.hljs-bullet, +.hljs-attribute { + color: #0086b3; +} + +.hljs-section, +.hljs-name { + color: #63a35c; +} + +.hljs-tag { + color: #333333; +} + +.hljs-title, +.hljs-attr, +.hljs-selector-id, +.hljs-selector-class, +.hljs-selector-attr, +.hljs-selector-pseudo { + color: #795da3; +} + +.hljs-addition { + color: #55a532; + background-color: #eaffea; +} + +.hljs-deletion { + color: #bd2c00; + background-color: #ffecec; +} + +.hljs-link { + text-decoration: underline; +} + +/* 代码高亮 */ +/* PrismJS 1.15.0 +https://prismjs.com/download.html#themes=prism&languages=markup+css+clike+javascript */ +/** + * prism.js default theme for JavaScript, CSS and HTML + * Based on dabblet (http://dabblet.com) + * @author Lea Verou + */ +code[class*="language-"], +pre[class*="language-"] { + color: black; + background: none; + text-shadow: 0 1px white; + font-family: Consolas, Monaco, 'Andale Mono', 'Ubuntu Mono', monospace; + text-align: left; + white-space: pre; + word-spacing: normal; + word-break: normal; + word-wrap: normal; + line-height: 1.5; + + -moz-tab-size: 4; + -o-tab-size: 4; + tab-size: 4; + + -webkit-hyphens: none; + -moz-hyphens: none; + -ms-hyphens: none; + hyphens: none; +} + +pre[class*="language-"]::-moz-selection, +pre[class*="language-"] ::-moz-selection, +code[class*="language-"]::-moz-selection, +code[class*="language-"] ::-moz-selection { + text-shadow: none; + background: #b3d4fc; +} + +pre[class*="language-"]::selection, +pre[class*="language-"] ::selection, +code[class*="language-"]::selection, +code[class*="language-"] ::selection { + text-shadow: none; + background: #b3d4fc; +} + +@media print { + + code[class*="language-"], + pre[class*="language-"] { + text-shadow: none; + } +} + +/* Code blocks */ +pre[class*="language-"] { + padding: 1em; + margin: .5em 0; + overflow: auto; +} + +:not(pre)>code[class*="language-"], +pre[class*="language-"] { + background: #f5f2f0; +} + +/* Inline code */ +:not(pre)>code[class*="language-"] { + padding: .1em; + border-radius: .3em; + white-space: normal; +} + +.token.comment, +.token.prolog, +.token.doctype, +.token.cdata { + color: slategray; +} + +.token.punctuation { + color: #999; +} + +.namespace { + opacity: .7; +} + +.token.property, +.token.tag, +.token.boolean, +.token.number, +.token.constant, +.token.symbol, +.token.deleted { + color: #905; +} + +.token.selector, +.token.attr-name, +.token.string, +.token.char, +.token.builtin, +.token.inserted { + color: #690; +} + +.token.operator, +.token.entity, +.token.url, +.language-css .token.string, +.style .token.string { + color: #9a6e3a; + background: hsla(0, 0%, 100%, .5); +} + +.token.atrule, +.token.attr-value, +.token.keyword { + color: #07a; +} + +.token.function, +.token.class-name { + color: #DD4A68; +} + +.token.regex, +.token.important, +.token.variable { + color: #e90; +} + +.token.important, +.token.bold { + font-weight: bold; +} + +.token.italic { + font-style: italic; +} + +.token.entity { + cursor: help; +} diff --git a/web/src/assets/demo_index.html b/web/src/assets/demo_index.html new file mode 100644 index 000000000..5316d9800 --- /dev/null +++ b/web/src/assets/demo_index.html @@ -0,0 +1,2189 @@ + + + + + iconfont Demo + + + + + + + + + + + + + +
+

+ + +

+ +
+
+
    + +
  • + +
    蓝色箭头带框
    +
    &#xe72b;
    +
  • + +
  • + +
    箭头
    +
    &#xe727;
    +
  • + +
  • + +
    hive
    +
    &#xe722;
    +
  • + +
  • + +
    shell
    +
    &#xe721;
    +
  • + +
  • + +
    copy
    +
    &#xe720;
    +
  • + +
  • + +
    leftfold
    +
    &#xe71e;
    +
  • + +
  • + +
    更多省略号
    +
    &#xe71c;
    +
  • + +
  • + +
    unfold
    +
    &#xe71d;
    +
  • + +
  • + +
    收起
    +
    &#xe71a;
    +
  • + +
  • + +
    展开
    +
    &#xe71b;
    +
  • + +
  • + +
    固定到导航栏
    +
    &#xe719;
    +
  • + +
  • + +
    search
    +
    &#xe713;
    +
  • + +
  • + +
    spark process
    +
    &#xe700;
    +
  • + +
  • + +
    hive process
    +
    &#xe701;
    +
  • + +
  • + +
    删除的 hive table
    +
    &#xe702;
    +
  • + +
  • + +
    hive table
    +
    &#xe6ff;
    +
  • + +
  • + +
    主题域配置
    +
    &#xe6f9;
    +
  • + +
  • + +
    分层配置
    +
    &#xe6fa;
    +
  • + +
  • + +
    已收藏
    +
    &#xe6f7;
    +
  • + +
  • + +
    未收藏
    +
    &#xe6f8;
    +
  • + +
  • + +
    star
    +
    &#xe6f5;
    +
  • + +
  • + +
    star-filled
    +
    &#xe6f6;
    +
  • + +
  • + +
    编辑
    +
    &#xe6f4;
    +
  • + +
  • + +
    表总数
    +
    &#xe6f1;
    +
  • + +
  • + +
    数据库总数
    +
    &#xe6f2;
    +
  • + +
  • + +
    总存储量
    +
    &#xe6f3;
    +
  • + +
  • + +
    工作流
    +
    &#xe6f0;
    +
  • + +
  • + +
    新增
    +
    &#xe6ef;
    +
  • + +
  • + +
    日期
    +
    &#xe6ee;
    +
  • + +
  • + +
    组件接入管理
    +
    &#xe6eb;
    +
  • + +
  • + +
    控制台
    +
    &#xe6ec;
    +
  • + +
  • + +
    部门和用户管理
    +
    &#xe6ed;
    +
  • + +
  • + +
    电信 5G天翼云纯色logo
    +
    &#xe6c7;
    +
  • + +
  • + +
    +
    &#xe6bb;
    +
  • + +
  • + +
    下拉
    +
    &#xe6bc;
    +
  • + +
  • + +
    服务管理
    +
    &#xe6bd;
    +
  • + +
  • + +
    收起
    +
    &#xe6be;
    +
  • + +
  • + +
    总调用次数
    +
    &#xe6bf;
    +
  • + +
  • + +
    api
    +
    &#xe6c0;
    +
  • + +
  • + +
    属性
    +
    &#xe6c1;
    +
  • + +
  • + +
    API调用
    +
    &#xe6c2;
    +
  • + +
  • + +
    总执行时长用量
    +
    &#xe6c3;
    +
  • + +
  • + +
    API监控
    +
    &#xe6c4;
    +
  • + +
  • + +
    API测试
    +
    &#xe6c5;
    +
  • + +
  • + +
    API管理
    +
    &#xe6c6;
    +
  • + +
  • + +
    +
    &#xe6b3;
    +
  • + +
  • + +
    版本
    +
    &#xe6b4;
    +
  • + +
  • + +
    测试
    +
    &#xe6b5;
    +
  • + +
  • + +
    发布
    +
    &#xe6b6;
    +
  • + +
  • + +
    服务表
    +
    &#xe6b7;
    +
  • + +
  • + +
    保存
    +
    &#xe6b8;
    +
  • + +
  • + +
    服务开发
    +
    &#xe6b9;
    +
  • + +
  • + +
    新增
    +
    &#xe6ba;
    +
  • + +
  • + +
    内存卡底
    +
    &#xe6b0;
    +
  • + +
  • + +
    矩形
    +
    &#xe6af;
    +
  • + +
  • + +
    问号
    +
    &#xe6ac;
    +
  • + +
  • + +
    调度
    +
    &#xe6ad;
    +
  • + +
  • + +
    画板
    +
    &#xe6ae;
    +
  • + +
  • + +
    loading
    +
    &#xe84b;
    +
  • + +
  • + +
    file
    +
    &#xe693;
    +
  • + +
  • + +
    plus
    +
    &#xe692;
    +
  • + +
  • + +
    unfold
    +
    &#xe690;
    +
  • + +
  • + +
    put away
    +
    &#xe691;
    +
  • + +
  • + +
    open
    +
    &#xe68a;
    +
  • + +
  • + +
    delete
    +
    &#xe68c;
    +
  • + +
  • + +
    flow
    +
    &#xe68d;
    +
  • + +
  • + +
    close
    +
    &#xe68e;
    +
  • + +
  • + +
    project
    +
    &#xe68f;
    +
  • + +
  • + +
    shell
    +
    &#xe671;
    +
  • + +
  • + +
    icon-kill
    +
    &#xe667;
    +
  • + +
  • + +
    icon-ready-stop
    +
    &#xe66b;
    +
  • + +
  • + +
    icon-ready-pause
    +
    &#xe66d;
    +
  • + +
  • + +
    DELAY_EXECUTION
    +
    &#xe663;
    +
  • + +
  • + +
    NEED_FAULT_TOLERANCE
    +
    &#xe664;
    +
  • + +
  • + +
    PAUSE
    +
    &#xe665;
    +
  • + +
  • + +
    STOP
    +
    &#xe666;
    +
  • + +
  • + +
    WAITTING_DEPEND
    +
    &#xe668;
    +
  • + +
  • + +
    FORCED_SUCCES
    +
    &#xe669;
    +
  • + +
  • + +
    SUCCESS
    +
    &#xe66a;
    +
  • + +
  • + +
    SUBMITTED_SUCCES
    +
    &#xe66c;
    +
  • + +
  • + +
    FAILURE
    +
    &#xe66e;
    +
  • + +
  • + +
    RUNNING_EXECUTIO
    +
    &#xe66f;
    +
  • + +
  • + +
    WAITTING_THREAD
    +
    &#xe670;
    +
  • + +
  • + +
    REFRESH
    +
    &#xe65f;
    +
  • + +
  • + +
    FULL_SCREEN
    +
    &#xe660;
    +
  • + +
  • + +
    CANCLE_FULL_SCREEN
    +
    &#xe661;
    +
  • + +
  • + +
    DOWNLOAD
    +
    &#xe662;
    +
  • + +
+
+

Unicode 引用

+
+ +

Unicode 是字体在网页端最原始的应用方式,特点是:

+
    +
  • 支持按字体的方式去动态调整图标大小,颜色等等。
  • +
  • 默认情况下不支持多色,直接添加多色图标会自动去色。
  • +
+
+

注意:新版 iconfont 支持两种方式引用多色图标:SVG symbol 引用方式和彩色字体图标模式。(使用彩色字体图标需要在「编辑项目」中开启「彩色」选项后并重新生成。)

+
+

Unicode 使用步骤如下:

+

第一步:拷贝项目下面生成的 @font-face

+
@font-face {
+  font-family: 'iconfont';
+  src: url('iconfont.woff2?t=1639637531657') format('woff2'),
+       url('iconfont.woff?t=1639637531657') format('woff'),
+       url('iconfont.ttf?t=1639637531657') format('truetype');
+}
+
+

第二步:定义使用 iconfont 的样式

+
.iconfont {
+  font-family: "iconfont" !important;
+  font-size: 16px;
+  font-style: normal;
+  -webkit-font-smoothing: antialiased;
+  -moz-osx-font-smoothing: grayscale;
+}
+
+

第三步:挑选相应图标并获取字体编码,应用于页面

+
+<span class="iconfont">&#x33;</span>
+
+
+

"iconfont" 是你项目下的 font-family。可以通过编辑项目查看,默认是 "iconfont"。

+
+
+
+
+
    + +
  • + +
    + 蓝色箭头带框 +
    +
    .icon-lansejiantoudaikuang +
    +
  • + +
  • + +
    + 箭头 +
    +
    .icon-jiantou +
    +
  • + +
  • + +
    + hive +
    +
    .icon-hive +
    +
  • + +
  • + +
    + shell +
    +
    .icon-shell1 +
    +
  • + +
  • + +
    + copy +
    +
    .icon-copy +
    +
  • + +
  • + +
    + leftfold +
    +
    .icon-leftfold +
    +
  • + +
  • + +
    + 更多省略号 +
    +
    .icon-gengduoshenglvehao +
    +
  • + +
  • + +
    + unfold +
    +
    .icon-unfold1 +
    +
  • + +
  • + +
    + 收起 +
    +
    .icon-shouqi1 +
    +
  • + +
  • + +
    + 展开 +
    +
    .icon-zhankai +
    +
  • + +
  • + +
    + 固定到导航栏 +
    +
    .icon-gudingdaodaohanglan +
    +
  • + +
  • + +
    + search +
    +
    .icon-search +
    +
  • + +
  • + +
    + spark process +
    +
    .icon-a-sparkprocess +
    +
  • + +
  • + +
    + hive process +
    +
    .icon-a-hiveprocess +
    +
  • + +
  • + +
    + 删除的 hive table +
    +
    .icon-a-shanchudehivetable +
    +
  • + +
  • + +
    + hive table +
    +
    .icon-a-hivetable +
    +
  • + +
  • + +
    + 主题域配置 +
    +
    .icon-zhutiyupeizhi +
    +
  • + +
  • + +
    + 分层配置 +
    +
    .icon-fencengpeizhi +
    +
  • + +
  • + +
    + 已收藏 +
    +
    .icon-yishoucang +
    +
  • + +
  • + +
    + 未收藏 +
    +
    .icon-weishoucang +
    +
  • + +
  • + +
    + star +
    +
    .icon-star +
    +
  • + +
  • + +
    + star-filled +
    +
    .icon-star-filled +
    +
  • + +
  • + +
    + 编辑 +
    +
    .icon-bianji +
    +
  • + +
  • + +
    + 表总数 +
    +
    .icon-biaozongshu +
    +
  • + +
  • + +
    + 数据库总数 +
    +
    .icon-shujukuzongshu +
    +
  • + +
  • + +
    + 总存储量 +
    +
    .icon-zongcunchuliang +
    +
  • + +
  • + +
    + 工作流 +
    +
    .icon-gongzuoliu +
    +
  • + +
  • + +
    + 新增 +
    +
    .icon-xinzeng1 +
    +
  • + +
  • + +
    + 日期 +
    +
    .icon-riqi +
    +
  • + +
  • + +
    + 组件接入管理 +
    +
    .icon-zujianjieruguanli +
    +
  • + +
  • + +
    + 控制台 +
    +
    .icon-kongzhitai +
    +
  • + +
  • + +
    + 部门和用户管理 +
    +
    .icon-bumenheyonghuguanli +
    +
  • + +
  • + +
    + 电信 5G天翼云纯色logo +
    +
    .icon-a-dianxin5Gtianyiyunchunselogo +
    +
  • + +
  • + +
    + 勾 +
    +
    .icon-gou +
    +
  • + +
  • + +
    + 下拉 +
    +
    .icon-xiala +
    +
  • + +
  • + +
    + 服务管理 +
    +
    .icon-fuwuguanli +
    +
  • + +
  • + +
    + 收起 +
    +
    .icon-shouqi +
    +
  • + +
  • + +
    + 总调用次数 +
    +
    .icon-zongtiaoyongcishu +
    +
  • + +
  • + +
    + api +
    +
    .icon-api +
    +
  • + +
  • + +
    + 属性 +
    +
    .icon-shuxing +
    +
  • + +
  • + +
    + API调用 +
    +
    .icon-APItiaoyong +
    +
  • + +
  • + +
    + 总执行时长用量 +
    +
    .icon-zongzhihangshichangyongliang +
    +
  • + +
  • + +
    + API监控 +
    +
    .icon-APIjiankong +
    +
  • + +
  • + +
    + API测试 +
    +
    .icon-APIceshi +
    +
  • + +
  • + +
    + API管理 +
    +
    .icon-APIguanli +
    +
  • + +
  • + +
    + ! +
    +
    .icon-a- +
    +
  • + +
  • + +
    + 版本 +
    +
    .icon-banben +
    +
  • + +
  • + +
    + 测试 +
    +
    .icon-ceshi +
    +
  • + +
  • + +
    + 发布 +
    +
    .icon-fabu +
    +
  • + +
  • + +
    + 服务表 +
    +
    .icon-fuwubiao +
    +
  • + +
  • + +
    + 保存 +
    +
    .icon-baocun +
    +
  • + +
  • + +
    + 服务开发 +
    +
    .icon-fuwukaifa +
    +
  • + +
  • + +
    + 新增 +
    +
    .icon-xinzeng +
    +
  • + +
  • + +
    + 内存卡底 +
    +
    .icon-neicunkadi +
    +
  • + +
  • + +
    + 矩形 +
    +
    .icon-juxing +
    +
  • + +
  • + +
    + 问号 +
    +
    .icon-wenhao +
    +
  • + +
  • + +
    + 调度 +
    +
    .icon-tiaodu +
    +
  • + +
  • + +
    + 画板 +
    +
    .icon-huaban +
    +
  • + +
  • + +
    + loading +
    +
    .icon-xingzhuang +
    +
  • + +
  • + +
    + file +
    +
    .icon-file +
    +
  • + +
  • + +
    + plus +
    +
    .icon-plus +
    +
  • + +
  • + +
    + unfold +
    +
    .icon-unfold +
    +
  • + +
  • + +
    + put away +
    +
    .icon-putaway +
    +
  • + +
  • + +
    + open +
    +
    .icon-open +
    +
  • + +
  • + +
    + delete +
    +
    .icon-delete +
    +
  • + +
  • + +
    + flow +
    +
    .icon-flow +
    +
  • + +
  • + +
    + close +
    +
    .icon-close +
    +
  • + +
  • + +
    + project +
    +
    .icon-project +
    +
  • + +
  • + +
    + shell +
    +
    .icon-shell +
    +
  • + +
  • + +
    + icon-kill +
    +
    .icon-kill +
    +
  • + +
  • + +
    + icon-ready-stop +
    +
    .icon-ready-stop +
    +
  • + +
  • + +
    + icon-ready-pause +
    +
    .icon-ready-pause +
    +
  • + +
  • + +
    + DELAY_EXECUTION +
    +
    .icon-delay-execution +
    +
  • + +
  • + +
    + NEED_FAULT_TOLERANCE +
    +
    .icon-need-fault-tolerance +
    +
  • + +
  • + +
    + PAUSE +
    +
    .icon-pause +
    +
  • + +
  • + +
    + STOP +
    +
    .icon-stop +
    +
  • + +
  • + +
    + WAITTING_DEPEND +
    +
    .icon-watting-depend +
    +
  • + +
  • + +
    + FORCED_SUCCES +
    +
    .icon-forced-success +
    +
  • + +
  • + +
    + SUCCESS +
    +
    .icon-success +
    +
  • + +
  • + +
    + SUBMITTED_SUCCES +
    +
    .icon-submitted-success +
    +
  • + +
  • + +
    + FAILURE +
    +
    .icon-failure +
    +
  • + +
  • + +
    + RUNNING_EXECUTIO +
    +
    .icon-running-execution +
    +
  • + +
  • + +
    + WAITTING_THREAD +
    +
    .icon-waitting-thread +
    +
  • + +
  • + +
    + REFRESH +
    +
    .icon-refresh +
    +
  • + +
  • + +
    + FULL_SCREEN +
    +
    .icon-full-screen +
    +
  • + +
  • + +
    + CANCLE_FULL_SCREEN +
    +
    .icon-cancel-full-screeen +
    +
  • + +
  • + +
    + DOWNLOAD +
    +
    .icon-download +
    +
  • + +
+
+

font-class 引用

+
+ +

font-class 是 Unicode 使用方式的一种变种,主要是解决 Unicode 书写不直观,语意不明确的问题。

+

与 Unicode 使用方式相比,具有如下特点:

+
    +
  • 相比于 Unicode 语意明确,书写更直观。可以很容易分辨这个 icon 是什么。
  • +
  • 因为使用 class 来定义图标,所以当要替换图标时,只需要修改 class 里面的 Unicode 引用。
  • +
+

使用步骤如下:

+

第一步:引入项目下面生成的 fontclass 代码:

+
<link rel="stylesheet" href="./iconfont.css">
+
+

第二步:挑选相应图标并获取类名,应用于页面:

+
<span class="iconfont icon-xxx"></span>
+
+
+

" + iconfont" 是你项目下的 font-family。可以通过编辑项目查看,默认是 "iconfont"。

+
+
+
+
+
    + +
  • + +
    蓝色箭头带框
    +
    #icon-lansejiantoudaikuang
    +
  • + +
  • + +
    箭头
    +
    #icon-jiantou
    +
  • + +
  • + +
    hive
    +
    #icon-hive
    +
  • + +
  • + +
    shell
    +
    #icon-shell1
    +
  • + +
  • + +
    copy
    +
    #icon-copy
    +
  • + +
  • + +
    leftfold
    +
    #icon-leftfold
    +
  • + +
  • + +
    更多省略号
    +
    #icon-gengduoshenglvehao
    +
  • + +
  • + +
    unfold
    +
    #icon-unfold1
    +
  • + +
  • + +
    收起
    +
    #icon-shouqi1
    +
  • + +
  • + +
    展开
    +
    #icon-zhankai
    +
  • + +
  • + +
    固定到导航栏
    +
    #icon-gudingdaodaohanglan
    +
  • + +
  • + +
    search
    +
    #icon-search
    +
  • + +
  • + +
    spark process
    +
    #icon-a-sparkprocess
    +
  • + +
  • + +
    hive process
    +
    #icon-a-hiveprocess
    +
  • + +
  • + +
    删除的 hive table
    +
    #icon-a-shanchudehivetable
    +
  • + +
  • + +
    hive table
    +
    #icon-a-hivetable
    +
  • + +
  • + +
    主题域配置
    +
    #icon-zhutiyupeizhi
    +
  • + +
  • + +
    分层配置
    +
    #icon-fencengpeizhi
    +
  • + +
  • + +
    已收藏
    +
    #icon-yishoucang
    +
  • + +
  • + +
    未收藏
    +
    #icon-weishoucang
    +
  • + +
  • + +
    star
    +
    #icon-star
    +
  • + +
  • + +
    star-filled
    +
    #icon-star-filled
    +
  • + +
  • + +
    编辑
    +
    #icon-bianji
    +
  • + +
  • + +
    表总数
    +
    #icon-biaozongshu
    +
  • + +
  • + +
    数据库总数
    +
    #icon-shujukuzongshu
    +
  • + +
  • + +
    总存储量
    +
    #icon-zongcunchuliang
    +
  • + +
  • + +
    工作流
    +
    #icon-gongzuoliu
    +
  • + +
  • + +
    新增
    +
    #icon-xinzeng1
    +
  • + +
  • + +
    日期
    +
    #icon-riqi
    +
  • + +
  • + +
    组件接入管理
    +
    #icon-zujianjieruguanli
    +
  • + +
  • + +
    控制台
    +
    #icon-kongzhitai
    +
  • + +
  • + +
    部门和用户管理
    +
    #icon-bumenheyonghuguanli
    +
  • + +
  • + +
    电信 5G天翼云纯色logo
    +
    #icon-a-dianxin5Gtianyiyunchunselogo
    +
  • + +
  • + +
    +
    #icon-gou
    +
  • + +
  • + +
    下拉
    +
    #icon-xiala
    +
  • + +
  • + +
    服务管理
    +
    #icon-fuwuguanli
    +
  • + +
  • + +
    收起
    +
    #icon-shouqi
    +
  • + +
  • + +
    总调用次数
    +
    #icon-zongtiaoyongcishu
    +
  • + +
  • + +
    api
    +
    #icon-api
    +
  • + +
  • + +
    属性
    +
    #icon-shuxing
    +
  • + +
  • + +
    API调用
    +
    #icon-APItiaoyong
    +
  • + +
  • + +
    总执行时长用量
    +
    #icon-zongzhihangshichangyongliang
    +
  • + +
  • + +
    API监控
    +
    #icon-APIjiankong
    +
  • + +
  • + +
    API测试
    +
    #icon-APIceshi
    +
  • + +
  • + +
    API管理
    +
    #icon-APIguanli
    +
  • + +
  • + +
    +
    #icon-a-
    +
  • + +
  • + +
    版本
    +
    #icon-banben
    +
  • + +
  • + +
    测试
    +
    #icon-ceshi
    +
  • + +
  • + +
    发布
    +
    #icon-fabu
    +
  • + +
  • + +
    服务表
    +
    #icon-fuwubiao
    +
  • + +
  • + +
    保存
    +
    #icon-baocun
    +
  • + +
  • + +
    服务开发
    +
    #icon-fuwukaifa
    +
  • + +
  • + +
    新增
    +
    #icon-xinzeng
    +
  • + +
  • + +
    内存卡底
    +
    #icon-neicunkadi
    +
  • + +
  • + +
    矩形
    +
    #icon-juxing
    +
  • + +
  • + +
    问号
    +
    #icon-wenhao
    +
  • + +
  • + +
    调度
    +
    #icon-tiaodu
    +
  • + +
  • + +
    画板
    +
    #icon-huaban
    +
  • + +
  • + +
    loading
    +
    #icon-xingzhuang
    +
  • + +
  • + +
    file
    +
    #icon-file
    +
  • + +
  • + +
    plus
    +
    #icon-plus
    +
  • + +
  • + +
    unfold
    +
    #icon-unfold
    +
  • + +
  • + +
    put away
    +
    #icon-putaway
    +
  • + +
  • + +
    open
    +
    #icon-open
    +
  • + +
  • + +
    delete
    +
    #icon-delete
    +
  • + +
  • + +
    flow
    +
    #icon-flow
    +
  • + +
  • + +
    close
    +
    #icon-close
    +
  • + +
  • + +
    project
    +
    #icon-project
    +
  • + +
  • + +
    shell
    +
    #icon-shell
    +
  • + +
  • + +
    icon-kill
    +
    #icon-kill
    +
  • + +
  • + +
    icon-ready-stop
    +
    #icon-ready-stop
    +
  • + +
  • + +
    icon-ready-pause
    +
    #icon-ready-pause
    +
  • + +
  • + +
    DELAY_EXECUTION
    +
    #icon-delay-execution
    +
  • + +
  • + +
    NEED_FAULT_TOLERANCE
    +
    #icon-need-fault-tolerance
    +
  • + +
  • + +
    PAUSE
    +
    #icon-pause
    +
  • + +
  • + +
    STOP
    +
    #icon-stop
    +
  • + +
  • + +
    WAITTING_DEPEND
    +
    #icon-watting-depend
    +
  • + +
  • + +
    FORCED_SUCCES
    +
    #icon-forced-success
    +
  • + +
  • + +
    SUCCESS
    +
    #icon-success
    +
  • + +
  • + +
    SUBMITTED_SUCCES
    +
    #icon-submitted-success
    +
  • + +
  • + +
    FAILURE
    +
    #icon-failure
    +
  • + +
  • + +
    RUNNING_EXECUTIO
    +
    #icon-running-execution
    +
  • + +
  • + +
    WAITTING_THREAD
    +
    #icon-waitting-thread
    +
  • + +
  • + +
    REFRESH
    +
    #icon-refresh
    +
  • + +
  • + +
    FULL_SCREEN
    +
    #icon-full-screen
    +
  • + +
  • + +
    CANCLE_FULL_SCREEN
    +
    #icon-cancel-full-screeen
    +
  • + +
  • + +
    DOWNLOAD
    +
    #icon-download
    +
  • + +
+
+

Symbol 引用

+
+ +

这是一种全新的使用方式,应该说这才是未来的主流,也是平台目前推荐的用法。相关介绍可以参考这篇文章 + 这种用法其实是做了一个 SVG 的集合,与另外两种相比具有如下特点:

+
    +
  • 支持多色图标了,不再受单色限制。
  • +
  • 通过一些技巧,支持像字体那样,通过 font-size, color 来调整样式。
  • +
  • 兼容性较差,支持 IE9+,及现代浏览器。
  • +
  • 浏览器渲染 SVG 的性能一般,还不如 png。
  • +
+

使用步骤如下:

+

第一步:引入项目下面生成的 symbol 代码:

+
<script src="./iconfont.js"></script>
+
+

第二步:加入通用 CSS 代码(引入一次就行):

+
<style>
+.icon {
+  width: 1em;
+  height: 1em;
+  vertical-align: -0.15em;
+  fill: currentColor;
+  overflow: hidden;
+}
+</style>
+
+

第三步:挑选相应图标并获取类名,应用于页面:

+
<svg class="icon" aria-hidden="true">
+  <use xlink:href="#icon-xxx"></use>
+</svg>
+
+
+
+ +
+
+ + + diff --git a/web/src/assets/iconfont.css b/web/src/assets/iconfont.css new file mode 100644 index 000000000..6b1fc9f15 --- /dev/null +++ b/web/src/assets/iconfont.css @@ -0,0 +1,363 @@ +@font-face { + font-family: "iconfont"; /* Project id 2585050 */ + src: url('iconfont.woff2?t=1639637531657') format('woff2'), + url('iconfont.woff?t=1639637531657') format('woff'), + url('iconfont.ttf?t=1639637531657') format('truetype'); +} + +.iconfont { + font-family: "iconfont" !important; + font-size: 16px; + font-style: normal; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +.icon-lansejiantoudaikuang:before { + content: "\e72b"; +} + +.icon-jiantou:before { + content: "\e727"; +} + +.icon-hive:before { + content: "\e722"; +} + +.icon-shell1:before { + content: "\e721"; +} + +.icon-copy:before { + content: "\e720"; +} + +.icon-leftfold:before { + content: "\e71e"; +} + +.icon-gengduoshenglvehao:before { + content: "\e71c"; +} + +.icon-unfold1:before { + content: "\e71d"; +} + +.icon-shouqi1:before { + content: "\e71a"; +} + +.icon-zhankai:before { + content: "\e71b"; +} + +.icon-gudingdaodaohanglan:before { + content: "\e719"; +} + +.icon-search:before { + content: "\e713"; +} + +.icon-a-sparkprocess:before { + content: "\e700"; +} + +.icon-a-hiveprocess:before { + content: "\e701"; +} + +.icon-a-shanchudehivetable:before { + content: "\e702"; +} + +.icon-a-hivetable:before { + content: "\e6ff"; +} + +.icon-zhutiyupeizhi:before { + content: "\e6f9"; +} + +.icon-fencengpeizhi:before { + content: "\e6fa"; +} + +.icon-yishoucang:before { + content: "\e6f7"; +} + +.icon-weishoucang:before { + content: "\e6f8"; +} + +.icon-star:before { + content: "\e6f5"; +} + +.icon-star-filled:before { + content: "\e6f6"; +} + +.icon-bianji:before { + content: "\e6f4"; +} + +.icon-biaozongshu:before { + content: "\e6f1"; +} + +.icon-shujukuzongshu:before { + content: "\e6f2"; +} + +.icon-zongcunchuliang:before { + content: "\e6f3"; +} + +.icon-gongzuoliu:before { + content: "\e6f0"; +} + +.icon-xinzeng1:before { + content: "\e6ef"; +} + +.icon-riqi:before { + content: "\e6ee"; +} + +.icon-zujianjieruguanli:before { + content: "\e6eb"; +} + +.icon-kongzhitai:before { + content: "\e6ec"; +} + +.icon-bumenheyonghuguanli:before { + content: "\e6ed"; +} + +.icon-a-dianxin5Gtianyiyunchunselogo:before { + content: "\e6c7"; +} + +.icon-gou:before { + content: "\e6bb"; +} + +.icon-xiala:before { + content: "\e6bc"; +} + +.icon-fuwuguanli:before { + content: "\e6bd"; +} + +.icon-shouqi:before { + content: "\e6be"; +} + +.icon-zongtiaoyongcishu:before { + content: "\e6bf"; +} + +.icon-api:before { + content: "\e6c0"; +} + +.icon-shuxing:before { + content: "\e6c1"; +} + +.icon-APItiaoyong:before { + content: "\e6c2"; +} + +.icon-zongzhihangshichangyongliang:before { + content: "\e6c3"; +} + +.icon-APIjiankong:before { + content: "\e6c4"; +} + +.icon-APIceshi:before { + content: "\e6c5"; +} + +.icon-APIguanli:before { + content: "\e6c6"; +} + +.icon-a-:before { + content: "\e6b3"; +} + +.icon-banben:before { + content: "\e6b4"; +} + +.icon-ceshi:before { + content: "\e6b5"; +} + +.icon-fabu:before { + content: "\e6b6"; +} + +.icon-fuwubiao:before { + content: "\e6b7"; +} + +.icon-baocun:before { + content: "\e6b8"; +} + +.icon-fuwukaifa:before { + content: "\e6b9"; +} + +.icon-xinzeng:before { + content: "\e6ba"; +} + +.icon-neicunkadi:before { + content: "\e6b0"; +} + +.icon-juxing:before { + content: "\e6af"; +} + +.icon-wenhao:before { + content: "\e6ac"; +} + +.icon-tiaodu:before { + content: "\e6ad"; +} + +.icon-huaban:before { + content: "\e6ae"; +} + +.icon-xingzhuang:before { + content: "\e84b"; +} + +.icon-file:before { + content: "\e693"; +} + +.icon-plus:before { + content: "\e692"; +} + +.icon-unfold:before { + content: "\e690"; +} + +.icon-putaway:before { + content: "\e691"; +} + +.icon-open:before { + content: "\e68a"; +} + +.icon-delete:before { + content: "\e68c"; +} + +.icon-flow:before { + content: "\e68d"; +} + +.icon-close:before { + content: "\e68e"; +} + +.icon-project:before { + content: "\e68f"; +} + +.icon-shell:before { + content: "\e671"; +} + +.icon-kill:before { + content: "\e667"; +} + +.icon-ready-stop:before { + content: "\e66b"; +} + +.icon-ready-pause:before { + content: "\e66d"; +} + +.icon-delay-execution:before { + content: "\e663"; +} + +.icon-need-fault-tolerance:before { + content: "\e664"; +} + +.icon-pause:before { + content: "\e665"; +} + +.icon-stop:before { + content: "\e666"; +} + +.icon-watting-depend:before { + content: "\e668"; +} + +.icon-forced-success:before { + content: "\e669"; +} + +.icon-success:before { + content: "\e66a"; +} + +.icon-submitted-success:before { + content: "\e66c"; +} + +.icon-failure:before { + content: "\e66e"; +} + +.icon-running-execution:before { + content: "\e66f"; +} + +.icon-waitting-thread:before { + content: "\e670"; +} + +.icon-refresh:before { + content: "\e65f"; +} + +.icon-full-screen:before { + content: "\e660"; +} + +.icon-cancel-full-screeen:before { + content: "\e661"; +} + +.icon-download:before { + content: "\e662"; +} + diff --git a/web/src/assets/iconfont.js b/web/src/assets/iconfont.js new file mode 100644 index 000000000..f50771508 --- /dev/null +++ b/web/src/assets/iconfont.js @@ -0,0 +1 @@ +!function(a){var l,h,c,t,p,i='',v=(v=document.getElementsByTagName("script"))[v.length-1].getAttribute("data-injectcss"),z=function(a,l){l.parentNode.insertBefore(a,l)};if(v&&!a.__iconfont__svg__cssinject__){a.__iconfont__svg__cssinject__=!0;try{document.write("")}catch(a){console&&console.log(a)}}function d(){p||(p=!0,c())}function m(){try{t.documentElement.doScroll("left")}catch(a){return void setTimeout(m,50)}d()}l=function(){var a,l;(l=document.createElement("div")).innerHTML=i,i=null,(a=l.getElementsByTagName("svg")[0])&&(a.setAttribute("aria-hidden","true"),a.style.position="absolute",a.style.width=0,a.style.height=0,a.style.overflow="hidden",l=a,(a=document.body).firstChild?z(l,a.firstChild):a.appendChild(l))},document.addEventListener?~["complete","loaded","interactive"].indexOf(document.readyState)?setTimeout(l,0):(h=function(){document.removeEventListener("DOMContentLoaded",h,!1),l()},document.addEventListener("DOMContentLoaded",h,!1)):document.attachEvent&&(c=l,t=a.document,p=!1,m(),t.onreadystatechange=function(){"complete"==t.readyState&&(t.onreadystatechange=null,d())})}(window); \ No newline at end of file diff --git a/web/src/assets/iconfont.json b/web/src/assets/iconfont.json new file mode 100644 index 000000000..271f04869 --- /dev/null +++ b/web/src/assets/iconfont.json @@ -0,0 +1,618 @@ +{ + "id": "2585050", + "name": "鲁班", + "font_family": "iconfont", + "css_prefix_text": "icon-", + "description": "", + "glyphs": [ + { + "icon_id": "26528093", + "name": "蓝色箭头带框", + "font_class": "lansejiantoudaikuang", + "unicode": "e72b", + "unicode_decimal": 59179 + }, + { + "icon_id": "26523520", + "name": "箭头", + "font_class": "jiantou", + "unicode": "e727", + "unicode_decimal": 59175 + }, + { + "icon_id": "26454158", + "name": "hive", + "font_class": "hive", + "unicode": "e722", + "unicode_decimal": 59170 + }, + { + "icon_id": "26454156", + "name": "shell", + "font_class": "shell1", + "unicode": "e721", + "unicode_decimal": 59169 + }, + { + "icon_id": "26440997", + "name": "copy", + "font_class": "copy", + "unicode": "e720", + "unicode_decimal": 59168 + }, + { + "icon_id": "26058470", + "name": "leftfold", + "font_class": "leftfold", + "unicode": "e71e", + "unicode_decimal": 59166 + }, + { + "icon_id": "26055468", + "name": "更多省略号", + "font_class": "gengduoshenglvehao", + "unicode": "e71c", + "unicode_decimal": 59164 + }, + { + "icon_id": "26055469", + "name": "unfold", + "font_class": "unfold1", + "unicode": "e71d", + "unicode_decimal": 59165 + }, + { + "icon_id": "26016697", + "name": "收起", + "font_class": "shouqi1", + "unicode": "e71a", + "unicode_decimal": 59162 + }, + { + "icon_id": "26016698", + "name": "展开", + "font_class": "zhankai", + "unicode": "e71b", + "unicode_decimal": 59163 + }, + { + "icon_id": "25934170", + "name": "固定到导航栏", + "font_class": "gudingdaodaohanglan", + "unicode": "e719", + "unicode_decimal": 59161 + }, + { + "icon_id": "25535513", + "name": "search", + "font_class": "search", + "unicode": "e713", + "unicode_decimal": 59155 + }, + { + "icon_id": "24156532", + "name": "spark process", + "font_class": "a-sparkprocess", + "unicode": "e700", + "unicode_decimal": 59136 + }, + { + "icon_id": "24156533", + "name": "hive process", + "font_class": "a-hiveprocess", + "unicode": "e701", + "unicode_decimal": 59137 + }, + { + "icon_id": "24156534", + "name": "删除的 hive table", + "font_class": "a-shanchudehivetable", + "unicode": "e702", + "unicode_decimal": 59138 + }, + { + "icon_id": "24156421", + "name": "hive table", + "font_class": "a-hivetable", + "unicode": "e6ff", + "unicode_decimal": 59135 + }, + { + "icon_id": "23905974", + "name": "主题域配置", + "font_class": "zhutiyupeizhi", + "unicode": "e6f9", + "unicode_decimal": 59129 + }, + { + "icon_id": "23905975", + "name": "分层配置", + "font_class": "fencengpeizhi", + "unicode": "e6fa", + "unicode_decimal": 59130 + }, + { + "icon_id": "23818087", + "name": "已收藏", + "font_class": "yishoucang", + "unicode": "e6f7", + "unicode_decimal": 59127 + }, + { + "icon_id": "23818088", + "name": "未收藏", + "font_class": "weishoucang", + "unicode": "e6f8", + "unicode_decimal": 59128 + }, + { + "icon_id": "23645384", + "name": "star", + "font_class": "star", + "unicode": "e6f5", + "unicode_decimal": 59125 + }, + { + "icon_id": "23645385", + "name": "star-filled", + "font_class": "star-filled", + "unicode": "e6f6", + "unicode_decimal": 59126 + }, + { + "icon_id": "23628719", + "name": "编辑", + "font_class": "bianji", + "unicode": "e6f4", + "unicode_decimal": 59124 + }, + { + "icon_id": "23594976", + "name": "表总数", + "font_class": "biaozongshu", + "unicode": "e6f1", + "unicode_decimal": 59121 + }, + { + "icon_id": "23594977", + "name": "数据库总数", + "font_class": "shujukuzongshu", + "unicode": "e6f2", + "unicode_decimal": 59122 + }, + { + "icon_id": "23594978", + "name": "总存储量", + "font_class": "zongcunchuliang", + "unicode": "e6f3", + "unicode_decimal": 59123 + }, + { + "icon_id": "23476333", + "name": "工作流", + "font_class": "gongzuoliu", + "unicode": "e6f0", + "unicode_decimal": 59120 + }, + { + "icon_id": "23183738", + "name": "新增", + "font_class": "xinzeng1", + "unicode": "e6ef", + "unicode_decimal": 59119 + }, + { + "icon_id": "23183418", + "name": "日期", + "font_class": "riqi", + "unicode": "e6ee", + "unicode_decimal": 59118 + }, + { + "icon_id": "23181388", + "name": "组件接入管理", + "font_class": "zujianjieruguanli", + "unicode": "e6eb", + "unicode_decimal": 59115 + }, + { + "icon_id": "23181389", + "name": "控制台", + "font_class": "kongzhitai", + "unicode": "e6ec", + "unicode_decimal": 59116 + }, + { + "icon_id": "23181390", + "name": "部门和用户管理", + "font_class": "bumenheyonghuguanli", + "unicode": "e6ed", + "unicode_decimal": 59117 + }, + { + "icon_id": "22920894", + "name": "电信 5G天翼云纯色logo", + "font_class": "a-dianxin5Gtianyiyunchunselogo", + "unicode": "e6c7", + "unicode_decimal": 59079 + }, + { + "icon_id": "22890665", + "name": "勾", + "font_class": "gou", + "unicode": "e6bb", + "unicode_decimal": 59067 + }, + { + "icon_id": "22890666", + "name": "下拉", + "font_class": "xiala", + "unicode": "e6bc", + "unicode_decimal": 59068 + }, + { + "icon_id": "22890667", + "name": "服务管理", + "font_class": "fuwuguanli", + "unicode": "e6bd", + "unicode_decimal": 59069 + }, + { + "icon_id": "22890668", + "name": "收起", + "font_class": "shouqi", + "unicode": "e6be", + "unicode_decimal": 59070 + }, + { + "icon_id": "22890669", + "name": "总调用次数", + "font_class": "zongtiaoyongcishu", + "unicode": "e6bf", + "unicode_decimal": 59071 + }, + { + "icon_id": "22890670", + "name": "api", + "font_class": "api", + "unicode": "e6c0", + "unicode_decimal": 59072 + }, + { + "icon_id": "22890671", + "name": "属性", + "font_class": "shuxing", + "unicode": "e6c1", + "unicode_decimal": 59073 + }, + { + "icon_id": "22890672", + "name": "API调用", + "font_class": "APItiaoyong", + "unicode": "e6c2", + "unicode_decimal": 59074 + }, + { + "icon_id": "22890673", + "name": "总执行时长用量", + "font_class": "zongzhihangshichangyongliang", + "unicode": "e6c3", + "unicode_decimal": 59075 + }, + { + "icon_id": "22890674", + "name": "API监控", + "font_class": "APIjiankong", + "unicode": "e6c4", + "unicode_decimal": 59076 + }, + { + "icon_id": "22890675", + "name": "API测试", + "font_class": "APIceshi", + "unicode": "e6c5", + "unicode_decimal": 59077 + }, + { + "icon_id": "22890676", + "name": "API管理", + "font_class": "APIguanli", + "unicode": "e6c6", + "unicode_decimal": 59078 + }, + { + "icon_id": "22890657", + "name": "!", + "font_class": "a-", + "unicode": "e6b3", + "unicode_decimal": 59059 + }, + { + "icon_id": "22890658", + "name": "版本", + "font_class": "banben", + "unicode": "e6b4", + "unicode_decimal": 59060 + }, + { + "icon_id": "22890659", + "name": "测试", + "font_class": "ceshi", + "unicode": "e6b5", + "unicode_decimal": 59061 + }, + { + "icon_id": "22890660", + "name": "发布", + "font_class": "fabu", + "unicode": "e6b6", + "unicode_decimal": 59062 + }, + { + "icon_id": "22890661", + "name": "服务表", + "font_class": "fuwubiao", + "unicode": "e6b7", + "unicode_decimal": 59063 + }, + { + "icon_id": "22890662", + "name": "保存", + "font_class": "baocun", + "unicode": "e6b8", + "unicode_decimal": 59064 + }, + { + "icon_id": "22890663", + "name": "服务开发", + "font_class": "fuwukaifa", + "unicode": "e6b9", + "unicode_decimal": 59065 + }, + { + "icon_id": "22890664", + "name": "新增", + "font_class": "xinzeng", + "unicode": "e6ba", + "unicode_decimal": 59066 + }, + { + "icon_id": "22808932", + "name": "内存卡底", + "font_class": "neicunkadi", + "unicode": "e6b0", + "unicode_decimal": 59056 + }, + { + "icon_id": "22808755", + "name": "矩形", + "font_class": "juxing", + "unicode": "e6af", + "unicode_decimal": 59055 + }, + { + "icon_id": "22779777", + "name": "问号", + "font_class": "wenhao", + "unicode": "e6ac", + "unicode_decimal": 59052 + }, + { + "icon_id": "22779778", + "name": "调度", + "font_class": "tiaodu", + "unicode": "e6ad", + "unicode_decimal": 59053 + }, + { + "icon_id": "22779779", + "name": "画板", + "font_class": "huaban", + "unicode": "e6ae", + "unicode_decimal": 59054 + }, + { + "icon_id": "10610779", + "name": "loading", + "font_class": "xingzhuang", + "unicode": "e84b", + "unicode_decimal": 59467 + }, + { + "icon_id": "22298489", + "name": "file", + "font_class": "file", + "unicode": "e693", + "unicode_decimal": 59027 + }, + { + "icon_id": "22292262", + "name": "plus", + "font_class": "plus", + "unicode": "e692", + "unicode_decimal": 59026 + }, + { + "icon_id": "22285589", + "name": "unfold", + "font_class": "unfold", + "unicode": "e690", + "unicode_decimal": 59024 + }, + { + "icon_id": "22285590", + "name": "put away", + "font_class": "putaway", + "unicode": "e691", + "unicode_decimal": 59025 + }, + { + "icon_id": "22285559", + "name": "open", + "font_class": "open", + "unicode": "e68a", + "unicode_decimal": 59018 + }, + { + "icon_id": "22285561", + "name": "delete", + "font_class": "delete", + "unicode": "e68c", + "unicode_decimal": 59020 + }, + { + "icon_id": "22285562", + "name": "flow", + "font_class": "flow", + "unicode": "e68d", + "unicode_decimal": 59021 + }, + { + "icon_id": "22285563", + "name": "close", + "font_class": "close", + "unicode": "e68e", + "unicode_decimal": 59022 + }, + { + "icon_id": "22285564", + "name": "project", + "font_class": "project", + "unicode": "e68f", + "unicode_decimal": 59023 + }, + { + "icon_id": "21990365", + "name": "shell", + "font_class": "shell", + "unicode": "e671", + "unicode_decimal": 58993 + }, + { + "icon_id": "21974034", + "name": "icon-kill", + "font_class": "kill", + "unicode": "e667", + "unicode_decimal": 58983 + }, + { + "icon_id": "21974035", + "name": "icon-ready-stop", + "font_class": "ready-stop", + "unicode": "e66b", + "unicode_decimal": 58987 + }, + { + "icon_id": "21974036", + "name": "icon-ready-pause", + "font_class": "ready-pause", + "unicode": "e66d", + "unicode_decimal": 58989 + }, + { + "icon_id": "21972598", + "name": "DELAY_EXECUTION", + "font_class": "delay-execution", + "unicode": "e663", + "unicode_decimal": 58979 + }, + { + "icon_id": "21972599", + "name": "NEED_FAULT_TOLERANCE", + "font_class": "need-fault-tolerance", + "unicode": "e664", + "unicode_decimal": 58980 + }, + { + "icon_id": "21972600", + "name": "PAUSE", + "font_class": "pause", + "unicode": "e665", + "unicode_decimal": 58981 + }, + { + "icon_id": "21972601", + "name": "STOP", + "font_class": "stop", + "unicode": "e666", + "unicode_decimal": 58982 + }, + { + "icon_id": "21972603", + "name": "WAITTING_DEPEND", + "font_class": "watting-depend", + "unicode": "e668", + "unicode_decimal": 58984 + }, + { + "icon_id": "21972604", + "name": "FORCED_SUCCES", + "font_class": "forced-success", + "unicode": "e669", + "unicode_decimal": 58985 + }, + { + "icon_id": "21972605", + "name": "SUCCESS", + "font_class": "success", + "unicode": "e66a", + "unicode_decimal": 58986 + }, + { + "icon_id": "21972607", + "name": "SUBMITTED_SUCCES", + "font_class": "submitted-success", + "unicode": "e66c", + "unicode_decimal": 58988 + }, + { + "icon_id": "21972609", + "name": "FAILURE", + "font_class": "failure", + "unicode": "e66e", + "unicode_decimal": 58990 + }, + { + "icon_id": "21972610", + "name": "RUNNING_EXECUTIO", + "font_class": "running-execution", + "unicode": "e66f", + "unicode_decimal": 58991 + }, + { + "icon_id": "21972611", + "name": "WAITTING_THREAD", + "font_class": "waitting-thread", + "unicode": "e670", + "unicode_decimal": 58992 + }, + { + "icon_id": "21972589", + "name": "REFRESH", + "font_class": "refresh", + "unicode": "e65f", + "unicode_decimal": 58975 + }, + { + "icon_id": "21972590", + "name": "FULL_SCREEN", + "font_class": "full-screen", + "unicode": "e660", + "unicode_decimal": 58976 + }, + { + "icon_id": "21972591", + "name": "CANCLE_FULL_SCREEN", + "font_class": "cancel-full-screeen", + "unicode": "e661", + "unicode_decimal": 58977 + }, + { + "icon_id": "21972592", + "name": "DOWNLOAD", + "font_class": "download", + "unicode": "e662", + "unicode_decimal": 58978 + } + ] +} diff --git a/web/src/assets/iconfont.ttf b/web/src/assets/iconfont.ttf new file mode 100644 index 000000000..13a9307f2 Binary files /dev/null and b/web/src/assets/iconfont.ttf differ diff --git a/web/src/assets/iconfont.woff b/web/src/assets/iconfont.woff new file mode 100644 index 000000000..b8528e821 Binary files /dev/null and b/web/src/assets/iconfont.woff differ diff --git a/web/src/assets/iconfont.woff2 b/web/src/assets/iconfont.woff2 new file mode 100644 index 000000000..0740c9958 Binary files /dev/null and b/web/src/assets/iconfont.woff2 differ diff --git a/web/src/assets/img/E.jpg b/web/src/assets/img/E.jpg new file mode 100644 index 000000000..507aa814c Binary files /dev/null and b/web/src/assets/img/E.jpg differ diff --git a/web/src/assets/img/Exchangis.jpeg b/web/src/assets/img/Exchangis.jpeg new file mode 100644 index 000000000..63cfc495f Binary files /dev/null and b/web/src/assets/img/Exchangis.jpeg differ diff --git a/web/src/assets/img/void_page.png b/web/src/assets/img/void_page.png new file mode 100644 index 000000000..633bb6cf0 Binary files /dev/null and b/web/src/assets/img/void_page.png differ diff --git a/web/src/common/common.less b/web/src/common/common.less new file mode 100644 index 000000000..5d5ea274f --- /dev/null +++ b/web/src/common/common.less @@ -0,0 +1,32 @@ +@import '../assets/iconfont.css'; +@font-face { + font-family: 'iconfont'; + src: url('../assets/iconfont.woff2?t=1639637531657') format('woff2'), + url('../assets/iconfont.woff?t=1639637531657') format('woff'), + url('../assets/iconfont.ttf?t=1639637531657') format('truetype'); +} +.icon { + width: 1em; height: 1em; + vertical-align: -0.15em; + fill: currentColor; + overflow: hidden; +} + +.logo-img { + +} + +.ant-layout-sider-children { + border-right: 1px solid #dee4ec; +} +.ant-layout-sider-trigger { + border-right: 1px solid #dee4ec; +} +.ant-layout-sider-collapsed { + .layout-logo { + visibility: hidden; + } +} +.ant-layout-header.layout-header { + display: none !important; +} diff --git a/web/src/common/constants.js b/web/src/common/constants.js new file mode 100644 index 000000000..5e701bbe2 --- /dev/null +++ b/web/src/common/constants.js @@ -0,0 +1,4 @@ +//export const BASE_URL = "/api/rest_j/v1/exchangis/"; +//export const BASE_URL = "/api/rest_j/v1/dss/exchangis/" +export const BASE_URL = "/api/rest_j/v1/dss/exchangis/main/" +//export const BASE_URL = "/wdsentl/exchangis/api/rest_j/v1/dss/exchangis/main/" diff --git a/web/src/common/content.less b/web/src/common/content.less new file mode 100644 index 000000000..f204425aa --- /dev/null +++ b/web/src/common/content.less @@ -0,0 +1,33 @@ +:deep(.ant-form-item-control-input input) { + border-radius: 4px; +} +:deep(.ant-input) { + border-radius: 4px; +} +:deep(.ant-input-affix-wrapper) { + border-radius: 4px; +} +:deep(.ant-select .ant-select-selector.ant-select-selector.ant-select-selector.ant-select-selector) { + border-radius: 4px; +} +:deep(.ant-calendar-picker-input.ant-input) { + border-radius: 4px; +} +:deep(.ant-btn) { + border-radius: 4px !important; +} +:deep(.ant-table-thead > tr > th) { + background: #F8F9FC; +} +:deep(.ant-table-thead > tr:first-child > th:first-child) { + border-top-left-radius: 4px; +} +:deep(.ant-table-thead > tr:first-child > th:last-child) { + border-top-right-radius: 4px; +} +:deep(.ant-table-thead > tr > th) { + padding: 10px; +} +:deep(.ant-table-tbody > tr > td) { + padding: 10px; +} diff --git a/web/src/common/service.js b/web/src/common/service.js new file mode 100644 index 000000000..f524a5662 --- /dev/null +++ b/web/src/common/service.js @@ -0,0 +1,634 @@ +// 服务端接口管理 +import { request } from "@fesjs/fes"; +import { BASE_URL } from "@/common/constants"; + +import { getEnvironment } from "@/common/utils"; + +//////////////////////////////////////////////////////////////////// +export const getProjectList = (name, current, size) => { + return request(`/projects?name=${name}¤t=${current}&size=${size}`, { + labels: { + route: getEnvironment() + } + }, { method: "POST" }); +}; + +export const createProject = (body) => { + return request("/createProject", { + ...body, + labels: { + route: getEnvironment() + } + }); +}; + +export const deleteProject = (id) => { + return request("/projects/" + id, { + labels: { + route: getEnvironment() + } + }, { + method: "DELETE", + }); +}; + +export const getProjectById = (id) => { + return request("/projects/" + id + '?labels=' + getEnvironment(), null, { + method: "GET", + }); +}; + +export const updateProject = (body) => { + return request("/updateProject", { + ...body, + labels: { + route: getEnvironment() + } + }, { + method: "PUT", + }); +}; + +export const getDataSourceList = (params) => { + return request("/datasources/query", { + ...params, + labels: { + route: getEnvironment() + } + }, { method: "POST" }); +}; + +// 数据源管理 获取数据源 +export const getDataSourceTypes = (param) => { + let extra = '' + if (param) { + const { engineType, direct, sourceType } = param + extra = `&engineType=${engineType}&direct=${direct}${sourceType ? '&sourceType=' + sourceType : ''}` + } + return request( + `/datasources/type?labels=${getEnvironment()}&t=_${new Date().getTime()}${extra}`, + {}, + { method: "GET" } + ); +}; + +// 数据源管理 获取动态参数 +export const getKeyDefine = (dataSourceTypeId) => { + return request( + `/datasources/types/${dataSourceTypeId}/keydefines?labels=${getEnvironment()}&t=_${new Date().getTime()}`, + {}, + { method: "GET" } + ); +}; + +// 查询数据源 +export const getDataSource = (body) => { + return request("/datasources/query", { + ...body, + labels: { + route: getEnvironment() + } + }, { method: "POST" }); +}; + +export const getDBs = (type, id) => { + return request(`/datasources/${type}/${id}/dbs?labels=${getEnvironment()}`, {}, { method: "GET" }); +}; + +export const getTables = (type, id, dbName) => { + return request( + `/datasources/${type}/${id}/dbs/${dbName}/tables?labels=${getEnvironment()}`, + {}, + { method: "GET" } + ); +}; + +/*export const getFields = (type, id, dbName, tableName) => { + return request( + `/datasources/${type}/${id}/dbs/${dbName}/tables/${tableName}/fields`, + {}, + { method: "GET" } + ); +};*/ +// /datasources/fieldsmaping +export const getFields = (params) => { + return request( + `/job/transform/settings`, + { + ...params, + labels: { + route: getEnvironment() + } + }, + { method: "POST" } + ); +}; + +export const createDataSource = (params) => { + return request("/datasources", { + ...params, + labels: { + route: getEnvironment() + } + }, { method: "POST" }); +}; + +export const updateDataSource = (id, params) => { + return request("/datasources/" + id, { + ...params, + labels: { + route: getEnvironment() + } + }, { method: "PUT" }); +}; + +export const deleteDataSource = (id) => { + return request(`/datasources/${id}`, { + labels: { + route: getEnvironment() + } + }, { method: "DELETE" }); +}; + +export const getDataSourceVersionList = (id) => { + return request(`/datasources/${id}/versions?labels=${getEnvironment()}`, {}, { method: "GET" }); +}; + +export const testDataSourceConnect = (type, id) => { + return request(`/datasources/${type}/${id}/connect?_=${Math.random()}`, { + labels: { + route: getEnvironment() + } + }, { method: "PUT" }); +}; + +export const testDataSourceNotSavedConnect = (params) => { + return request(`/datasources/op/connect?_=${Math.random()}`, { + ...params, + labels: { + route: getEnvironment() + } + }, { method: "POST" }); +}; + +export const getDataSourceById = (id, versionId) => { + return request(`/datasources/${id}?labels=${getEnvironment()}`, {versionId}, { method: "GET" }); +}; + +export const getJobInfo = (id) => { + return request(`/job/${id}?labels=${getEnvironment()}`, null, { + method: "GET", + }); +}; + +//获取任务列表 +export const getJobList = (query) => { + return request(`/job?labels=${getEnvironment()}&${query}`, null, { + method: "GET", + }); +}; + +//获取执行引擎列表 +export const getEngineType = () => { + return request(`/job/engineType?labels=${getEnvironment()}`, null, { + method: "GET", + }); +}; + +//新建任务 +export const createJob = (params) => { + return request( + `/job`, + { + ...params, + labels: { + route: getEnvironment() + } + }, + { + method: "POST", + } + ); +}; + +//复制任务 +export const copyJob = (id, params) => { + return request( + `/job/${id}/copy`, + { + ...params, + labels: { + route: getEnvironment() + } + }, + { + method: "POST", + } + ); +}; + +//编辑任务 +export const modifyJob = (id, params) => { + return request( + `/job/${id}`, + { + ...params, + labels: { + route: getEnvironment() + } + }, + { + method: "PUT", + } + ); +}; + +//删除任务 +export const deleteJob = (id) => { + return request(`/job/${id}`, { + labels: { + route: getEnvironment() + } + }, { + method: "DELETE", + }); +}; + +//导入任务 +export const importJob = (id, params) => { + return request( + `/job/import`, + { + ...params, + labels: { + route: getEnvironment() + } + }, + { + method: "POST", + } + ); +}; + +//执行任务 +export const executeTask = (id) => { + return request(`/job/${id}/action/execute`, { + labels: { + route: getEnvironment() + } + }, { + method: "POST", + }); +}; + +export const getJobs = (id, jobType, name, current, size) => { + return request(`/job?labels=${getEnvironment()}&projectId=${id}&jobType=${jobType}&name=${name}¤t=${current}&size=${size}`, null, { + method: "GET", + }); +}; + +export const saveProject = (id, body, type = 'save') => { + return request(`/job/${id}/content`, { + ...body, + labels: { + route: getEnvironment() + } + }, { + headers: { 'save-from': type }, + method: "PUT", + }); +}; + +// 保存/更新任务配置 +export const updateTaskConfiguration = (id, body) => { + return request(`/job/${id}/config`, { + ...body, + labels: { + route: getEnvironment() + } + }, { + method: "PUT", + }); +}; + +export const expireDataSource = (id) => { + return request(`/datasources/${id}/expire`, { + labels: { + route: getEnvironment() + } + }, { method: "PUT" }); +}; + +export const publishDataSource = (id, versionId) => { + return request( + `/datasources/${id}/${versionId}/publish`, + { + labels: { + route: getEnvironment() + } + }, + { method: "PUT" } + ); +}; + +export const getSourceParams = (engineType, type, ds) => { + return request( + `/datasources/${engineType}/${type}/params/ui?labels=${getEnvironment()}&dir=${ds}`, + {}, + { method: "GET" } + ); +}; + +export const getSettingsParams = (engineType) => { + return request( + `/jobs/engine/${engineType}/settings/ui?labels=${getEnvironment()}`, + {}, + { method: "GET" } + ); +}; + +// job执行 +/*export const executeJob = (id) => { + return request(`/job/${id}/action/execute`, {}, { + method: "POST", + }); +};*/ + +// 同步历史 +export const getSyncHistory = (body) => { + return request("/tasks?labels=" + getEnvironment(), body, { + method: "GET", + }); +}; +// 新版同步历史-获取job列表 +export const getSyncHistoryJobList = (body) => { + return request("/job/listJobs?labels=" + getEnvironment(), body, { + method: "GET", + }); +}; +// 删除同步历史 +export const delSyncHistory = (jobExecutionId) => { + return request(`/job/${jobExecutionId}/deleteJob`, { + labels: { + route: getEnvironment() + } + }, { + method: "POST", + }); +}; +// 读取Task限速配置 +export const getSpeedLimit = (params) => { + return request( + `/job/${params.jobId}/speedlimit/${params.taskName}/params/ui?labels=${getEnvironment()}`, + {}, + { + method: "GET", + } + ); +}; +// 保存Task限速配置 +export const saveSpeedLimit = (params, body) => { + return request(`/job/${params.jobId}/speedlimit/${params.taskName}`, { + ...body, + labels: { + route: getEnvironment() + } + }, { + method: "PUT", + }); +}; + +// 获取运行日志 +export const getLogs = (params) => { + return request( + `/execution/tasks/${params.taskID}/logs?labels=${getEnvironment()}`, + { + fromLine: params.fromLine || 1, + pageSize: params.pageSize || 10 + }, + { + method: "GET", + } + ); +} + + +// 首页相关 + +// 任务状态 +export const getTaskState = () => { + return request("/metrics/taskstate?labels=" + getEnvironment(), {}, { method: "GET" }); +}; + +// 任务进度 +export const getTaskProcess = () => { + return request("/metrics/taskprocess?labels=" + getEnvironment(), {}, { method: "GET" }); +}; + +// 流量监控 +export const getDataSourceFlow = () => { + return request("/metrics/datasourceflow?labels=" + getEnvironment(), {}, { method: "GET" }); +}; + +// 资源使用 +export const getEngineriesSource = () => { + return request("/metrics/engineresource?labels=" + getEnvironment(), {}, { method: "GET" }); +}; + +export const getEngineriesSourceCpu = () => { + return request("/metrics/engineresourcecpu?labels=" + getEnvironment(), {}, { method: "GET" }); +}; + +export const getEngineriesSourceMem = () => { + return request("/metrics/engineresourcemem?labels=" + getEnvironment(), {}, { method: "GET" }); +}; + + +/* 作业执行模块接口 */ +export const executeJob = (id) => { + return request(`/job/${id}/execute`,{ + labels: { + route: getEnvironment() + } + }, { + method: "POST", + }) +} + +export const getJobStatus = (id) => { + return request(`/job/execution/${id}/status?labels=${getEnvironment()}`, {}, { + method: "GET", + }) +} + +export const getJobTasks = (id) => { + return request(`/job/execution/${id}/taskList?labels=${getEnvironment()}`, null, { + method: "GET", + }) +} + +export const getProgress = (id) => { + return request(`/job/execution/${id}/progress?labels=${getEnvironment()}&_=${Math.random()}`, null, { + method: "GET", + }) +} + +export const getMetrics = (taskId, jobExecutionId) => { + return request(`/task/execution/${taskId}/metrics`, { + jobExecutionId, + labels: { + route: getEnvironment() + } + }, { + method: "POST", + }) +} + +export const killJob = (id) => { + return request(`/job/execution/${id}/kill`, { + labels: { + route: getEnvironment() + } + }, { + method: "POST", + }) +} + +// 获取job运行日志 +export const getJobExecLog = (params) => { + return request( + `/job/execution/${params.id}/log?labels=${getEnvironment()}&_=${Math.random()}`, + { + fromLine: params.fromLine || 0, + pageSize: params.pageSize || 50, + onlyKeywords: params.onlyKeywords, + ignoreKeywords: params.ignoreKeywords, + lastRows: params.lastRows, + enableTail: true + }, + { + method: "GET", + } + ); +} + +// 获取task运行日志 +export const getTaskExecLog = (params) => { + return request( + `/task/execution/${params.taskId}/log?labels=${getEnvironment()}&_=${Math.random()}`, + { + fromLine: params.fromLine || 0, + pageSize: params.pageSize || 50, + jobExecutionId: params.id, + onlyKeywords: params.onlyKeywords, + ignoreKeywords: params.ignoreKeywords, + lastRows: params.lastRows, + enableTail: true + }, + { + method: "GET", + } + ); +} + +// 获取分区信息 +export const getPartitionInfo = (params) => { + if (!params.source) return + const url = params.source.split(BASE_URL)[1] + return request( + `${url}?labels=${getEnvironment()}&dataSourceId=${params.dataSourceId}&database=${params.database}&table=${params.table}&tableNotExist=${params.tableNotExist}&_=${Math.random()}`, + {}, + { + method: "GET", + } + ); +} + +// 获取字段映射转换函数 +export const getFieldFunc = (funcType) => { + if (!funcType) return + return request( + `/job/func/${funcType}?labels=${getEnvironment()}&_=${Math.random()}`, + {}, + { + method: "GET", + } + ); +} + +// 获取字段映射转换函数 +export const encryptFunc = (param) => { + return request( + `/datasources/tools/encrypt?labels=${getEnvironment()}`, + param, + { + method: "POST", + } + ); +} + +// 获取执行用户 +export const getExecutor = () => { + return request( + `/job/Executor?labels=${getEnvironment()}`, + {}, + { + method: "GET", + } + ) +} + +// processor的内容保存 +export const saveProcessor = (param) => { + return request( + `/job/transform/processor/code_content?labels=${getEnvironment()}`, + param, + { + method: "POST", + } + ); +} + +// processor的内容更新 +export const updateProcessor = ({ proc_code_id, ...param }) => { + return request( + `/job/transform/processor/code_content/${proc_code_id}?labels=${getEnvironment()}`, + param, + { + method: "PUT", + } + ); +} + +// processor的内容更新 +export const getTemplate = () => { + return request( + `/job/transform/processor/code_template?labels=${getEnvironment()}`, + {}, + { + method: "GET", + } + ); +} + +// processor的内容更新 +export const getProcessor = (proc_code_id) => { + return request( + `/job/transform/processor/code_content/${proc_code_id}?labels=${getEnvironment()}`, + {}, + { + method: "GET", + } + ); +} + +// 获取项目权限 +export const getProjectPermission = (projectId) => { + return request( + `/getProjectPermission/${projectId}?labels=${getEnvironment()}`, + {}, + { + method: "GET", + } + ); +} \ No newline at end of file diff --git a/web/src/common/use/useTable.js b/web/src/common/use/useTable.js new file mode 100644 index 000000000..a84646966 --- /dev/null +++ b/web/src/common/use/useTable.js @@ -0,0 +1,28 @@ +import { ref, reactive } from "vue"; +import { request } from "@fesjs/fes"; + +export const useTableBase = (api) => { + // 加载中状态 + const queryDataSourceLoading = ref(true); + const dataSource = ref([]); + // 缓存参数 + let cacheParams = {}; + // 返回查询方法 + const queryDataSource = async (params) => { + queryDataSourceLoading.value = true; + try { + cacheParams = params || cacheParams; + const result = await api.bind(null, params)(); + dataSource.value = result || []; + return result; + } finally { + queryDataSourceLoading.value = false; + } + }; + + return { + dataSource, + queryDataSource, + queryDataSourceLoading, + }; +}; diff --git a/web/src/common/utils.js b/web/src/common/utils.js new file mode 100644 index 000000000..2e259f943 --- /dev/null +++ b/web/src/common/utils.js @@ -0,0 +1,118 @@ +import { cloneDeep, isNil, isInteger } from "lodash-es"; +import moment from "moment"; +import { BASE_URL } from "./constants"; + +export const arrToObj = (arr, key) => { + if (!Array.isArray(arr)) return null; + if (!key) { + console.error("arrToObj: key is required"); + return null; + } + return arr.reduce((acc, cur) => { + acc[cur[key]] = cur; + return acc; + }, {}); +}; + +export const extractionObj = (obj, standard) => { + const cloneObj = cloneDeep(obj); + const cloneStandard = cloneDeep(standard); + Object.key(standard).forEach((key) => { + if (!isNil(cloneObj[key])) { + cloneStandard[key] = cloneObj[key]; + } + }); + return cloneStandard; +}; + +export const isPositiveInteger = (num) => isInteger(num) && num > 0; + +export const nonNegativeInteger = (num) => isInteger(num) && num >= 0; + +export function getQueryVariable(variable) { + const query = window.location.search.substring(1); + const vars = query.split("&"); + for (let i = 0; i < vars.length; i++) { + const pair = vars[i].split("="); + if (pair[0] === variable) { + return pair[1]; + } + } + return false; +} + +export const genFileURL = (fileId, fileHash) => + `${BASE_URL}/record/download/file/${fileId}/${fileHash}`; + +export const dateFormat = (timestamp) => { + return moment(timestamp).format("YYYY-MM-DD HH:mm"); +} + +export const dateFormatSeconds = (timestamp) => { + return moment(timestamp).format("YYYY-MM-DD HH:mm:ss"); +}; + +/* + * 随机字符串生成 + */ +export const randomString = (len) => { + len = len || 12; + let $chars = "ABCDEFGHJKMNPQRSTWXYZabcdefhijkmnprstwxyz"; + let maxPos = $chars.length; + let pwd = ""; + for (let i = 0; i < len; i++) { + pwd += $chars.charAt(Math.floor(Math.random() * maxPos)); + } + return pwd; +}; + +export const moveUpDown = (targetSelector, wrapSelector, moveTopSelector, boundaryTop = 200, boundaryBottom = 200) => { + let box = document.querySelector(targetSelector), + top = moveTopSelector ? document.querySelector(moveTopSelector) : box, + wrap = document.querySelector(wrapSelector) + top.onmousedown = (ev) => { + let e = ev || window.event; + //计算出鼠标按下的点到box的上侧边缘 + let restTop = e.clientY - box.offsetTop; + let topRestTop = e.clientY - top.offsetTop; + document.onmousemove = function (ev) { + let e = ev || window.event; + let boxTop = e.clientY - restTop; + let topTop = e.clientY - topRestTop; + const contentHeight = document.body.offsetHeight - wrap.offsetTop + // 头部限制 + if (boxTop - wrap.offsetTop < boundaryTop) { + boxTop = boundaryTop + } + if (topTop - wrap.offsetTop + top.offsetHeight < boundaryTop) { + topTop = boundaryTop - top.offsetHeight + } + // 底部限制 + if (contentHeight - boxTop < boundaryBottom) { + boxTop = contentHeight - boundaryBottom + } + if (contentHeight - topTop -top.offsetHeight < boundaryBottom) { + topTop = contentHeight - boundaryBottom - top.offsetHeight + } + + box.style.top = boxTop + "px" + box.style.height = contentHeight - boxTop + top.offsetHeight + "px" + top.style.top = topTop + "px" + + // 设置textarea滚动 + let area = document.querySelector('.ant-tabs-tabpane-active.log-textarea') + if (area) { + area.style.overflowY = 'auto' + area.style.height = contentHeight - boxTop - 45 + "px" + } + }; + document.onmouseup = function () { + document.onmousemove = null; + document.onmouseup = null; + } + } +} + +export const getEnvironment = () => { + return localStorage.getItem('exchangis_environment') || '' +} \ No newline at end of file diff --git a/web/src/components/PageLoading.vue b/web/src/components/PageLoading.vue new file mode 100644 index 000000000..b9ae15a10 --- /dev/null +++ b/web/src/components/PageLoading.vue @@ -0,0 +1,27 @@ + + + diff --git a/web/src/components/UserCenter.vue b/web/src/components/UserCenter.vue new file mode 100644 index 000000000..99bae8b5f --- /dev/null +++ b/web/src/components/UserCenter.vue @@ -0,0 +1,21 @@ + + + diff --git a/web/src/components/svgIcon/index.js b/web/src/components/svgIcon/index.js new file mode 100644 index 000000000..b7a6c3fa9 --- /dev/null +++ b/web/src/components/svgIcon/index.js @@ -0,0 +1,9 @@ +import Vue from 'vue' +import SvgIcon from './index.vue' + +// register globally +Vue.component('SvgIcon', SvgIcon) + +const req = require.context('./svg', true, /\.svg$/) +const requireAll = requireContext => requireContext.keys().map(requireContext) +requireAll(req) \ No newline at end of file diff --git a/web/src/components/svgIcon/index.vue b/web/src/components/svgIcon/index.vue new file mode 100644 index 000000000..978c4a90a --- /dev/null +++ b/web/src/components/svgIcon/index.vue @@ -0,0 +1,94 @@ + + + + + \ No newline at end of file diff --git a/web/src/components/svgIcon/svg/failure.svg b/web/src/components/svgIcon/svg/failure.svg new file mode 100644 index 000000000..3ae160b76 --- /dev/null +++ b/web/src/components/svgIcon/svg/failure.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/web/src/components/svgIcon/svg/forced-success.svg b/web/src/components/svgIcon/svg/forced-success.svg new file mode 100644 index 000000000..d9dbcdbe8 --- /dev/null +++ b/web/src/components/svgIcon/svg/forced-success.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/web/src/components/svgIcon/svg/refresh.svg b/web/src/components/svgIcon/svg/refresh.svg new file mode 100644 index 000000000..a2eb368a7 --- /dev/null +++ b/web/src/components/svgIcon/svg/refresh.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/web/src/components/svgIcon/svg/running-execution.svg b/web/src/components/svgIcon/svg/running-execution.svg new file mode 100644 index 000000000..38726206a --- /dev/null +++ b/web/src/components/svgIcon/svg/running-execution.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/web/src/components/svgIcon/svg/submitted-success.svg b/web/src/components/svgIcon/svg/submitted-success.svg new file mode 100644 index 000000000..1f6afda8d --- /dev/null +++ b/web/src/components/svgIcon/svg/submitted-success.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/web/src/components/svgIcon/svg/success.svg b/web/src/components/svgIcon/svg/success.svg new file mode 100644 index 000000000..3928d81a6 --- /dev/null +++ b/web/src/components/svgIcon/svg/success.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/web/src/components/svgIcon/svg/waitting-thread.svg b/web/src/components/svgIcon/svg/waitting-thread.svg new file mode 100644 index 000000000..4641c297b --- /dev/null +++ b/web/src/components/svgIcon/svg/waitting-thread.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/web/src/images/blank.svg b/web/src/images/blank.svg new file mode 100644 index 000000000..2d12f8067 --- /dev/null +++ b/web/src/images/blank.svg @@ -0,0 +1,55 @@ + + + 编组 5 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/web/src/images/dataSourceTypeIcon/ELASTICSEARCH.png b/web/src/images/dataSourceTypeIcon/ELASTICSEARCH.png new file mode 100644 index 000000000..241cceb48 Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/ELASTICSEARCH.png differ diff --git a/web/src/images/dataSourceTypeIcon/HIVE.png b/web/src/images/dataSourceTypeIcon/HIVE.png new file mode 100644 index 000000000..ae30505a1 Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/HIVE.png differ diff --git a/web/src/images/dataSourceTypeIcon/MONGODB.png b/web/src/images/dataSourceTypeIcon/MONGODB.png new file mode 100644 index 000000000..02f299330 Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/MONGODB.png differ diff --git a/web/src/images/dataSourceTypeIcon/MYSQL.png b/web/src/images/dataSourceTypeIcon/MYSQL.png new file mode 100644 index 000000000..bd48cd8ec Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/MYSQL.png differ diff --git a/web/src/images/dataSourceTypeIcon/ORACLE.png b/web/src/images/dataSourceTypeIcon/ORACLE.png new file mode 100644 index 000000000..1b7b9a95d Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/ORACLE.png differ diff --git a/web/src/images/dataSourceTypeIcon/SFTP.png b/web/src/images/dataSourceTypeIcon/SFTP.png new file mode 100644 index 000000000..edb3907df Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/SFTP.png differ diff --git a/web/src/images/dataSourceTypeIcon/STARROCKS.png b/web/src/images/dataSourceTypeIcon/STARROCKS.png new file mode 100644 index 000000000..50269cf08 Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/STARROCKS.png differ diff --git a/web/src/images/dataSourceTypeIcon/TDSQL.png b/web/src/images/dataSourceTypeIcon/TDSQL.png new file mode 100644 index 000000000..03b2815ab Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/TDSQL.png differ diff --git a/web/src/images/dataSourceTypeIcon/database.png b/web/src/images/dataSourceTypeIcon/database.png new file mode 100644 index 000000000..3417d4aac Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/database.png differ diff --git a/web/src/images/dataSourceTypeIcon/database_active.png b/web/src/images/dataSourceTypeIcon/database_active.png new file mode 100644 index 000000000..c4d9ae821 Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/database_active.png differ diff --git a/web/src/images/dataSourceTypeIcon/table.png b/web/src/images/dataSourceTypeIcon/table.png new file mode 100644 index 000000000..56367dabc Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/table.png differ diff --git a/web/src/images/datax-icon.png b/web/src/images/datax-icon.png new file mode 100644 index 000000000..f743fcd5d Binary files /dev/null and b/web/src/images/datax-icon.png differ diff --git a/web/src/images/datax.png b/web/src/images/datax.png new file mode 100644 index 000000000..0634b208e Binary files /dev/null and b/web/src/images/datax.png differ diff --git a/web/src/images/flink.svg b/web/src/images/flink.svg new file mode 100644 index 000000000..5c54db4f5 --- /dev/null +++ b/web/src/images/flink.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/web/src/images/homePage/u3989.png b/web/src/images/homePage/u3989.png new file mode 100644 index 000000000..34515e0eb Binary files /dev/null and b/web/src/images/homePage/u3989.png differ diff --git a/web/src/images/homePage/u3998.png b/web/src/images/homePage/u3998.png new file mode 100644 index 000000000..de3b8fd5d Binary files /dev/null and b/web/src/images/homePage/u3998.png differ diff --git a/web/src/images/homePage/u3999.png b/web/src/images/homePage/u3999.png new file mode 100644 index 000000000..7b83daf25 Binary files /dev/null and b/web/src/images/homePage/u3999.png differ diff --git a/web/src/images/homePage/u4009.png b/web/src/images/homePage/u4009.png new file mode 100644 index 000000000..47a86ba9d Binary files /dev/null and b/web/src/images/homePage/u4009.png differ diff --git a/web/src/images/homePage/u4010.png b/web/src/images/homePage/u4010.png new file mode 100644 index 000000000..80995b0d1 Binary files /dev/null and b/web/src/images/homePage/u4010.png differ diff --git a/web/src/images/homePage/u6032.png b/web/src/images/homePage/u6032.png new file mode 100644 index 000000000..585444717 Binary files /dev/null and b/web/src/images/homePage/u6032.png differ diff --git a/web/src/images/homePage/u6036.png b/web/src/images/homePage/u6036.png new file mode 100644 index 000000000..abcd67515 Binary files /dev/null and b/web/src/images/homePage/u6036.png differ diff --git a/web/src/images/icon.png b/web/src/images/icon.png new file mode 100644 index 000000000..ff46a842d Binary files /dev/null and b/web/src/images/icon.png differ diff --git a/web/src/images/jobDetail/u2664.png b/web/src/images/jobDetail/u2664.png new file mode 100644 index 000000000..a2e6d40f0 Binary files /dev/null and b/web/src/images/jobDetail/u2664.png differ diff --git a/web/src/images/jobDetail/u2666.png b/web/src/images/jobDetail/u2666.png new file mode 100644 index 000000000..43cf0e573 Binary files /dev/null and b/web/src/images/jobDetail/u2666.png differ diff --git a/web/src/images/jobDetail/u6239.png b/web/src/images/jobDetail/u6239.png new file mode 100644 index 000000000..16221cf2d Binary files /dev/null and b/web/src/images/jobDetail/u6239.png differ diff --git a/web/src/images/jobDetail/u6240.png b/web/src/images/jobDetail/u6240.png new file mode 100644 index 000000000..7ebd921b7 Binary files /dev/null and b/web/src/images/jobDetail/u6240.png differ diff --git a/web/src/images/sqoop-icon.png b/web/src/images/sqoop-icon.png new file mode 100644 index 000000000..31f83e85b Binary files /dev/null and b/web/src/images/sqoop-icon.png differ diff --git a/web/src/images/sqoop.svg b/web/src/images/sqoop.svg new file mode 100644 index 000000000..cc5d6c8a7 --- /dev/null +++ b/web/src/images/sqoop.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/web/src/images/u1972.png b/web/src/images/u1972.png new file mode 100644 index 000000000..50c46ca65 Binary files /dev/null and b/web/src/images/u1972.png differ diff --git a/web/src/images/u32.svg b/web/src/images/u32.svg new file mode 100644 index 000000000..e67551d38 --- /dev/null +++ b/web/src/images/u32.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/web/src/locales/en-US.js b/web/src/locales/en-US.js new file mode 100644 index 000000000..5f64b4b08 --- /dev/null +++ b/web/src/locales/en-US.js @@ -0,0 +1,438 @@ +export default { + globalMenu: { + projectManage: "project", + dataSourceManage: "data source", + jobManagement: "job management", + synchronizationHistory: "synchronization history", + homePage: "home page", + }, + projectManage: { + topLine: { + searchBar: { + searchInputPlaceholder: "search by name", + searchButtonText: "Search", + }, + title: "Project List", + }, + createCardText: "Create Project", + viewCard: { + actionTitle: "action", + action: { + edit: "edit", + delete: "delete", + }, + }, + editModal: { + title: { + create: "Create Project", + edit: "Edit Project", + }, + form: { + fields: { + projectName: { + label: "Name", + placeholder: "please enter Project Name", + }, + tags: { + label: "Tags", + placeholder: "please select Project Tags", + }, + description: { + label: "describe", + placeholder: "please enter describe", + }, + viewUsers: { + label: "viewUsers", + placeholder: "please select viewUsers", + }, + execUsers: { + label: "execUsers", + placeholder: "please select execUsers", + }, + editUsers: { + label: "editUsers", + placeholder: "please select editUsers", + }, + }, + }, + }, + }, + dataSource: { + topLine: { + searchBar: { + dataTypePlaceholder: "DataSourceType", + creatorPlaceholder: "creator", + namePlaceholder: "DataSourceName", + searchButtonText: "Search", + }, + createDataSourceButton: "CreateDataSource", + importsDataSource: "Batch import DataSource", + exportsDataSource: "Batch export DataSource", + }, + table: { + list: { + columns: { + title: { + name: "Name", + type: "Type", + status: "status", + tags: "tags", + version: "version", + describe: "describe", + updatetim: "updatetim", + creator: "creator", + updater: "updater", + action: "action", + }, + actions: { + testConnectButton: "TestConnect", + editButton: "Edit", + expireButton: "Expire", + deleteButton: "Delete", + }, + }, + }, + }, + editModal: { + title: { + create: "CreateDataSource", + edit: "EditDataSource", + read: "DataSource" + }, + form: { + fields: { + dataSourceName: { + label: "Name", + placeholder: "please enter DataSourceName", + }, + dataSourceDesc: { + label: "describe", + placeholder: "please enter describe", + }, + }, + }, + }, + sourceTypeModal: { + title: "DataSourceType", + searchInputPlaceholder: "Search Type By Name", + }, + }, + + overview: "Overview", + i18n: { + internationalization: "internationalization,base on", + achieve: "to achieve.", + ui: "UI components", + }, + job: { + list: "Job List", + type: { + offline: "Offline Job", + stream: "Stream Job", + }, + action: { + createJob: "Create Job", + copyJob: "Copy Job", + modifyJob: "Modify Job", + import: "Import", + export: "Export", + save: "Save", + cancel: "Cancel", + addJobSuccess: "Create Job Success", + copyJobSuccess: "Copy Job Success", + deleteJobSuccess: "Delete Job Success", + yes: "YES", + no: "NO", + confirmDelete: "Are you sure to delete this task?", + manage: "MANAGE", + fileUpSuccess: "File uploaded successfully", + fileUpFailed: "File upload failed.", + jobSearch: "Please input text for search job", + }, + jobDetail: { + originJob: "Original Job", + name: "Job name", + label: "Job label", + type: "Job type", + engine: "Engine", + description: "Description", + jobNameEmpty: "Job name cannot be empty", + jobTypeEmpty: "Job type cannot be empty", + engineEmpty: "Engine type cannot be empty", + }, + }, + message: { + "linkis": { + "refresh": "Refresh", + "noDescription": "No description", + "placeholderZip": "Please enter the workspace Python package path (only zip is supported)!", + "emptyString": "Empty string", + "description": "Description", + "name": "Name", + "order": "Order", + "addParameterConfig": "Add parameter configuration", + "editDescriptionEngineConfig": "Edit description engine config", + "addAppType": "Add application type", + "editContents": "Add contents", + "eurekeRegisterCenter": "Eureke Register Center", + "addTags": "Add Tags", + "unfold": "Unfold", + "fold": "Fold", + "jumpPage": "Please check in the jump page...", + "initiator": "Initiator", + "find": "Find", + "errorCode": "Error code", + "errorDescription": "Error description", + "notLog": "Log not obtained!", + "editedSuccess": "Edited success!", + "stopEngineTip": "Are you sure you want to stop the current engine?", + "addVariable": "Add variable", + "defaultValue": "Default", + "noDefaultValue": "No default value", + "stop": "Stop", + "tip": "Tip", + "serverTip": "No Results(resultLocation:null)", + "log": "Task log", + "detail": "Task detail", + "result": "Task result", + "startUp": "Start-up", + "tagEdit": "Edit", + "rename": "Rename", + "instanceNum": "Instance Number", + "keyTip": "The key cannot be empty", + "instanceName": "Instance Name", + "resources": "Resources", + "reset": "Reset", + "remove": "Remove", + "submit": "Submit", + "search": "Search", + "save": "Save", + "edit": "Edit", + "cancel": "Cancel", + "noDataText": "No data yet", + "userName": "Username", + "unselect": "Please select task", + "searchName": "Please enter username to search", + "generalView": "Switch to the common view", + "manageView": "Switch to the admin view", + "back": "Back", + "warning": { + "api": "Requesting API, please hold on!", + "data": "Requesting data, please hold on!", + "waiting": "Please wait for API response!", + "biLoading": "Connecting with Visualis, please hold on!", + "comingSoon": "New version is being integrated, so stay tuned !", + "GJZ": "Open source co-construction is in progress, so stay tuned!" + }, + "resourceManagement": { + "resourceUsage": "Resource usage", + "applicationList": "Application List" + }, + "time": { + "second": "Second", + "minute": "Minute", + "hour": "Hour", + "day": "Day" + }, + "tableColumns": { + "engineInstance": "Engine instance", + "engineType": "Engine type", + "taskID": "Task ID", + "fileName": "Source", + "executionCode": "Execution Code", + "status": "Status", + "label": "Label", + "engineVersion": "Engine Version", + "engineVersionCannotBeNull": "Engine Version Cannot Be Null", + "usedResources": "Used resources", + "maximumAvailableResources": "Maximum available resources", + "minimumAvailableResources": "Minimum available resources", + "startTime": "Start time", + "costTime": "Time Elapsed", + "executeApplicationName": "Execution Engine", + "requestApplicationName": "Created From", + "user": "User", + "createdTime": "Created At", + "updateTime": "Update Time", + "failedReason": "Key Information", + "control": { + "title": "Handle", + "label": "View" + } + }, + "logLoading": "Requesting logs, please hold on", + "title": "Linkis Control Panel", + "info": "{num} new messages", + "hint": "Please view in the redirected page……", + "sideNavList": { + "news": { + "name": "Latest Activities", + "children": { + "daily": "Daily Operation Report" + } + }, + "function": { + "name": "Frequently Used", + "children": { + "globalHistory": "Global History", + "resource": "Resource Manager", + "setting": "Settings", + "dateReport": "Global Variables", + "globalValiable": "Frequently Asked", + "microserviceManage": "Microservice management", + "ECMManage": "ECM Management", + "dataSourceManage": "DataSource Manage" + } + } + }, + "formItems": { + "id": { + "placeholder": "Please search by entering the ID" + }, + "date": { + "label": " Start Date", + "placeholder": "Please choose the start date" + }, + "engine": { + "label": "Engine" + }, + "status": { + "label": "Status" + } + }, + "columns": { + "taskID": "Task ID", + "fileName": "Script Name", + "executionCode": "Execution Code", + "status": "Status", + "costTime": "Time Elapsed", + "executeApplicationName": "Execution Engine", + "requestApplicationName": "Created From", + "progress": "Progress", + "createdTime": "Created At", + "updatedTime": "Updated At", + "control": { + "title": "Handle", + "label": "View" + }, + "moduleName": "Module Name", + "totalResource": "Total Resources", + "usedResource": "Used Resources", + "initializingResource": "Initing Resources", + "memory": "Memory", + "engineInstance": "Engine Instance", + "applicationName": "Application Name", + "usedTime": "Started At", + "engineStatus": "Status", + "username": "Username" + }, + "shortcuts": { + "week": "Recent Week", + "month": "Recent Month", + "threeMonths": "Recent Three Months" + }, + "statusType": { + "all": "All", + "inited": "Waiting", + "running": "Running", + "succeed": "Succeeded", + "cancelled": "Canceled", + "failed": "Failed", + "scheduled": "Applying Resources", + "timeout": "Timeout", + "retry": "Retry", + "unknown": "Unknown" + }, + "engineTypes": { + "all": "All" + }, + "header": "Resource Manager", + "tabs": { + "first": "User Sessions", + "second": "User Resources", + "third": "Server Resources" + }, + "noLimit": "Unlimited", + "core": "Cores", + "row": { + "applicationName": "Application Name", + "usedTime": "Started At", + "engineStatus": "Status", + "engineInstance": "Engine Instance", + "queueName": "Queue Name", + "user": "User", + "cpu": "Used server CPU resources", + "memory": "Used server memory resources", + "queueCpu": "Used Yarn queue CPU resources", + "queueMemory": "Used Yarn queue memory resources" + }, + "setting": { + "global": "Global", + "globalSetting": "Global Settings", + "hide": "Hide", + "show": "Show", + "advancedSetting": "Advanced Settings", + "dataDev": "Data Development" + }, + "globalValiable": "Global Variables", + "rules": { + "first": { + "required": "The key of variable {text} is empty", + "lengthLimit": "Length between 1 to 128 characters", + "letterTypeLimit": "Started with alphabetic characters, spance and Chinese characters are not allowed", + "placeholder": "Please enter the variable name" + }, + "second": { + "required": "The value of variable {text} is empty", + "lengthLimit": "Length between 1 to 128 characters", + "placeholder": "Please enter the variable value" + } + }, + "addArgs": "New argument", + "emptyDataText": "No global variable data yet", + "sameName": "Duplicated key", + "error": { + "validate": "Invalid items found, please check and then retry!" + }, + "success": { + "update": "Successfully updated global variables!" + }, + "datasource": { + "pleaseInput": "Please input", + "datasourceSrc": "Datasource", + "connectTest": "Test Connection", + "sourceName": "Data source name", + "sourceDec": "Data source description", + "sourceType": "Data source type:", + "creator": "Creator:", + "create": "New data source", + "exports": "Demonstration export data source", + "imports": "Demonstration of importing data sources", + "Expired": "Expired", + "versionList": "Version List", + "dataSourceName": "Data Source Name", + "dataSourceType": "Data Source Type", + "dataSourceEnv": "Available Space", + "status": "Status", + "permissions": "Permissions", + "label": "label", + "Version": "Version", + "desc": "Description", + "action": "Action", + "createUser": "Create User", + "createTime": "Create Time", + "versionDec": "Version Description", + "watch": "View", + "rollback": "Rollback", + "publish": "Publish", + "initVersion": "Initial Version", + "updateVersion": "Version update", + "published": "Published", + "unpublish": "Unpublished", + "cannotPublish": "Cannot Publish", + "used": "Available", + "commentValue": "Roll back from version {text}" + } + } + } +}; diff --git a/web/src/locales/zh-CN.js b/web/src/locales/zh-CN.js new file mode 100644 index 000000000..7eea71432 --- /dev/null +++ b/web/src/locales/zh-CN.js @@ -0,0 +1,434 @@ +export default { + globalMenu: { + projectManage: "项目管理", + dataSourceManage: "数据源管理", + jobManagement: "数据任务管理", + synchronizationHistory: "同步历史", + homePage: "首页", + }, + projectManage: { + topLine: { + searchBar: { + searchInputPlaceholder: "输入项目名,按回车搜索", + searchButtonText: "搜索", + }, + title: "项目列表", + }, + createCardText: "创建项目", + viewCard: { + actionTitle: "操作", + action: { + edit: "编辑", + delete: "删除", + }, + }, + editModal: { + title: { + create: "创建项目", + edit: "修改项目", + }, + form: { + fields: { + projectName: { + label: "项目名", + placeholder: "请输入项目名", + }, + tags: { + label: "标签", + placeholder: "请选择标签", + }, + description: { + label: "描述", + placeholder: "请填写描述", + }, + viewUsers: { + label: "查看权限", + placeholder: "请选择查看权限", + }, + execUsers: { + label: "执行权限", + placeholder: "请选择执行权限", + }, + editUsers: { + label: "编辑权限", + placeholder: "请选择编辑权限", + }, + }, + }, + }, + }, + dataSource: { + topLine: { + searchBar: { + dataTypePlaceholder: "数据源类型", + creatorPlaceholder: "创建者", + namePlaceholder: "数据源名称", + searchButtonText: "搜索", + }, + createDataSourceButton: "创建数据源", + importsDataSource: "批量导入数据源", + exportsDataSource: "批量导出数据源", + }, + table: { + list: { + columns: { + title: { + name: "数据源名称", + type: "类型", + status: "状态", + tags: "标签", + version: "版本", + describe: "描述", + updatetim: "更新时间", + creator: "创建者", + updater: "更新者", + action: "操作", + }, + actions: { + testConnectButton: "测试连接", + editButton: "编辑", + expireButton: "过期", + deleteButton: "删除", + }, + }, + }, + }, + editModal: { + title: { + create: "创建数据源", + edit: "修改数据源", + read: "查看数据源" + }, + form: { + fields: { + dataSourceName: { + label: "数据源名称", + placeholder: "请输入数据源名称", + }, + dataSourceDesc: { + label: "描述", + placeholder: "请输入描述", + }, + }, + }, + }, + sourceTypeModal: { + title: "数据源", + searchInputPlaceholder: "输入关键字搜索数据源", + }, + }, + job: { + list: "任务列表", + type: { + offline: "离线任务", + stream: "流式任务", + }, + action: { + createJob: "创建任务", + copyJob: "复制任务", + modifyJob: "编辑任务", + import: "导入", + export: "导出", + save: "保存", + cancel: "取消", + addJobSuccess: "新建任务成功", + copyJobSuccess: "复制任务成功", + deleteJobSuccess: "删除任务成功", + yes: "是", + no: "不", + confirmDelete: "确认删除该任务?", + manage: "管理", + fileUpSuccess: "文件上传成功", + fileUpFailed: "文件上传失败", + jobSearch: "输入任务名称,按回车搜索", + }, + jobDetail: { + originJob: "原任务", + name: "任务名", + label: "业务标签", + type: "任务类型", + engine: "执行引擎", + description: "任务描述", + jobNameEmpty: "任务名不能为空", + jobTypeEmpty: "任务类型不能为空", + engineEmpty: "执行引擎不能为空", + }, + }, + message: { + "linkis": { + "refresh": "刷新", + "placeholderZip": "请输入工作空间python包路径(只支持zip)!", + "emptyString": "空字符串", + "addAppType": "新增应用类型", + "editContents": "编辑目录", + "eurekeRegisterCenter": "Eureke注册中心", + "addParameterConfig": "新增参数配置", + "editDescriptionEngineConfig": "编辑引擎配置", + "name": "名称", + "order": "顺序", + "description": "描述", + "noDescription": "暂无描述", + "addTags": "添加标签", + "find": "查询", + "initiator": "启动者", + "jumpPage": "请在跳转页面查看……", + "editedSuccess": "修改成功", + "errorCode": "错误码", + "errorDescription": "错误描述", + "notLog": "未获取到日志!", + "unfold": "展开", + "fold": "收起", + "addVariable": "新增变量", + "defaultValue": "默认值", + "noDefaultValue": "无默认值", + "stop": "停止", + "tip": "提示", + "serverTip": "无结果集(resultLocation:null)", + "stopEngineTip": "请问确认要停止当前引擎?", + "log": "任务日志", + "detail": "任务详情", + "result": "任务结果", + "startUp": "启动", + "tagEdit": "编辑", + "keyTip": "不能为空", + "rename": "重命名", + "resources": "资源", + "instanceNum": "实例数", + "instanceName": "实例名称", + "reset": "重置", + "remove": "移除", + "submit": "确定", + "search": "搜索", + "save": "保存", + "edit": "编辑", + "cancel": "取消", + "noDataText": "暂无数据", + "userName": "用户名", + "unselect": "请选择", + "searchName": "请输入用户名搜索", + "generalView": "切换普通视图", + "manageView": "切换管理员视图", + "back": "返回", + "prev": "上一步", + "complete": "完成", + "close": "关闭", + "warning": { + "api": "接口请求中,请稍候!", + "data": "数据请求中,请稍候!", + "waiting": "请等待接口返回!", + "biLoading": "正在和Visualis系统通讯,请稍候!", + "comingSoon": "尚未开源,敬请期待!" + }, + "resourceManagement": { + "resourceUsage": "资源使用情况", + "applicationList": "应用列表" + }, + "time": { + "second": "秒", + "minute": "分钟", + "hour": "小时", + "day": "天" + }, + "tableColumns": { + "engineInstance": "引擎实例", + "engineType": "引擎类型", + "taskID": "任务ID", + "fileName": "来源", + "executionCode": "查询语句", + "status": "状态", + "costTime": "已耗时", + "label": "标签", + "engineVersion": "引擎版本", + "engineVersionCannotBeNull": "引擎版本不能为空", + "addEngineRules": "应用名称不能有特殊符号和空格", + "usedResources": "已用资源", + "maximumAvailableResources": "最大可用资源", + "minimumAvailableResources": "最小可用资源", + "startTime": "启动时间", + "executeApplicationName": "执行引擎", + "requestApplicationName": "创建者", + "user": "用户", + "createdTime": "创建时间", + "updateTime": "更新时间", + "failedReason": "关键信息", + "control": { + "title": "操作", + "label": "查看" + } + }, + "logLoading": "日志请求中,请稍后", + "title": "Linkis计算治理台", + "info": "{num} 条新消息", + "hint": "请在跳转页面查看……", + "sideNavList": { + "news": { + "name": "最新动态", + "children": { + "daily": "运营日报" + } + }, + "function": { + "name": "常用功能", + "children": { + "globalHistory": "全局历史", + "resource": "资源管理", + "setting": "参数配置", + "dateReport": "全局变量", + "globalValiable": "常见问题", + "ECMManage": "ECM管理", + "microserviceManage": "微服务管理", + "dataSourceManage": "数据源管理" + } + } + }, + "formItems": { + "id": { + "placeholder": "请输入ID搜索" + }, + "date": { + "label": "起始时间", + "placeholder": "请选择起始日期" + }, + "engine": { + "label": "引擎" + }, + "status": { + "label": "状态" + } + }, + "columns": { + "taskID": "任务ID", + "fileName": "脚本名称", + "executionCode": "查询语句", + "status": "状态", + "costTime": "已耗时", + "executeApplicationName": "执行引擎", + "requestApplicationName": "创建者", + "progress": "进度", + "createdTime": "创建时间", + "updatedTime": "最后更新时间", + "control": { + "title": "操作", + "label": "查看" + }, + "moduleName": "模块名称", + "totalResource": "总资源", + "usedResource": "已用资源", + "initializingResource": "初始化中资源", + "memory": "内存", + "engineInstance": "引擎实例", + "applicationName": "应用名称", + "usedTime": "开始时间", + "engineStatus": "状态", + "username": "用户名" + }, + "shortcuts": { + "week": "最近一周", + "month": "最近一个月", + "threeMonths": "最近三个月" + }, + "statusType": { + "all": "全部", + "inited": "排队中", + "running": "运行", + "succeed": "成功", + "cancelled": "取消", + "failed": "失败", + "scheduled": "资源申请中", + "timeout": "超时", + "retry": "重试", + "unknown": "未知" + }, + "engineTypes": { + "all": "全部" + }, + "header": "资源管理器", + "tabs": { + "first": "用户会话", + "second": "用户资源", + "third": "服务器资源" + }, + "noLimit": "无限制", + "core": "核", + "row": { + "applicationName": "应用名称", + "usedTime": "开始时间", + "engineStatus": "状态", + "engineInstance": "引擎实例", + "queueName": "队列名称", + "user": "用户", + "cpu": "已用的服务器CPU资源", + "memory": "已用的服务器内存资源", + "queueCpu": "已用的Yarn队列CPU资源", + "queueMemory": "已用的Yarn队列内存资源" + }, + "setting": { + "global": "全局", + "globalSetting": "通用设置", + "hide": "隐藏", + "show": "显示", + "advancedSetting": "高级设置", + "dataDev": "数据开发" + }, + "globalValiable": "全局变量", + "rules": { + "first": { + "required": "变量 {text} 的key为空", + "lengthLimit": "长度应为 1 至 128 个字符", + "letterTypeLimit": "仅支持以字母开头,且不得存在空格和中文", + "placeholder": "请输入变量名" + }, + "second": { + "required": "变量 {text} 的value为空", + "lengthLimit": "长度应为 1 至 128 个字符", + "placeholder": "请输入变量值" + } + }, + "addArgs": "增加参数", + "emptyDataText": "暂无全局变量数据", + "sameName": "存在同名key", + "error": { + "validate": "有验证项未通过,请检查后再试!" + }, + "success": { + "update": "全局变量更新成功!" + }, + "datasource": { + "pleaseInput": "请输入", + "datasourceSrc": "数据源", + "connectTest": "测试连接", + "sourceName": "数据源名称", + "sourceDec": "数据源描述", + "sourceType": "数据源类型:", + "creator": "创建人:", + "create": "新增数据源", + "exports": "批量导出数据源", + "imports": "批量导入数据源", + "overdue": "过期", + "versionList": "版本列表", + "dataSourceName": "数据源名称", + "dataSourceType": "数据源类型", + "dataSourceEnv": "可用集群", + "status": "状态", + "permissions": "权限", + "label": "标签", + "version": "版本", + "desc": "描述", + "action": "操作", + "createUser": "创建人", + "createTime": "创建时间", + "versionDec": "版本描述", + "watch": "查看", + "rollback": "回滚", + "publish": "发布", + "initVersion": "初始化版本", + "updateVersion": "版本更新", + "published": "已发布", + "unpublish": "未发布", + "cannotPublish": "不可发布", + "used": "可用", + "commentValue": "从版本 {text} 回滚" + } + } + } +}; diff --git a/web/src/pages/dataSourceManage/components/datasourceForm/index.vue b/web/src/pages/dataSourceManage/components/datasourceForm/index.vue new file mode 100644 index 000000000..d526d4e6d --- /dev/null +++ b/web/src/pages/dataSourceManage/components/datasourceForm/index.vue @@ -0,0 +1,500 @@ + + + + diff --git a/web/src/pages/dataSourceManage/components/editModal.vue b/web/src/pages/dataSourceManage/components/editModal.vue new file mode 100644 index 000000000..064bbb992 --- /dev/null +++ b/web/src/pages/dataSourceManage/components/editModal.vue @@ -0,0 +1,247 @@ + + + + + diff --git a/web/src/pages/dataSourceManage/components/encryptModal.vue b/web/src/pages/dataSourceManage/components/encryptModal.vue new file mode 100644 index 000000000..3dde79665 --- /dev/null +++ b/web/src/pages/dataSourceManage/components/encryptModal.vue @@ -0,0 +1,111 @@ + + + + + + diff --git a/web/src/pages/dataSourceManage/components/selectTypeModal.vue b/web/src/pages/dataSourceManage/components/selectTypeModal.vue new file mode 100644 index 000000000..db6daf2af --- /dev/null +++ b/web/src/pages/dataSourceManage/components/selectTypeModal.vue @@ -0,0 +1,79 @@ + + + + + diff --git a/web/src/pages/dataSourceManage/components/topLine.vue b/web/src/pages/dataSourceManage/components/topLine.vue new file mode 100644 index 000000000..4a7fddd13 --- /dev/null +++ b/web/src/pages/dataSourceManage/components/topLine.vue @@ -0,0 +1,85 @@ + + + + + + diff --git a/web/src/pages/dataSourceManage/components/versionModal.vue b/web/src/pages/dataSourceManage/components/versionModal.vue new file mode 100644 index 000000000..9bf119c22 --- /dev/null +++ b/web/src/pages/dataSourceManage/components/versionModal.vue @@ -0,0 +1,99 @@ + + + + + diff --git a/web/src/pages/dataSourceManage/index.vue b/web/src/pages/dataSourceManage/index.vue new file mode 100644 index 000000000..4697157e3 --- /dev/null +++ b/web/src/pages/dataSourceManage/index.vue @@ -0,0 +1,433 @@ + + + + diff --git a/web/src/pages/homePage/index.vue b/web/src/pages/homePage/index.vue new file mode 100644 index 000000000..31fb0bafd --- /dev/null +++ b/web/src/pages/homePage/index.vue @@ -0,0 +1,797 @@ + + + + + + diff --git a/web/src/pages/index.vue b/web/src/pages/index.vue new file mode 100644 index 000000000..50f9264cf --- /dev/null +++ b/web/src/pages/index.vue @@ -0,0 +1,8 @@ + + + \ No newline at end of file diff --git a/web/src/pages/jobManagement/components/bottomLog.vue b/web/src/pages/jobManagement/components/bottomLog.vue new file mode 100644 index 000000000..a460d1299 --- /dev/null +++ b/web/src/pages/jobManagement/components/bottomLog.vue @@ -0,0 +1,361 @@ + + + diff --git a/web/src/pages/jobManagement/components/configDrawer.vue b/web/src/pages/jobManagement/components/configDrawer.vue new file mode 100644 index 000000000..4fcfbfb17 --- /dev/null +++ b/web/src/pages/jobManagement/components/configDrawer.vue @@ -0,0 +1,320 @@ + + + + + diff --git a/web/src/pages/jobManagement/components/configModal.vue b/web/src/pages/jobManagement/components/configModal.vue new file mode 100644 index 000000000..1ffae5bbc --- /dev/null +++ b/web/src/pages/jobManagement/components/configModal.vue @@ -0,0 +1,255 @@ + + + + + diff --git a/web/src/pages/jobManagement/components/copyModal.vue b/web/src/pages/jobManagement/components/copyModal.vue new file mode 100644 index 000000000..c1e5e2345 --- /dev/null +++ b/web/src/pages/jobManagement/components/copyModal.vue @@ -0,0 +1,115 @@ + + + + + diff --git a/web/src/pages/jobManagement/components/createJob.vue b/web/src/pages/jobManagement/components/createJob.vue new file mode 100644 index 000000000..6293b712e --- /dev/null +++ b/web/src/pages/jobManagement/components/createJob.vue @@ -0,0 +1,300 @@ + + diff --git a/web/src/pages/jobManagement/components/dataSource.vue b/web/src/pages/jobManagement/components/dataSource.vue new file mode 100644 index 000000000..163bbeb06 --- /dev/null +++ b/web/src/pages/jobManagement/components/dataSource.vue @@ -0,0 +1,663 @@ + + + + + diff --git a/web/src/pages/jobManagement/components/dyncRender.vue b/web/src/pages/jobManagement/components/dyncRender.vue new file mode 100644 index 000000000..b831117d7 --- /dev/null +++ b/web/src/pages/jobManagement/components/dyncRender.vue @@ -0,0 +1,290 @@ + + + + diff --git a/web/src/pages/jobManagement/components/executionLog.vue b/web/src/pages/jobManagement/components/executionLog.vue new file mode 100644 index 000000000..080b86dd0 --- /dev/null +++ b/web/src/pages/jobManagement/components/executionLog.vue @@ -0,0 +1,386 @@ + + + + diff --git a/web/src/pages/jobManagement/components/fieldMap.vue b/web/src/pages/jobManagement/components/fieldMap.vue new file mode 100644 index 000000000..bd3f6493b --- /dev/null +++ b/web/src/pages/jobManagement/components/fieldMap.vue @@ -0,0 +1,693 @@ + + + + + diff --git a/web/src/pages/jobManagement/components/index.vue b/web/src/pages/jobManagement/components/index.vue new file mode 100644 index 000000000..c567e9ebe --- /dev/null +++ b/web/src/pages/jobManagement/components/index.vue @@ -0,0 +1,1772 @@ + + + diff --git a/web/src/pages/jobManagement/components/jobDetail.vue b/web/src/pages/jobManagement/components/jobDetail.vue new file mode 100644 index 000000000..0a25e0066 --- /dev/null +++ b/web/src/pages/jobManagement/components/jobDetail.vue @@ -0,0 +1,1808 @@ + + + diff --git a/web/src/pages/jobManagement/components/jobList.vue b/web/src/pages/jobManagement/components/jobList.vue new file mode 100644 index 000000000..f5a38bb13 --- /dev/null +++ b/web/src/pages/jobManagement/components/jobList.vue @@ -0,0 +1,477 @@ + + + diff --git a/web/src/pages/jobManagement/components/job_card.vue b/web/src/pages/jobManagement/components/job_card.vue new file mode 100644 index 000000000..013af95ee --- /dev/null +++ b/web/src/pages/jobManagement/components/job_card.vue @@ -0,0 +1,283 @@ + + + diff --git a/web/src/pages/jobManagement/components/metricsInfo.vue b/web/src/pages/jobManagement/components/metricsInfo.vue new file mode 100644 index 000000000..19cb67c81 --- /dev/null +++ b/web/src/pages/jobManagement/components/metricsInfo.vue @@ -0,0 +1,83 @@ + + + + diff --git a/web/src/pages/jobManagement/components/monacoEditor.vue b/web/src/pages/jobManagement/components/monacoEditor.vue new file mode 100644 index 000000000..e8cb69024 --- /dev/null +++ b/web/src/pages/jobManagement/components/monacoEditor.vue @@ -0,0 +1,117 @@ + + + + \ No newline at end of file diff --git a/web/src/pages/jobManagement/components/noTablePartition.vue b/web/src/pages/jobManagement/components/noTablePartition.vue new file mode 100644 index 000000000..f58edbf98 --- /dev/null +++ b/web/src/pages/jobManagement/components/noTablePartition.vue @@ -0,0 +1,119 @@ + + + + \ No newline at end of file diff --git a/web/src/pages/jobManagement/components/processControl.vue b/web/src/pages/jobManagement/components/processControl.vue new file mode 100644 index 000000000..716c7e2bf --- /dev/null +++ b/web/src/pages/jobManagement/components/processControl.vue @@ -0,0 +1,223 @@ + + + + + diff --git a/web/src/pages/jobManagement/components/processor.vue b/web/src/pages/jobManagement/components/processor.vue new file mode 100644 index 000000000..f5366f38e --- /dev/null +++ b/web/src/pages/jobManagement/components/processor.vue @@ -0,0 +1,253 @@ + + + \ No newline at end of file diff --git a/web/src/pages/jobManagement/components/selectDataSource.vue b/web/src/pages/jobManagement/components/selectDataSource.vue new file mode 100644 index 000000000..2b9a4c27c --- /dev/null +++ b/web/src/pages/jobManagement/components/selectDataSource.vue @@ -0,0 +1,601 @@ + + + + + diff --git a/web/src/pages/jobManagement/components/syncBottomLog.vue b/web/src/pages/jobManagement/components/syncBottomLog.vue new file mode 100644 index 000000000..3fb5bf0ca --- /dev/null +++ b/web/src/pages/jobManagement/components/syncBottomLog.vue @@ -0,0 +1,387 @@ + + + diff --git a/web/src/pages/jobManagement/components/transformer.vue b/web/src/pages/jobManagement/components/transformer.vue new file mode 100644 index 000000000..6ca84b693 --- /dev/null +++ b/web/src/pages/jobManagement/components/transformer.vue @@ -0,0 +1,380 @@ + + + + + diff --git a/web/src/pages/jobManagement/index.vue b/web/src/pages/jobManagement/index.vue new file mode 100644 index 000000000..646825e10 --- /dev/null +++ b/web/src/pages/jobManagement/index.vue @@ -0,0 +1,366 @@ + + + diff --git a/web/src/pages/jobManagement/mock.js b/web/src/pages/jobManagement/mock.js new file mode 100644 index 000000000..e3d78f710 --- /dev/null +++ b/web/src/pages/jobManagement/mock.js @@ -0,0 +1,165 @@ +const jobInfo = { + id: 1, // 任务id + projectId: 1, // 所属项目id + jobName: "任务名", + jobType: "OFFLINE | STREAM", + engineType: "DataX", // 执行引擎 + jobLabels: "", + jobDesc: "任务描述", + content: { + subJobs: [ + { + subjobName: "subjob1", + dataSourceIds: { + source: { + type: "HIVE", + id: "10001", + db: "db_test", + table: "table_test", + }, + sink: { + type: "MYSQL", + id: "10002", + db: "db_mask", + table: "table_mask", + }, + }, + params: { + sources: [ + { + field: "exchangis.job.hive.transform_type", // key + label: "传输方式", + values: ["Record", "二进制"], + value: "二进制", + unit: "", + sort: 1, + type: "OPTION", + required: true, + }, + { + field: "exchangis.job.hive.partition", + label: "分区信息", + value: "2021-08-17", + unit: "", + sort: 2, + type: "INPUT", + required: false, + }, + ], + sinks: [], + }, + transforms: { + type: "MAPPING", + sql: "", // type为SQL时,使用该字段 + mapping: [ + // type为MAPPING时,使用该字段 + { + source_field_name: "field1", + source_field_type: "varchar", + sink_field_name: "field2", + sink_field_type: "varchar", + validator: ["> 100", "< 200"], + transformer: { + name: "ex_substr", + params: ["1", "3"], + }, + }, + { + source_field_name: "field3", + source_field_type: "varchar", + sink_field_name: "field4", + sink_field_type: "varchar", + validator: ["like '%example'"], + transformer: { + name: "ex_replace", + params: ["1", "4"], + }, + }, + ], + }, + settings: [ + { + field: "errorlimit_percentage", + label: "脏数据占比阈值", + sort: 1, + value: "", + unit: "", + type: "INPUT", + required: true, + }, + { + field: "errorlimit_record", + label: "脏数据最大记录数", + sort: 2, + value: "", + unit: "条", + type: "INPUT", + required: true, + }, + { + field: "channel", + label: "传输速率", + sort: 3, + value: "", + unit: "Mb/s", + type: "INPUT", + required: true, + }, + { + field: "channel", + label: "并发数", + sort: 4, + value: "", + unit: "个", + type: "INPUT", + required: true, + }, + ], + }, + { + subjobName: "subjob2", + // ... + }, + ], + }, + proxyUser: "代理用户", + executeNode: "执行节点", + syncType: "同步类型", + jobParams: "参数", +}; + +const fieldInfo = [ + { + name: "field1", + type: "VARCHAR", + }, + { + name: "field2", + type: "VARCHAR", + }, +]; + +const SQLlist = [ + { + classifier: "关系型数据库", + name: "MYSQL", + icon: "icon-mysql", + description: "MYSQL description", + id: "1", + option: "mysql", + }, + { + classifier: "大数据存储", + name: "HIVE", + icon: "icon-hive", + description: "Hive description", + id: "2", + option: "hive", + }, +]; + +const dbs = ["db1", "db2"]; + +const tables = ["table1", "table2"]; + +export { jobInfo, fieldInfo, SQLlist, dbs, tables }; diff --git a/web/src/pages/jobManagement/spaIndex.vue b/web/src/pages/jobManagement/spaIndex.vue new file mode 100644 index 000000000..4b1a31369 --- /dev/null +++ b/web/src/pages/jobManagement/spaIndex.vue @@ -0,0 +1,12 @@ + + diff --git a/web/src/pages/projectManage/components/editModal.vue b/web/src/pages/projectManage/components/editModal.vue new file mode 100644 index 000000000..9c4c78586 --- /dev/null +++ b/web/src/pages/projectManage/components/editModal.vue @@ -0,0 +1,223 @@ + + + + + diff --git a/web/src/pages/projectManage/components/projectCreateCard.vue b/web/src/pages/projectManage/components/projectCreateCard.vue new file mode 100644 index 000000000..9b505c241 --- /dev/null +++ b/web/src/pages/projectManage/components/projectCreateCard.vue @@ -0,0 +1,57 @@ + + + + + diff --git a/web/src/pages/projectManage/components/projectViewCard.vue b/web/src/pages/projectManage/components/projectViewCard.vue new file mode 100644 index 000000000..90af8d5ac --- /dev/null +++ b/web/src/pages/projectManage/components/projectViewCard.vue @@ -0,0 +1,224 @@ + + + + + diff --git a/web/src/pages/projectManage/index.vue b/web/src/pages/projectManage/index.vue new file mode 100644 index 000000000..3120846b4 --- /dev/null +++ b/web/src/pages/projectManage/index.vue @@ -0,0 +1,277 @@ + + + + diff --git a/web/src/pages/synchronizationHistory/index.vue b/web/src/pages/synchronizationHistory/index.vue new file mode 100644 index 000000000..de0a79ea2 --- /dev/null +++ b/web/src/pages/synchronizationHistory/index.vue @@ -0,0 +1,492 @@ + + + + + \ No newline at end of file diff --git a/web/src/register/antd.js b/web/src/register/antd.js new file mode 100644 index 000000000..4319f6d29 --- /dev/null +++ b/web/src/register/antd.js @@ -0,0 +1,80 @@ +import { + Button, + Row, + Col, + Tag, + Form, + Input, + ConfigProvider, + Select, + Checkbox, + DatePicker, + TimePicker, + Dropdown, + Divider, + Modal, + Popconfirm, + Upload, + InputNumber, + Table, + Spin, + Breadcrumb, + Tabs, + Card, + Menu, + Pagination, + Typography, + Space, + message, + Radio, + Tree, + Tooltip, + Progress, + Drawer, + Empty, + Icon +} from "ant-design-vue"; +import formCreate from '@form-create/ant-design-vue' + +/** + * @description 手动注册 antd-vue 组件,达到按需加载目的 + * @description Automatically register components under Button, such as Button.Group + * @param {ReturnType} app 整个应用的实例 + * @returns void + */ +export default function loadComponent(app) { + app.use(Button); + app.use(Table); + app.use(TimePicker); + app.use(Row); + app.use(Col); + app.use(Tag); + app.use(Form); + app.use(Input); + app.use(Dropdown); + app.use(Divider); + app.use(ConfigProvider); + app.use(Select); + app.use(DatePicker); + app.use(Checkbox); + app.use(Modal); + app.use(Popconfirm); + app.use(Upload); + app.use(InputNumber); + app.use(Spin); + app.use(Breadcrumb); + app.use(Tabs); + app.use(Card); + app.use(Menu); + app.use(Pagination); + app.use(Space); + app.use(Typography); + app.use(Radio); + app.use(Tree); + app.use(Tooltip); + app.use(Progress); + app.use(Drawer); + app.use(Empty); + app.use(Icon) + app.use(formCreate) +} diff --git a/web/src/register/index.js b/web/src/register/index.js new file mode 100644 index 000000000..4457e6f89 --- /dev/null +++ b/web/src/register/index.js @@ -0,0 +1,12 @@ +/** + * @description 加载所有 Plugins + * @param {ReturnType} app 整个应用的实例 + */ +export function loadAllRegister(app) { + const files = require.context('.', true, /\.js$/); + files.keys().forEach((key) => { + if (typeof files(key).default === 'function') { + if (key !== './index.ts') files(key).default(app); + } + }); +} diff --git a/web/tsconfig.json b/web/tsconfig.json new file mode 100644 index 000000000..ea930c0b5 --- /dev/null +++ b/web/tsconfig.json @@ -0,0 +1,37 @@ +{ + "compilerOptions": { + "outDir": "build/dist", + "module": "esnext", + "target": "esnext", + "lib": ["esnext", "dom"], + "sourceMap": true, + "baseUrl": ".", + "jsx": "preserve", + "allowSyntheticDefaultImports": true, + "moduleResolution": "node", + "forceConsistentCasingInFileNames": true, + "noImplicitReturns": true, + "suppressImplicitAnyIndexErrors": true, + "noUnusedLocals": true, + "allowJs": true, + "skipLibCheck": true, + "experimentalDecorators": true, + "strict": true, + "paths": { + "@/*": ["./src/*"], + "@@/*": ["./src/.fes/*"] + } + }, + "include": [ + "src/**/*", + "tests/**/*", + "test/**/*", + "__test__/**/*", + "typings/**/*", + "config/**/*", + ".eslintrc.js", + ".stylelintrc.js", + ".prettierrc.js" + ], + "exclude": ["node_modules", "build", "dist", "scripts", "src/.fes/*", "webpack", "jest"] +} diff --git a/web/yarn.lock b/web/yarn.lock new file mode 100644 index 000000000..fdce33dc1 --- /dev/null +++ b/web/yarn.lock @@ -0,0 +1,7907 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@ampproject/remapping@^2.1.0": + "integrity" "sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==" + "resolved" "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz" + "version" "2.2.0" + dependencies: + "@jridgewell/gen-mapping" "^0.1.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@ant-design/colors@^5.0.0": + "integrity" "sha512-Txy4KpHrp3q4XZdfgOBqLl+lkQIc3tEvHXOimRN1giX1AEC7mGtyrO9p8iRGJ3FLuVMGa2gNEzQyghVymLttKQ==" + "resolved" "https://registry.npmjs.org/@ant-design/colors/-/colors-5.1.1.tgz" + "version" "5.1.1" + dependencies: + "@ctrl/tinycolor" "^3.3.1" + +"@ant-design/icons-svg@^4.0.0": + "integrity" "sha512-EB0iwlKDGpG93hW8f85CTJTs4SvMX7tt5ceupvhALp1IF44SeUFOMhKUOYqpsoYWQKAOuTRDMqn75rEaKDp0Xw==" + "resolved" "https://registry.npmjs.org/@ant-design/icons-svg/-/icons-svg-4.2.1.tgz" + "version" "4.2.1" + +"@ant-design/icons-vue@^6.0.0", "@ant-design/icons-vue@^6.0.1": + "integrity" "sha512-HigIgEVV6bbcrz2A92/qDzi/aKWB5EC6b6E1mxMB6aQA7ksiKY+gi4U94TpqyEIIhR23uaDrjufJ+xCZQ+vx6Q==" + "resolved" "https://registry.npmjs.org/@ant-design/icons-vue/-/icons-vue-6.0.1.tgz" + "version" "6.0.1" + dependencies: + "@ant-design/colors" "^5.0.0" + "@ant-design/icons-svg" "^4.0.0" + "@types/lodash" "^4.14.165" + "lodash" "^4.17.15" + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.16.7": + "integrity" "sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==" + "resolved" "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz" + "version" "7.16.7" + dependencies: + "@babel/highlight" "^7.16.7" + +"@babel/code-frame@7.12.11": + "integrity" "sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==" + "resolved" "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz" + "version" "7.12.11" + dependencies: + "@babel/highlight" "^7.10.4" + +"@babel/compat-data@^7.13.11", "@babel/compat-data@^7.15.0", "@babel/compat-data@^7.17.10": + "integrity" "sha512-GZt/TCsG70Ms19gfZO1tM4CVnXsPgEPBCpJu+Qz3L0LUDsY5nZqFZglIoPC1kIYOtNBZlrnFT+klg12vFGZXrw==" + "resolved" "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.17.10.tgz" + "version" "7.17.10" + +"@babel/core@^7.0.0", "@babel/core@^7.0.0-0", "@babel/core@^7.12.0", "@babel/core@^7.12.13", "@babel/core@^7.13.0", "@babel/core@^7.16.7", "@babel/core@^7.4.0-0", "@babel/core@>=7.11.0": + "integrity" "sha512-liKoppandF3ZcBnIYFjfSDHZLKdLHGJRkoWtG8zQyGJBQfIYobpnVGI5+pLBNtS6psFLDzyq8+h5HiVljW9PNA==" + "resolved" "https://registry.npmjs.org/@babel/core/-/core-7.17.10.tgz" + "version" "7.17.10" + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.16.7" + "@babel/generator" "^7.17.10" + "@babel/helper-compilation-targets" "^7.17.10" + "@babel/helper-module-transforms" "^7.17.7" + "@babel/helpers" "^7.17.9" + "@babel/parser" "^7.17.10" + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.17.10" + "@babel/types" "^7.17.10" + "convert-source-map" "^1.7.0" + "debug" "^4.1.0" + "gensync" "^1.0.0-beta.2" + "json5" "^2.2.1" + "semver" "^6.3.0" + +"@babel/eslint-parser@^7.13.14": + "integrity" "sha512-fYP7QFngCvgxjUuw8O057SVH5jCXsbFFOoE77CFDcvzwBVgTOkMD/L4mIC5Ud1xf8chK/no2fRbSSn1wvNmKuQ==" + "resolved" "https://registry.npmjs.org/@babel/eslint-parser/-/eslint-parser-7.15.8.tgz" + "version" "7.15.8" + dependencies: + "eslint-scope" "^5.1.1" + "eslint-visitor-keys" "^2.1.0" + "semver" "^6.3.0" + +"@babel/generator@^7.15.0", "@babel/generator@^7.17.10": + "integrity" "sha512-46MJZZo9y3o4kmhBVc7zW7i8dtR1oIK/sdO5NcfcZRhTGYi+KKJRtHNgsU6c4VUcJmUNV/LQdebD/9Dlv4K+Tg==" + "resolved" "https://registry.npmjs.org/@babel/generator/-/generator-7.17.10.tgz" + "version" "7.17.10" + dependencies: + "@babel/types" "^7.17.10" + "@jridgewell/gen-mapping" "^0.1.0" + "jsesc" "^2.5.1" + +"@babel/helper-annotate-as-pure@^7.14.5", "@babel/helper-annotate-as-pure@^7.15.4": + "integrity" "sha512-QwrtdNvUNsPCj2lfNQacsGSQvGX8ee1ttrBrcozUP2Sv/jylewBP/8QFe6ZkBsC8T/GYWonNAWJV4aRR9AL2DA==" + "resolved" "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "@babel/types" "^7.15.4" + +"@babel/helper-builder-binary-assignment-operator-visitor@^7.14.5": + "integrity" "sha512-P8o7JP2Mzi0SdC6eWr1zF+AEYvrsZa7GSY1lTayjF5XJhVH0kjLYUZPvTMflP7tBgZoe9gIhTa60QwFpqh/E0Q==" + "resolved" "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "@babel/helper-explode-assignable-expression" "^7.15.4" + "@babel/types" "^7.15.4" + +"@babel/helper-compilation-targets@^7.13.0", "@babel/helper-compilation-targets@^7.15.4", "@babel/helper-compilation-targets@^7.17.10": + "integrity" "sha512-gh3RxjWbauw/dFiU/7whjd0qN9K6nPJMqe6+Er7rOavFh0CQUSwhAE3IcTho2rywPJFxej6TUUHDkWcYI6gGqQ==" + "resolved" "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.17.10.tgz" + "version" "7.17.10" + dependencies: + "@babel/compat-data" "^7.17.10" + "@babel/helper-validator-option" "^7.16.7" + "browserslist" "^4.20.2" + "semver" "^6.3.0" + +"@babel/helper-create-class-features-plugin@^7.14.5", "@babel/helper-create-class-features-plugin@^7.15.4": + "integrity" "sha512-7ZmzFi+DwJx6A7mHRwbuucEYpyBwmh2Ca0RvI6z2+WLZYCqV0JOaLb+u0zbtmDicebgKBZgqbYfLaKNqSgv5Pw==" + "resolved" "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "@babel/helper-annotate-as-pure" "^7.15.4" + "@babel/helper-function-name" "^7.15.4" + "@babel/helper-member-expression-to-functions" "^7.15.4" + "@babel/helper-optimise-call-expression" "^7.15.4" + "@babel/helper-replace-supers" "^7.15.4" + "@babel/helper-split-export-declaration" "^7.15.4" + +"@babel/helper-create-regexp-features-plugin@^7.14.5": + "integrity" "sha512-TLawwqpOErY2HhWbGJ2nZT5wSkR192QpN+nBg1THfBfftrlvOh+WbhrxXCH4q4xJ9Gl16BGPR/48JA+Ryiho/A==" + "resolved" "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-annotate-as-pure" "^7.14.5" + "regexpu-core" "^4.7.1" + +"@babel/helper-define-polyfill-provider@^0.2.2": + "integrity" "sha512-RH3QDAfRMzj7+0Nqu5oqgO5q9mFtQEVvCRsi8qCEfzLR9p2BHfn5FzhSB2oj1fF7I2+DcTORkYaQ6aTR9Cofew==" + "resolved" "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.2.3.tgz" + "version" "0.2.3" + dependencies: + "@babel/helper-compilation-targets" "^7.13.0" + "@babel/helper-module-imports" "^7.12.13" + "@babel/helper-plugin-utils" "^7.13.0" + "@babel/traverse" "^7.13.0" + "debug" "^4.1.1" + "lodash.debounce" "^4.0.8" + "resolve" "^1.14.2" + "semver" "^6.1.2" + +"@babel/helper-environment-visitor@^7.16.7": + "integrity" "sha512-SLLb0AAn6PkUeAfKJCCOl9e1R53pQlGAfc4y4XuMRZfqeMYLE0dM1LMhqbGAlGQY0lfw5/ohoYWAe9V1yibRag==" + "resolved" "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.16.7.tgz" + "version" "7.16.7" + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-explode-assignable-expression@^7.15.4": + "integrity" "sha512-J14f/vq8+hdC2KoWLIQSsGrC9EFBKE4NFts8pfMpymfApds+fPqR30AOUWc4tyr56h9l/GA1Sxv2q3dLZWbQ/g==" + "resolved" "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "@babel/types" "^7.15.4" + +"@babel/helper-function-name@^7.14.5", "@babel/helper-function-name@^7.15.4", "@babel/helper-function-name@^7.17.9": + "integrity" "sha512-7cRisGlVtiVqZ0MW0/yFB4atgpGLWEHUVYnb448hZK4x+vih0YO5UoS11XIYtZYqHd0dIPMdUSv8q5K4LdMnIg==" + "resolved" "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.17.9.tgz" + "version" "7.17.9" + dependencies: + "@babel/template" "^7.16.7" + "@babel/types" "^7.17.0" + +"@babel/helper-hoist-variables@^7.15.4", "@babel/helper-hoist-variables@^7.16.7": + "integrity" "sha512-m04d/0Op34H5v7pbZw6pSKP7weA6lsMvfiIAMeIvkY/R4xQtBSMFEigu9QTZ2qB/9l22vsxtM8a+Q8CzD255fg==" + "resolved" "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.16.7.tgz" + "version" "7.16.7" + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-member-expression-to-functions@^7.15.4": + "integrity" "sha512-cokOMkxC/BTyNP1AlY25HuBWM32iCEsLPI4BHDpJCHHm1FU2E7dKWWIXJgQgSFiu4lp8q3bL1BIKwqkSUviqtA==" + "resolved" "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "@babel/types" "^7.15.4" + +"@babel/helper-module-imports@^7.0.0", "@babel/helper-module-imports@^7.12.13", "@babel/helper-module-imports@^7.14.5", "@babel/helper-module-imports@^7.15.4", "@babel/helper-module-imports@^7.16.7": + "integrity" "sha512-LVtS6TqjJHFc+nYeITRo6VLXve70xmq7wPhWTqDJusJEgGmkAACWwMiTNrvfoQo6hEhFwAIixNkvB0jPXDL8Wg==" + "resolved" "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.16.7.tgz" + "version" "7.16.7" + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-module-transforms@^7.14.5", "@babel/helper-module-transforms@^7.15.4", "@babel/helper-module-transforms@^7.17.7": + "integrity" "sha512-VmZD99F3gNTYB7fJRDTi+u6l/zxY0BE6OIxPSU7a50s6ZUQkHwSDmV92FfM+oCG0pZRVojGYhkR8I0OGeCVREw==" + "resolved" "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.17.7.tgz" + "version" "7.17.7" + dependencies: + "@babel/helper-environment-visitor" "^7.16.7" + "@babel/helper-module-imports" "^7.16.7" + "@babel/helper-simple-access" "^7.17.7" + "@babel/helper-split-export-declaration" "^7.16.7" + "@babel/helper-validator-identifier" "^7.16.7" + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.17.3" + "@babel/types" "^7.17.0" + +"@babel/helper-optimise-call-expression@^7.15.4": + "integrity" "sha512-E/z9rfbAOt1vDW1DR7k4SzhzotVV5+qMciWV6LaG1g4jeFrkDlJedjtV4h0i4Q/ITnUu+Pk08M7fczsB9GXBDw==" + "resolved" "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "@babel/types" "^7.15.4" + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.13.0", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": + "integrity" "sha512-/37qQCE3K0vvZKwoK4XU/irIJQdIfCJuhU5eKnNxpFDsOkgFaUAwbv+RYw6eYgsC0E4hS7r5KqGULUogqui0fQ==" + "resolved" "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.14.5.tgz" + "version" "7.14.5" + +"@babel/helper-remap-async-to-generator@^7.14.5", "@babel/helper-remap-async-to-generator@^7.15.4": + "integrity" "sha512-v53MxgvMK/HCwckJ1bZrq6dNKlmwlyRNYM6ypaRTdXWGOE2c1/SCa6dL/HimhPulGhZKw9W0QhREM583F/t0vQ==" + "resolved" "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "@babel/helper-annotate-as-pure" "^7.15.4" + "@babel/helper-wrap-function" "^7.15.4" + "@babel/types" "^7.15.4" + +"@babel/helper-replace-supers@^7.14.5", "@babel/helper-replace-supers@^7.15.4": + "integrity" "sha512-/ztT6khaXF37MS47fufrKvIsiQkx1LBRvSJNzRqmbyeZnTwU9qBxXYLaaT/6KaxfKhjs2Wy8kG8ZdsFUuWBjzw==" + "resolved" "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "@babel/helper-member-expression-to-functions" "^7.15.4" + "@babel/helper-optimise-call-expression" "^7.15.4" + "@babel/traverse" "^7.15.4" + "@babel/types" "^7.15.4" + +"@babel/helper-simple-access@^7.15.4", "@babel/helper-simple-access@^7.17.7": + "integrity" "sha512-txyMCGroZ96i+Pxr3Je3lzEJjqwaRC9buMUgtomcrLe5Nd0+fk1h0LLA+ixUF5OW7AhHuQ7Es1WcQJZmZsz2XA==" + "resolved" "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.17.7.tgz" + "version" "7.17.7" + dependencies: + "@babel/types" "^7.17.0" + +"@babel/helper-skip-transparent-expression-wrappers@^7.14.5", "@babel/helper-skip-transparent-expression-wrappers@^7.15.4": + "integrity" "sha512-BMRLsdh+D1/aap19TycS4eD1qELGrCBJwzaY9IE8LrpJtJb+H7rQkPIdsfgnMtLBA6DJls7X9z93Z4U8h7xw0A==" + "resolved" "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "@babel/types" "^7.15.4" + +"@babel/helper-split-export-declaration@^7.15.4", "@babel/helper-split-export-declaration@^7.16.7": + "integrity" "sha512-xbWoy/PFoxSWazIToT9Sif+jJTlrMcndIsaOKvTA6u7QEo7ilkRZpjew18/W3c7nm8fXdUDXh02VXTbZ0pGDNw==" + "resolved" "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.16.7.tgz" + "version" "7.16.7" + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-validator-identifier@^7.14.9", "@babel/helper-validator-identifier@^7.16.7": + "integrity" "sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==" + "resolved" "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz" + "version" "7.16.7" + +"@babel/helper-validator-option@^7.14.5", "@babel/helper-validator-option@^7.16.7": + "integrity" "sha512-TRtenOuRUVo9oIQGPC5G9DgK4743cdxvtOw0weQNpZXaS16SCBi5MNjZF8vba3ETURjZpTbVn7Vvcf2eAwFozQ==" + "resolved" "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.16.7.tgz" + "version" "7.16.7" + +"@babel/helper-wrap-function@^7.15.4": + "integrity" "sha512-Y2o+H/hRV5W8QhIfTpRIBwl57y8PrZt6JM3V8FOo5qarjshHItyH5lXlpMfBfmBefOqSCpKZs/6Dxqp0E/U+uw==" + "resolved" "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "@babel/helper-function-name" "^7.15.4" + "@babel/template" "^7.15.4" + "@babel/traverse" "^7.15.4" + "@babel/types" "^7.15.4" + +"@babel/helpers@^7.17.9": + "integrity" "sha512-cPCt915ShDWUEzEp3+UNRktO2n6v49l5RSnG9M5pS24hA+2FAc5si+Pn1i4VVbQQ+jh+bIZhPFQOJOzbrOYY1Q==" + "resolved" "https://registry.npmjs.org/@babel/helpers/-/helpers-7.17.9.tgz" + "version" "7.17.9" + dependencies: + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.17.9" + "@babel/types" "^7.17.0" + +"@babel/highlight@^7.10.4", "@babel/highlight@^7.16.7": + "integrity" "sha512-J9PfEKCbFIv2X5bjTMiZu6Vf341N05QIY+d6FvVKynkG1S7G0j3I0QoRtWIrXhZ+/Nlb5Q0MzqL7TokEJ5BNHg==" + "resolved" "https://registry.npmjs.org/@babel/highlight/-/highlight-7.17.9.tgz" + "version" "7.17.9" + dependencies: + "@babel/helper-validator-identifier" "^7.16.7" + "chalk" "^2.0.0" + "js-tokens" "^4.0.0" + +"@babel/parser@^7.15.0", "@babel/parser@^7.16.7", "@babel/parser@^7.17.10": + "integrity" "sha512-n2Q6i+fnJqzOaq2VkdXxy2TCPCWQZHiCo0XqmrCvDWcZQKRyZzYi4Z0yxlBuN0w+r2ZHmre+Q087DSrw3pbJDQ==" + "resolved" "https://registry.npmjs.org/@babel/parser/-/parser-7.17.10.tgz" + "version" "7.17.10" + +"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.15.4": + "integrity" "sha512-eBnpsl9tlhPhpI10kU06JHnrYXwg3+V6CaP2idsCXNef0aeslpqyITXQ74Vfk5uHgY7IG7XP0yIH8b42KSzHog==" + "resolved" "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/helper-skip-transparent-expression-wrappers" "^7.15.4" + "@babel/plugin-proposal-optional-chaining" "^7.14.5" + +"@babel/plugin-proposal-async-generator-functions@^7.15.8": + "integrity" "sha512-2Z5F2R2ibINTc63mY7FLqGfEbmofrHU9FitJW1Q7aPaKFhiPvSq6QEt/BoWN5oME3GVyjcRuNNSRbb9LC0CSWA==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.15.8.tgz" + "version" "7.15.8" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/helper-remap-async-to-generator" "^7.15.4" + "@babel/plugin-syntax-async-generators" "^7.8.4" + +"@babel/plugin-proposal-class-properties@^7.14.5": + "integrity" "sha512-q/PLpv5Ko4dVc1LYMpCY7RVAAO4uk55qPwrIuJ5QJ8c6cVuAmhu7I/49JOppXL6gXf7ZHzpRVEUZdYoPLM04Gg==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-create-class-features-plugin" "^7.14.5" + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-proposal-class-static-block@^7.15.4": + "integrity" "sha512-M682XWrrLNk3chXCjoPUQWOyYsB93B9z3mRyjtqqYJWDf2mfCdIYgDrA11cgNVhAQieaq6F2fn2f3wI0U4aTjA==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "@babel/helper-create-class-features-plugin" "^7.15.4" + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + +"@babel/plugin-proposal-do-expressions@^7.12.13": + "integrity" "sha512-i40m/CLe5WBGYMZL/SC3xtjJ/B0i+XblaonSsinumgfNIqmBOf4LEcZJXijoQeQbQVl55PyM0siWSWWJ9lV7cA==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-do-expressions/-/plugin-proposal-do-expressions-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/plugin-syntax-do-expressions" "^7.14.5" + +"@babel/plugin-proposal-dynamic-import@^7.14.5": + "integrity" "sha512-ExjiNYc3HDN5PXJx+bwC50GIx/KKanX2HiggnIUAYedbARdImiCU4RhhHfdf0Kd7JNXGpsBBBCOm+bBVy3Gb0g==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + +"@babel/plugin-proposal-export-default-from@^7.12.13": + "integrity" "sha512-T8KZ5abXvKMjF6JcoXjgac3ElmXf0AWzJwi2O/42Jk+HmCky3D9+i1B7NPP1FblyceqTevKeV/9szeikFoaMDg==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-export-default-from/-/plugin-proposal-export-default-from-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/plugin-syntax-export-default-from" "^7.14.5" + +"@babel/plugin-proposal-export-namespace-from@^7.14.5": + "integrity" "sha512-g5POA32bXPMmSBu5Dx/iZGLGnKmKPc5AiY7qfZgurzrCYgIztDlHFbznSNCoQuv57YQLnQfaDi7dxCtLDIdXdA==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + +"@babel/plugin-proposal-function-bind@^7.12.13": + "integrity" "sha512-PSQk5JImi81nFAzIebCEqkd0aiP9LDVKLCIH+0yR66JV8cQ1oZ8IRK9NNaA5nw9sjo0cPXxuBPCqgqcpugR8tA==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-function-bind/-/plugin-proposal-function-bind-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/plugin-syntax-function-bind" "^7.14.5" + +"@babel/plugin-proposal-json-strings@^7.14.5": + "integrity" "sha512-NSq2fczJYKVRIsUJyNxrVUMhB27zb7N7pOFGQOhBKJrChbGcgEAqyZrmZswkPk18VMurEeJAaICbfm57vUeTbQ==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/plugin-syntax-json-strings" "^7.8.3" + +"@babel/plugin-proposal-logical-assignment-operators@^7.14.5": + "integrity" "sha512-YGn2AvZAo9TwyhlLvCCWxD90Xq8xJ4aSgaX3G5D/8DW94L8aaT+dS5cSP+Z06+rCJERGSr9GxMBZ601xoc2taw==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + +"@babel/plugin-proposal-nullish-coalescing-operator@^7.14.5": + "integrity" "sha512-gun/SOnMqjSb98Nkaq2rTKMwervfdAoz6NphdY0vTfuzMfryj+tDGb2n6UkDKwez+Y8PZDhE3D143v6Gepp4Hg==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + +"@babel/plugin-proposal-numeric-separator@^7.14.5": + "integrity" "sha512-yiclALKe0vyZRZE0pS6RXgjUOt87GWv6FYa5zqj15PvhOGFO69R5DusPlgK/1K5dVnCtegTiWu9UaBSrLLJJBg==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + +"@babel/plugin-proposal-object-rest-spread@^7.15.6": + "integrity" "sha512-qtOHo7A1Vt+O23qEAX+GdBpqaIuD3i9VRrWgCJeq7WO6H2d14EK3q11urj5Te2MAeK97nMiIdRpwd/ST4JFbNg==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.15.6.tgz" + "version" "7.15.6" + dependencies: + "@babel/compat-data" "^7.15.0" + "@babel/helper-compilation-targets" "^7.15.4" + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-transform-parameters" "^7.15.4" + +"@babel/plugin-proposal-optional-catch-binding@^7.14.5": + "integrity" "sha512-3Oyiixm0ur7bzO5ybNcZFlmVsygSIQgdOa7cTfOYCMY+wEPAYhZAJxi3mixKFCTCKUhQXuCTtQ1MzrpL3WT8ZQ==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + +"@babel/plugin-proposal-optional-chaining@^7.14.5": + "integrity" "sha512-ycz+VOzo2UbWNI1rQXxIuMOzrDdHGrI23fRiz/Si2R4kv2XZQ1BK8ccdHwehMKBlcH/joGW/tzrUmo67gbJHlQ==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/helper-skip-transparent-expression-wrappers" "^7.14.5" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + +"@babel/plugin-proposal-pipeline-operator@^7.12.13": + "integrity" "sha512-/XNBV8GmMxl7icZ0G5o4f3aGXHDKuhS8xHhbdusjE/ZDrsqtLq5kUiw/i7J6ZnlS/ngM0IQTN2CPlrPTb6GKVw==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-pipeline-operator/-/plugin-proposal-pipeline-operator-7.15.0.tgz" + "version" "7.15.0" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/plugin-syntax-pipeline-operator" "^7.15.0" + +"@babel/plugin-proposal-private-methods@^7.14.5": + "integrity" "sha512-838DkdUA1u+QTCplatfq4B7+1lnDa/+QMI89x5WZHBcnNv+47N8QEj2k9I2MUU9xIv8XJ4XvPCviM/Dj7Uwt9g==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-create-class-features-plugin" "^7.14.5" + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-proposal-private-property-in-object@^7.15.4": + "integrity" "sha512-X0UTixkLf0PCCffxgu5/1RQyGGbgZuKoI+vXP4iSbJSYwPb7hu06omsFGBvQ9lJEvwgrxHdS8B5nbfcd8GyUNA==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "@babel/helper-annotate-as-pure" "^7.15.4" + "@babel/helper-create-class-features-plugin" "^7.15.4" + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + +"@babel/plugin-proposal-unicode-property-regex@^7.14.5", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": + "integrity" "sha512-6axIeOU5LnY471KenAB9vI8I5j7NQ2d652hIYwVyRfgaZT5UpiqFKCuVXCDMSrU+3VFafnu2c5m3lrWIlr6A5Q==" + "resolved" "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.14.5" + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-async-generators@^7.8.4": + "integrity" "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz" + "version" "7.8.4" + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.12.13": + "integrity" "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz" + "version" "7.12.13" + dependencies: + "@babel/helper-plugin-utils" "^7.12.13" + +"@babel/plugin-syntax-class-static-block@^7.14.5": + "integrity" "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-do-expressions@^7.14.5": + "integrity" "sha512-IpVyxRlfFCU2emBiq2OxUX10PD6FoGZ30yWwGt1qdkIPUDhAodG5Il1LStODgATndKRhQgqT21ksqA5fd39AwA==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-do-expressions/-/plugin-syntax-do-expressions-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-dynamic-import@^7.8.3": + "integrity" "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz" + "version" "7.8.3" + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-export-default-from@^7.14.5": + "integrity" "sha512-snWDxjuaPEobRBnhpqEfZ8RMxDbHt8+87fiEioGuE+Uc0xAKgSD8QiuL3lF93hPVQfZFAcYwrrf+H5qUhike3Q==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-export-default-from/-/plugin-syntax-export-default-from-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-export-namespace-from@^7.8.3": + "integrity" "sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz" + "version" "7.8.3" + dependencies: + "@babel/helper-plugin-utils" "^7.8.3" + +"@babel/plugin-syntax-function-bind@^7.14.5": + "integrity" "sha512-gstAIrKtlPwrQaRz4uK+kT7zI2p5MQqX41SeO+kZKH1XGO1jL0nLZBWznRigPpkem6LfIoG2EduQZmPBcUwEmg==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-function-bind/-/plugin-syntax-function-bind-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-json-strings@^7.8.3": + "integrity" "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz" + "version" "7.8.3" + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-jsx@^7.0.0", "@babel/plugin-syntax-jsx@^7.12.13": + "integrity" "sha512-ohuFIsOMXJnbOMRfX7/w7LocdR6R7whhuRD4ax8IipLcLPlZGJKkBxgHp++U4N/vKyU16/YDQr2f5seajD3jIw==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-logical-assignment-operators@^7.10.4": + "integrity" "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz" + "version" "7.10.4" + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": + "integrity" "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz" + "version" "7.8.3" + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.10.4": + "integrity" "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz" + "version" "7.10.4" + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@^7.8.3": + "integrity" "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz" + "version" "7.8.3" + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": + "integrity" "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz" + "version" "7.8.3" + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.3": + "integrity" "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz" + "version" "7.8.3" + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-pipeline-operator@^7.15.0": + "integrity" "sha512-APuEsBJFWgLasnPi3XS4o7AW24Z8hsX1odmCl9it1fpIA38E2+rSWk6zy1MpFQYKGyphlh84dJB4MtDwI0XN5w==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-pipeline-operator/-/plugin-syntax-pipeline-operator-7.15.0.tgz" + "version" "7.15.0" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-private-property-in-object@^7.14.5": + "integrity" "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-top-level-await@^7.14.5": + "integrity" "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-typescript@^7.14.5": + "integrity" "sha512-u6OXzDaIXjEstBRRoBCQ/uKQKlbuaeE5in0RvWdA4pN6AhqxTIwUsnHPU1CFZA/amYObMsuWhYfRl3Ch90HD0Q==" + "resolved" "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-arrow-functions@^7.14.5": + "integrity" "sha512-KOnO0l4+tD5IfOdi4x8C1XmEIRWUjNRV8wc6K2vz/3e8yAOoZZvsRXRRIF/yo/MAOFb4QjtAw9xSxMXbSMRy8A==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-async-to-generator@^7.14.5": + "integrity" "sha512-szkbzQ0mNk0rpu76fzDdqSyPu0MuvpXgC+6rz5rpMb5OIRxdmHfQxrktL8CYolL2d8luMCZTR0DpIMIdL27IjA==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-module-imports" "^7.14.5" + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/helper-remap-async-to-generator" "^7.14.5" + +"@babel/plugin-transform-block-scoped-functions@^7.14.5": + "integrity" "sha512-dtqWqdWZ5NqBX3KzsVCWfQI3A53Ft5pWFCT2eCVUftWZgjc5DpDponbIF1+c+7cSGk2wN0YK7HGL/ezfRbpKBQ==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-block-scoping@^7.15.3": + "integrity" "sha512-nBAzfZwZb4DkaGtOes1Up1nOAp9TDRRFw4XBzBBSG9QK7KVFmYzgj9o9sbPv7TX5ofL4Auq4wZnxCoPnI/lz2Q==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.15.3.tgz" + "version" "7.15.3" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-classes@^7.15.4": + "integrity" "sha512-Yjvhex8GzBmmPQUvpXRPWQ9WnxXgAFuZSrqOK/eJlOGIXwvv8H3UEdUigl1gb/bnjTrln+e8bkZUYCBt/xYlBg==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "@babel/helper-annotate-as-pure" "^7.15.4" + "@babel/helper-function-name" "^7.15.4" + "@babel/helper-optimise-call-expression" "^7.15.4" + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/helper-replace-supers" "^7.15.4" + "@babel/helper-split-export-declaration" "^7.15.4" + "globals" "^11.1.0" + +"@babel/plugin-transform-computed-properties@^7.14.5": + "integrity" "sha512-pWM+E4283UxaVzLb8UBXv4EIxMovU4zxT1OPnpHJcmnvyY9QbPPTKZfEj31EUvG3/EQRbYAGaYEUZ4yWOBC2xg==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-destructuring@^7.14.7": + "integrity" "sha512-0mDE99nK+kVh3xlc5vKwB6wnP9ecuSj+zQCa/n0voENtP/zymdT4HH6QEb65wjjcbqr1Jb/7z9Qp7TF5FtwYGw==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.14.7.tgz" + "version" "7.14.7" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-dotall-regex@^7.14.5", "@babel/plugin-transform-dotall-regex@^7.4.4": + "integrity" "sha512-loGlnBdj02MDsFaHhAIJzh7euK89lBrGIdM9EAtHFo6xKygCUGuuWe07o1oZVk287amtW1n0808sQM99aZt3gw==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.14.5" + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-duplicate-keys@^7.14.5": + "integrity" "sha512-iJjbI53huKbPDAsJ8EmVmvCKeeq21bAze4fu9GBQtSLqfvzj2oRuHVx4ZkDwEhg1htQ+5OBZh/Ab0XDf5iBZ7A==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-exponentiation-operator@^7.14.5": + "integrity" "sha512-jFazJhMBc9D27o9jDnIE5ZErI0R0m7PbKXVq77FFvqFbzvTMuv8jaAwLZ5PviOLSFttqKIW0/wxNSDbjLk0tYA==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-builder-binary-assignment-operator-visitor" "^7.14.5" + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-for-of@^7.15.4": + "integrity" "sha512-DRTY9fA751AFBDh2oxydvVm4SYevs5ILTWLs6xKXps4Re/KG5nfUkr+TdHCrRWB8C69TlzVgA9b3RmGWmgN9LA==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-function-name@^7.14.5": + "integrity" "sha512-vbO6kv0fIzZ1GpmGQuvbwwm+O4Cbm2NrPzwlup9+/3fdkuzo1YqOZcXw26+YUJB84Ja7j9yURWposEHLYwxUfQ==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-function-name" "^7.14.5" + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-literals@^7.14.5": + "integrity" "sha512-ql33+epql2F49bi8aHXxvLURHkxJbSmMKl9J5yHqg4PLtdE6Uc48CH1GS6TQvZ86eoB/ApZXwm7jlA+B3kra7A==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-member-expression-literals@^7.14.5": + "integrity" "sha512-WkNXxH1VXVTKarWFqmso83xl+2V3Eo28YY5utIkbsmXoItO8Q3aZxN4BTS2k0hz9dGUloHK26mJMyQEYfkn/+Q==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-modules-amd@^7.14.5": + "integrity" "sha512-3lpOU8Vxmp3roC4vzFpSdEpGUWSMsHFreTWOMMLzel2gNGfHE5UWIh/LN6ghHs2xurUp4jRFYMUIZhuFbody1g==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-module-transforms" "^7.14.5" + "@babel/helper-plugin-utils" "^7.14.5" + "babel-plugin-dynamic-import-node" "^2.3.3" + +"@babel/plugin-transform-modules-commonjs@^7.15.4": + "integrity" "sha512-qg4DPhwG8hKp4BbVDvX1s8cohM8a6Bvptu4l6Iingq5rW+yRUAhe/YRup/YcW2zCOlrysEWVhftIcKzrEZv3sA==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "@babel/helper-module-transforms" "^7.15.4" + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/helper-simple-access" "^7.15.4" + "babel-plugin-dynamic-import-node" "^2.3.3" + +"@babel/plugin-transform-modules-systemjs@^7.15.4": + "integrity" "sha512-fJUnlQrl/mezMneR72CKCgtOoahqGJNVKpompKwzv3BrEXdlPspTcyxrZ1XmDTIr9PpULrgEQo3qNKp6dW7ssw==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "@babel/helper-hoist-variables" "^7.15.4" + "@babel/helper-module-transforms" "^7.15.4" + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/helper-validator-identifier" "^7.14.9" + "babel-plugin-dynamic-import-node" "^2.3.3" + +"@babel/plugin-transform-modules-umd@^7.14.5": + "integrity" "sha512-RfPGoagSngC06LsGUYyM9QWSXZ8MysEjDJTAea1lqRjNECE3y0qIJF/qbvJxc4oA4s99HumIMdXOrd+TdKaAAA==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-module-transforms" "^7.14.5" + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-named-capturing-groups-regex@^7.14.9": + "integrity" "sha512-l666wCVYO75mlAtGFfyFwnWmIXQm3kSH0C3IRnJqWcZbWkoihyAdDhFm2ZWaxWTqvBvhVFfJjMRQ0ez4oN1yYA==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.14.9.tgz" + "version" "7.14.9" + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.14.5" + +"@babel/plugin-transform-new-target@^7.14.5": + "integrity" "sha512-Nx054zovz6IIRWEB49RDRuXGI4Gy0GMgqG0cII9L3MxqgXz/+rgII+RU58qpo4g7tNEx1jG7rRVH4ihZoP4esQ==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-object-super@^7.14.5": + "integrity" "sha512-MKfOBWzK0pZIrav9z/hkRqIk/2bTv9qvxHzPQc12RcVkMOzpIKnFCNYJip00ssKWYkd8Sf5g0Wr7pqJ+cmtuFg==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/helper-replace-supers" "^7.14.5" + +"@babel/plugin-transform-parameters@^7.15.4": + "integrity" "sha512-9WB/GUTO6lvJU3XQsSr6J/WKvBC2hcs4Pew8YxZagi6GkTdniyqp8On5kqdK8MN0LMeu0mGbhPN+O049NV/9FQ==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-property-literals@^7.14.5": + "integrity" "sha512-r1uilDthkgXW8Z1vJz2dKYLV1tuw2xsbrp3MrZmD99Wh9vsfKoob+JTgri5VUb/JqyKRXotlOtwgu4stIYCmnw==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-regenerator@^7.14.5": + "integrity" "sha512-NVIY1W3ITDP5xQl50NgTKlZ0GrotKtLna08/uGY6ErQt6VEQZXla86x/CTddm5gZdcr+5GSsvMeTmWA5Ii6pkg==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "regenerator-transform" "^0.14.2" + +"@babel/plugin-transform-reserved-words@^7.14.5": + "integrity" "sha512-cv4F2rv1nD4qdexOGsRQXJrOcyb5CrgjUH9PKrrtyhSDBNWGxd0UIitjyJiWagS+EbUGjG++22mGH1Pub8D6Vg==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-runtime@^7.12.13": + "integrity" "sha512-+6zsde91jMzzvkzuEA3k63zCw+tm/GvuuabkpisgbDMTPQsIMHllE3XczJFFtEHLjjhKQFZmGQVRdELetlWpVw==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.15.8.tgz" + "version" "7.15.8" + dependencies: + "@babel/helper-module-imports" "^7.15.4" + "@babel/helper-plugin-utils" "^7.14.5" + "babel-plugin-polyfill-corejs2" "^0.2.2" + "babel-plugin-polyfill-corejs3" "^0.2.5" + "babel-plugin-polyfill-regenerator" "^0.2.2" + "semver" "^6.3.0" + +"@babel/plugin-transform-shorthand-properties@^7.14.5": + "integrity" "sha512-xLucks6T1VmGsTB+GWK5Pl9Jl5+nRXD1uoFdA5TSO6xtiNjtXTjKkmPdFXVLGlK5A2/or/wQMKfmQ2Y0XJfn5g==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-spread@^7.15.8": + "integrity" "sha512-/daZ8s2tNaRekl9YJa9X4bzjpeRZLt122cpgFnQPLGUe61PH8zMEBmYqKkW5xF5JUEh5buEGXJoQpqBmIbpmEQ==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.15.8.tgz" + "version" "7.15.8" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/helper-skip-transparent-expression-wrappers" "^7.15.4" + +"@babel/plugin-transform-sticky-regex@^7.14.5": + "integrity" "sha512-Z7F7GyvEMzIIbwnziAZmnSNpdijdr4dWt+FJNBnBLz5mwDFkqIXU9wmBcWWad3QeJF5hMTkRe4dAq2sUZiG+8A==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-template-literals@^7.14.5": + "integrity" "sha512-22btZeURqiepOfuy/VkFr+zStqlujWaarpMErvay7goJS6BWwdd6BY9zQyDLDa4x2S3VugxFb162IZ4m/S/+Gg==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-typeof-symbol@^7.14.5": + "integrity" "sha512-lXzLD30ffCWseTbMQzrvDWqljvZlHkXU+CnseMhkMNqU1sASnCsz3tSzAaH3vCUXb9PHeUb90ZT1BdFTm1xxJw==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-typescript@^7.15.0": + "integrity" "sha512-ZXIkJpbaf6/EsmjeTbiJN/yMxWPFWvlr7sEG1P95Xb4S4IBcrf2n7s/fItIhsAmOf8oSh3VJPDppO6ExfAfKRQ==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.15.8.tgz" + "version" "7.15.8" + dependencies: + "@babel/helper-create-class-features-plugin" "^7.15.4" + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/plugin-syntax-typescript" "^7.14.5" + +"@babel/plugin-transform-unicode-escapes@^7.14.5": + "integrity" "sha512-crTo4jATEOjxj7bt9lbYXcBAM3LZaUrbP2uUdxb6WIorLmjNKSpHfIybgY4B8SRpbf8tEVIWH3Vtm7ayCrKocA==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-unicode-regex@^7.14.5": + "integrity" "sha512-UygduJpC5kHeCiRw/xDVzC+wj8VaYSoKl5JNVmbP7MadpNinAm3SvZCxZ42H37KZBKztz46YC73i9yV34d0Tzw==" + "resolved" "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.14.5.tgz" + "version" "7.14.5" + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.14.5" + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/preset-env@^7.12.13", "@babel/preset-env@^7.15.0": + "integrity" "sha512-rCC0wH8husJgY4FPbHsiYyiLxSY8oMDJH7Rl6RQMknbN9oDDHhM9RDFvnGM2MgkbUJzSQB4gtuwygY5mCqGSsA==" + "resolved" "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.15.8.tgz" + "version" "7.15.8" + dependencies: + "@babel/compat-data" "^7.15.0" + "@babel/helper-compilation-targets" "^7.15.4" + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/helper-validator-option" "^7.14.5" + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.15.4" + "@babel/plugin-proposal-async-generator-functions" "^7.15.8" + "@babel/plugin-proposal-class-properties" "^7.14.5" + "@babel/plugin-proposal-class-static-block" "^7.15.4" + "@babel/plugin-proposal-dynamic-import" "^7.14.5" + "@babel/plugin-proposal-export-namespace-from" "^7.14.5" + "@babel/plugin-proposal-json-strings" "^7.14.5" + "@babel/plugin-proposal-logical-assignment-operators" "^7.14.5" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.14.5" + "@babel/plugin-proposal-numeric-separator" "^7.14.5" + "@babel/plugin-proposal-object-rest-spread" "^7.15.6" + "@babel/plugin-proposal-optional-catch-binding" "^7.14.5" + "@babel/plugin-proposal-optional-chaining" "^7.14.5" + "@babel/plugin-proposal-private-methods" "^7.14.5" + "@babel/plugin-proposal-private-property-in-object" "^7.15.4" + "@babel/plugin-proposal-unicode-property-regex" "^7.14.5" + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-class-properties" "^7.12.13" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + "@babel/plugin-syntax-top-level-await" "^7.14.5" + "@babel/plugin-transform-arrow-functions" "^7.14.5" + "@babel/plugin-transform-async-to-generator" "^7.14.5" + "@babel/plugin-transform-block-scoped-functions" "^7.14.5" + "@babel/plugin-transform-block-scoping" "^7.15.3" + "@babel/plugin-transform-classes" "^7.15.4" + "@babel/plugin-transform-computed-properties" "^7.14.5" + "@babel/plugin-transform-destructuring" "^7.14.7" + "@babel/plugin-transform-dotall-regex" "^7.14.5" + "@babel/plugin-transform-duplicate-keys" "^7.14.5" + "@babel/plugin-transform-exponentiation-operator" "^7.14.5" + "@babel/plugin-transform-for-of" "^7.15.4" + "@babel/plugin-transform-function-name" "^7.14.5" + "@babel/plugin-transform-literals" "^7.14.5" + "@babel/plugin-transform-member-expression-literals" "^7.14.5" + "@babel/plugin-transform-modules-amd" "^7.14.5" + "@babel/plugin-transform-modules-commonjs" "^7.15.4" + "@babel/plugin-transform-modules-systemjs" "^7.15.4" + "@babel/plugin-transform-modules-umd" "^7.14.5" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.14.9" + "@babel/plugin-transform-new-target" "^7.14.5" + "@babel/plugin-transform-object-super" "^7.14.5" + "@babel/plugin-transform-parameters" "^7.15.4" + "@babel/plugin-transform-property-literals" "^7.14.5" + "@babel/plugin-transform-regenerator" "^7.14.5" + "@babel/plugin-transform-reserved-words" "^7.14.5" + "@babel/plugin-transform-shorthand-properties" "^7.14.5" + "@babel/plugin-transform-spread" "^7.15.8" + "@babel/plugin-transform-sticky-regex" "^7.14.5" + "@babel/plugin-transform-template-literals" "^7.14.5" + "@babel/plugin-transform-typeof-symbol" "^7.14.5" + "@babel/plugin-transform-unicode-escapes" "^7.14.5" + "@babel/plugin-transform-unicode-regex" "^7.14.5" + "@babel/preset-modules" "^0.1.4" + "@babel/types" "^7.15.6" + "babel-plugin-polyfill-corejs2" "^0.2.2" + "babel-plugin-polyfill-corejs3" "^0.2.5" + "babel-plugin-polyfill-regenerator" "^0.2.2" + "core-js-compat" "^3.16.0" + "semver" "^6.3.0" + +"@babel/preset-modules@^0.1.4": + "integrity" "sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA==" + "resolved" "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.5.tgz" + "version" "0.1.5" + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" + "@babel/plugin-transform-dotall-regex" "^7.4.4" + "@babel/types" "^7.4.4" + "esutils" "^2.0.2" + +"@babel/preset-typescript@^7.15.0": + "integrity" "sha512-lt0Y/8V3y06Wq/8H/u0WakrqciZ7Fz7mwPDHWUJAXlABL5hiUG42BNlRXiELNjeWjO5rWmnNKlx+yzJvxezHow==" + "resolved" "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.15.0.tgz" + "version" "7.15.0" + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + "@babel/helper-validator-option" "^7.14.5" + "@babel/plugin-transform-typescript" "^7.15.0" + +"@babel/register@^7.15.3": + "integrity" "sha512-mj4IY1ZJkorClxKTImccn4T81+UKTo4Ux0+OFSV9hME1ooqS9UV+pJ6BjD0qXPK4T3XW/KNa79XByjeEMZz+fw==" + "resolved" "https://registry.npmjs.org/@babel/register/-/register-7.15.3.tgz" + "version" "7.15.3" + dependencies: + "clone-deep" "^4.0.1" + "find-cache-dir" "^2.0.0" + "make-dir" "^2.1.0" + "pirates" "^4.0.0" + "source-map-support" "^0.5.16" + +"@babel/runtime@^7.0.0", "@babel/runtime@^7.10.5", "@babel/runtime@^7.8.4": + "integrity" "sha512-99catp6bHCaxr4sJ/DbTGgHS4+Rs2RVd2g7iOap6SLGPDknRK9ztKNsE/Fg6QhSeh1FGE5f6gHGQmvvn3I3xhw==" + "resolved" "https://registry.npmjs.org/@babel/runtime/-/runtime-7.15.4.tgz" + "version" "7.15.4" + dependencies: + "regenerator-runtime" "^0.13.4" + +"@babel/template@^7.0.0", "@babel/template@^7.15.4", "@babel/template@^7.16.7": + "integrity" "sha512-I8j/x8kHUrbYRTUxXrrMbfCa7jxkE7tZre39x3kjr9hvI82cK1FfqLygotcWN5kdPGWcLdWMHpSBavse5tWw3w==" + "resolved" "https://registry.npmjs.org/@babel/template/-/template-7.16.7.tgz" + "version" "7.16.7" + dependencies: + "@babel/code-frame" "^7.16.7" + "@babel/parser" "^7.16.7" + "@babel/types" "^7.16.7" + +"@babel/traverse@^7.0.0", "@babel/traverse@^7.13.0", "@babel/traverse@^7.15.0", "@babel/traverse@^7.15.4", "@babel/traverse@^7.17.10", "@babel/traverse@^7.17.3", "@babel/traverse@^7.17.9": + "integrity" "sha512-VmbrTHQteIdUUQNTb+zE12SHS/xQVIShmBPhlNP12hD5poF2pbITW1Z4172d03HegaQWhLffdkRJYtAzp0AGcw==" + "resolved" "https://registry.npmjs.org/@babel/traverse/-/traverse-7.17.10.tgz" + "version" "7.17.10" + dependencies: + "@babel/code-frame" "^7.16.7" + "@babel/generator" "^7.17.10" + "@babel/helper-environment-visitor" "^7.16.7" + "@babel/helper-function-name" "^7.17.9" + "@babel/helper-hoist-variables" "^7.16.7" + "@babel/helper-split-export-declaration" "^7.16.7" + "@babel/parser" "^7.17.10" + "@babel/types" "^7.17.10" + "debug" "^4.1.0" + "globals" "^11.1.0" + +"@babel/types@^7.0.0", "@babel/types@^7.15.4", "@babel/types@^7.15.6", "@babel/types@^7.16.7", "@babel/types@^7.17.0", "@babel/types@^7.17.10", "@babel/types@^7.4.4": + "integrity" "sha512-9O26jG0mBYfGkUYCYZRnBwbVLd1UZOICEr2Em6InB6jVfsAv1GKgwXHmrSg+WFWDmeKTA6vyTZiN8tCSM5Oo3A==" + "resolved" "https://registry.npmjs.org/@babel/types/-/types-7.17.10.tgz" + "version" "7.17.10" + dependencies: + "@babel/helper-validator-identifier" "^7.16.7" + "to-fast-properties" "^2.0.0" + +"@ctrl/tinycolor@^3.3.1": + "integrity" "sha512-JZButFdZ1+/xAfpguQHoabIXkcqRRKpMrWKBkpEZZyxfY9C1DpADFB8PEqGSTeFr135SaTRfKqGKx5xSCLI7ZQ==" + "resolved" "https://registry.npmjs.org/@ctrl/tinycolor/-/tinycolor-3.4.0.tgz" + "version" "3.4.0" + +"@eslint/eslintrc@^0.4.3": + "integrity" "sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw==" + "resolved" "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.3.tgz" + "version" "0.4.3" + dependencies: + "ajv" "^6.12.4" + "debug" "^4.1.1" + "espree" "^7.3.0" + "globals" "^13.9.0" + "ignore" "^4.0.6" + "import-fresh" "^3.2.1" + "js-yaml" "^3.13.1" + "minimatch" "^3.0.4" + "strip-json-comments" "^3.1.1" + +"@fesjs/compiler@^2.0.5": + "integrity" "sha512-zfKZcwOBRVtjnNQRvPtvffBN8gw3oSeI4+OkH9cUoh0irqpYBLX9FkaJmCMo+RlG3R1ke21Ssk/ZU/r7VyF5jA==" + "resolved" "https://registry.npmjs.org/@fesjs/compiler/-/compiler-2.0.5.tgz" + "version" "2.0.5" + dependencies: + "@babel/core" "^7.16.7" + "@babel/preset-env" "^7.15.0" + "@babel/register" "^7.15.3" + "@fesjs/utils" "^2.0.4" + "commander" "^7.0.0" + "dotenv" "8.2.0" + "joi" "17.3.0" + "readline" "^1.3.0" + "set-value" "3.0.2" + "tapable" "^2.2.0" + +"@fesjs/fes@^2.0.0": + "integrity" "sha512-BVHwYH9k6RLAKjUL2jwC24asPcBdK2DuLYrxBeq+LFtdbGOB3PRTf1D1lUMjNdeJYRcajmsC5Wfv8IbZEPj/Tg==" + "resolved" "https://registry.npmjs.org/@fesjs/fes/-/fes-2.1.4.tgz" + "version" "2.1.4" + dependencies: + "@fesjs/compiler" "^2.0.5" + "@fesjs/preset-built-in" "^2.1.4" + "@fesjs/runtime" "^2.0.2" + "@fesjs/utils" "^2.0.4" + "resolve-cwd" "^3.0.0" + +"@fesjs/plugin-access@^2.0.0": + "integrity" "sha512-4J5ltQhx4LRZoSMRw7SjjikyMv1nWX06uuYg1z0xUcfgN9ju/4Xo4PpvGDf3qokA9Z8v3uh9XZ5PeT3V4kxAEw==" + "resolved" "https://registry.npmjs.org/@fesjs/plugin-access/-/plugin-access-2.0.6.tgz" + "version" "2.0.6" + dependencies: + "@fesjs/utils" "^2.0.4" + "lodash-es" "^4.17.15" + +"@fesjs/plugin-enums@^2.0.0": + "integrity" "sha512-rqwUUJmFcM9IxCcFIRJArriB+znXGGHkJhQbp6H4n48l6mnvwoJ0ChXOPlF5Bp9R7Qtv9tn/0U0PJ0c0YZCaZA==" + "resolved" "https://registry.npmjs.org/@fesjs/plugin-enums/-/plugin-enums-2.0.2.tgz" + "version" "2.0.2" + +"@fesjs/plugin-layout@^2.0.8": + "integrity" "sha512-jC7ZKEnN9TRF66K7+EhPbQPiwNvngHjHBU8RVoZUeNfm6AnKN5KSHjXZor3rPFXd7qirC4Z1VAnrNn6Tb4wt0g==" + "resolved" "https://registry.npmmirror.com/@fesjs/plugin-layout/download/@fesjs/plugin-layout-2.0.9.tgz" + "version" "2.0.9" + dependencies: + "@fesjs/utils" "^2.0.3" + +"@fesjs/plugin-locale@^2.0.3": + "integrity" "sha512-jfexzCxqbAr6QlpiVIFHE8tsqPnqlVhtc8YIwO8JfR24iCXhidMaSp7bdd5HRSoiFrFNHcw6ArzHijlMtuGBYg==" + "resolved" "https://registry.npmjs.org/@fesjs/plugin-locale/-/plugin-locale-2.0.4.tgz" + "version" "2.0.4" + dependencies: + "@fesjs/utils" "^2.0.3" + "vue-i18n" "^9.0.0" + +"@fesjs/plugin-model@^2.0.0": + "integrity" "sha512-snLskfueCJ7NzkaV3kc0clwjZpttBQjtA3i9b5CfcZ7WzyHqS/x7VMsswgzxeWOmQl01gV255InHxoTi3mMIdQ==" + "resolved" "https://registry.npmjs.org/@fesjs/plugin-model/-/plugin-model-2.0.5.tgz" + "version" "2.0.5" + dependencies: + "@fesjs/utils" "^2.0.4" + +"@fesjs/plugin-request@^2.0.2": + "integrity" "sha512-AXmewksnO4blaEKg4yxmg8nFy4LsrruNV7jZLOcCfwaj6ibcjocdESDC+2dqb9HCz6dd5ydQOoHxb/2NdVZU+g==" + "resolved" "https://registry.npmjs.org/@fesjs/plugin-request/-/plugin-request-2.0.15.tgz" + "version" "2.0.15" + dependencies: + "@fesjs/compiler" "^2.0.5" + "@fesjs/utils" "^2.0.4" + "axios" "0.21.1" + +"@fesjs/preset-built-in@^2.1.4": + "integrity" "sha512-YCC5I+6vUtf00bR5IhN5BWCnC7OES/5W/aWPUuO1JbrIHdrvf7oTbBJ+HJ7VRDezBiF7eLBP0ugpRV9tcwDl8A==" + "resolved" "https://registry.npmjs.org/@fesjs/preset-built-in/-/preset-built-in-2.1.4.tgz" + "version" "2.1.4" + dependencies: + "@babel/core" "^7.12.13" + "@babel/plugin-proposal-do-expressions" "^7.12.13" + "@babel/plugin-proposal-export-default-from" "^7.12.13" + "@babel/plugin-proposal-function-bind" "^7.12.13" + "@babel/plugin-proposal-pipeline-operator" "^7.12.13" + "@babel/plugin-transform-runtime" "^7.12.13" + "@babel/preset-env" "^7.12.13" + "@babel/preset-typescript" "^7.15.0" + "@fesjs/compiler" "^2.0.5" + "@fesjs/utils" "^2.0.4" + "@soda/friendly-errors-webpack-plugin" "^1.8.0" + "@vue/babel-plugin-jsx" "^1.0.2" + "autoprefixer" "^10.2.4" + "babel-loader" "^8.2.2" + "babel-plugin-import" "1.13.3" + "body-parser" "^1.19.0" + "cli-highlight" "^2.1.4" + "cliui" "7.0.4" + "connect-history-api-fallback" "^1.6.0" + "cookie-parser" "^1.4.5" + "copy-webpack-plugin" "^10.2.4" + "core-js" "^3.8.3" + "css-loader" "^5.0.1" + "css-minimizer-webpack-plugin" "^3.0.0" + "envinfo" "^7.7.3" + "file-loader" "^6.2.0" + "html-webpack-plugin" "^5.0.0" + "html-webpack-tags-plugin" "^3.0.0" + "less" "3.9.0" + "less-loader" "^8.0.0" + "mini-css-extract-plugin" "^1.3.5" + "mockjs" "^1.1.0" + "postcss" "8.3.0" + "postcss-flexbugs-fixes" "^5.0.2" + "postcss-loader" "^4.2.0" + "postcss-safe-parser" "^5.0.2" + "qs" "^6.10.2" + "raw-loader" "^4.0.2" + "style-loader" "^2.0.0" + "url-loader" "^4.1.1" + "vue-loader" "^16.1.2" + "webpack" "^5.24.2" + "webpack-bundle-analyzer" "^4.4.0" + "webpack-chain" "^6.5.1" + "webpack-dev-server" "^3.11.2" + "webpackbar" "^5.0.0-3" + +"@fesjs/runtime@^2.0.2": + "integrity" "sha512-BE702ZyyeMdoBRBdUkCVEsnhIhjiLOIASBKWkn9JtqT5gthwZXG1DTwPwErAsqpjjZZiXmexVFV0RWLXBn+Z9w==" + "resolved" "https://registry.npmjs.org/@fesjs/runtime/-/runtime-2.0.2.tgz" + "version" "2.0.2" + dependencies: + "vue-router" "^4.0.1" + +"@fesjs/utils@^2.0.3", "@fesjs/utils@^2.0.4": + "integrity" "sha512-jnTs+ioRjYJ34LaoSzKeOQ5qgPJIYdeh+Uj9frjEJ2ZIIB+D+MyL82ciISRnnSs0XkocV5eNsb6Bs8p3/eEd4Q==" + "resolved" "https://registry.npmjs.org/@fesjs/utils/-/utils-2.0.4.tgz" + "version" "2.0.4" + dependencies: + "@babel/generator" "^7.15.0" + "@babel/parser" "^7.15.0" + "@babel/traverse" "^7.15.0" + "chalk" "^4.1.2" + "chokidar" "^3.5.2" + "crequire" "^1.8.1" + "debug" "^4.3.2" + "deepmerge" "^4.2.2" + "glob" "^7.1.7" + "lodash" "^4.17.21" + "mkdirp" "^1.0.4" + "mustache" "^4.2.0" + "pkg-up" "^3.1.0" + "portfinder" "^1.0.28" + "resolve" "^1.20.0" + "rimraf" "^3.0.2" + "semver" "^7.3.5" + "yargs-parser" "^20.2.9" + +"@form-create/ant-design-vue@^3.0.0-alpha.2": + "integrity" "sha512-PmtjReh+sE4JkKI91ex8hiOGwTdfFTWFCqzPaKnoa82s2/JVYIma7tXOFo3HzWl/iKkyuH62IzvKTpG6w+VpUQ==" + "resolved" "https://registry.npmjs.org/@form-create/ant-design-vue/-/ant-design-vue-3.0.0-alpha.2.tgz" + "version" "3.0.0-alpha.2" + dependencies: + "@form-create/component-antdv-frame" "^3.0.0-alpha.2" + "@form-create/component-antdv-group" "^3.0.0-alpha.2" + "@form-create/component-antdv-upload" "^3.0.0-alpha.1" + "@form-create/component-subform" "^3.0.0-alpha.2" + "@form-create/core" "^3.0.0-alpha.2" + "@form-create/utils" "^3.0.0-alpha.1" + +"@form-create/component-antdv-frame@^3.0.0-alpha.2": + "integrity" "sha512-xWl45AWkoK9Chz1uWIv9rPpahCS69fqUMrEl0YbJMoEVZmmOAVSMVRXHClTgUdE/TVQwjAF99gixwZvezIE/2w==" + "resolved" "https://registry.npmjs.org/@form-create/component-antdv-frame/-/component-antdv-frame-3.0.0-alpha.2.tgz" + "version" "3.0.0-alpha.2" + dependencies: + "@form-create/utils" "^3.0.0-alpha.1" + +"@form-create/component-antdv-group@^3.0.0-alpha.2": + "integrity" "sha512-PsQ5ocD1YkY0IRUmi80SlzHb0AFKnKBjQVAEVDsHgb3HnO63pDWEuGUBfI2ZDvpEsCSL22zYSUhuqku2CAznsw==" + "resolved" "https://registry.npmjs.org/@form-create/component-antdv-group/-/component-antdv-group-3.0.0-alpha.2.tgz" + "version" "3.0.0-alpha.2" + dependencies: + "@form-create/utils" "^3.0.0-alpha.1" + +"@form-create/component-antdv-upload@^3.0.0-alpha.1": + "integrity" "sha512-OdLH8FRwHrX1bSRJxvMMXiALKV59aj1xSyZ/ip5ihYTlwl4A9pSBrPgvprj1UW7lplLr4Gp/teH7c8Jn+Gy99g==" + "resolved" "https://registry.npmjs.org/@form-create/component-antdv-upload/-/component-antdv-upload-3.0.0-alpha.1.tgz" + "version" "3.0.0-alpha.1" + dependencies: + "@form-create/utils" "^3.0.0-alpha.1" + +"@form-create/component-subform@^3.0.0-alpha.2": + "integrity" "sha512-AdqeWdyL+YtnbUpYz10fX9zTMrz3jz6U5hi+5kCbVrFBoug7ZjXP2qBiNZ0BRyHqXUqIpPv6iOt4Wwk/Jw4q8Q==" + "resolved" "https://registry.npmjs.org/@form-create/component-subform/-/component-subform-3.0.0-alpha.2.tgz" + "version" "3.0.0-alpha.2" + +"@form-create/core@^3.0.0-alpha.2": + "integrity" "sha512-s8lnWJ4CQotczKba8+RdL+VmvyBxurp3YTEaeYyEHNLA67u8gQ6P03Wr4+qJq2nysLE9jmb5tD8fZREdoelFTg==" + "resolved" "https://registry.npmjs.org/@form-create/core/-/core-3.0.0-alpha.2.tgz" + "version" "3.0.0-alpha.2" + dependencies: + "@form-create/utils" "^3.0.0-alpha.1" + +"@form-create/utils@^3.0.0-alpha.1": + "integrity" "sha512-UE4kkPdykZBveYVhPQrNJ5p3xHyYg2oyVDJwxctHw7WILeNZRDbsPlrjEx/KEcERU1avH3KMOt4toL24FmJ0tQ==" + "resolved" "https://registry.npmjs.org/@form-create/utils/-/utils-3.0.0-alpha.1.tgz" + "version" "3.0.0-alpha.1" + +"@hapi/hoek@^9.0.0": + "integrity" "sha512-gfta+H8aziZsm8pZa0vj04KO6biEiisppNgA1kbJvFrrWu9Vm7eaUEy76DIxsuTaWvti5fkJVhllWc6ZTE+Mdw==" + "resolved" "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.2.1.tgz" + "version" "9.2.1" + +"@hapi/topo@^5.0.0": + "integrity" "sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==" + "resolved" "https://registry.npmjs.org/@hapi/topo/-/topo-5.1.0.tgz" + "version" "5.1.0" + dependencies: + "@hapi/hoek" "^9.0.0" + +"@humanwhocodes/config-array@^0.5.0": + "integrity" "sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==" + "resolved" "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz" + "version" "0.5.0" + dependencies: + "@humanwhocodes/object-schema" "^1.2.0" + "debug" "^4.1.1" + "minimatch" "^3.0.4" + +"@humanwhocodes/object-schema@^1.2.0": + "integrity" "sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==" + "resolved" "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.0.tgz" + "version" "1.2.0" + +"@intlify/core-base@9.1.9": + "integrity" "sha512-x5T0p/Ja0S8hs5xs+ImKyYckVkL4CzcEXykVYYV6rcbXxJTe2o58IquSqX9bdncVKbRZP7GlBU1EcRaQEEJ+vw==" + "resolved" "https://registry.npmjs.org/@intlify/core-base/-/core-base-9.1.9.tgz" + "version" "9.1.9" + dependencies: + "@intlify/devtools-if" "9.1.9" + "@intlify/message-compiler" "9.1.9" + "@intlify/message-resolver" "9.1.9" + "@intlify/runtime" "9.1.9" + "@intlify/shared" "9.1.9" + "@intlify/vue-devtools" "9.1.9" + +"@intlify/devtools-if@9.1.9": + "integrity" "sha512-oKSMKjttG3Ut/1UGEZjSdghuP3fwA15zpDPcjkf/1FjlOIm6uIBGMNS5jXzsZy593u+P/YcnrZD6cD3IVFz9vQ==" + "resolved" "https://registry.npmjs.org/@intlify/devtools-if/-/devtools-if-9.1.9.tgz" + "version" "9.1.9" + dependencies: + "@intlify/shared" "9.1.9" + +"@intlify/message-compiler@9.1.9": + "integrity" "sha512-6YgCMF46Xd0IH2hMRLCssZI3gFG4aywidoWQ3QP4RGYQXQYYfFC54DxhSgfIPpVoPLQ+4AD29eoYmhiHZ+qLFQ==" + "resolved" "https://registry.npmjs.org/@intlify/message-compiler/-/message-compiler-9.1.9.tgz" + "version" "9.1.9" + dependencies: + "@intlify/message-resolver" "9.1.9" + "@intlify/shared" "9.1.9" + "source-map" "0.6.1" + +"@intlify/message-resolver@9.1.9": + "integrity" "sha512-Lx/DBpigeK0sz2BBbzv5mu9/dAlt98HxwbG7xLawC3O2xMF9MNWU5FtOziwYG6TDIjNq0O/3ZbOJAxwITIWXEA==" + "resolved" "https://registry.npmjs.org/@intlify/message-resolver/-/message-resolver-9.1.9.tgz" + "version" "9.1.9" + +"@intlify/runtime@9.1.9": + "integrity" "sha512-XgPw8+UlHCiie3fI41HPVa/VDJb3/aSH7bLhY1hJvlvNV713PFtb4p4Jo+rlE0gAoMsMCGcsiT982fImolSltg==" + "resolved" "https://registry.npmjs.org/@intlify/runtime/-/runtime-9.1.9.tgz" + "version" "9.1.9" + dependencies: + "@intlify/message-compiler" "9.1.9" + "@intlify/message-resolver" "9.1.9" + "@intlify/shared" "9.1.9" + +"@intlify/shared@9.1.9": + "integrity" "sha512-xKGM1d0EAxdDFCWedcYXOm6V5Pfw/TMudd6/qCdEb4tv0hk9EKeg7lwQF1azE0dP2phvx0yXxrt7UQK+IZjNdw==" + "resolved" "https://registry.npmjs.org/@intlify/shared/-/shared-9.1.9.tgz" + "version" "9.1.9" + +"@intlify/vue-devtools@9.1.9": + "integrity" "sha512-YPehH9uL4vZcGXky4Ev5qQIITnHKIvsD2GKGXgqf+05osMUI6WSEQHaN9USRa318Rs8RyyPCiDfmA0hRu3k7og==" + "resolved" "https://registry.npmjs.org/@intlify/vue-devtools/-/vue-devtools-9.1.9.tgz" + "version" "9.1.9" + dependencies: + "@intlify/message-resolver" "9.1.9" + "@intlify/runtime" "9.1.9" + "@intlify/shared" "9.1.9" + +"@jridgewell/gen-mapping@^0.1.0": + "integrity" "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==" + "resolved" "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz" + "version" "0.1.1" + dependencies: + "@jridgewell/set-array" "^1.0.0" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/gen-mapping@^0.3.0": + "integrity" "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==" + "resolved" "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz" + "version" "0.3.2" + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/resolve-uri@^3.0.3": + "integrity" "sha512-8cXDaBBHOr2pQ7j77Y6Vp5VDT2sIqWyWQ56TjEq4ih/a4iST3dItRe8Q9fp0rrIl9DoKhWQtUQz/YpOxLkXbNA==" + "resolved" "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.0.7.tgz" + "version" "3.0.7" + +"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1": + "integrity" "sha512-Ct5MqZkLGEXTVmQYbGtx9SVqD2fqwvdubdps5D3djjAkgkKwT918VNOz65pEHFaYTeWcukmJmH5SwsA9Tn2ObQ==" + "resolved" "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.1.tgz" + "version" "1.1.1" + +"@jridgewell/source-map@^0.3.2": + "integrity" "sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw==" + "resolved" "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.2.tgz" + "version" "0.3.2" + dependencies: + "@jridgewell/gen-mapping" "^0.3.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/sourcemap-codec@^1.4.10": + "integrity" "sha512-GryiOJmNcWbovBxTfZSF71V/mXbgcV3MewDe3kIMCLyIh5e7SKAeUZs+rMnJ8jkMolZ/4/VsdBmMrw3l+VdZ3w==" + "resolved" "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.13.tgz" + "version" "1.4.13" + +"@jridgewell/trace-mapping@^0.3.9": + "integrity" "sha512-o1xbKhp9qnIAoHJSWd6KlCZfqslL4valSF81H8ImioOAxluWYWOpWkpyktY2vnt4tbrX9XYaxovq6cgowaJp2w==" + "resolved" "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.13.tgz" + "version" "0.3.13" + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@nodelib/fs.scandir@2.1.5": + "integrity" "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==" + "resolved" "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz" + "version" "2.1.5" + dependencies: + "@nodelib/fs.stat" "2.0.5" + "run-parallel" "^1.1.9" + +"@nodelib/fs.stat@^2.0.2", "@nodelib/fs.stat@2.0.5": + "integrity" "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==" + "resolved" "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz" + "version" "2.0.5" + +"@nodelib/fs.walk@^1.2.3": + "integrity" "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==" + "resolved" "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz" + "version" "1.2.8" + dependencies: + "@nodelib/fs.scandir" "2.1.5" + "fastq" "^1.6.0" + +"@polka/url@^1.0.0-next.20": + "integrity" "sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g==" + "resolved" "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.21.tgz" + "version" "1.0.0-next.21" + +"@sideway/address@^4.1.0": + "integrity" "sha512-idTz8ibqWFrPU8kMirL0CoPH/A29XOzzAzpyN3zQ4kAWnzmNfFmRaoMNN6VI8ske5M73HZyhIaW4OuSFIdM4oA==" + "resolved" "https://registry.npmjs.org/@sideway/address/-/address-4.1.2.tgz" + "version" "4.1.2" + dependencies: + "@hapi/hoek" "^9.0.0" + +"@sideway/formula@^3.0.0": + "integrity" "sha512-vHe7wZ4NOXVfkoRb8T5otiENVlT7a3IAiw7H5M2+GO+9CDgcVUUsX1zalAztCmwyOr2RUTGJdgB+ZvSVqmdHmg==" + "resolved" "https://registry.npmjs.org/@sideway/formula/-/formula-3.0.0.tgz" + "version" "3.0.0" + +"@sideway/pinpoint@^2.0.0": + "integrity" "sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==" + "resolved" "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz" + "version" "2.0.0" + +"@simonwep/pickr@~1.8.0": + "integrity" "sha512-/l5w8BIkrpP6n1xsetx9MWPWlU6OblN5YgZZphxan0Tq4BByTCETL6lyIeY8lagalS2Nbt4F2W034KHLIiunKA==" + "resolved" "https://registry.npmjs.org/@simonwep/pickr/-/pickr-1.8.2.tgz" + "version" "1.8.2" + dependencies: + "core-js" "^3.15.1" + "nanopop" "^2.1.0" + +"@soda/friendly-errors-webpack-plugin@^1.8.0": + "integrity" "sha512-h2ooWqP8XuFqTXT+NyAFbrArzfQA7R6HTezADrvD9Re8fxMLTPPniLdqVTdDaO0eIoLaAwKT+d6w+5GeTk7Vbg==" + "resolved" "https://registry.npmjs.org/@soda/friendly-errors-webpack-plugin/-/friendly-errors-webpack-plugin-1.8.1.tgz" + "version" "1.8.1" + dependencies: + "chalk" "^3.0.0" + "error-stack-parser" "^2.0.6" + "string-width" "^4.2.3" + "strip-ansi" "^6.0.1" + +"@trysound/sax@0.2.0": + "integrity" "sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==" + "resolved" "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz" + "version" "0.2.0" + +"@types/eslint-scope@^3.7.0": + "integrity" "sha512-SCFeogqiptms4Fg29WpOTk5nHIzfpKCemSN63ksBQYKTcXoJEmJagV+DhVmbapZzY4/5YaOV1nZwrsU79fFm1g==" + "resolved" "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.1.tgz" + "version" "3.7.1" + dependencies: + "@types/eslint" "*" + "@types/estree" "*" + +"@types/eslint@*": + "integrity" "sha512-KubbADPkfoU75KgKeKLsFHXnU4ipH7wYg0TRT33NK3N3yiu7jlFAAoygIWBV+KbuHx/G+AvuGX6DllnK35gfJA==" + "resolved" "https://registry.npmjs.org/@types/eslint/-/eslint-7.28.2.tgz" + "version" "7.28.2" + dependencies: + "@types/estree" "*" + "@types/json-schema" "*" + +"@types/estree@*", "@types/estree@^0.0.50": + "integrity" "sha512-C6N5s2ZFtuZRj54k2/zyRhNDjJwwcViAM3Nbm8zjBpbqAdZ00mr0CFxvSKeO8Y/e03WVFLpQMdHYVfUd6SB+Hw==" + "resolved" "https://registry.npmjs.org/@types/estree/-/estree-0.0.50.tgz" + "version" "0.0.50" + +"@types/glob@^7.1.1": + "integrity" "sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA==" + "resolved" "https://registry.npmjs.org/@types/glob/-/glob-7.2.0.tgz" + "version" "7.2.0" + dependencies: + "@types/minimatch" "*" + "@types/node" "*" + +"@types/html-minifier-terser@^6.0.0": + "integrity" "sha512-NZwaaynfs1oIoLAV1vg18e7QMVDvw+6SQrdJc8w3BwUaoroVSf6EBj/Sk4PBWGxsq0dzhA2drbsuMC1/6C6KgQ==" + "resolved" "https://registry.npmjs.org/@types/html-minifier-terser/-/html-minifier-terser-6.0.0.tgz" + "version" "6.0.0" + +"@types/json-schema@*", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": + "integrity" "sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ==" + "resolved" "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.9.tgz" + "version" "7.0.9" + +"@types/json5@^0.0.29": + "integrity" "sha1-7ihweulOEdK4J7y+UnC86n8+ce4=" + "resolved" "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz" + "version" "0.0.29" + +"@types/lodash@^4.14.165": + "integrity" "sha512-xZmuPTa3rlZoIbtDUyJKZQimJV3bxCmzMIO2c9Pz9afyDro6kr7R79GwcB6mRhuoPmV2p1Vb66WOJH7F886WKQ==" + "resolved" "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.176.tgz" + "version" "4.14.176" + +"@types/minimatch@*": + "integrity" "sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==" + "resolved" "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz" + "version" "5.1.2" + +"@types/node@*": + "integrity" "sha512-TMgXmy0v2xWyuCSCJM6NCna2snndD8yvQF67J29ipdzMcsPa9u+o0tjF5+EQNdhcuZplYuouYqpc4zcd5I6amQ==" + "resolved" "https://registry.npmjs.org/@types/node/-/node-16.11.4.tgz" + "version" "16.11.4" + +"@types/parse-json@^4.0.0": + "integrity" "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==" + "resolved" "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz" + "version" "4.0.0" + +"@vue/babel-helper-vue-transform-on@^1.0.2": + "integrity" "sha512-hz4R8tS5jMn8lDq6iD+yWL6XNB699pGIVLk7WSJnn1dbpjaazsjZQkieJoRX6gW5zpYSCFqQ7jUquPNY65tQYA==" + "resolved" "https://registry.npmjs.org/@vue/babel-helper-vue-transform-on/-/babel-helper-vue-transform-on-1.0.2.tgz" + "version" "1.0.2" + +"@vue/babel-plugin-jsx@^1.0.2": + "integrity" "sha512-j2uVfZjnB5+zkcbc/zsOc0fSNGCMMjaEXP52wdwdIfn0qjFfEYpYZBFKFg+HHnQeJCVrjOeO0YxgaL7DMrym9w==" + "resolved" "https://registry.npmjs.org/@vue/babel-plugin-jsx/-/babel-plugin-jsx-1.1.1.tgz" + "version" "1.1.1" + dependencies: + "@babel/helper-module-imports" "^7.0.0" + "@babel/plugin-syntax-jsx" "^7.0.0" + "@babel/template" "^7.0.0" + "@babel/traverse" "^7.0.0" + "@babel/types" "^7.0.0" + "@vue/babel-helper-vue-transform-on" "^1.0.2" + "camelcase" "^6.0.0" + "html-tags" "^3.1.0" + "svg-tags" "^1.0.0" + +"@vue/compiler-core@3.2.20": + "integrity" "sha512-vcEXlKXoPwBXFP5aUTHN9GTZaDfwCofa9Yu9bbW2C5O/QSa9Esdt7OG4+0RRd3EHEMxUvEdj4RZrd/KpQeiJbA==" + "resolved" "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.2.20.tgz" + "version" "3.2.20" + dependencies: + "@babel/parser" "^7.15.0" + "@vue/shared" "3.2.20" + "estree-walker" "^2.0.2" + "source-map" "^0.6.1" + +"@vue/compiler-dom@3.2.20": + "integrity" "sha512-QnI77ec/JtV7R0YBbcVayYTDCRcI9OCbxiUQK6izVyqQO0658n0zQuoNwe+bYgtqnvGAIqTR3FShTd5y4oOjdg==" + "resolved" "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.2.20.tgz" + "version" "3.2.20" + dependencies: + "@vue/compiler-core" "3.2.20" + "@vue/shared" "3.2.20" + +"@vue/compiler-sfc@^3.0.5", "@vue/compiler-sfc@>=3.1.0", "@vue/compiler-sfc@3.2.20": + "integrity" "sha512-03aZo+6tQKiFLfunHKSPZvdK4Jsn/ftRCyaro8AQIWkuxJbvSosbKK6HTTn+D2c3nPScG155akJoxKENw7rftQ==" + "resolved" "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.2.20.tgz" + "version" "3.2.20" + dependencies: + "@babel/parser" "^7.15.0" + "@vue/compiler-core" "3.2.20" + "@vue/compiler-dom" "3.2.20" + "@vue/compiler-ssr" "3.2.20" + "@vue/ref-transform" "3.2.20" + "@vue/shared" "3.2.20" + "estree-walker" "^2.0.2" + "magic-string" "^0.25.7" + "postcss" "^8.1.10" + "source-map" "^0.6.1" + +"@vue/compiler-ssr@3.2.20": + "integrity" "sha512-rzzVVYivm+EjbfiGQvNeyiYZWzr6Hkej97RZLZvcumacQlnKv9176Xo9rRyeWwFbBlxmtNdrVMslRXtipMXk2w==" + "resolved" "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.2.20.tgz" + "version" "3.2.20" + dependencies: + "@vue/compiler-dom" "3.2.20" + "@vue/shared" "3.2.20" + +"@vue/devtools-api@^6.0.0-beta.18", "@vue/devtools-api@^6.0.0-beta.7": + "integrity" "sha512-ObzQhgkoVeoyKv+e8+tB/jQBL2smtk/NmC9OmFK8UqdDpoOdv/Kf9pyDWL+IFyM7qLD2C75rszJujvGSPSpGlw==" + "resolved" "https://registry.npmjs.org/@vue/devtools-api/-/devtools-api-6.0.0-beta.19.tgz" + "version" "6.0.0-beta.19" + +"@vue/reactivity@3.2.20": + "integrity" "sha512-nSmoLojUTk+H8HNTAkrUduB4+yIUBK2HPihJo2uXVSH4Spry6oqN6lFzE5zpLK+F27Sja+UqR9R1+/kIOsHV5w==" + "resolved" "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.2.20.tgz" + "version" "3.2.20" + dependencies: + "@vue/shared" "3.2.20" + +"@vue/ref-transform@3.2.20": + "integrity" "sha512-Y42d3PGlYZ1lXcF3dbd3+qU/C/a3wYEZ949fyOI5ptzkjDWlkfU6vn74fmOjsLjEcjs10BXK2qO99FqQIK2r1Q==" + "resolved" "https://registry.npmjs.org/@vue/ref-transform/-/ref-transform-3.2.20.tgz" + "version" "3.2.20" + dependencies: + "@babel/parser" "^7.15.0" + "@vue/compiler-core" "3.2.20" + "@vue/shared" "3.2.20" + "estree-walker" "^2.0.2" + "magic-string" "^0.25.7" + +"@vue/runtime-core@3.2.20": + "integrity" "sha512-d1xfUGhZPfiZzAN7SatStD4vRtT8deJSXib2+Cz3x0brjMWKxe32asQc154FF1E2fFgMCHtnfd4A90bQEzV4GQ==" + "resolved" "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.2.20.tgz" + "version" "3.2.20" + dependencies: + "@vue/reactivity" "3.2.20" + "@vue/shared" "3.2.20" + +"@vue/runtime-dom@3.2.20": + "integrity" "sha512-4TCvZMLhESWCFHFYgqN4QmMA/onnINAlUovhopjlS8ST27G1A8Z0tyxPzLoXLa+b5JrOpbMPheEMPvdKExTJig==" + "resolved" "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.2.20.tgz" + "version" "3.2.20" + dependencies: + "@vue/runtime-core" "3.2.20" + "@vue/shared" "3.2.20" + "csstype" "^2.6.8" + +"@vue/server-renderer@3.2.20": + "integrity" "sha512-viIbZGep9XabnrRcaxWIi00cOh1x21QYm2upIL5W0zqzTJ54VdTzpI+zi1osNp+VfRQDTHpV2U7H3Kn4ljYJvg==" + "resolved" "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.2.20.tgz" + "version" "3.2.20" + dependencies: + "@vue/compiler-ssr" "3.2.20" + "@vue/shared" "3.2.20" + +"@vue/shared@3.2.20": + "integrity" "sha512-FbpX+hD5BvXCQerEYO7jtAGHlhAkhTQ4KIV73kmLWNlawWhTiVuQxizgVb0BOkX5oG9cIRZ42EG++d/k/Efp0w==" + "resolved" "https://registry.npmjs.org/@vue/shared/-/shared-3.2.20.tgz" + "version" "3.2.20" + +"@webank/eslint-config-webank@0.3.0": + "integrity" "sha512-WH5FI2j+mfBhp7qZsWyotBKy8zRhGkjFr//cJZyAhr4TuWyc3wF9MBjRm7DWP/SzyYFlbELvTf330uUM3zRBLg==" + "resolved" "https://registry.npmjs.org/@webank/eslint-config-webank/-/eslint-config-webank-0.3.0.tgz" + "version" "0.3.0" + dependencies: + "@babel/eslint-parser" "^7.13.14" + "@babel/plugin-syntax-jsx" "^7.12.13" + "confusing-browser-globals" "^1.0.10" + "eslint" "^7.26.0" + "eslint-plugin-angular" "^4.0.1" + "eslint-plugin-html" "^6.1.2" + "eslint-plugin-import" "^2.22.1" + "eslint-plugin-node" "^11.1.0" + "eslint-plugin-vue" "^7.9.0" + "vue-eslint-parser" "^7.6.0" + +"@webassemblyjs/ast@1.11.1": + "integrity" "sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==" + "resolved" "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz" + "version" "1.11.1" + dependencies: + "@webassemblyjs/helper-numbers" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + +"@webassemblyjs/floating-point-hex-parser@1.11.1": + "integrity" "sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ==" + "resolved" "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz" + "version" "1.11.1" + +"@webassemblyjs/helper-api-error@1.11.1": + "integrity" "sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg==" + "resolved" "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz" + "version" "1.11.1" + +"@webassemblyjs/helper-buffer@1.11.1": + "integrity" "sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA==" + "resolved" "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz" + "version" "1.11.1" + +"@webassemblyjs/helper-numbers@1.11.1": + "integrity" "sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ==" + "resolved" "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz" + "version" "1.11.1" + dependencies: + "@webassemblyjs/floating-point-hex-parser" "1.11.1" + "@webassemblyjs/helper-api-error" "1.11.1" + "@xtuc/long" "4.2.2" + +"@webassemblyjs/helper-wasm-bytecode@1.11.1": + "integrity" "sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q==" + "resolved" "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz" + "version" "1.11.1" + +"@webassemblyjs/helper-wasm-section@1.11.1": + "integrity" "sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg==" + "resolved" "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz" + "version" "1.11.1" + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + +"@webassemblyjs/ieee754@1.11.1": + "integrity" "sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ==" + "resolved" "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz" + "version" "1.11.1" + dependencies: + "@xtuc/ieee754" "^1.2.0" + +"@webassemblyjs/leb128@1.11.1": + "integrity" "sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw==" + "resolved" "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.1.tgz" + "version" "1.11.1" + dependencies: + "@xtuc/long" "4.2.2" + +"@webassemblyjs/utf8@1.11.1": + "integrity" "sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ==" + "resolved" "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.1.tgz" + "version" "1.11.1" + +"@webassemblyjs/wasm-edit@1.11.1": + "integrity" "sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA==" + "resolved" "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz" + "version" "1.11.1" + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/helper-wasm-section" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/wasm-opt" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + "@webassemblyjs/wast-printer" "1.11.1" + +"@webassemblyjs/wasm-gen@1.11.1": + "integrity" "sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA==" + "resolved" "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz" + "version" "1.11.1" + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/ieee754" "1.11.1" + "@webassemblyjs/leb128" "1.11.1" + "@webassemblyjs/utf8" "1.11.1" + +"@webassemblyjs/wasm-opt@1.11.1": + "integrity" "sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw==" + "resolved" "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz" + "version" "1.11.1" + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + +"@webassemblyjs/wasm-parser@1.11.1": + "integrity" "sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA==" + "resolved" "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz" + "version" "1.11.1" + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-api-error" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/ieee754" "1.11.1" + "@webassemblyjs/leb128" "1.11.1" + "@webassemblyjs/utf8" "1.11.1" + +"@webassemblyjs/wast-printer@1.11.1": + "integrity" "sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg==" + "resolved" "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz" + "version" "1.11.1" + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@xtuc/long" "4.2.2" + +"@xtuc/ieee754@^1.2.0": + "integrity" "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==" + "resolved" "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz" + "version" "1.2.0" + +"@xtuc/long@4.2.2": + "integrity" "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==" + "resolved" "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz" + "version" "4.2.2" + +"accepts@~1.3.4", "accepts@~1.3.5", "accepts@~1.3.8": + "integrity" "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==" + "resolved" "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz" + "version" "1.3.8" + dependencies: + "mime-types" "~2.1.34" + "negotiator" "0.6.3" + +"acorn-import-assertions@^1.7.6": + "integrity" "sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw==" + "resolved" "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz" + "version" "1.8.0" + +"acorn-jsx@^5.2.0", "acorn-jsx@^5.3.1": + "integrity" "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==" + "resolved" "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz" + "version" "5.3.2" + +"acorn-walk@^8.0.0": + "integrity" "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==" + "resolved" "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz" + "version" "8.2.0" + +"acorn@^6.0.0 || ^7.0.0 || ^8.0.0", "acorn@^7.1.1", "acorn@^7.4.0": + "integrity" "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==" + "resolved" "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz" + "version" "7.4.1" + +"acorn@^8.0.4": + "integrity" "sha512-yXbYeFy+jUuYd3/CDcg2NkIYE991XYX/bje7LmjJigUciaeO1JR4XxXgCIV1/Zc/dRuFEyw1L0pbA+qynJkW5Q==" + "resolved" "https://registry.npmjs.org/acorn/-/acorn-8.5.0.tgz" + "version" "8.5.0" + +"acorn@^8.5.0": + "integrity" "sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w==" + "resolved" "https://registry.npmjs.org/acorn/-/acorn-8.8.0.tgz" + "version" "8.8.0" + +"acorn@^8", "acorn@^8.4.1": + "integrity" "sha512-yXbYeFy+jUuYd3/CDcg2NkIYE991XYX/bje7LmjJigUciaeO1JR4XxXgCIV1/Zc/dRuFEyw1L0pbA+qynJkW5Q==" + "resolved" "https://registry.npmjs.org/acorn/-/acorn-8.5.0.tgz" + "version" "8.5.0" + +"ajv-errors@^1.0.0": + "integrity" "sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ==" + "resolved" "https://registry.npmjs.org/ajv-errors/-/ajv-errors-1.0.1.tgz" + "version" "1.0.1" + +"ajv-formats@^2.1.1": + "integrity" "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==" + "resolved" "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz" + "version" "2.1.1" + dependencies: + "ajv" "^8.0.0" + +"ajv-keywords@^3.1.0", "ajv-keywords@^3.5.2": + "integrity" "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==" + "resolved" "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz" + "version" "3.5.2" + +"ajv-keywords@^5.0.0": + "integrity" "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==" + "resolved" "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz" + "version" "5.1.0" + dependencies: + "fast-deep-equal" "^3.1.3" + +"ajv@^6.1.0", "ajv@^6.10.0", "ajv@^6.12.3", "ajv@^6.12.4", "ajv@^6.12.5", "ajv@^6.9.1", "ajv@>=5.0.0": + "integrity" "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==" + "resolved" "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" + "version" "6.12.6" + dependencies: + "fast-deep-equal" "^3.1.1" + "fast-json-stable-stringify" "^2.0.0" + "json-schema-traverse" "^0.4.1" + "uri-js" "^4.2.2" + +"ajv@^8.0.0": + "integrity" "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==" + "resolved" "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz" + "version" "8.11.0" + dependencies: + "fast-deep-equal" "^3.1.1" + "json-schema-traverse" "^1.0.0" + "require-from-string" "^2.0.2" + "uri-js" "^4.2.2" + +"ajv@^8.0.1": + "integrity" "sha512-SMJOdDP6LqTkD0Uq8qLi+gMwSt0imXLSV080qFVwJCpH9U6Mb+SUGHAXM0KNbcBPguytWyvFxcHgMLe2D2XSpw==" + "resolved" "https://registry.npmjs.org/ajv/-/ajv-8.6.3.tgz" + "version" "8.6.3" + dependencies: + "fast-deep-equal" "^3.1.1" + "json-schema-traverse" "^1.0.0" + "require-from-string" "^2.0.2" + "uri-js" "^4.2.2" + +"ajv@^8.8.0", "ajv@^8.8.2": + "integrity" "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==" + "resolved" "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz" + "version" "8.11.0" + dependencies: + "fast-deep-equal" "^3.1.1" + "json-schema-traverse" "^1.0.0" + "require-from-string" "^2.0.2" + "uri-js" "^4.2.2" + +"alphanum-sort@^1.0.2": + "integrity" "sha1-l6ERlkmyEa0zaR2fn0hqjsn74KM=" + "resolved" "https://registry.npmjs.org/alphanum-sort/-/alphanum-sort-1.0.2.tgz" + "version" "1.0.2" + +"ansi-colors@^3.0.0": + "integrity" "sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA==" + "resolved" "https://registry.npmjs.org/ansi-colors/-/ansi-colors-3.2.4.tgz" + "version" "3.2.4" + +"ansi-colors@^4.1.1": + "integrity" "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==" + "resolved" "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz" + "version" "4.1.1" + +"ansi-escapes@^4.3.1": + "integrity" "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==" + "resolved" "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz" + "version" "4.3.2" + dependencies: + "type-fest" "^0.21.3" + +"ansi-html-community@0.0.8": + "integrity" "sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==" + "resolved" "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz" + "version" "0.0.8" + +"ansi-regex@^2.0.0": + "integrity" "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + "resolved" "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz" + "version" "2.1.1" + +"ansi-regex@^4.1.0": + "integrity" "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==" + "resolved" "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz" + "version" "4.1.1" + +"ansi-regex@^5.0.1": + "integrity" "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" + "resolved" "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz" + "version" "5.0.1" + +"ansi-styles@^3.2.0", "ansi-styles@^3.2.1": + "integrity" "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==" + "resolved" "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz" + "version" "3.2.1" + dependencies: + "color-convert" "^1.9.0" + +"ansi-styles@^4.0.0": + "integrity" "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==" + "resolved" "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz" + "version" "4.3.0" + dependencies: + "color-convert" "^2.0.1" + +"ansi-styles@^4.1.0": + "integrity" "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==" + "resolved" "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz" + "version" "4.3.0" + dependencies: + "color-convert" "^2.0.1" + +"ant-design-vue@^2.2.0", "ant-design-vue@^2.2.7": + "integrity" "sha512-3graq9/gCfJQs6hznrHV6sa9oDmk/D1H3Oo0vLdVpPS/I61fZPk8NEyNKCHpNA6fT2cx6xx9U3QS63uuyikg/Q==" + "resolved" "https://registry.npmjs.org/ant-design-vue/-/ant-design-vue-2.2.8.tgz" + "version" "2.2.8" + dependencies: + "@ant-design/icons-vue" "^6.0.0" + "@babel/runtime" "^7.10.5" + "@simonwep/pickr" "~1.8.0" + "array-tree-filter" "^2.1.0" + "async-validator" "^3.3.0" + "dom-align" "^1.12.1" + "dom-scroll-into-view" "^2.0.0" + "lodash" "^4.17.21" + "lodash-es" "^4.17.15" + "moment" "^2.27.0" + "omit.js" "^2.0.0" + "resize-observer-polyfill" "^1.5.1" + "scroll-into-view-if-needed" "^2.2.25" + "shallow-equal" "^1.0.0" + "vue-types" "^3.0.0" + "warning" "^4.0.0" + +"any-promise@^1.0.0": + "integrity" "sha1-q8av7tzqUugJzcA3au0845Y10X8=" + "resolved" "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz" + "version" "1.3.0" + +"anymatch@^2.0.0": + "integrity" "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==" + "resolved" "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz" + "version" "2.0.0" + dependencies: + "micromatch" "^3.1.4" + "normalize-path" "^2.1.1" + +"anymatch@~3.1.2": + "integrity" "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==" + "resolved" "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz" + "version" "3.1.2" + dependencies: + "normalize-path" "^3.0.0" + "picomatch" "^2.0.4" + +"argparse@^1.0.7": + "integrity" "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==" + "resolved" "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz" + "version" "1.0.10" + dependencies: + "sprintf-js" "~1.0.2" + +"arr-diff@^4.0.0": + "integrity" "sha512-YVIQ82gZPGBebQV/a8dar4AitzCQs0jjXwMPZllpXMaGjXPYVUawSxQrRsjhjupyVxEvbHgUmIhKVlND+j02kA==" + "resolved" "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz" + "version" "4.0.0" + +"arr-flatten@^1.1.0": + "integrity" "sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==" + "resolved" "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz" + "version" "1.1.0" + +"arr-union@^3.1.0": + "integrity" "sha512-sKpyeERZ02v1FeCZT8lrfJq5u6goHCtpTAzPwJYe7c8SPFOboNjNg1vz2L4VTn9T4PQxEx13TbXLmYUcS6Ug7Q==" + "resolved" "https://registry.npmjs.org/arr-union/-/arr-union-3.1.0.tgz" + "version" "3.1.0" + +"array-flatten@^2.1.0": + "integrity" "sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ==" + "resolved" "https://registry.npmjs.org/array-flatten/-/array-flatten-2.1.2.tgz" + "version" "2.1.2" + +"array-flatten@1.1.1": + "integrity" "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" + "resolved" "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz" + "version" "1.1.1" + +"array-includes@^3.1.4": + "integrity" "sha512-ZTNSQkmWumEbiHO2GF4GmWxYVTiQyJy2XOTa15sdQSrvKn7l+180egQMqlrMOUMCyLMD7pmyQe4mMDUT6Behrw==" + "resolved" "https://registry.npmjs.org/array-includes/-/array-includes-3.1.4.tgz" + "version" "3.1.4" + dependencies: + "call-bind" "^1.0.2" + "define-properties" "^1.1.3" + "es-abstract" "^1.19.1" + "get-intrinsic" "^1.1.1" + "is-string" "^1.0.7" + +"array-tree-filter@^2.1.0": + "integrity" "sha512-4ROwICNlNw/Hqa9v+rk5h22KjmzB1JGTMVKP2AKJBOCgb0yL0ASf0+YvCcLNNwquOHNX48jkeZIJ3a+oOQqKcw==" + "resolved" "https://registry.npmjs.org/array-tree-filter/-/array-tree-filter-2.1.0.tgz" + "version" "2.1.0" + +"array-union@^1.0.1": + "integrity" "sha512-Dxr6QJj/RdU/hCaBjOfxW+q6lyuVE6JFWIrAUpuOOhoJJoQ99cUn3igRaHVB5P9WrgFVN0FfArM3x0cueOU8ng==" + "resolved" "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz" + "version" "1.0.2" + dependencies: + "array-uniq" "^1.0.1" + +"array-union@^3.0.1": + "integrity" "sha512-1OvF9IbWwaeiM9VhzYXVQacMibxpXOMYVNIvMtKRyX9SImBXpKcFr8XvFDeEslCyuH/t6KRt7HEO94AlP8Iatw==" + "resolved" "https://registry.npmjs.org/array-union/-/array-union-3.0.1.tgz" + "version" "3.0.1" + +"array-uniq@^1.0.1": + "integrity" "sha512-MNha4BWQ6JbwhFhj03YK552f7cb3AzoE8SzeljgChvL1dl3IcvggXVz1DilzySZkCja+CXuZbdW7yATchWn8/Q==" + "resolved" "https://registry.npmjs.org/array-uniq/-/array-uniq-1.0.3.tgz" + "version" "1.0.3" + +"array-unique@^0.3.2": + "integrity" "sha512-SleRWjh9JUud2wH1hPs9rZBZ33H6T9HOiL0uwGnGx9FpE6wKGyfWugmbkEOIs6qWrZhg0LWeLziLrEwQJhs5mQ==" + "resolved" "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz" + "version" "0.3.2" + +"array.prototype.flat@^1.2.5": + "integrity" "sha512-KaYU+S+ndVqyUnignHftkwc58o3uVU1jzczILJ1tN2YaIZpFIKBiP/x/j97E5MVPsaCloPbqWLB/8qCTVvT2qg==" + "resolved" "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.5.tgz" + "version" "1.2.5" + dependencies: + "call-bind" "^1.0.2" + "define-properties" "^1.1.3" + "es-abstract" "^1.19.0" + +"asap@~2.0.3": + "integrity" "sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=" + "resolved" "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz" + "version" "2.0.6" + +"asn1@~0.2.3": + "integrity" "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==" + "resolved" "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz" + "version" "0.2.4" + dependencies: + "safer-buffer" "~2.1.0" + +"assert-plus@^1.0.0", "assert-plus@1.0.0": + "integrity" "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" + "resolved" "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + "version" "1.0.0" + +"assign-symbols@^1.0.0": + "integrity" "sha512-Q+JC7Whu8HhmTdBph/Tq59IoRtoy6KAm5zzPv00WdujX82lbAL8K7WVjne7vdCsAmbF4AYaDOPyO3k0kl8qIrw==" + "resolved" "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz" + "version" "1.0.0" + +"astral-regex@^2.0.0": + "integrity" "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==" + "resolved" "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz" + "version" "2.0.0" + +"async-each@^1.0.1": + "integrity" "sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ==" + "resolved" "https://registry.npmjs.org/async-each/-/async-each-1.0.3.tgz" + "version" "1.0.3" + +"async-limiter@~1.0.0": + "integrity" "sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==" + "resolved" "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.1.tgz" + "version" "1.0.1" + +"async-validator@^3.3.0": + "integrity" "sha512-8eLCg00W9pIRZSB781UUX/H6Oskmm8xloZfr09lz5bikRpBVDlJ3hRVuxxP1SxcwsEYfJ4IU8Q19Y8/893r3rQ==" + "resolved" "https://registry.npmjs.org/async-validator/-/async-validator-3.5.2.tgz" + "version" "3.5.2" + +"async@^2.6.2": + "integrity" "sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==" + "resolved" "https://registry.npmjs.org/async/-/async-2.6.4.tgz" + "version" "2.6.4" + dependencies: + "lodash" "^4.17.14" + +"asynckit@^0.4.0": + "integrity" "sha1-x57Zf380y48robyXkLzDZkdLS3k=" + "resolved" "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz" + "version" "0.4.0" + +"atob@^2.1.2": + "integrity" "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==" + "resolved" "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz" + "version" "2.1.2" + +"autoprefixer@^10.2.4": + "integrity" "sha512-EmGpu0nnQVmMhX8ROoJ7Mx8mKYPlcUHuxkwrRYEYMz85lu7H09v8w6R1P0JPdn/hKU32GjpLBFEOuIlDWCRWvg==" + "resolved" "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.3.7.tgz" + "version" "10.3.7" + dependencies: + "browserslist" "^4.17.3" + "caniuse-lite" "^1.0.30001264" + "fraction.js" "^4.1.1" + "normalize-range" "^0.1.2" + "picocolors" "^0.2.1" + "postcss-value-parser" "^4.1.0" + +"aws-sign2@~0.7.0": + "integrity" "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" + "resolved" "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz" + "version" "0.7.0" + +"aws4@^1.8.0": + "integrity" "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==" + "resolved" "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz" + "version" "1.11.0" + +"axios@0.21.1": + "integrity" "sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA==" + "resolved" "https://registry.npmjs.org/axios/-/axios-0.21.1.tgz" + "version" "0.21.1" + dependencies: + "follow-redirects" "^1.10.0" + +"babel-loader@^8.2.2": + "integrity" "sha512-n4Zeta8NC3QAsuyiizu0GkmRcQ6clkV9WFUnUf1iXP//IeSKbWjofW3UHyZVwlOB4y039YQKefawyTn64Zwbuw==" + "resolved" "https://registry.npmjs.org/babel-loader/-/babel-loader-8.2.3.tgz" + "version" "8.2.3" + dependencies: + "find-cache-dir" "^3.3.1" + "loader-utils" "^1.4.0" + "make-dir" "^3.1.0" + "schema-utils" "^2.6.5" + +"babel-plugin-dynamic-import-node@^2.3.3": + "integrity" "sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==" + "resolved" "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz" + "version" "2.3.3" + dependencies: + "object.assign" "^4.1.0" + +"babel-plugin-import@1.13.3": + "integrity" "sha512-1qCWdljJOrDRH/ybaCZuDgySii4yYrtQ8OJQwrcDqdt0y67N30ng3X3nABg6j7gR7qUJgcMa9OMhc4AGViDwWw==" + "resolved" "https://registry.npmjs.org/babel-plugin-import/-/babel-plugin-import-1.13.3.tgz" + "version" "1.13.3" + dependencies: + "@babel/helper-module-imports" "^7.0.0" + "@babel/runtime" "^7.0.0" + +"babel-plugin-polyfill-corejs2@^0.2.2": + "integrity" "sha512-kISrENsJ0z5dNPq5eRvcctITNHYXWOA4DUZRFYCz3jYCcvTb/A546LIddmoGNMVYg2U38OyFeNosQwI9ENTqIQ==" + "resolved" "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.2.2.tgz" + "version" "0.2.2" + dependencies: + "@babel/compat-data" "^7.13.11" + "@babel/helper-define-polyfill-provider" "^0.2.2" + "semver" "^6.1.1" + +"babel-plugin-polyfill-corejs3@^0.2.5": + "integrity" "sha512-ninF5MQNwAX9Z7c9ED+H2pGt1mXdP4TqzlHKyPIYmJIYz0N+++uwdM7RnJukklhzJ54Q84vA4ZJkgs7lu5vqcw==" + "resolved" "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.2.5.tgz" + "version" "0.2.5" + dependencies: + "@babel/helper-define-polyfill-provider" "^0.2.2" + "core-js-compat" "^3.16.2" + +"babel-plugin-polyfill-regenerator@^0.2.2": + "integrity" "sha512-Goy5ghsc21HgPDFtzRkSirpZVW35meGoTmTOb2bxqdl60ghub4xOidgNTHaZfQ2FaxQsKmwvXtOAkcIS4SMBWg==" + "resolved" "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.2.2.tgz" + "version" "0.2.2" + dependencies: + "@babel/helper-define-polyfill-provider" "^0.2.2" + +"balanced-match@^1.0.0": + "integrity" "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + "resolved" "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz" + "version" "1.0.2" + +"base@^0.11.1": + "integrity" "sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==" + "resolved" "https://registry.npmjs.org/base/-/base-0.11.2.tgz" + "version" "0.11.2" + dependencies: + "cache-base" "^1.0.1" + "class-utils" "^0.3.5" + "component-emitter" "^1.2.1" + "define-property" "^1.0.0" + "isobject" "^3.0.1" + "mixin-deep" "^1.2.0" + "pascalcase" "^0.1.1" + +"batch@0.6.1": + "integrity" "sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw==" + "resolved" "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz" + "version" "0.6.1" + +"bcrypt-pbkdf@^1.0.0": + "integrity" "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=" + "resolved" "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz" + "version" "1.0.2" + dependencies: + "tweetnacl" "^0.14.3" + +"big.js@^5.2.2": + "integrity" "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==" + "resolved" "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz" + "version" "5.2.2" + +"binary-extensions@^1.0.0": + "integrity" "sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==" + "resolved" "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.13.1.tgz" + "version" "1.13.1" + +"binary-extensions@^2.0.0": + "integrity" "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==" + "resolved" "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz" + "version" "2.2.0" + +"body-parser@^1.19.0": + "integrity" "sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==" + "resolved" "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz" + "version" "1.19.0" + dependencies: + "bytes" "3.1.0" + "content-type" "~1.0.4" + "debug" "2.6.9" + "depd" "~1.1.2" + "http-errors" "1.7.2" + "iconv-lite" "0.4.24" + "on-finished" "~2.3.0" + "qs" "6.7.0" + "raw-body" "2.4.0" + "type-is" "~1.6.17" + +"body-parser@1.20.0": + "integrity" "sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg==" + "resolved" "https://registry.npmjs.org/body-parser/-/body-parser-1.20.0.tgz" + "version" "1.20.0" + dependencies: + "bytes" "3.1.2" + "content-type" "~1.0.4" + "debug" "2.6.9" + "depd" "2.0.0" + "destroy" "1.2.0" + "http-errors" "2.0.0" + "iconv-lite" "0.4.24" + "on-finished" "2.4.1" + "qs" "6.10.3" + "raw-body" "2.5.1" + "type-is" "~1.6.18" + "unpipe" "1.0.0" + +"bonjour@^3.5.0": + "integrity" "sha512-RaVTblr+OnEli0r/ud8InrU7D+G0y6aJhlxaLa6Pwty4+xoxboF1BsUI45tujvRpbj9dQVoglChqonGAsjEBYg==" + "resolved" "https://registry.npmjs.org/bonjour/-/bonjour-3.5.0.tgz" + "version" "3.5.0" + dependencies: + "array-flatten" "^2.1.0" + "deep-equal" "^1.0.1" + "dns-equal" "^1.0.0" + "dns-txt" "^2.0.2" + "multicast-dns" "^6.0.1" + "multicast-dns-service-types" "^1.1.0" + +"boolbase@^1.0.0": + "integrity" "sha1-aN/1++YMUes3cl6p4+0xDcwed24=" + "resolved" "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz" + "version" "1.0.0" + +"brace-expansion@^1.1.7": + "integrity" "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==" + "resolved" "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" + "version" "1.1.11" + dependencies: + "balanced-match" "^1.0.0" + "concat-map" "0.0.1" + +"braces@^2.3.1", "braces@^2.3.2": + "integrity" "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==" + "resolved" "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz" + "version" "2.3.2" + dependencies: + "arr-flatten" "^1.1.0" + "array-unique" "^0.3.2" + "extend-shallow" "^2.0.1" + "fill-range" "^4.0.0" + "isobject" "^3.0.1" + "repeat-element" "^1.1.2" + "snapdragon" "^0.8.1" + "snapdragon-node" "^2.0.1" + "split-string" "^3.0.2" + "to-regex" "^3.0.1" + +"braces@^3.0.1", "braces@~3.0.2": + "integrity" "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==" + "resolved" "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz" + "version" "3.0.2" + dependencies: + "fill-range" "^7.0.1" + +"browserslist@^4.0.0", "browserslist@^4.14.5", "browserslist@^4.16.0", "browserslist@^4.16.6", "browserslist@^4.17.3", "browserslist@^4.17.5", "browserslist@^4.20.2": + "integrity" "sha512-NBhymBQl1zM0Y5dQT/O+xiLP9/rzOIQdKM/eMJBAq7yBgaB6krIYLGejrwVYnSHZdqjscB1SPuAjHwxjvN6Wdg==" + "resolved" "https://registry.npmjs.org/browserslist/-/browserslist-4.20.3.tgz" + "version" "4.20.3" + dependencies: + "caniuse-lite" "^1.0.30001332" + "electron-to-chromium" "^1.4.118" + "escalade" "^3.1.1" + "node-releases" "^2.0.3" + "picocolors" "^1.0.0" + +"buffer-from@^1.0.0": + "integrity" "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==" + "resolved" "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz" + "version" "1.1.2" + +"buffer-indexof@^1.0.0": + "integrity" "sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g==" + "resolved" "https://registry.npmjs.org/buffer-indexof/-/buffer-indexof-1.1.1.tgz" + "version" "1.1.1" + +"bytes@3.0.0": + "integrity" "sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw==" + "resolved" "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz" + "version" "3.0.0" + +"bytes@3.1.0": + "integrity" "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==" + "resolved" "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz" + "version" "3.1.0" + +"bytes@3.1.2": + "integrity" "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==" + "resolved" "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz" + "version" "3.1.2" + +"cache-base@^1.0.1": + "integrity" "sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==" + "resolved" "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz" + "version" "1.0.1" + dependencies: + "collection-visit" "^1.0.0" + "component-emitter" "^1.2.1" + "get-value" "^2.0.6" + "has-value" "^1.0.0" + "isobject" "^3.0.1" + "set-value" "^2.0.0" + "to-object-path" "^0.3.0" + "union-value" "^1.0.0" + "unset-value" "^1.0.0" + +"call-bind@^1.0.0", "call-bind@^1.0.2": + "integrity" "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==" + "resolved" "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz" + "version" "1.0.2" + dependencies: + "function-bind" "^1.1.1" + "get-intrinsic" "^1.0.2" + +"callsites@^3.0.0": + "integrity" "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==" + "resolved" "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz" + "version" "3.1.0" + +"camel-case@^4.1.2": + "integrity" "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==" + "resolved" "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz" + "version" "4.1.2" + dependencies: + "pascal-case" "^3.1.2" + "tslib" "^2.0.3" + +"camelcase@^5.0.0": + "integrity" "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==" + "resolved" "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz" + "version" "5.3.1" + +"camelcase@^6.0.0": + "integrity" "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==" + "resolved" "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz" + "version" "6.2.0" + +"caniuse-api@^3.0.0": + "integrity" "sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==" + "resolved" "https://registry.npmjs.org/caniuse-api/-/caniuse-api-3.0.0.tgz" + "version" "3.0.0" + dependencies: + "browserslist" "^4.0.0" + "caniuse-lite" "^1.0.0" + "lodash.memoize" "^4.1.2" + "lodash.uniq" "^4.5.0" + +"caniuse-lite@^1.0.0", "caniuse-lite@^1.0.30001264", "caniuse-lite@^1.0.30001332": + "integrity" "sha512-jUNz+a9blQTQVu4uFcn17uAD8IDizPzQkIKh3LCJfg9BkyIqExYYdyc/ZSlWUSKb8iYiXxKsxbv4zYSvkqjrxw==" + "resolved" "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001340.tgz" + "version" "1.0.30001340" + +"caseless@~0.12.0": + "integrity" "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" + "resolved" "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz" + "version" "0.12.0" + +"chalk@^2.0.0": + "integrity" "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==" + "resolved" "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz" + "version" "2.4.2" + dependencies: + "ansi-styles" "^3.2.1" + "escape-string-regexp" "^1.0.5" + "supports-color" "^5.3.0" + +"chalk@^3.0.0": + "integrity" "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==" + "resolved" "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz" + "version" "3.0.0" + dependencies: + "ansi-styles" "^4.1.0" + "supports-color" "^7.1.0" + +"chalk@^4.0.0": + "integrity" "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==" + "resolved" "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz" + "version" "4.1.2" + dependencies: + "ansi-styles" "^4.1.0" + "supports-color" "^7.1.0" + +"chalk@^4.1.0": + "integrity" "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==" + "resolved" "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz" + "version" "4.1.2" + dependencies: + "ansi-styles" "^4.1.0" + "supports-color" "^7.1.0" + +"chalk@^4.1.2": + "integrity" "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==" + "resolved" "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz" + "version" "4.1.2" + dependencies: + "ansi-styles" "^4.1.0" + "supports-color" "^7.1.0" + +"chokidar@^2.1.8": + "integrity" "sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==" + "resolved" "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz" + "version" "2.1.8" + dependencies: + "anymatch" "^2.0.0" + "async-each" "^1.0.1" + "braces" "^2.3.2" + "glob-parent" "^3.1.0" + "inherits" "^2.0.3" + "is-binary-path" "^1.0.0" + "is-glob" "^4.0.0" + "normalize-path" "^3.0.0" + "path-is-absolute" "^1.0.0" + "readdirp" "^2.2.1" + "upath" "^1.1.1" + optionalDependencies: + "fsevents" "^1.2.7" + +"chokidar@^3.5.2": + "integrity" "sha512-ekGhOnNVPgT77r4K/U3GDhu+FQ2S8TnK/s2KbIGXi0SZWuwkZ2QNyfWdZW+TVfn84DpEP7rLeCt2UI6bJ8GwbQ==" + "resolved" "https://registry.npmjs.org/chokidar/-/chokidar-3.5.2.tgz" + "version" "3.5.2" + dependencies: + "anymatch" "~3.1.2" + "braces" "~3.0.2" + "glob-parent" "~5.1.2" + "is-binary-path" "~2.1.0" + "is-glob" "~4.0.1" + "normalize-path" "~3.0.0" + "readdirp" "~3.6.0" + optionalDependencies: + "fsevents" "~2.3.2" + +"chrome-trace-event@^1.0.2": + "integrity" "sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==" + "resolved" "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz" + "version" "1.0.3" + +"ci-info@^3.1.1": + "integrity" "sha512-dVqRX7fLUm8J6FgHJ418XuIgDLZDkYcDFTeL6TA2gt5WlIZUQrrH6EZrNClwT/H0FateUsZkGIOPRrLbP+PR9A==" + "resolved" "https://registry.npmjs.org/ci-info/-/ci-info-3.2.0.tgz" + "version" "3.2.0" + +"class-utils@^0.3.5": + "integrity" "sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==" + "resolved" "https://registry.npmjs.org/class-utils/-/class-utils-0.3.6.tgz" + "version" "0.3.6" + dependencies: + "arr-union" "^3.1.0" + "define-property" "^0.2.5" + "isobject" "^3.0.0" + "static-extend" "^0.1.1" + +"clean-css@^5.1.5": + "integrity" "sha512-/eR8ru5zyxKzpBLv9YZvMXgTSSQn7AdkMItMYynsFgGwTveCRVam9IUPFloE85B4vAIj05IuKmmEoV7/AQjT0w==" + "resolved" "https://registry.npmjs.org/clean-css/-/clean-css-5.2.2.tgz" + "version" "5.2.2" + dependencies: + "source-map" "~0.6.0" + +"cli-highlight@^2.1.4": + "integrity" "sha512-9KDcoEVwyUXrjcJNvHD0NFc/hiwe/WPVYIleQh2O1N2Zro5gWJZ/K+3DGn8w8P/F6FxOgzyC5bxDyHIgCSPhGg==" + "resolved" "https://registry.npmjs.org/cli-highlight/-/cli-highlight-2.1.11.tgz" + "version" "2.1.11" + dependencies: + "chalk" "^4.0.0" + "highlight.js" "^10.7.1" + "mz" "^2.4.0" + "parse5" "^5.1.1" + "parse5-htmlparser2-tree-adapter" "^6.0.0" + "yargs" "^16.0.0" + +"cliui@^5.0.0": + "integrity" "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==" + "resolved" "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz" + "version" "5.0.0" + dependencies: + "string-width" "^3.1.0" + "strip-ansi" "^5.2.0" + "wrap-ansi" "^5.1.0" + +"cliui@^7.0.2", "cliui@7.0.4": + "integrity" "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==" + "resolved" "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz" + "version" "7.0.4" + dependencies: + "string-width" "^4.2.0" + "strip-ansi" "^6.0.0" + "wrap-ansi" "^7.0.0" + +"clone-deep@^4.0.1": + "integrity" "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==" + "resolved" "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz" + "version" "4.0.1" + dependencies: + "is-plain-object" "^2.0.4" + "kind-of" "^6.0.2" + "shallow-clone" "^3.0.0" + +"clone@^2.1.2": + "integrity" "sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18=" + "resolved" "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz" + "version" "2.1.2" + +"collection-visit@^1.0.0": + "integrity" "sha512-lNkKvzEeMBBjUGHZ+q6z9pSJla0KWAQPvtzhEV9+iGyQYG+pBpl7xKDhxoNSOZH2hhv0v5k0y2yAM4o4SjoSkw==" + "resolved" "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz" + "version" "1.0.0" + dependencies: + "map-visit" "^1.0.0" + "object-visit" "^1.0.0" + +"color-convert@^1.9.0": + "integrity" "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==" + "resolved" "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz" + "version" "1.9.3" + dependencies: + "color-name" "1.1.3" + +"color-convert@^2.0.1": + "integrity" "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==" + "resolved" "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz" + "version" "2.0.1" + dependencies: + "color-name" "~1.1.4" + +"color-name@~1.1.4": + "integrity" "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + "resolved" "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" + "version" "1.1.4" + +"color-name@1.1.3": + "integrity" "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" + "resolved" "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz" + "version" "1.1.3" + +"colord@^2.0.1", "colord@^2.6": + "integrity" "sha512-4LBMSt09vR0uLnPVkOUBnmxgoaeN4ewRbx801wY/bXcltXfpR/G46OdWn96XpYmCWuYvO46aBZP4NgX8HpNAcw==" + "resolved" "https://registry.npmjs.org/colord/-/colord-2.9.1.tgz" + "version" "2.9.1" + +"colorette@^1.2.2": + "integrity" "sha512-Y2oEozpomLn7Q3HFP7dpww7AtMJplbM9lGZP6RDfHqmbeRjiwRg4n6VM6j4KLmRke85uWEI7JqF17f3pqdRA0g==" + "resolved" "https://registry.npmjs.org/colorette/-/colorette-1.4.0.tgz" + "version" "1.4.0" + +"combined-stream@^1.0.6", "combined-stream@~1.0.6": + "integrity" "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==" + "resolved" "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz" + "version" "1.0.8" + dependencies: + "delayed-stream" "~1.0.0" + +"commander@*", "commander@^7.0.0", "commander@^7.2.0": + "integrity" "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==" + "resolved" "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz" + "version" "7.2.0" + +"commander@^2.20.0": + "integrity" "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + "resolved" "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz" + "version" "2.20.3" + +"commander@^8.1.0": + "integrity" "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==" + "resolved" "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz" + "version" "8.3.0" + +"commondir@^1.0.1": + "integrity" "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=" + "resolved" "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz" + "version" "1.0.1" + +"component-emitter@^1.2.1": + "integrity" "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" + "resolved" "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz" + "version" "1.3.0" + +"compressible@~2.0.16": + "integrity" "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==" + "resolved" "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz" + "version" "2.0.18" + dependencies: + "mime-db" ">= 1.43.0 < 2" + +"compression@^1.7.4": + "integrity" "sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==" + "resolved" "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz" + "version" "1.7.4" + dependencies: + "accepts" "~1.3.5" + "bytes" "3.0.0" + "compressible" "~2.0.16" + "debug" "2.6.9" + "on-headers" "~1.0.2" + "safe-buffer" "5.1.2" + "vary" "~1.1.2" + +"compute-scroll-into-view@^1.0.17": + "integrity" "sha512-j4dx+Fb0URmzbwwMUrhqWM2BEWHdFGx+qZ9qqASHRPqvTYdqvWnHg0H1hIbcyLnvgnoNAVMlwkepyqM3DaIFUg==" + "resolved" "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-1.0.17.tgz" + "version" "1.0.17" + +"concat-map@0.0.1": + "integrity" "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + "resolved" "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + "version" "0.0.1" + +"confusing-browser-globals@^1.0.10": + "integrity" "sha512-gNld/3lySHwuhaVluJUKLePYirM3QNCKzVxqAdhJII9/WXKVX5PURzMVJspS1jTslSqjeuG4KMVTSouit5YPHA==" + "resolved" "https://registry.npmjs.org/confusing-browser-globals/-/confusing-browser-globals-1.0.10.tgz" + "version" "1.0.10" + +"connect-history-api-fallback@^1.6.0": + "integrity" "sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg==" + "resolved" "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz" + "version" "1.6.0" + +"consola@^2.15.0": + "integrity" "sha512-9vAdYbHj6x2fLKC4+oPH0kFzY/orMZyG2Aj+kNylHxKGJ/Ed4dpNyAQYwJOdqO4zdM7XpVHmyejQDcQHrnuXbw==" + "resolved" "https://registry.npmjs.org/consola/-/consola-2.15.3.tgz" + "version" "2.15.3" + +"content-disposition@0.5.4": + "integrity" "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==" + "resolved" "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz" + "version" "0.5.4" + dependencies: + "safe-buffer" "5.2.1" + +"content-type@~1.0.4": + "integrity" "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" + "resolved" "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz" + "version" "1.0.4" + +"convert-source-map@^1.7.0": + "integrity" "sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==" + "resolved" "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.8.0.tgz" + "version" "1.8.0" + dependencies: + "safe-buffer" "~5.1.1" + +"cookie-parser@^1.4.5": + "integrity" "sha512-f13bPUj/gG/5mDr+xLmSxxDsB9DQiTIfhJS/sqjrmfAWiAN+x2O4i/XguTL9yDZ+/IFDanJ+5x7hC4CXT9Tdzw==" + "resolved" "https://registry.npmjs.org/cookie-parser/-/cookie-parser-1.4.5.tgz" + "version" "1.4.5" + dependencies: + "cookie" "0.4.0" + "cookie-signature" "1.0.6" + +"cookie-signature@1.0.6": + "integrity" "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" + "resolved" "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz" + "version" "1.0.6" + +"cookie@0.4.0": + "integrity" "sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==" + "resolved" "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz" + "version" "0.4.0" + +"cookie@0.5.0": + "integrity" "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==" + "resolved" "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz" + "version" "0.5.0" + +"copy-descriptor@^0.1.0": + "integrity" "sha512-XgZ0pFcakEUlbwQEVNg3+QAis1FyTL3Qel9FYy8pSkQqoG3PNoT0bOCQtOXcOkur21r2Eq2kI+IE+gsmAEVlYw==" + "resolved" "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz" + "version" "0.1.1" + +"copy-webpack-plugin@^10.2.4": + "integrity" "sha512-xFVltahqlsRcyyJqQbDY6EYTtyQZF9rf+JPjwHObLdPFMEISqkFkr7mFoVOC6BfYS/dNThyoQKvziugm+OnwBg==" + "resolved" "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-10.2.4.tgz" + "version" "10.2.4" + dependencies: + "fast-glob" "^3.2.7" + "glob-parent" "^6.0.1" + "globby" "^12.0.2" + "normalize-path" "^3.0.0" + "schema-utils" "^4.0.0" + "serialize-javascript" "^6.0.0" + +"core-js-compat@^3.16.0", "core-js-compat@^3.16.2": + "integrity" "sha512-R09rKZ56ccGBebjTLZHvzDxhz93YPT37gBm6qUhnwj3Kt7aCjjZWD1injyNbyeFHxNKfeZBSyds6O9n3MKq1sw==" + "resolved" "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.19.0.tgz" + "version" "3.19.0" + dependencies: + "browserslist" "^4.17.5" + "semver" "7.0.0" + +"core-js@^3.15.1", "core-js@^3.8.3": + "integrity" "sha512-L1TpFRWXZ76vH1yLM+z6KssLZrP8Z6GxxW4auoCj+XiViOzNPJCAuTIkn03BGdFe6Z5clX5t64wRIRypsZQrUg==" + "resolved" "https://registry.npmjs.org/core-js/-/core-js-3.19.0.tgz" + "version" "3.19.0" + +"core-util-is@~1.0.0", "core-util-is@1.0.2": + "integrity" "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + "resolved" "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + "version" "1.0.2" + +"cosmiconfig@^7.0.0": + "integrity" "sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ==" + "resolved" "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.0.1.tgz" + "version" "7.0.1" + dependencies: + "@types/parse-json" "^4.0.0" + "import-fresh" "^3.2.1" + "parse-json" "^5.0.0" + "path-type" "^4.0.0" + "yaml" "^1.10.0" + +"crequire@^1.8.1": + "integrity" "sha1-rIHyBHhrXyARlOsWmM9EGxCktX0=" + "resolved" "https://registry.npmjs.org/crequire/-/crequire-1.8.1.tgz" + "version" "1.8.1" + +"cross-spawn@^6.0.0": + "integrity" "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==" + "resolved" "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz" + "version" "6.0.5" + dependencies: + "nice-try" "^1.0.4" + "path-key" "^2.0.1" + "semver" "^5.5.0" + "shebang-command" "^1.2.0" + "which" "^1.2.9" + +"cross-spawn@^7.0.2": + "integrity" "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==" + "resolved" "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz" + "version" "7.0.3" + dependencies: + "path-key" "^3.1.0" + "shebang-command" "^2.0.0" + "which" "^2.0.1" + +"css-color-names@^1.0.1": + "integrity" "sha512-/loXYOch1qU1biStIFsHH8SxTmOseh1IJqFvy8IujXOm1h+QjUdDhkzOrR5HG8K8mlxREj0yfi8ewCHx0eMxzA==" + "resolved" "https://registry.npmjs.org/css-color-names/-/css-color-names-1.0.1.tgz" + "version" "1.0.1" + +"css-declaration-sorter@^6.0.3": + "integrity" "sha512-SvjQjNRZgh4ULK1LDJ2AduPKUKxIqmtU7ZAyi47BTV+M90Qvxr9AB6lKlLbDUfXqI9IQeYA8LbAsCZPpJEV3aA==" + "resolved" "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-6.1.3.tgz" + "version" "6.1.3" + dependencies: + "timsort" "^0.3.0" + +"css-loader@^5.0.1": + "integrity" "sha512-Q7mOvpBNBG7YrVGMxRxcBJZFL75o+cH2abNASdibkj/fffYD8qWbInZrD0S9ccI6vZclF3DsHE7njGlLtaHbhg==" + "resolved" "https://registry.npmjs.org/css-loader/-/css-loader-5.2.7.tgz" + "version" "5.2.7" + dependencies: + "icss-utils" "^5.1.0" + "loader-utils" "^2.0.0" + "postcss" "^8.2.15" + "postcss-modules-extract-imports" "^3.0.0" + "postcss-modules-local-by-default" "^4.0.0" + "postcss-modules-scope" "^3.0.0" + "postcss-modules-values" "^4.0.0" + "postcss-value-parser" "^4.1.0" + "schema-utils" "^3.0.0" + "semver" "^7.3.5" + +"css-minimizer-webpack-plugin@^3.0.0": + "integrity" "sha512-KlB8l5uoNcf9F7i5kXnkxoqJGd2BXH4f0+Lj2vSWSmuvMLYO1kNsJ1KHSzeDW8e45/whgSOPcKVT/3JopkT8dg==" + "resolved" "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.1.1.tgz" + "version" "3.1.1" + dependencies: + "cssnano" "^5.0.6" + "jest-worker" "^27.0.2" + "p-limit" "^3.0.2" + "postcss" "^8.3.5" + "schema-utils" "^3.1.0" + "serialize-javascript" "^6.0.0" + "source-map" "^0.6.1" + +"css-select@^4.1.3": + "integrity" "sha512-gT3wBNd9Nj49rAbmtFHj1cljIAOLYSX1nZ8CB7TBO3INYckygm5B7LISU/szY//YmdiSLbJvDLOx9VnMVpMBxA==" + "resolved" "https://registry.npmjs.org/css-select/-/css-select-4.1.3.tgz" + "version" "4.1.3" + dependencies: + "boolbase" "^1.0.0" + "css-what" "^5.0.0" + "domhandler" "^4.2.0" + "domutils" "^2.6.0" + "nth-check" "^2.0.0" + +"css-tree@^1.1.2", "css-tree@^1.1.3": + "integrity" "sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q==" + "resolved" "https://registry.npmjs.org/css-tree/-/css-tree-1.1.3.tgz" + "version" "1.1.3" + dependencies: + "mdn-data" "2.0.14" + "source-map" "^0.6.1" + +"css-what@^5.0.0": + "integrity" "sha512-arSMRWIIFY0hV8pIxZMEfmMI47Wj3R/aWpZDDxWYCPEiOMv6tfOrnpDtgxBYPEQD4V0Y/958+1TdC3iWTFcUPw==" + "resolved" "https://registry.npmjs.org/css-what/-/css-what-5.1.0.tgz" + "version" "5.1.0" + +"cssesc@^3.0.0": + "integrity" "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==" + "resolved" "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz" + "version" "3.0.0" + +"cssnano-preset-default@^5.1.4": + "integrity" "sha512-sPpQNDQBI3R/QsYxQvfB4mXeEcWuw0wGtKtmS5eg8wudyStYMgKOQT39G07EbW1LB56AOYrinRS9f0ig4Y3MhQ==" + "resolved" "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-5.1.4.tgz" + "version" "5.1.4" + dependencies: + "css-declaration-sorter" "^6.0.3" + "cssnano-utils" "^2.0.1" + "postcss-calc" "^8.0.0" + "postcss-colormin" "^5.2.0" + "postcss-convert-values" "^5.0.1" + "postcss-discard-comments" "^5.0.1" + "postcss-discard-duplicates" "^5.0.1" + "postcss-discard-empty" "^5.0.1" + "postcss-discard-overridden" "^5.0.1" + "postcss-merge-longhand" "^5.0.2" + "postcss-merge-rules" "^5.0.2" + "postcss-minify-font-values" "^5.0.1" + "postcss-minify-gradients" "^5.0.2" + "postcss-minify-params" "^5.0.1" + "postcss-minify-selectors" "^5.1.0" + "postcss-normalize-charset" "^5.0.1" + "postcss-normalize-display-values" "^5.0.1" + "postcss-normalize-positions" "^5.0.1" + "postcss-normalize-repeat-style" "^5.0.1" + "postcss-normalize-string" "^5.0.1" + "postcss-normalize-timing-functions" "^5.0.1" + "postcss-normalize-unicode" "^5.0.1" + "postcss-normalize-url" "^5.0.2" + "postcss-normalize-whitespace" "^5.0.1" + "postcss-ordered-values" "^5.0.2" + "postcss-reduce-initial" "^5.0.1" + "postcss-reduce-transforms" "^5.0.1" + "postcss-svgo" "^5.0.2" + "postcss-unique-selectors" "^5.0.1" + +"cssnano-utils@^2.0.1": + "integrity" "sha512-i8vLRZTnEH9ubIyfdZCAdIdgnHAUeQeByEeQ2I7oTilvP9oHO6RScpeq3GsFUVqeB8uZgOQ9pw8utofNn32hhQ==" + "resolved" "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-2.0.1.tgz" + "version" "2.0.1" + +"cssnano@^5.0.6": + "integrity" "sha512-Lda7geZU0Yu+RZi2SGpjYuQz4HI4/1Y+BhdD0jL7NXAQ5larCzVn+PUGuZbDMYz904AXXCOgO5L1teSvgu7aFg==" + "resolved" "https://registry.npmjs.org/cssnano/-/cssnano-5.0.8.tgz" + "version" "5.0.8" + dependencies: + "cssnano-preset-default" "^5.1.4" + "is-resolvable" "^1.1.0" + "lilconfig" "^2.0.3" + "yaml" "^1.10.2" + +"csso@^4.2.0": + "integrity" "sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA==" + "resolved" "https://registry.npmjs.org/csso/-/csso-4.2.0.tgz" + "version" "4.2.0" + dependencies: + "css-tree" "^1.1.2" + +"csstype@^2.6.8": + "integrity" "sha512-RSU6Hyeg14am3Ah4VZEmeX8H7kLwEEirXe6aU2IPfKNvhXwTflK5HQRDNI0ypQXoqmm+QPyG2IaPuQE5zMwSIQ==" + "resolved" "https://registry.npmjs.org/csstype/-/csstype-2.6.18.tgz" + "version" "2.6.18" + +"dashdash@^1.12.0": + "integrity" "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=" + "resolved" "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz" + "version" "1.14.1" + dependencies: + "assert-plus" "^1.0.0" + +"debug@^2.2.0": + "integrity" "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==" + "resolved" "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" + "version" "2.6.9" + dependencies: + "ms" "2.0.0" + +"debug@^2.3.3": + "integrity" "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==" + "resolved" "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" + "version" "2.6.9" + dependencies: + "ms" "2.0.0" + +"debug@^2.6.9": + "integrity" "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==" + "resolved" "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" + "version" "2.6.9" + dependencies: + "ms" "2.0.0" + +"debug@^3.1.1": + "integrity" "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==" + "resolved" "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz" + "version" "3.2.7" + dependencies: + "ms" "^2.1.1" + +"debug@^3.2.7": + "integrity" "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==" + "resolved" "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz" + "version" "3.2.7" + dependencies: + "ms" "^2.1.1" + +"debug@^4.0.1", "debug@^4.1.0", "debug@^4.1.1", "debug@^4.3.2": + "integrity" "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==" + "resolved" "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz" + "version" "4.3.2" + dependencies: + "ms" "2.1.2" + +"debug@2.6.9": + "integrity" "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==" + "resolved" "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" + "version" "2.6.9" + dependencies: + "ms" "2.0.0" + +"decamelize@^1.2.0": + "integrity" "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==" + "resolved" "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz" + "version" "1.2.0" + +"decode-uri-component@^0.2.0": + "integrity" "sha512-hjf+xovcEn31w/EUYdTXQh/8smFL/dzYjohQGEIgjyNavaJfBY2p5F527Bo1VPATxv0VYTUC2bOcXvqFwk78Og==" + "resolved" "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.0.tgz" + "version" "0.2.0" + +"deep-equal@^1.0.1": + "integrity" "sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g==" + "resolved" "https://registry.npmjs.org/deep-equal/-/deep-equal-1.1.1.tgz" + "version" "1.1.1" + dependencies: + "is-arguments" "^1.0.4" + "is-date-object" "^1.0.1" + "is-regex" "^1.0.4" + "object-is" "^1.0.1" + "object-keys" "^1.1.1" + "regexp.prototype.flags" "^1.2.0" + +"deep-is@^0.1.3": + "integrity" "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==" + "resolved" "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz" + "version" "0.1.4" + +"deepmerge@^1.5.2": + "integrity" "sha512-95k0GDqvBjZavkuvzx/YqVLv/6YYa17fz6ILMSf7neqQITCPbnfEnQvEgMPNjH4kgobe7+WIL0yJEHku+H3qtQ==" + "resolved" "https://registry.npmjs.org/deepmerge/-/deepmerge-1.5.2.tgz" + "version" "1.5.2" + +"deepmerge@^4.2.2": + "integrity" "sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==" + "resolved" "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz" + "version" "4.2.2" + +"default-gateway@^4.2.0": + "integrity" "sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA==" + "resolved" "https://registry.npmjs.org/default-gateway/-/default-gateway-4.2.0.tgz" + "version" "4.2.0" + dependencies: + "execa" "^1.0.0" + "ip-regex" "^2.1.0" + +"define-properties@^1.1.3": + "integrity" "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==" + "resolved" "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz" + "version" "1.1.3" + dependencies: + "object-keys" "^1.0.12" + +"define-property@^0.2.5": + "integrity" "sha512-Rr7ADjQZenceVOAKop6ALkkRAmH1A4Gx9hV/7ZujPUN2rkATqFO0JZLZInbAjpZYoJ1gUx8MRMQVkYemcbMSTA==" + "resolved" "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz" + "version" "0.2.5" + dependencies: + "is-descriptor" "^0.1.0" + +"define-property@^1.0.0": + "integrity" "sha512-cZTYKFWspt9jZsMscWo8sc/5lbPC9Q0N5nBLgb+Yd915iL3udB1uFgS3B8YCx66UVHq018DAVFoee7x+gxggeA==" + "resolved" "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz" + "version" "1.0.0" + dependencies: + "is-descriptor" "^1.0.0" + +"define-property@^2.0.2": + "integrity" "sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==" + "resolved" "https://registry.npmjs.org/define-property/-/define-property-2.0.2.tgz" + "version" "2.0.2" + dependencies: + "is-descriptor" "^1.0.2" + "isobject" "^3.0.1" + +"del@^4.1.1": + "integrity" "sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ==" + "resolved" "https://registry.npmjs.org/del/-/del-4.1.1.tgz" + "version" "4.1.1" + dependencies: + "@types/glob" "^7.1.1" + "globby" "^6.1.0" + "is-path-cwd" "^2.0.0" + "is-path-in-cwd" "^2.0.0" + "p-map" "^2.0.0" + "pify" "^4.0.1" + "rimraf" "^2.6.3" + +"delayed-stream@~1.0.0": + "integrity" "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" + "resolved" "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" + "version" "1.0.0" + +"depd@~1.1.2": + "integrity" "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" + "resolved" "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz" + "version" "1.1.2" + +"depd@2.0.0": + "integrity" "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" + "resolved" "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz" + "version" "2.0.0" + +"destroy@1.2.0": + "integrity" "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==" + "resolved" "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz" + "version" "1.2.0" + +"detect-node@^2.0.4": + "integrity" "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==" + "resolved" "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz" + "version" "2.1.0" + +"dir-glob@^3.0.1": + "integrity" "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==" + "resolved" "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz" + "version" "3.0.1" + dependencies: + "path-type" "^4.0.0" + +"dns-equal@^1.0.0": + "integrity" "sha512-z+paD6YUQsk+AbGCEM4PrOXSss5gd66QfcVBFTKR/HpFL9jCqikS94HYwKww6fQyO7IxrIIyUu+g0Ka9tUS2Cg==" + "resolved" "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz" + "version" "1.0.0" + +"dns-packet@^1.3.1": + "integrity" "sha512-BQ6F4vycLXBvdrJZ6S3gZewt6rcrks9KBgM9vrhW+knGRqc8uEdT7fuCwloc7nny5xNoMJ17HGH0R/6fpo8ECA==" + "resolved" "https://registry.npmjs.org/dns-packet/-/dns-packet-1.3.4.tgz" + "version" "1.3.4" + dependencies: + "ip" "^1.1.0" + "safe-buffer" "^5.0.1" + +"dns-txt@^2.0.2": + "integrity" "sha512-Ix5PrWjphuSoUXV/Zv5gaFHjnaJtb02F2+Si3Ht9dyJ87+Z/lMmy+dpNHtTGraNK958ndXq2i+GLkWsWHcKaBQ==" + "resolved" "https://registry.npmjs.org/dns-txt/-/dns-txt-2.0.2.tgz" + "version" "2.0.2" + dependencies: + "buffer-indexof" "^1.0.0" + +"doctrine@^2.1.0": + "integrity" "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==" + "resolved" "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz" + "version" "2.1.0" + dependencies: + "esutils" "^2.0.2" + +"doctrine@^3.0.0": + "integrity" "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==" + "resolved" "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz" + "version" "3.0.0" + dependencies: + "esutils" "^2.0.2" + +"dom-align@^1.12.1": + "integrity" "sha512-pHuazgqrsTFrGU2WLDdXxCFabkdQDx72ddkraZNih1KsMcN5qsRSTR9O4VJRlwTPCPb5COYg3LOfiMHHcPInHg==" + "resolved" "https://registry.npmjs.org/dom-align/-/dom-align-1.12.2.tgz" + "version" "1.12.2" + +"dom-converter@^0.2.0": + "integrity" "sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==" + "resolved" "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz" + "version" "0.2.0" + dependencies: + "utila" "~0.4" + +"dom-scroll-into-view@^2.0.0": + "integrity" "sha512-bvVTQe1lfaUr1oFzZX80ce9KLDlZ3iU+XGNE/bz9HnGdklTieqsbmsLHe+rT2XWqopvL0PckkYqN7ksmm5pe3w==" + "resolved" "https://registry.npmjs.org/dom-scroll-into-view/-/dom-scroll-into-view-2.0.1.tgz" + "version" "2.0.1" + +"dom-serializer@^1.0.1": + "integrity" "sha512-5c54Bk5Dw4qAxNOI1pFEizPSjVsx5+bpJKmL2kPn8JhBUq2q09tTCa3mjijun2NfK78NMouDYNMBkOrPZiS+ig==" + "resolved" "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.3.2.tgz" + "version" "1.3.2" + dependencies: + "domelementtype" "^2.0.1" + "domhandler" "^4.2.0" + "entities" "^2.0.0" + +"domelementtype@^2.0.1", "domelementtype@^2.2.0": + "integrity" "sha512-DtBMo82pv1dFtUmHyr48beiuq792Sxohr+8Hm9zoxklYPfa6n0Z3Byjj2IV7bmr2IyqClnqEQhfgHJJ5QF0R5A==" + "resolved" "https://registry.npmjs.org/domelementtype/-/domelementtype-2.2.0.tgz" + "version" "2.2.0" + +"domhandler@^4.0.0", "domhandler@^4.2.0", "domhandler@^4.2.2": + "integrity" "sha512-PzE9aBMsdZO8TK4BnuJwH0QT41wgMbRzuZrHUcpYncEjmQazq8QEaBWgLG7ZyC/DAZKEgglpIA6j4Qn/HmxS3w==" + "resolved" "https://registry.npmjs.org/domhandler/-/domhandler-4.2.2.tgz" + "version" "4.2.2" + dependencies: + "domelementtype" "^2.2.0" + +"domutils@^2.5.2", "domutils@^2.6.0", "domutils@^2.8.0": + "integrity" "sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==" + "resolved" "https://registry.npmjs.org/domutils/-/domutils-2.8.0.tgz" + "version" "2.8.0" + dependencies: + "dom-serializer" "^1.0.1" + "domelementtype" "^2.2.0" + "domhandler" "^4.2.0" + +"dot-case@^3.0.4": + "integrity" "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==" + "resolved" "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz" + "version" "3.0.4" + dependencies: + "no-case" "^3.0.4" + "tslib" "^2.0.3" + +"dotenv@8.2.0": + "integrity" "sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw==" + "resolved" "https://registry.npmjs.org/dotenv/-/dotenv-8.2.0.tgz" + "version" "8.2.0" + +"duplexer@^0.1.2": + "integrity" "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==" + "resolved" "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz" + "version" "0.1.2" + +"ecc-jsbn@~0.1.1": + "integrity" "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=" + "resolved" "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz" + "version" "0.1.2" + dependencies: + "jsbn" "~0.1.0" + "safer-buffer" "^2.1.0" + +"echarts@^5.2.1": + "integrity" "sha512-OJ79b22eqRfbSV8vYmDKmA+XWfNbr0Uk/OafWcFNIGDWti2Uw9A6eVCiJLmqPa9Sk+EWL+t5v26aak0z3gxiZw==" + "resolved" "https://registry.npmjs.org/echarts/-/echarts-5.2.1.tgz" + "version" "5.2.1" + dependencies: + "tslib" "2.3.0" + "zrender" "5.2.1" + +"ee-first@1.1.1": + "integrity" "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" + "resolved" "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz" + "version" "1.1.1" + +"electron-to-chromium@^1.4.118": + "integrity" "sha512-0Rcpald12O11BUogJagX3HsCN3FE83DSqWjgXoHo5a72KUKMSfI39XBgJpgNNxS9fuGzytaFjE06kZkiVFy2qA==" + "resolved" "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.137.tgz" + "version" "1.4.137" + +"emoji-regex@^7.0.1": + "integrity" "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + "resolved" "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz" + "version" "7.0.3" + +"emoji-regex@^8.0.0": + "integrity" "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + "resolved" "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz" + "version" "8.0.0" + +"emojis-list@^3.0.0": + "integrity" "sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==" + "resolved" "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz" + "version" "3.0.0" + +"encodeurl@~1.0.2": + "integrity" "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==" + "resolved" "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz" + "version" "1.0.2" + +"end-of-stream@^1.1.0": + "integrity" "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==" + "resolved" "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz" + "version" "1.4.4" + dependencies: + "once" "^1.4.0" + +"enhanced-resolve@^5.8.3": + "integrity" "sha512-EGAbGvH7j7Xt2nc0E7D99La1OiEs8LnyimkRgwExpUMScN6O+3x9tIWs7PLQZVNx4YD+00skHXPXi1yQHpAmZA==" + "resolved" "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.8.3.tgz" + "version" "5.8.3" + dependencies: + "graceful-fs" "^4.2.4" + "tapable" "^2.2.0" + +"enquirer@^2.3.5": + "integrity" "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==" + "resolved" "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz" + "version" "2.3.6" + dependencies: + "ansi-colors" "^4.1.1" + +"entities@^2.0.0": + "integrity" "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==" + "resolved" "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz" + "version" "2.2.0" + +"entities@^3.0.1": + "integrity" "sha512-WiyBqoomrwMdFG1e0kqvASYfnlb0lp8M5o5Fw2OFq1hNZxxcNk8Ik0Xm7LxzBhuidnZB/UtBqVCgUz3kBOP51Q==" + "resolved" "https://registry.npmjs.org/entities/-/entities-3.0.1.tgz" + "version" "3.0.1" + +"envinfo@^7.7.3": + "integrity" "sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw==" + "resolved" "https://registry.npmjs.org/envinfo/-/envinfo-7.8.1.tgz" + "version" "7.8.1" + +"errno@^0.1.1", "errno@^0.1.3": + "integrity" "sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==" + "resolved" "https://registry.npmjs.org/errno/-/errno-0.1.8.tgz" + "version" "0.1.8" + dependencies: + "prr" "~1.0.1" + +"error-ex@^1.3.1": + "integrity" "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==" + "resolved" "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz" + "version" "1.3.2" + dependencies: + "is-arrayish" "^0.2.1" + +"error-stack-parser@^2.0.6": + "integrity" "sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ==" + "resolved" "https://registry.npmjs.org/error-stack-parser/-/error-stack-parser-2.1.4.tgz" + "version" "2.1.4" + dependencies: + "stackframe" "^1.3.4" + +"es-abstract@^1.19.0", "es-abstract@^1.19.1": + "integrity" "sha512-2vJ6tjA/UfqLm2MPs7jxVybLoB8i1t1Jd9R3kISld20sIxPcTbLuggQOUxeWeAvIUkduv/CfMjuh4WmiXr2v9w==" + "resolved" "https://registry.npmjs.org/es-abstract/-/es-abstract-1.19.1.tgz" + "version" "1.19.1" + dependencies: + "call-bind" "^1.0.2" + "es-to-primitive" "^1.2.1" + "function-bind" "^1.1.1" + "get-intrinsic" "^1.1.1" + "get-symbol-description" "^1.0.0" + "has" "^1.0.3" + "has-symbols" "^1.0.2" + "internal-slot" "^1.0.3" + "is-callable" "^1.2.4" + "is-negative-zero" "^2.0.1" + "is-regex" "^1.1.4" + "is-shared-array-buffer" "^1.0.1" + "is-string" "^1.0.7" + "is-weakref" "^1.0.1" + "object-inspect" "^1.11.0" + "object-keys" "^1.1.1" + "object.assign" "^4.1.2" + "string.prototype.trimend" "^1.0.4" + "string.prototype.trimstart" "^1.0.4" + "unbox-primitive" "^1.0.1" + +"es-module-lexer@^0.9.0": + "integrity" "sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==" + "resolved" "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz" + "version" "0.9.3" + +"es-to-primitive@^1.2.1": + "integrity" "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==" + "resolved" "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz" + "version" "1.2.1" + dependencies: + "is-callable" "^1.1.4" + "is-date-object" "^1.0.1" + "is-symbol" "^1.0.2" + +"escalade@^3.1.1": + "integrity" "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==" + "resolved" "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz" + "version" "3.1.1" + +"escape-html@~1.0.3": + "integrity" "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" + "resolved" "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz" + "version" "1.0.3" + +"escape-string-regexp@^1.0.5": + "integrity" "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" + "resolved" "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" + "version" "1.0.5" + +"escape-string-regexp@^4.0.0": + "integrity" "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==" + "resolved" "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz" + "version" "4.0.0" + +"eslint-import-resolver-node@^0.3.6": + "integrity" "sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw==" + "resolved" "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz" + "version" "0.3.6" + dependencies: + "debug" "^3.2.7" + "resolve" "^1.20.0" + +"eslint-module-utils@^2.7.0": + "integrity" "sha512-fjoetBXQZq2tSTWZ9yWVl2KuFrTZZH3V+9iD1V1RfpDgxzJR+mPd/KZmMiA8gbPqdBzpNiEHOuT7IYEWxrH0zQ==" + "resolved" "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.1.tgz" + "version" "2.7.1" + dependencies: + "debug" "^3.2.7" + "find-up" "^2.1.0" + "pkg-dir" "^2.0.0" + +"eslint-plugin-angular@^4.0.1": + "integrity" "sha512-dacledMPxVOZA3T0xcYFuvrMCy5dHxg0ZTMWUaHqSBQef3/XLyXJ9s1LNj0NikJ/dYx6OhqlnnNpKmrJhEUB+Q==" + "resolved" "https://registry.npmjs.org/eslint-plugin-angular/-/eslint-plugin-angular-4.1.0.tgz" + "version" "4.1.0" + +"eslint-plugin-es@^3.0.0": + "integrity" "sha512-GUmAsJaN4Fc7Gbtl8uOBlayo2DqhwWvEzykMHSCZHU3XdJ+NSzzZcVhXh3VxX5icqQ+oQdIEawXX8xkR3mIFmQ==" + "resolved" "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-3.0.1.tgz" + "version" "3.0.1" + dependencies: + "eslint-utils" "^2.0.0" + "regexpp" "^3.0.0" + +"eslint-plugin-html@^6.1.2": + "integrity" "sha512-vi3NW0E8AJombTvt8beMwkL1R/fdRWl4QSNRNMhVQKWm36/X0KF0unGNAY4mqUF06mnwVWZcIcerrCnfn9025g==" + "resolved" "https://registry.npmjs.org/eslint-plugin-html/-/eslint-plugin-html-6.2.0.tgz" + "version" "6.2.0" + dependencies: + "htmlparser2" "^7.1.2" + +"eslint-plugin-import@^2.22.1": + "integrity" "sha512-qCwQr9TYfoBHOFcVGKY9C9unq05uOxxdklmBXLVvcwo68y5Hta6/GzCZEMx2zQiu0woKNEER0LE7ZgaOfBU14g==" + "resolved" "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.25.2.tgz" + "version" "2.25.2" + dependencies: + "array-includes" "^3.1.4" + "array.prototype.flat" "^1.2.5" + "debug" "^2.6.9" + "doctrine" "^2.1.0" + "eslint-import-resolver-node" "^0.3.6" + "eslint-module-utils" "^2.7.0" + "has" "^1.0.3" + "is-core-module" "^2.7.0" + "is-glob" "^4.0.3" + "minimatch" "^3.0.4" + "object.values" "^1.1.5" + "resolve" "^1.20.0" + "tsconfig-paths" "^3.11.0" + +"eslint-plugin-node@^11.1.0": + "integrity" "sha512-oUwtPJ1W0SKD0Tr+wqu92c5xuCeQqB3hSCHasn/ZgjFdA9iDGNkNf2Zi9ztY7X+hNuMib23LNGRm6+uN+KLE3g==" + "resolved" "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz" + "version" "11.1.0" + dependencies: + "eslint-plugin-es" "^3.0.0" + "eslint-utils" "^2.0.0" + "ignore" "^5.1.1" + "minimatch" "^3.0.4" + "resolve" "^1.10.1" + "semver" "^6.1.0" + +"eslint-plugin-vue@^7.9.0": + "integrity" "sha512-oVNDqzBC9h3GO+NTgWeLMhhGigy6/bQaQbHS+0z7C4YEu/qK/yxHvca/2PTZtGNPsCrHwOTgKMrwu02A9iPBmw==" + "resolved" "https://registry.npmjs.org/eslint-plugin-vue/-/eslint-plugin-vue-7.20.0.tgz" + "version" "7.20.0" + dependencies: + "eslint-utils" "^2.1.0" + "natural-compare" "^1.4.0" + "semver" "^6.3.0" + "vue-eslint-parser" "^7.10.0" + +"eslint-scope@^5.1.1", "eslint-scope@5.1.1": + "integrity" "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==" + "resolved" "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz" + "version" "5.1.1" + dependencies: + "esrecurse" "^4.3.0" + "estraverse" "^4.1.1" + +"eslint-utils@^2.0.0", "eslint-utils@^2.1.0": + "integrity" "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==" + "resolved" "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz" + "version" "2.1.0" + dependencies: + "eslint-visitor-keys" "^1.1.0" + +"eslint-visitor-keys@^1.1.0": + "integrity" "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==" + "resolved" "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz" + "version" "1.3.0" + +"eslint-visitor-keys@^1.3.0": + "integrity" "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==" + "resolved" "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz" + "version" "1.3.0" + +"eslint-visitor-keys@^2.0.0", "eslint-visitor-keys@^2.1.0": + "integrity" "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==" + "resolved" "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz" + "version" "2.1.0" + +"eslint@^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8", "eslint@^6.2.0 || ^7.0.0 || ^8.0.0", "eslint@^7.26.0", "eslint@>=4.19.1", "eslint@>=5.0.0", "eslint@>=5.16.0", "eslint@>=7.5.0": + "integrity" "sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA==" + "resolved" "https://registry.npmjs.org/eslint/-/eslint-7.32.0.tgz" + "version" "7.32.0" + dependencies: + "@babel/code-frame" "7.12.11" + "@eslint/eslintrc" "^0.4.3" + "@humanwhocodes/config-array" "^0.5.0" + "ajv" "^6.10.0" + "chalk" "^4.0.0" + "cross-spawn" "^7.0.2" + "debug" "^4.0.1" + "doctrine" "^3.0.0" + "enquirer" "^2.3.5" + "escape-string-regexp" "^4.0.0" + "eslint-scope" "^5.1.1" + "eslint-utils" "^2.1.0" + "eslint-visitor-keys" "^2.0.0" + "espree" "^7.3.1" + "esquery" "^1.4.0" + "esutils" "^2.0.2" + "fast-deep-equal" "^3.1.3" + "file-entry-cache" "^6.0.1" + "functional-red-black-tree" "^1.0.1" + "glob-parent" "^5.1.2" + "globals" "^13.6.0" + "ignore" "^4.0.6" + "import-fresh" "^3.0.0" + "imurmurhash" "^0.1.4" + "is-glob" "^4.0.0" + "js-yaml" "^3.13.1" + "json-stable-stringify-without-jsonify" "^1.0.1" + "levn" "^0.4.1" + "lodash.merge" "^4.6.2" + "minimatch" "^3.0.4" + "natural-compare" "^1.4.0" + "optionator" "^0.9.1" + "progress" "^2.0.0" + "regexpp" "^3.1.0" + "semver" "^7.2.1" + "strip-ansi" "^6.0.0" + "strip-json-comments" "^3.1.0" + "table" "^6.0.9" + "text-table" "^0.2.0" + "v8-compile-cache" "^2.0.3" + +"espree@^6.2.1": + "integrity" "sha512-ysCxRQY3WaXJz9tdbWOwuWr5Y/XrPTGX9Kiz3yoUXwW0VZ4w30HTkQLaGx/+ttFjF8i+ACbArnB4ce68a9m5hw==" + "resolved" "https://registry.npmjs.org/espree/-/espree-6.2.1.tgz" + "version" "6.2.1" + dependencies: + "acorn" "^7.1.1" + "acorn-jsx" "^5.2.0" + "eslint-visitor-keys" "^1.1.0" + +"espree@^7.3.0", "espree@^7.3.1": + "integrity" "sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==" + "resolved" "https://registry.npmjs.org/espree/-/espree-7.3.1.tgz" + "version" "7.3.1" + dependencies: + "acorn" "^7.4.0" + "acorn-jsx" "^5.3.1" + "eslint-visitor-keys" "^1.3.0" + +"esprima@^4.0.0": + "integrity" "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==" + "resolved" "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz" + "version" "4.0.1" + +"esquery@^1.4.0": + "integrity" "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==" + "resolved" "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz" + "version" "1.4.0" + dependencies: + "estraverse" "^5.1.0" + +"esrecurse@^4.3.0": + "integrity" "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==" + "resolved" "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz" + "version" "4.3.0" + dependencies: + "estraverse" "^5.2.0" + +"estraverse@^4.1.1": + "integrity" "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==" + "resolved" "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz" + "version" "4.3.0" + +"estraverse@^5.1.0": + "integrity" "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==" + "resolved" "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz" + "version" "5.2.0" + +"estraverse@^5.2.0": + "integrity" "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==" + "resolved" "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz" + "version" "5.2.0" + +"estree-walker@^2.0.2": + "integrity" "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==" + "resolved" "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz" + "version" "2.0.2" + +"esutils@^2.0.2": + "integrity" "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==" + "resolved" "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz" + "version" "2.0.3" + +"etag@~1.8.1": + "integrity" "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==" + "resolved" "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz" + "version" "1.8.1" + +"eventemitter3@^4.0.0": + "integrity" "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==" + "resolved" "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz" + "version" "4.0.7" + +"events@^3.2.0": + "integrity" "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==" + "resolved" "https://registry.npmjs.org/events/-/events-3.3.0.tgz" + "version" "3.3.0" + +"eventsource@^2.0.2": + "integrity" "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==" + "resolved" "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz" + "version" "2.0.2" + +"execa@^1.0.0": + "integrity" "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==" + "resolved" "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz" + "version" "1.0.0" + dependencies: + "cross-spawn" "^6.0.0" + "get-stream" "^4.0.0" + "is-stream" "^1.1.0" + "npm-run-path" "^2.0.0" + "p-finally" "^1.0.0" + "signal-exit" "^3.0.0" + "strip-eof" "^1.0.0" + +"expand-brackets@^2.1.4": + "integrity" "sha512-w/ozOKR9Obk3qoWeY/WDi6MFta9AoMR+zud60mdnbniMcBxRuFJyDt2LdX/14A1UABeqk+Uk+LDfUpvoGKppZA==" + "resolved" "https://registry.npmjs.org/expand-brackets/-/expand-brackets-2.1.4.tgz" + "version" "2.1.4" + dependencies: + "debug" "^2.3.3" + "define-property" "^0.2.5" + "extend-shallow" "^2.0.1" + "posix-character-classes" "^0.1.0" + "regex-not" "^1.0.0" + "snapdragon" "^0.8.1" + "to-regex" "^3.0.1" + +"express@^4.17.1": + "integrity" "sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q==" + "resolved" "https://registry.npmjs.org/express/-/express-4.18.1.tgz" + "version" "4.18.1" + dependencies: + "accepts" "~1.3.8" + "array-flatten" "1.1.1" + "body-parser" "1.20.0" + "content-disposition" "0.5.4" + "content-type" "~1.0.4" + "cookie" "0.5.0" + "cookie-signature" "1.0.6" + "debug" "2.6.9" + "depd" "2.0.0" + "encodeurl" "~1.0.2" + "escape-html" "~1.0.3" + "etag" "~1.8.1" + "finalhandler" "1.2.0" + "fresh" "0.5.2" + "http-errors" "2.0.0" + "merge-descriptors" "1.0.1" + "methods" "~1.1.2" + "on-finished" "2.4.1" + "parseurl" "~1.3.3" + "path-to-regexp" "0.1.7" + "proxy-addr" "~2.0.7" + "qs" "6.10.3" + "range-parser" "~1.2.1" + "safe-buffer" "5.2.1" + "send" "0.18.0" + "serve-static" "1.15.0" + "setprototypeof" "1.2.0" + "statuses" "2.0.1" + "type-is" "~1.6.18" + "utils-merge" "1.0.1" + "vary" "~1.1.2" + +"extend-shallow@^2.0.1": + "integrity" "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==" + "resolved" "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz" + "version" "2.0.1" + dependencies: + "is-extendable" "^0.1.0" + +"extend-shallow@^3.0.0", "extend-shallow@^3.0.2": + "integrity" "sha512-BwY5b5Ql4+qZoefgMj2NUmx+tehVTH/Kf4k1ZEtOHNFcm2wSxMRo992l6X3TIgni2eZVTZ85xMOjF31fwZAj6Q==" + "resolved" "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz" + "version" "3.0.2" + dependencies: + "assign-symbols" "^1.0.0" + "is-extendable" "^1.0.1" + +"extend@~3.0.2": + "integrity" "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + "resolved" "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz" + "version" "3.0.2" + +"extglob@^2.0.4": + "integrity" "sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==" + "resolved" "https://registry.npmjs.org/extglob/-/extglob-2.0.4.tgz" + "version" "2.0.4" + dependencies: + "array-unique" "^0.3.2" + "define-property" "^1.0.0" + "expand-brackets" "^2.1.4" + "extend-shallow" "^2.0.1" + "fragment-cache" "^0.2.1" + "regex-not" "^1.0.0" + "snapdragon" "^0.8.1" + "to-regex" "^3.0.1" + +"extsprintf@^1.2.0", "extsprintf@1.3.0": + "integrity" "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" + "resolved" "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz" + "version" "1.3.0" + +"fast-deep-equal@^3.1.1", "fast-deep-equal@^3.1.3": + "integrity" "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + "resolved" "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz" + "version" "3.1.3" + +"fast-glob@^3.2.7": + "integrity" "sha512-rYGMRwip6lUMvYD3BTScMwT1HtAs2d71SMv66Vrxs0IekGZEjhM0pcMfjQPnknBt2zeCwQMEupiN02ZP4DiT1Q==" + "resolved" "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.7.tgz" + "version" "3.2.7" + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + "glob-parent" "^5.1.2" + "merge2" "^1.3.0" + "micromatch" "^4.0.4" + +"fast-json-stable-stringify@^2.0.0": + "integrity" "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" + "resolved" "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz" + "version" "2.1.0" + +"fast-levenshtein@^2.0.6": + "integrity" "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=" + "resolved" "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz" + "version" "2.0.6" + +"fastq@^1.6.0": + "integrity" "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==" + "resolved" "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz" + "version" "1.13.0" + dependencies: + "reusify" "^1.0.4" + +"faye-websocket@^0.11.3", "faye-websocket@^0.11.4": + "integrity" "sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==" + "resolved" "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.4.tgz" + "version" "0.11.4" + dependencies: + "websocket-driver" ">=0.5.1" + +"figures@^3.2.0": + "integrity" "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==" + "resolved" "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz" + "version" "3.2.0" + dependencies: + "escape-string-regexp" "^1.0.5" + +"file-entry-cache@^6.0.1": + "integrity" "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==" + "resolved" "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz" + "version" "6.0.1" + dependencies: + "flat-cache" "^3.0.4" + +"file-loader@*", "file-loader@^6.2.0": + "integrity" "sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw==" + "resolved" "https://registry.npmjs.org/file-loader/-/file-loader-6.2.0.tgz" + "version" "6.2.0" + dependencies: + "loader-utils" "^2.0.0" + "schema-utils" "^3.0.0" + +"fill-range@^4.0.0": + "integrity" "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==" + "resolved" "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz" + "version" "4.0.0" + dependencies: + "extend-shallow" "^2.0.1" + "is-number" "^3.0.0" + "repeat-string" "^1.6.1" + "to-regex-range" "^2.1.0" + +"fill-range@^7.0.1": + "integrity" "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==" + "resolved" "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz" + "version" "7.0.1" + dependencies: + "to-regex-range" "^5.0.1" + +"finalhandler@1.2.0": + "integrity" "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==" + "resolved" "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz" + "version" "1.2.0" + dependencies: + "debug" "2.6.9" + "encodeurl" "~1.0.2" + "escape-html" "~1.0.3" + "on-finished" "2.4.1" + "parseurl" "~1.3.3" + "statuses" "2.0.1" + "unpipe" "~1.0.0" + +"find-cache-dir@^2.0.0": + "integrity" "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==" + "resolved" "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz" + "version" "2.1.0" + dependencies: + "commondir" "^1.0.1" + "make-dir" "^2.0.0" + "pkg-dir" "^3.0.0" + +"find-cache-dir@^3.3.1": + "integrity" "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==" + "resolved" "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz" + "version" "3.3.2" + dependencies: + "commondir" "^1.0.1" + "make-dir" "^3.0.2" + "pkg-dir" "^4.1.0" + +"find-up@^2.1.0": + "integrity" "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=" + "resolved" "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz" + "version" "2.1.0" + dependencies: + "locate-path" "^2.0.0" + +"find-up@^3.0.0": + "integrity" "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==" + "resolved" "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz" + "version" "3.0.0" + dependencies: + "locate-path" "^3.0.0" + +"find-up@^4.0.0": + "integrity" "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==" + "resolved" "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz" + "version" "4.1.0" + dependencies: + "locate-path" "^5.0.0" + "path-exists" "^4.0.0" + +"flat-cache@^3.0.4": + "integrity" "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==" + "resolved" "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz" + "version" "3.0.4" + dependencies: + "flatted" "^3.1.0" + "rimraf" "^3.0.2" + +"flatted@^3.1.0": + "integrity" "sha512-JaTY/wtrcSyvXJl4IMFHPKyFur1sE9AUqc0QnhOaJ0CxHtAoIV8pYDzeEfAaNEtGkOfq4gr3LBFmdXW5mOQFnA==" + "resolved" "https://registry.npmjs.org/flatted/-/flatted-3.2.2.tgz" + "version" "3.2.2" + +"follow-redirects@^1.0.0", "follow-redirects@^1.10.0": + "integrity" "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==" + "resolved" "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz" + "version" "1.15.2" + +"for-in@^1.0.2": + "integrity" "sha512-7EwmXrOjyL+ChxMhmG5lnW9MPt1aIeZEwKhQzoBUdTV0N3zuwWDZYVJatDvZ2OyzPUvdIAZDsCetk3coyMfcnQ==" + "resolved" "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz" + "version" "1.0.2" + +"forever-agent@~0.6.1": + "integrity" "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" + "resolved" "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" + "version" "0.6.1" + +"form-data@~2.3.2": + "integrity" "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==" + "resolved" "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz" + "version" "2.3.3" + dependencies: + "asynckit" "^0.4.0" + "combined-stream" "^1.0.6" + "mime-types" "^2.1.12" + +"forwarded@0.2.0": + "integrity" "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==" + "resolved" "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz" + "version" "0.2.0" + +"fraction.js@^4.1.1": + "integrity" "sha512-MHOhvvxHTfRFpF1geTK9czMIZ6xclsEor2wkIGYYq+PxcQqT7vStJqjhe6S1TenZrMZzo+wlqOufBDVepUEgPg==" + "resolved" "https://registry.npmjs.org/fraction.js/-/fraction.js-4.1.1.tgz" + "version" "4.1.1" + +"fragment-cache@^0.2.1": + "integrity" "sha512-GMBAbW9antB8iZRHLoGw0b3HANt57diZYFO/HL1JGIC1MjKrdmhxvrJbupnVvpys0zsz7yBApXdQyfepKly2kA==" + "resolved" "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz" + "version" "0.2.1" + dependencies: + "map-cache" "^0.2.2" + +"fresh@0.5.2": + "integrity" "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==" + "resolved" "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz" + "version" "0.5.2" + +"fs.realpath@^1.0.0": + "integrity" "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" + "resolved" "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" + "version" "1.0.0" + +"function-bind@^1.1.1": + "integrity" "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + "resolved" "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz" + "version" "1.1.1" + +"functional-red-black-tree@^1.0.1": + "integrity" "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=" + "resolved" "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz" + "version" "1.0.1" + +"functions-have-names@^1.2.2": + "integrity" "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==" + "resolved" "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz" + "version" "1.2.3" + +"gensync@^1.0.0-beta.2": + "integrity" "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==" + "resolved" "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz" + "version" "1.0.0-beta.2" + +"get-caller-file@^2.0.1", "get-caller-file@^2.0.5": + "integrity" "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" + "resolved" "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz" + "version" "2.0.5" + +"get-intrinsic@^1.0.2", "get-intrinsic@^1.1.0", "get-intrinsic@^1.1.1": + "integrity" "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==" + "resolved" "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz" + "version" "1.1.1" + dependencies: + "function-bind" "^1.1.1" + "has" "^1.0.3" + "has-symbols" "^1.0.1" + +"get-stream@^4.0.0": + "integrity" "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==" + "resolved" "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz" + "version" "4.1.0" + dependencies: + "pump" "^3.0.0" + +"get-symbol-description@^1.0.0": + "integrity" "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==" + "resolved" "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz" + "version" "1.0.0" + dependencies: + "call-bind" "^1.0.2" + "get-intrinsic" "^1.1.1" + +"get-value@^2.0.3", "get-value@^2.0.6": + "integrity" "sha512-Ln0UQDlxH1BapMu3GPtf7CuYNwRZf2gwCuPqbyG6pB8WfmFpzqcy4xtAaAMUhnNqjMKTiCPZG2oMT3YSx8U2NA==" + "resolved" "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz" + "version" "2.0.6" + +"getpass@^0.1.1": + "integrity" "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=" + "resolved" "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz" + "version" "0.1.7" + dependencies: + "assert-plus" "^1.0.0" + +"glob-parent@^3.1.0": + "integrity" "sha512-E8Ak/2+dZY6fnzlR7+ueWvhsH1SjHr4jjss4YS/h4py44jY9MhK/VFdaZJAWDz6BbL21KeteKxFSFpq8OS5gVA==" + "resolved" "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz" + "version" "3.1.0" + dependencies: + "is-glob" "^3.1.0" + "path-dirname" "^1.0.0" + +"glob-parent@^5.1.2", "glob-parent@~5.1.2": + "integrity" "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==" + "resolved" "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz" + "version" "5.1.2" + dependencies: + "is-glob" "^4.0.1" + +"glob-parent@^6.0.1": + "integrity" "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==" + "resolved" "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz" + "version" "6.0.2" + dependencies: + "is-glob" "^4.0.3" + +"glob-to-regexp@^0.4.1": + "integrity" "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==" + "resolved" "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz" + "version" "0.4.1" + +"glob@^7.0.3", "glob@^7.1.3", "glob@^7.1.6", "glob@^7.1.7": + "integrity" "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==" + "resolved" "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz" + "version" "7.2.0" + dependencies: + "fs.realpath" "^1.0.0" + "inflight" "^1.0.4" + "inherits" "2" + "minimatch" "^3.0.4" + "once" "^1.3.0" + "path-is-absolute" "^1.0.0" + +"globals@^11.1.0": + "integrity" "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==" + "resolved" "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz" + "version" "11.12.0" + +"globals@^13.6.0": + "integrity" "sha512-08/xrJ7wQjK9kkkRoI3OFUBbLx4f+6x3SGwcPvQ0QH6goFDrOU2oyAWrmh3dJezu65buo+HBMzAMQy6rovVC3g==" + "resolved" "https://registry.npmjs.org/globals/-/globals-13.11.0.tgz" + "version" "13.11.0" + dependencies: + "type-fest" "^0.20.2" + +"globals@^13.9.0": + "integrity" "sha512-08/xrJ7wQjK9kkkRoI3OFUBbLx4f+6x3SGwcPvQ0QH6goFDrOU2oyAWrmh3dJezu65buo+HBMzAMQy6rovVC3g==" + "resolved" "https://registry.npmjs.org/globals/-/globals-13.11.0.tgz" + "version" "13.11.0" + dependencies: + "type-fest" "^0.20.2" + +"globby@^12.0.2": + "integrity" "sha512-wiSuFQLZ+urS9x2gGPl1H5drc5twabmm4m2gTR27XDFyjUHJUNsS8o/2aKyIF6IoBaR630atdher0XJ5g6OMmA==" + "resolved" "https://registry.npmjs.org/globby/-/globby-12.2.0.tgz" + "version" "12.2.0" + dependencies: + "array-union" "^3.0.1" + "dir-glob" "^3.0.1" + "fast-glob" "^3.2.7" + "ignore" "^5.1.9" + "merge2" "^1.4.1" + "slash" "^4.0.0" + +"globby@^6.1.0": + "integrity" "sha512-KVbFv2TQtbzCoxAnfD6JcHZTYCzyliEaaeM/gH8qQdkKr5s0OP9scEgvdcngyk7AVdY6YVW/TJHd+lQ/Df3Daw==" + "resolved" "https://registry.npmjs.org/globby/-/globby-6.1.0.tgz" + "version" "6.1.0" + dependencies: + "array-union" "^1.0.1" + "glob" "^7.0.3" + "object-assign" "^4.0.1" + "pify" "^2.0.0" + "pinkie-promise" "^2.0.0" + +"graceful-fs@^4.1.11", "graceful-fs@^4.1.2", "graceful-fs@^4.2.4": + "integrity" "sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg==" + "resolved" "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.8.tgz" + "version" "4.2.8" + +"gzip-size@^6.0.0": + "integrity" "sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q==" + "resolved" "https://registry.npmjs.org/gzip-size/-/gzip-size-6.0.0.tgz" + "version" "6.0.0" + dependencies: + "duplexer" "^0.1.2" + +"handle-thing@^2.0.0": + "integrity" "sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==" + "resolved" "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.1.tgz" + "version" "2.0.1" + +"har-schema@^2.0.0": + "integrity" "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" + "resolved" "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz" + "version" "2.0.0" + +"har-validator@~5.1.3": + "integrity" "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==" + "resolved" "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz" + "version" "5.1.5" + dependencies: + "ajv" "^6.12.3" + "har-schema" "^2.0.0" + +"has-bigints@^1.0.1": + "integrity" "sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA==" + "resolved" "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.1.tgz" + "version" "1.0.1" + +"has-flag@^3.0.0": + "integrity" "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" + "resolved" "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz" + "version" "3.0.0" + +"has-flag@^4.0.0": + "integrity" "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" + "resolved" "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz" + "version" "4.0.0" + +"has-symbols@^1.0.1", "has-symbols@^1.0.2": + "integrity" "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==" + "resolved" "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz" + "version" "1.0.2" + +"has-tostringtag@^1.0.0": + "integrity" "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==" + "resolved" "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz" + "version" "1.0.0" + dependencies: + "has-symbols" "^1.0.2" + +"has-value@^0.3.1": + "integrity" "sha512-gpG936j8/MzaeID5Yif+577c17TxaDmhuyVgSwtnL/q8UUTySg8Mecb+8Cf1otgLoD7DDH75axp86ER7LFsf3Q==" + "resolved" "https://registry.npmjs.org/has-value/-/has-value-0.3.1.tgz" + "version" "0.3.1" + dependencies: + "get-value" "^2.0.3" + "has-values" "^0.1.4" + "isobject" "^2.0.0" + +"has-value@^1.0.0": + "integrity" "sha512-IBXk4GTsLYdQ7Rvt+GRBrFSVEkmuOUy4re0Xjd9kJSUQpnTrWR4/y9RpfexN9vkAPMFuQoeWKwqzPozRTlasGw==" + "resolved" "https://registry.npmjs.org/has-value/-/has-value-1.0.0.tgz" + "version" "1.0.0" + dependencies: + "get-value" "^2.0.6" + "has-values" "^1.0.0" + "isobject" "^3.0.0" + +"has-values@^0.1.4": + "integrity" "sha512-J8S0cEdWuQbqD9//tlZxiMuMNmxB8PlEwvYwuxsTmR1G5RXUePEX/SJn7aD0GMLieuZYSwNH0cQuJGwnYunXRQ==" + "resolved" "https://registry.npmjs.org/has-values/-/has-values-0.1.4.tgz" + "version" "0.1.4" + +"has-values@^1.0.0": + "integrity" "sha512-ODYZC64uqzmtfGMEAX/FvZiRyWLpAC3vYnNunURUnkGVTS+mI0smVsWaPydRBsE3g+ok7h960jChO8mFcWlHaQ==" + "resolved" "https://registry.npmjs.org/has-values/-/has-values-1.0.0.tgz" + "version" "1.0.0" + dependencies: + "is-number" "^3.0.0" + "kind-of" "^4.0.0" + +"has@^1.0.3": + "integrity" "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==" + "resolved" "https://registry.npmjs.org/has/-/has-1.0.3.tgz" + "version" "1.0.3" + dependencies: + "function-bind" "^1.1.1" + +"hash-sum@^2.0.0": + "integrity" "sha512-WdZTbAByD+pHfl/g9QSsBIIwy8IT+EsPiKDs0KNX+zSHhdDLFKdZu0BQHljvO+0QI/BasbMSUa8wYNCZTvhslg==" + "resolved" "https://registry.npmjs.org/hash-sum/-/hash-sum-2.0.0.tgz" + "version" "2.0.0" + +"he@^1.2.0": + "integrity" "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==" + "resolved" "https://registry.npmjs.org/he/-/he-1.2.0.tgz" + "version" "1.2.0" + +"highlight.js@^10.7.1": + "integrity" "sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A==" + "resolved" "https://registry.npmjs.org/highlight.js/-/highlight.js-10.7.3.tgz" + "version" "10.7.3" + +"hpack.js@^2.1.6": + "integrity" "sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ==" + "resolved" "https://registry.npmjs.org/hpack.js/-/hpack.js-2.1.6.tgz" + "version" "2.1.6" + dependencies: + "inherits" "^2.0.1" + "obuf" "^1.0.0" + "readable-stream" "^2.0.1" + "wbuf" "^1.1.0" + +"html-entities@^1.3.1": + "integrity" "sha512-8nxjcBcd8wovbeKx7h3wTji4e6+rhaVuPNpMqwWgnHh+N9ToqsCs6XztWRBPQ+UtzsoMAdKZtUENoVzU/EMtZA==" + "resolved" "https://registry.npmjs.org/html-entities/-/html-entities-1.4.0.tgz" + "version" "1.4.0" + +"html-minifier-terser@^6.0.2": + "integrity" "sha512-AgYO3UGhMYQx2S/FBJT3EM0ZYcKmH6m9XL9c1v77BeK/tYJxGPxT1/AtsdUi4FcP8kZGmqqnItCcjFPcX9hk6A==" + "resolved" "https://registry.npmjs.org/html-minifier-terser/-/html-minifier-terser-6.0.2.tgz" + "version" "6.0.2" + dependencies: + "camel-case" "^4.1.2" + "clean-css" "^5.1.5" + "commander" "^8.1.0" + "he" "^1.2.0" + "param-case" "^3.0.4" + "relateurl" "^0.2.7" + "terser" "^5.7.2" + +"html-tags@^3.1.0": + "integrity" "sha512-1qYz89hW3lFDEazhjW0yVAV87lw8lVkrJocr72XmBkMKsoSVJCQx3W8BXsC7hO2qAt8BoVjYjtAcZ9perqGnNg==" + "resolved" "https://registry.npmjs.org/html-tags/-/html-tags-3.1.0.tgz" + "version" "3.1.0" + +"html-webpack-plugin@^5.0.0": + "integrity" "sha512-cSUdckNOIqKc0nOrCJG7zkvzEIUcXjzEiVbKdEdIzW3BD5T4xPK6boV1mrTrPDZiL+aAr/j45eqbNL1akU2ZRA==" + "resolved" "https://registry.npmjs.org/html-webpack-plugin/-/html-webpack-plugin-5.4.0.tgz" + "version" "5.4.0" + dependencies: + "@types/html-minifier-terser" "^6.0.0" + "html-minifier-terser" "^6.0.2" + "lodash" "^4.17.21" + "pretty-error" "^3.0.4" + "tapable" "^2.0.0" + +"html-webpack-tags-plugin@^3.0.0": + "integrity" "sha512-P/cfYDqXtgXoGFUv8zi/er8XFU5ztQmqp+VwmnhZEH8L35Q2YRKIb9P+dRiykLIDZF0NMt+l/uj+jmttKwmLvg==" + "resolved" "https://registry.npmjs.org/html-webpack-tags-plugin/-/html-webpack-tags-plugin-3.0.1.tgz" + "version" "3.0.1" + dependencies: + "glob" "^7.1.6" + "minimatch" "^3.0.4" + "slash" "^3.0.0" + +"htmlparser2@^6.1.0": + "integrity" "sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A==" + "resolved" "https://registry.npmjs.org/htmlparser2/-/htmlparser2-6.1.0.tgz" + "version" "6.1.0" + dependencies: + "domelementtype" "^2.0.1" + "domhandler" "^4.0.0" + "domutils" "^2.5.2" + "entities" "^2.0.0" + +"htmlparser2@^7.1.2": + "integrity" "sha512-d6cqsbJba2nRdg8WW2okyD4ceonFHn9jLFxhwlNcLhQWcFPdxXeJulgOLjLKtAK9T6ahd+GQNZwG9fjmGW7lyg==" + "resolved" "https://registry.npmjs.org/htmlparser2/-/htmlparser2-7.1.2.tgz" + "version" "7.1.2" + dependencies: + "domelementtype" "^2.0.1" + "domhandler" "^4.2.2" + "domutils" "^2.8.0" + "entities" "^3.0.1" + +"http-deceiver@^1.2.7": + "integrity" "sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw==" + "resolved" "https://registry.npmjs.org/http-deceiver/-/http-deceiver-1.2.7.tgz" + "version" "1.2.7" + +"http-errors@~1.6.2": + "integrity" "sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A==" + "resolved" "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz" + "version" "1.6.3" + dependencies: + "depd" "~1.1.2" + "inherits" "2.0.3" + "setprototypeof" "1.1.0" + "statuses" ">= 1.4.0 < 2" + +"http-errors@1.7.2": + "integrity" "sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==" + "resolved" "https://registry.npmjs.org/http-errors/-/http-errors-1.7.2.tgz" + "version" "1.7.2" + dependencies: + "depd" "~1.1.2" + "inherits" "2.0.3" + "setprototypeof" "1.1.1" + "statuses" ">= 1.5.0 < 2" + "toidentifier" "1.0.0" + +"http-errors@2.0.0": + "integrity" "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==" + "resolved" "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz" + "version" "2.0.0" + dependencies: + "depd" "2.0.0" + "inherits" "2.0.4" + "setprototypeof" "1.2.0" + "statuses" "2.0.1" + "toidentifier" "1.0.1" + +"http-parser-js@>=0.5.1": + "integrity" "sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q==" + "resolved" "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.8.tgz" + "version" "0.5.8" + +"http-proxy-middleware@0.19.1": + "integrity" "sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q==" + "resolved" "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz" + "version" "0.19.1" + dependencies: + "http-proxy" "^1.17.0" + "is-glob" "^4.0.0" + "lodash" "^4.17.11" + "micromatch" "^3.1.10" + +"http-proxy@^1.17.0": + "integrity" "sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==" + "resolved" "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.1.tgz" + "version" "1.18.1" + dependencies: + "eventemitter3" "^4.0.0" + "follow-redirects" "^1.0.0" + "requires-port" "^1.0.0" + +"http-signature@~1.2.0": + "integrity" "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=" + "resolved" "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz" + "version" "1.2.0" + dependencies: + "assert-plus" "^1.0.0" + "jsprim" "^1.2.2" + "sshpk" "^1.7.0" + +"iconv-lite@0.4.24": + "integrity" "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==" + "resolved" "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz" + "version" "0.4.24" + dependencies: + "safer-buffer" ">= 2.1.2 < 3" + +"icss-utils@^5.0.0", "icss-utils@^5.1.0": + "integrity" "sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA==" + "resolved" "https://registry.npmjs.org/icss-utils/-/icss-utils-5.1.0.tgz" + "version" "5.1.0" + +"ignore@^4.0.6": + "integrity" "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==" + "resolved" "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz" + "version" "4.0.6" + +"ignore@^5.1.1": + "integrity" "sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw==" + "resolved" "https://registry.npmjs.org/ignore/-/ignore-5.1.8.tgz" + "version" "5.1.8" + +"ignore@^5.1.9": + "integrity" "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==" + "resolved" "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz" + "version" "5.2.0" + +"image-size@~0.5.0": + "integrity" "sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w=" + "resolved" "https://registry.npmjs.org/image-size/-/image-size-0.5.5.tgz" + "version" "0.5.5" + +"import-fresh@^3.0.0", "import-fresh@^3.2.1": + "integrity" "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==" + "resolved" "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz" + "version" "3.3.0" + dependencies: + "parent-module" "^1.0.0" + "resolve-from" "^4.0.0" + +"import-local@^2.0.0": + "integrity" "sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ==" + "resolved" "https://registry.npmjs.org/import-local/-/import-local-2.0.0.tgz" + "version" "2.0.0" + dependencies: + "pkg-dir" "^3.0.0" + "resolve-cwd" "^2.0.0" + +"imurmurhash@^0.1.4": + "integrity" "sha1-khi5srkoojixPcT7a21XbyMUU+o=" + "resolved" "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz" + "version" "0.1.4" + +"inflight@^1.0.4": + "integrity" "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=" + "resolved" "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz" + "version" "1.0.6" + dependencies: + "once" "^1.3.0" + "wrappy" "1" + +"inherits@^2.0.1", "inherits@^2.0.3", "inherits@^2.0.4", "inherits@~2.0.3", "inherits@2", "inherits@2.0.4": + "integrity" "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + "resolved" "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" + "version" "2.0.4" + +"inherits@2.0.3": + "integrity" "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + "resolved" "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + "version" "2.0.3" + +"internal-ip@^4.3.0": + "integrity" "sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg==" + "resolved" "https://registry.npmjs.org/internal-ip/-/internal-ip-4.3.0.tgz" + "version" "4.3.0" + dependencies: + "default-gateway" "^4.2.0" + "ipaddr.js" "^1.9.0" + +"internal-slot@^1.0.3": + "integrity" "sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==" + "resolved" "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz" + "version" "1.0.3" + dependencies: + "get-intrinsic" "^1.1.0" + "has" "^1.0.3" + "side-channel" "^1.0.4" + +"ip-regex@^2.1.0": + "integrity" "sha512-58yWmlHpp7VYfcdTwMTvwMmqx/Elfxjd9RXTDyMsbL7lLWmhMylLEqiYVLKuLzOZqVgiWXD9MfR62Vv89VRxkw==" + "resolved" "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz" + "version" "2.1.0" + +"ip@^1.1.0", "ip@^1.1.5": + "integrity" "sha512-PuExPYUiu6qMBQb4l06ecm6T6ujzhmh+MeJcW9wa89PoAz5pvd4zPgN5WJV104mb6S2T1AwNIAaB70JNrLQWhg==" + "resolved" "https://registry.npmjs.org/ip/-/ip-1.1.8.tgz" + "version" "1.1.8" + +"ipaddr.js@^1.9.0", "ipaddr.js@1.9.1": + "integrity" "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==" + "resolved" "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz" + "version" "1.9.1" + +"is-absolute-url@^3.0.3": + "integrity" "sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q==" + "resolved" "https://registry.npmjs.org/is-absolute-url/-/is-absolute-url-3.0.3.tgz" + "version" "3.0.3" + +"is-accessor-descriptor@^0.1.6": + "integrity" "sha512-e1BM1qnDbMRG3ll2U9dSK0UMHuWOs3pY3AtcFsmvwPtKL3MML/Q86i+GilLfvqEs4GW+ExB91tQ3Ig9noDIZ+A==" + "resolved" "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz" + "version" "0.1.6" + dependencies: + "kind-of" "^3.0.2" + +"is-accessor-descriptor@^1.0.0": + "integrity" "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==" + "resolved" "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz" + "version" "1.0.0" + dependencies: + "kind-of" "^6.0.0" + +"is-arguments@^1.0.4": + "integrity" "sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==" + "resolved" "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz" + "version" "1.1.1" + dependencies: + "call-bind" "^1.0.2" + "has-tostringtag" "^1.0.0" + +"is-arrayish@^0.2.1": + "integrity" "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=" + "resolved" "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz" + "version" "0.2.1" + +"is-bigint@^1.0.1": + "integrity" "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==" + "resolved" "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz" + "version" "1.0.4" + dependencies: + "has-bigints" "^1.0.1" + +"is-binary-path@^1.0.0": + "integrity" "sha512-9fRVlXc0uCxEDj1nQzaWONSpbTfx0FmJfzHF7pwlI8DkWGoHBBea4Pg5Ky0ojwwxQmnSifgbKkI06Qv0Ljgj+Q==" + "resolved" "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz" + "version" "1.0.1" + dependencies: + "binary-extensions" "^1.0.0" + +"is-binary-path@~2.1.0": + "integrity" "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==" + "resolved" "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz" + "version" "2.1.0" + dependencies: + "binary-extensions" "^2.0.0" + +"is-boolean-object@^1.1.0": + "integrity" "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==" + "resolved" "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz" + "version" "1.1.2" + dependencies: + "call-bind" "^1.0.2" + "has-tostringtag" "^1.0.0" + +"is-buffer@^1.1.5": + "integrity" "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + "resolved" "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz" + "version" "1.1.6" + +"is-callable@^1.1.4", "is-callable@^1.2.4": + "integrity" "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==" + "resolved" "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz" + "version" "1.2.4" + +"is-core-module@^2.2.0", "is-core-module@^2.7.0": + "integrity" "sha512-vd15qHsaqrRL7dtH6QNuy0ndJmRDrS9HAM1CAiSifNUFv4x1a0CCVsj18hJ1mShxIG6T2i1sO78MkP56r0nYRw==" + "resolved" "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.0.tgz" + "version" "2.8.0" + dependencies: + "has" "^1.0.3" + +"is-data-descriptor@^0.1.4": + "integrity" "sha512-+w9D5ulSoBNlmw9OHn3U2v51SyoCd0he+bB3xMl62oijhrspxowjU+AIcDY0N3iEJbUEkB15IlMASQsxYigvXg==" + "resolved" "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz" + "version" "0.1.4" + dependencies: + "kind-of" "^3.0.2" + +"is-data-descriptor@^1.0.0": + "integrity" "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==" + "resolved" "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz" + "version" "1.0.0" + dependencies: + "kind-of" "^6.0.0" + +"is-date-object@^1.0.1": + "integrity" "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==" + "resolved" "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz" + "version" "1.0.5" + dependencies: + "has-tostringtag" "^1.0.0" + +"is-descriptor@^0.1.0": + "integrity" "sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==" + "resolved" "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz" + "version" "0.1.6" + dependencies: + "is-accessor-descriptor" "^0.1.6" + "is-data-descriptor" "^0.1.4" + "kind-of" "^5.0.0" + +"is-descriptor@^1.0.0": + "integrity" "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==" + "resolved" "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz" + "version" "1.0.2" + dependencies: + "is-accessor-descriptor" "^1.0.0" + "is-data-descriptor" "^1.0.0" + "kind-of" "^6.0.2" + +"is-descriptor@^1.0.2": + "integrity" "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==" + "resolved" "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz" + "version" "1.0.2" + dependencies: + "is-accessor-descriptor" "^1.0.0" + "is-data-descriptor" "^1.0.0" + "kind-of" "^6.0.2" + +"is-extendable@^0.1.0", "is-extendable@^0.1.1": + "integrity" "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==" + "resolved" "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz" + "version" "0.1.1" + +"is-extendable@^1.0.1": + "integrity" "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==" + "resolved" "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz" + "version" "1.0.1" + dependencies: + "is-plain-object" "^2.0.4" + +"is-extglob@^2.1.0", "is-extglob@^2.1.1": + "integrity" "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=" + "resolved" "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz" + "version" "2.1.1" + +"is-fullwidth-code-point@^2.0.0": + "integrity" "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==" + "resolved" "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz" + "version" "2.0.0" + +"is-fullwidth-code-point@^3.0.0": + "integrity" "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" + "resolved" "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz" + "version" "3.0.0" + +"is-glob@^3.1.0": + "integrity" "sha512-UFpDDrPgM6qpnFNI+rh/p3bUaq9hKLZN8bMUWzxmcnZVS3omf4IPK+BrewlnWjO1WmUsMYuSjKh4UJuV4+Lqmw==" + "resolved" "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz" + "version" "3.1.0" + dependencies: + "is-extglob" "^2.1.0" + +"is-glob@^4.0.0", "is-glob@^4.0.1", "is-glob@^4.0.3", "is-glob@~4.0.1": + "integrity" "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==" + "resolved" "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz" + "version" "4.0.3" + dependencies: + "is-extglob" "^2.1.1" + +"is-negative-zero@^2.0.1": + "integrity" "sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w==" + "resolved" "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.1.tgz" + "version" "2.0.1" + +"is-number-object@^1.0.4": + "integrity" "sha512-bEVOqiRcvo3zO1+G2lVMy+gkkEm9Yh7cDMRusKKu5ZJKPUYSJwICTKZrNKHA2EbSP0Tu0+6B/emsYNHZyn6K8g==" + "resolved" "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.6.tgz" + "version" "1.0.6" + dependencies: + "has-tostringtag" "^1.0.0" + +"is-number@^3.0.0": + "integrity" "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==" + "resolved" "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz" + "version" "3.0.0" + dependencies: + "kind-of" "^3.0.2" + +"is-number@^7.0.0": + "integrity" "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" + "resolved" "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz" + "version" "7.0.0" + +"is-path-cwd@^2.0.0": + "integrity" "sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==" + "resolved" "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-2.2.0.tgz" + "version" "2.2.0" + +"is-path-in-cwd@^2.0.0": + "integrity" "sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ==" + "resolved" "https://registry.npmjs.org/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz" + "version" "2.1.0" + dependencies: + "is-path-inside" "^2.1.0" + +"is-path-inside@^2.1.0": + "integrity" "sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg==" + "resolved" "https://registry.npmjs.org/is-path-inside/-/is-path-inside-2.1.0.tgz" + "version" "2.1.0" + dependencies: + "path-is-inside" "^1.0.2" + +"is-plain-object@^2.0.3", "is-plain-object@^2.0.4": + "integrity" "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==" + "resolved" "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz" + "version" "2.0.4" + dependencies: + "isobject" "^3.0.1" + +"is-plain-object@3.0.1": + "integrity" "sha512-Xnpx182SBMrr/aBik8y+GuR4U1L9FqMSojwDQwPMmxyC6bvEqly9UBCxhauBF5vNh2gwWJNX6oDV7O+OM4z34g==" + "resolved" "https://registry.npmjs.org/is-plain-object/-/is-plain-object-3.0.1.tgz" + "version" "3.0.1" + +"is-regex@^1.0.4", "is-regex@^1.1.4": + "integrity" "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==" + "resolved" "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz" + "version" "1.1.4" + dependencies: + "call-bind" "^1.0.2" + "has-tostringtag" "^1.0.0" + +"is-resolvable@^1.1.0": + "integrity" "sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg==" + "resolved" "https://registry.npmjs.org/is-resolvable/-/is-resolvable-1.1.0.tgz" + "version" "1.1.0" + +"is-shared-array-buffer@^1.0.1": + "integrity" "sha512-IU0NmyknYZN0rChcKhRO1X8LYz5Isj/Fsqh8NJOSf+N/hCOTwy29F32Ik7a+QszE63IdvmwdTPDd6cZ5pg4cwA==" + "resolved" "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.1.tgz" + "version" "1.0.1" + +"is-stream@^1.1.0": + "integrity" "sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==" + "resolved" "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz" + "version" "1.1.0" + +"is-string@^1.0.5", "is-string@^1.0.7": + "integrity" "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==" + "resolved" "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz" + "version" "1.0.7" + dependencies: + "has-tostringtag" "^1.0.0" + +"is-symbol@^1.0.2", "is-symbol@^1.0.3": + "integrity" "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==" + "resolved" "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz" + "version" "1.0.4" + dependencies: + "has-symbols" "^1.0.2" + +"is-typedarray@~1.0.0": + "integrity" "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" + "resolved" "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz" + "version" "1.0.0" + +"is-weakref@^1.0.1": + "integrity" "sha512-b2jKc2pQZjaeFYWEf7ScFj+Be1I+PXmlu572Q8coTXZ+LD/QQZ7ShPMst8h16riVgyXTQwUsFEl74mDvc/3MHQ==" + "resolved" "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.1.tgz" + "version" "1.0.1" + dependencies: + "call-bind" "^1.0.0" + +"is-windows@^1.0.2": + "integrity" "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==" + "resolved" "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz" + "version" "1.0.2" + +"is-wsl@^1.1.0": + "integrity" "sha512-gfygJYZ2gLTDlmbWMI0CE2MwnFzSN/2SZfkMlItC4K/JBlsWVDB0bO6XhqcY13YXE7iMcAJnzTCJjPiTeJJ0Mw==" + "resolved" "https://registry.npmjs.org/is-wsl/-/is-wsl-1.1.0.tgz" + "version" "1.1.0" + +"isarray@~1.0.0", "isarray@1.0.0": + "integrity" "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + "resolved" "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + "version" "1.0.0" + +"isexe@^2.0.0": + "integrity" "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=" + "resolved" "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz" + "version" "2.0.0" + +"isobject@^2.0.0": + "integrity" "sha512-+OUdGJlgjOBZDfxnDjYYG6zp487z0JGNQq3cYQYg5f5hKR+syHMsaztzGeml/4kGG55CSpKSpWTY+jYGgsHLgA==" + "resolved" "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz" + "version" "2.1.0" + dependencies: + "isarray" "1.0.0" + +"isobject@^3.0.0", "isobject@^3.0.1": + "integrity" "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=" + "resolved" "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz" + "version" "3.0.1" + +"isstream@~0.1.2": + "integrity" "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" + "resolved" "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz" + "version" "0.1.2" + +"javascript-stringify@^2.0.1": + "integrity" "sha512-JVAfqNPTvNq3sB/VHQJAFxN/sPgKnsKrCwyRt15zwNCdrMMJDdcEOdubuy+DuJYYdm0ox1J4uzEuYKkN+9yhVg==" + "resolved" "https://registry.npmjs.org/javascript-stringify/-/javascript-stringify-2.1.0.tgz" + "version" "2.1.0" + +"jest-worker@^27.0.2", "jest-worker@^27.0.6": + "integrity" "sha512-ks3WCzsiZaOPJl/oMsDjaf0TRiSv7ctNgs0FqRr2nARsovz6AWWy4oLElwcquGSz692DzgZQrCLScPNs5YlC4g==" + "resolved" "https://registry.npmjs.org/jest-worker/-/jest-worker-27.3.1.tgz" + "version" "27.3.1" + dependencies: + "@types/node" "*" + "merge-stream" "^2.0.0" + "supports-color" "^8.0.0" + +"joi@17.3.0": + "integrity" "sha512-Qh5gdU6niuYbUIUV5ejbsMiiFmBdw8Kcp8Buj2JntszCkCfxJ9Cz76OtHxOZMPXrt5810iDIXs+n1nNVoquHgg==" + "resolved" "https://registry.npmjs.org/joi/-/joi-17.3.0.tgz" + "version" "17.3.0" + dependencies: + "@hapi/hoek" "^9.0.0" + "@hapi/topo" "^5.0.0" + "@sideway/address" "^4.1.0" + "@sideway/formula" "^3.0.0" + "@sideway/pinpoint" "^2.0.0" + +"js-tokens@^3.0.0 || ^4.0.0", "js-tokens@^4.0.0": + "integrity" "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + "resolved" "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz" + "version" "4.0.0" + +"js-yaml@^3.13.1": + "integrity" "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==" + "resolved" "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz" + "version" "3.14.1" + dependencies: + "argparse" "^1.0.7" + "esprima" "^4.0.0" + +"jsbn@~0.1.0": + "integrity" "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" + "resolved" "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz" + "version" "0.1.1" + +"jsesc@^2.5.1": + "integrity" "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==" + "resolved" "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz" + "version" "2.5.2" + +"jsesc@~0.5.0": + "integrity" "sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=" + "resolved" "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz" + "version" "0.5.0" + +"json-parse-better-errors@^1.0.2": + "integrity" "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==" + "resolved" "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz" + "version" "1.0.2" + +"json-parse-even-better-errors@^2.3.0": + "integrity" "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" + "resolved" "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz" + "version" "2.3.1" + +"json-schema-traverse@^0.4.1": + "integrity" "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + "resolved" "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz" + "version" "0.4.1" + +"json-schema-traverse@^1.0.0": + "integrity" "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" + "resolved" "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz" + "version" "1.0.0" + +"json-schema@0.4.0": + "integrity" "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==" + "resolved" "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz" + "version" "0.4.0" + +"json-stable-stringify-without-jsonify@^1.0.1": + "integrity" "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=" + "resolved" "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz" + "version" "1.0.1" + +"json-stringify-safe@~5.0.1": + "integrity" "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" + "resolved" "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" + "version" "5.0.1" + +"json5@^1.0.1": + "integrity" "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==" + "resolved" "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz" + "version" "1.0.1" + dependencies: + "minimist" "^1.2.0" + +"json5@^2.1.2", "json5@^2.2.1": + "integrity" "sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==" + "resolved" "https://registry.npmjs.org/json5/-/json5-2.2.1.tgz" + "version" "2.2.1" + +"jsprim@^1.2.2": + "integrity" "sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==" + "resolved" "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz" + "version" "1.4.2" + dependencies: + "assert-plus" "1.0.0" + "extsprintf" "1.3.0" + "json-schema" "0.4.0" + "verror" "1.10.0" + +"killable@^1.0.1": + "integrity" "sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg==" + "resolved" "https://registry.npmjs.org/killable/-/killable-1.0.1.tgz" + "version" "1.0.1" + +"kind-of@^3.0.2": + "integrity" "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==" + "resolved" "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz" + "version" "3.2.2" + dependencies: + "is-buffer" "^1.1.5" + +"kind-of@^3.0.3": + "integrity" "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==" + "resolved" "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz" + "version" "3.2.2" + dependencies: + "is-buffer" "^1.1.5" + +"kind-of@^3.2.0": + "integrity" "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==" + "resolved" "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz" + "version" "3.2.2" + dependencies: + "is-buffer" "^1.1.5" + +"kind-of@^4.0.0": + "integrity" "sha512-24XsCxmEbRwEDbz/qz3stgin8TTzZ1ESR56OMCN0ujYg+vRutNSiOj9bHH9u85DKgXguraugV5sFuvbD4FW/hw==" + "resolved" "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz" + "version" "4.0.0" + dependencies: + "is-buffer" "^1.1.5" + +"kind-of@^5.0.0": + "integrity" "sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==" + "resolved" "https://registry.npmjs.org/kind-of/-/kind-of-5.1.0.tgz" + "version" "5.1.0" + +"kind-of@^6.0.0", "kind-of@^6.0.2": + "integrity" "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" + "resolved" "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz" + "version" "6.0.3" + +"klona@^2.0.4": + "integrity" "sha512-ZRbnvdg/NxqzC7L9Uyqzf4psi1OM4Cuc+sJAkQPjO6XkQIJTNbfK2Rsmbw8fx1p2mkZdp2FZYo2+LwXYY/uwIA==" + "resolved" "https://registry.npmjs.org/klona/-/klona-2.0.4.tgz" + "version" "2.0.4" + +"less-loader@^8.0.0": + "integrity" "sha512-K93jJU7fi3n6rxVvzp8Cb88Uy9tcQKfHlkoezHwKILXhlNYiRQl4yowLIkQqmBXOH/5I8yoKiYeIf781HGkW9g==" + "resolved" "https://registry.npmjs.org/less-loader/-/less-loader-8.1.1.tgz" + "version" "8.1.1" + dependencies: + "klona" "^2.0.4" + +"less@^3.5.0 || ^4.0.0", "less@3.9.0": + "integrity" "sha512-31CmtPEZraNUtuUREYjSqRkeETFdyEHSEPAGq4erDlUXtda7pzNmctdljdIagSb589d/qXGWiiP31R5JVf+v0w==" + "resolved" "https://registry.npmjs.org/less/-/less-3.9.0.tgz" + "version" "3.9.0" + dependencies: + "clone" "^2.1.2" + optionalDependencies: + "errno" "^0.1.1" + "graceful-fs" "^4.1.2" + "image-size" "~0.5.0" + "mime" "^1.4.1" + "mkdirp" "^0.5.0" + "promise" "^7.1.1" + "request" "^2.83.0" + "source-map" "~0.6.0" + +"levn@^0.4.1": + "integrity" "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==" + "resolved" "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz" + "version" "0.4.1" + dependencies: + "prelude-ls" "^1.2.1" + "type-check" "~0.4.0" + +"lilconfig@^2.0.3": + "integrity" "sha512-EHKqr/+ZvdKCifpNrJCKxBTgk5XupZA3y/aCPY9mxfgBzmgh93Mt/WqjjQ38oMxXuvDokaKiM3lAgvSH2sjtHg==" + "resolved" "https://registry.npmjs.org/lilconfig/-/lilconfig-2.0.3.tgz" + "version" "2.0.3" + +"lines-and-columns@^1.1.6": + "integrity" "sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=" + "resolved" "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz" + "version" "1.1.6" + +"loader-runner@^4.2.0": + "integrity" "sha512-92+huvxMvYlMzMt0iIOukcwYBFpkYJdpl2xsZ7LrlayO7E8SOv+JJUEK17B/dJIHAOLMfh2dZZ/Y18WgmGtYNw==" + "resolved" "https://registry.npmjs.org/loader-runner/-/loader-runner-4.2.0.tgz" + "version" "4.2.0" + +"loader-utils@^1.4.0": + "integrity" "sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA==" + "resolved" "https://registry.npmjs.org/loader-utils/-/loader-utils-1.4.0.tgz" + "version" "1.4.0" + dependencies: + "big.js" "^5.2.2" + "emojis-list" "^3.0.0" + "json5" "^1.0.1" + +"loader-utils@^2.0.0": + "integrity" "sha512-rP4F0h2RaWSvPEkD7BLDFQnvSf+nK+wr3ESUjNTyAGobqrijmW92zc+SO6d4p4B1wh7+B/Jg1mkQe5NYUEHtHQ==" + "resolved" "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.0.tgz" + "version" "2.0.0" + dependencies: + "big.js" "^5.2.2" + "emojis-list" "^3.0.0" + "json5" "^2.1.2" + +"loader-utils@^2.0.2": + "integrity" "sha512-TM57VeHptv569d/GKh6TAYdzKblwDNiumOdkFnejjD0XwTH87K90w3O7AiJRqdQoXygvi1VQTJTLGhJl7WqA7A==" + "resolved" "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.2.tgz" + "version" "2.0.2" + dependencies: + "big.js" "^5.2.2" + "emojis-list" "^3.0.0" + "json5" "^2.1.2" + +"locate-path@^2.0.0": + "integrity" "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=" + "resolved" "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz" + "version" "2.0.0" + dependencies: + "p-locate" "^2.0.0" + "path-exists" "^3.0.0" + +"locate-path@^3.0.0": + "integrity" "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==" + "resolved" "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz" + "version" "3.0.0" + dependencies: + "p-locate" "^3.0.0" + "path-exists" "^3.0.0" + +"locate-path@^5.0.0": + "integrity" "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==" + "resolved" "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz" + "version" "5.0.0" + dependencies: + "p-locate" "^4.1.0" + +"lodash-es@^4.17.15", "lodash-es@4.17.21": + "integrity" "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==" + "resolved" "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz" + "version" "4.17.21" + +"lodash.clonedeep@^4.5.0": + "integrity" "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=" + "resolved" "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz" + "version" "4.5.0" + +"lodash.debounce@^4.0.8": + "integrity" "sha1-gteb/zCmfEAF/9XiUVMArZyk168=" + "resolved" "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz" + "version" "4.0.8" + +"lodash.memoize@^4.1.2": + "integrity" "sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4=" + "resolved" "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz" + "version" "4.1.2" + +"lodash.merge@^4.6.2": + "integrity" "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" + "resolved" "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz" + "version" "4.6.2" + +"lodash.truncate@^4.4.2": + "integrity" "sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM=" + "resolved" "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz" + "version" "4.4.2" + +"lodash.uniq@^4.5.0": + "integrity" "sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=" + "resolved" "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz" + "version" "4.5.0" + +"lodash@^4.17.11", "lodash@^4.17.14", "lodash@^4.17.15", "lodash@^4.17.20", "lodash@^4.17.21": + "integrity" "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + "resolved" "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz" + "version" "4.17.21" + +"loglevel@^1.6.8": + "integrity" "sha512-G6A/nJLRgWOuuwdNuA6koovfEV1YpqqAG4pRUlFaz3jj2QNZ8M4vBqnVA+HBTmU/AMNUtlOsMmSpF6NyOjztbA==" + "resolved" "https://registry.npmjs.org/loglevel/-/loglevel-1.8.0.tgz" + "version" "1.8.0" + +"loose-envify@^1.0.0": + "integrity" "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==" + "resolved" "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz" + "version" "1.4.0" + dependencies: + "js-tokens" "^3.0.0 || ^4.0.0" + +"lower-case@^2.0.2": + "integrity" "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==" + "resolved" "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz" + "version" "2.0.2" + dependencies: + "tslib" "^2.0.3" + +"lru-cache@^6.0.0": + "integrity" "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==" + "resolved" "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz" + "version" "6.0.0" + dependencies: + "yallist" "^4.0.0" + +"magic-string@^0.25.7": + "integrity" "sha512-4CrMT5DOHTDk4HYDlzmwu4FVCcIYI8gauveasrdCu2IKIFOJ3f0v/8MDGJCDL9oD2ppz/Av1b0Nj345H9M+XIA==" + "resolved" "https://registry.npmjs.org/magic-string/-/magic-string-0.25.7.tgz" + "version" "0.25.7" + dependencies: + "sourcemap-codec" "^1.4.4" + +"make-dir@^2.0.0", "make-dir@^2.1.0": + "integrity" "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==" + "resolved" "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz" + "version" "2.1.0" + dependencies: + "pify" "^4.0.1" + "semver" "^5.6.0" + +"make-dir@^3.0.2", "make-dir@^3.1.0": + "integrity" "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==" + "resolved" "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz" + "version" "3.1.0" + dependencies: + "semver" "^6.0.0" + +"map-cache@^0.2.2": + "integrity" "sha512-8y/eV9QQZCiyn1SprXSrCmqJN0yNRATe+PO8ztwqrvrbdRLA3eYJF0yaR0YayLWkMbsQSKWS9N2gPcGEc4UsZg==" + "resolved" "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz" + "version" "0.2.2" + +"map-visit@^1.0.0": + "integrity" "sha512-4y7uGv8bd2WdM9vpQsiQNo41Ln1NvhvDRuVt0k2JZQ+ezN2uaQes7lZeZ+QQUHOLQAtDaBJ+7wCbi+ab/KFs+w==" + "resolved" "https://registry.npmjs.org/map-visit/-/map-visit-1.0.0.tgz" + "version" "1.0.0" + dependencies: + "object-visit" "^1.0.0" + +"mdn-data@2.0.14": + "integrity" "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==" + "resolved" "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz" + "version" "2.0.14" + +"media-typer@0.3.0": + "integrity" "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" + "resolved" "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz" + "version" "0.3.0" + +"memory-fs@^0.4.1": + "integrity" "sha512-cda4JKCxReDXFXRqOHPQscuIYg1PvxbE2S2GP45rnwfEK+vZaXC8C1OFvdHIbgw0DLzowXGVoxLaAmlgRy14GQ==" + "resolved" "https://registry.npmjs.org/memory-fs/-/memory-fs-0.4.1.tgz" + "version" "0.4.1" + dependencies: + "errno" "^0.1.3" + "readable-stream" "^2.0.1" + +"merge-descriptors@1.0.1": + "integrity" "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" + "resolved" "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz" + "version" "1.0.1" + +"merge-stream@^2.0.0": + "integrity" "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==" + "resolved" "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz" + "version" "2.0.0" + +"merge2@^1.3.0", "merge2@^1.4.1": + "integrity" "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==" + "resolved" "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz" + "version" "1.4.1" + +"methods@~1.1.2": + "integrity" "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" + "resolved" "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" + "version" "1.1.2" + +"micromatch@^3.1.10", "micromatch@^3.1.4": + "integrity" "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==" + "resolved" "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz" + "version" "3.1.10" + dependencies: + "arr-diff" "^4.0.0" + "array-unique" "^0.3.2" + "braces" "^2.3.1" + "define-property" "^2.0.2" + "extend-shallow" "^3.0.2" + "extglob" "^2.0.4" + "fragment-cache" "^0.2.1" + "kind-of" "^6.0.2" + "nanomatch" "^1.2.9" + "object.pick" "^1.3.0" + "regex-not" "^1.0.0" + "snapdragon" "^0.8.1" + "to-regex" "^3.0.2" + +"micromatch@^4.0.4": + "integrity" "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==" + "resolved" "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz" + "version" "4.0.4" + dependencies: + "braces" "^3.0.1" + "picomatch" "^2.2.3" + +"mime-db@>= 1.43.0 < 2", "mime-db@1.50.0": + "integrity" "sha512-9tMZCDlYHqeERXEHO9f/hKfNXhre5dK2eE/krIvUjZbS2KPcqGDfNShIWS1uW9XOTKQKqK6qbeOci18rbfW77A==" + "resolved" "https://registry.npmjs.org/mime-db/-/mime-db-1.50.0.tgz" + "version" "1.50.0" + +"mime-db@1.52.0": + "integrity" "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" + "resolved" "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz" + "version" "1.52.0" + +"mime-types@^2.1.12", "mime-types@^2.1.27", "mime-types@~2.1.17", "mime-types@~2.1.19", "mime-types@~2.1.24": + "integrity" "sha512-plLElXp7pRDd0bNZHw+nMd52vRYjLwQjygaNg7ddJ2uJtTlmnTCjWuPKxVu6//AdaRuME84SvLW91sIkBqGT0g==" + "resolved" "https://registry.npmjs.org/mime-types/-/mime-types-2.1.33.tgz" + "version" "2.1.33" + dependencies: + "mime-db" "1.50.0" + +"mime-types@~2.1.34": + "integrity" "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==" + "resolved" "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" + "version" "2.1.35" + dependencies: + "mime-db" "1.52.0" + +"mime@^1.4.1", "mime@1.6.0": + "integrity" "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==" + "resolved" "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz" + "version" "1.6.0" + +"mime@^2.3.1": + "integrity" "sha512-tqkh47FzKeCPD2PUiPB6pkbMzsCasjxAfC62/Wap5qrUWcb+sFasXUC5I3gYM5iBM8v/Qpn4UK0x+j0iHyFPDg==" + "resolved" "https://registry.npmjs.org/mime/-/mime-2.5.2.tgz" + "version" "2.5.2" + +"mime@^2.4.4": + "integrity" "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==" + "resolved" "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz" + "version" "2.6.0" + +"mini-css-extract-plugin@^1.3.5": + "integrity" "sha512-WhDvO3SjGm40oV5y26GjMJYjd2UMqrLAGKy5YS2/3QKJy2F7jgynuHTir/tgUUOiNQu5saXHdc8reo7YuhhT4Q==" + "resolved" "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-1.6.2.tgz" + "version" "1.6.2" + dependencies: + "loader-utils" "^2.0.0" + "schema-utils" "^3.0.0" + "webpack-sources" "^1.1.0" + +"minimalistic-assert@^1.0.0": + "integrity" "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==" + "resolved" "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz" + "version" "1.0.1" + +"minimatch@^3.0.4": + "integrity" "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==" + "resolved" "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz" + "version" "3.0.4" + dependencies: + "brace-expansion" "^1.1.7" + +"minimist@^1.2.0", "minimist@^1.2.5", "minimist@^1.2.6": + "integrity" "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" + "resolved" "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz" + "version" "1.2.6" + +"mixin-deep@^1.2.0": + "integrity" "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==" + "resolved" "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz" + "version" "1.3.2" + dependencies: + "for-in" "^1.0.2" + "is-extendable" "^1.0.1" + +"mkdirp@^0.5.0": + "integrity" "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==" + "resolved" "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz" + "version" "0.5.5" + dependencies: + "minimist" "^1.2.5" + +"mkdirp@^0.5.1": + "integrity" "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==" + "resolved" "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz" + "version" "0.5.6" + dependencies: + "minimist" "^1.2.6" + +"mkdirp@^0.5.5": + "integrity" "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==" + "resolved" "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz" + "version" "0.5.5" + dependencies: + "minimist" "^1.2.5" + +"mkdirp@^1.0.4": + "integrity" "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==" + "resolved" "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz" + "version" "1.0.4" + +"mockjs@^1.1.0": + "integrity" "sha512-eQsKcWzIaZzEZ07NuEyO4Nw65g0hdWAyurVol1IPl1gahRwY+svqzfgfey8U8dahLwG44d6/RwEzuK52rSa/JQ==" + "resolved" "https://registry.npmjs.org/mockjs/-/mockjs-1.1.0.tgz" + "version" "1.1.0" + dependencies: + "commander" "*" + +"moment@^2.27.0", "moment@^2.29.4": + "integrity" "sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==" + "resolved" "https://registry.npmjs.org/moment/-/moment-2.29.4.tgz" + "version" "2.29.4" + +"monaco-editor-webpack-plugin@^7.0.1": + "integrity" "sha512-M8qIqizltrPlIbrb73cZdTWfU9sIsUVFvAZkL3KGjAHmVWEJ0hZKa/uad14JuOckc0GwnCaoGHvMoYtJjVyCzw==" + "resolved" "https://registry.npmjs.org/monaco-editor-webpack-plugin/-/monaco-editor-webpack-plugin-7.0.1.tgz" + "version" "7.0.1" + dependencies: + "loader-utils" "^2.0.2" + +"monaco-editor@^0.34.0", "monaco-editor@>= 0.31.0": + "integrity" "sha512-VF+S5zG8wxfinLKLrWcl4WUizMx+LeJrG4PM/M78OhcwocpV0jiyhX/pG6Q9jIOhrb/ckYi6nHnaR5OojlOZCQ==" + "resolved" "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.34.0.tgz" + "version" "0.34.0" + +"ms@^2.1.1", "ms@2.1.2": + "integrity" "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + "resolved" "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" + "version" "2.1.2" + +"ms@2.0.0": + "integrity" "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + "resolved" "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" + "version" "2.0.0" + +"ms@2.1.3": + "integrity" "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + "resolved" "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz" + "version" "2.1.3" + +"multicast-dns-service-types@^1.1.0": + "integrity" "sha512-cnAsSVxIDsYt0v7HmC0hWZFwwXSh+E6PgCrREDuN/EsjgLwA5XRmlMHhSiDPrt6HxY1gTivEa/Zh7GtODoLevQ==" + "resolved" "https://registry.npmjs.org/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz" + "version" "1.1.0" + +"multicast-dns@^6.0.1": + "integrity" "sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g==" + "resolved" "https://registry.npmjs.org/multicast-dns/-/multicast-dns-6.2.3.tgz" + "version" "6.2.3" + dependencies: + "dns-packet" "^1.3.1" + "thunky" "^1.0.2" + +"mustache@^4.2.0": + "integrity" "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==" + "resolved" "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz" + "version" "4.2.0" + +"mz@^2.4.0": + "integrity" "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==" + "resolved" "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz" + "version" "2.7.0" + dependencies: + "any-promise" "^1.0.0" + "object-assign" "^4.0.1" + "thenify-all" "^1.0.0" + +"nanocolors@^0.1.12": + "integrity" "sha512-2nMHqg1x5PU+unxX7PGY7AuYxl2qDx7PSrTRjizr8sxdd3l/3hBuWWaki62qmtYm2U5i4Z5E7GbjlyDFhs9/EQ==" + "resolved" "https://registry.npmjs.org/nanocolors/-/nanocolors-0.1.12.tgz" + "version" "0.1.12" + +"nanoid@^3.1.23", "nanoid@^3.1.30": + "integrity" "sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==" + "resolved" "https://registry.npmjs.org/nanoid/-/nanoid-3.3.4.tgz" + "version" "3.3.4" + +"nanomatch@^1.2.9": + "integrity" "sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==" + "resolved" "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz" + "version" "1.2.13" + dependencies: + "arr-diff" "^4.0.0" + "array-unique" "^0.3.2" + "define-property" "^2.0.2" + "extend-shallow" "^3.0.2" + "fragment-cache" "^0.2.1" + "is-windows" "^1.0.2" + "kind-of" "^6.0.2" + "object.pick" "^1.3.0" + "regex-not" "^1.0.0" + "snapdragon" "^0.8.1" + "to-regex" "^3.0.1" + +"nanopop@^2.1.0": + "integrity" "sha512-jGTwpFRexSH+fxappnGQtN9dspgE2ipa1aOjtR24igG0pv6JCxImIAmrLRHX+zUF5+1wtsFVbKyfP51kIGAVNw==" + "resolved" "https://registry.npmjs.org/nanopop/-/nanopop-2.1.0.tgz" + "version" "2.1.0" + +"natural-compare@^1.4.0": + "integrity" "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=" + "resolved" "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz" + "version" "1.4.0" + +"negotiator@0.6.3": + "integrity" "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==" + "resolved" "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz" + "version" "0.6.3" + +"neo-async@^2.6.2": + "integrity" "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==" + "resolved" "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz" + "version" "2.6.2" + +"nice-try@^1.0.4": + "integrity" "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==" + "resolved" "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz" + "version" "1.0.5" + +"no-case@^3.0.4": + "integrity" "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==" + "resolved" "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz" + "version" "3.0.4" + dependencies: + "lower-case" "^2.0.2" + "tslib" "^2.0.3" + +"node-forge@^0.10.0": + "integrity" "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" + "resolved" "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz" + "version" "0.10.0" + +"node-modules-regexp@^1.0.0": + "integrity" "sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA=" + "resolved" "https://registry.npmjs.org/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz" + "version" "1.0.0" + +"node-releases@^2.0.3": + "integrity" "sha512-gbMzqQtTtDz/00jQzZ21PQzdI9PyLYqUSvD0p3naOhX4odFji0ZxYdnVwPTxmSwkmxhcFImpozceidSG+AgoPQ==" + "resolved" "https://registry.npmjs.org/node-releases/-/node-releases-2.0.4.tgz" + "version" "2.0.4" + +"normalize-path@^2.1.1": + "integrity" "sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w==" + "resolved" "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz" + "version" "2.1.1" + dependencies: + "remove-trailing-separator" "^1.0.1" + +"normalize-path@^3.0.0", "normalize-path@~3.0.0": + "integrity" "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==" + "resolved" "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz" + "version" "3.0.0" + +"normalize-range@^0.1.2": + "integrity" "sha1-LRDAa9/TEuqXd2laTShDlFa3WUI=" + "resolved" "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz" + "version" "0.1.2" + +"normalize-url@^6.0.1": + "integrity" "sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==" + "resolved" "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz" + "version" "6.1.0" + +"npm-run-path@^2.0.0": + "integrity" "sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==" + "resolved" "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz" + "version" "2.0.2" + dependencies: + "path-key" "^2.0.0" + +"nth-check@^2.0.0": + "integrity" "sha512-it1vE95zF6dTT9lBsYbxvqh0Soy4SPowchj0UBGj/V6cTPnXXtQOPUbhZ6CmGzAD/rW22LQK6E96pcdJXk4A4w==" + "resolved" "https://registry.npmjs.org/nth-check/-/nth-check-2.0.1.tgz" + "version" "2.0.1" + dependencies: + "boolbase" "^1.0.0" + +"oauth-sign@~0.9.0": + "integrity" "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" + "resolved" "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz" + "version" "0.9.0" + +"object-assign@^4.0.1": + "integrity" "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" + "resolved" "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz" + "version" "4.1.1" + +"object-copy@^0.1.0": + "integrity" "sha512-79LYn6VAb63zgtmAteVOWo9Vdj71ZVBy3Pbse+VqxDpEP83XuujMrGqHIwAXJ5I/aM0zU7dIyIAhifVTPrNItQ==" + "resolved" "https://registry.npmjs.org/object-copy/-/object-copy-0.1.0.tgz" + "version" "0.1.0" + dependencies: + "copy-descriptor" "^0.1.0" + "define-property" "^0.2.5" + "kind-of" "^3.0.3" + +"object-inspect@^1.11.0", "object-inspect@^1.9.0": + "integrity" "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==" + "resolved" "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz" + "version" "1.11.0" + +"object-is@^1.0.1": + "integrity" "sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw==" + "resolved" "https://registry.npmjs.org/object-is/-/object-is-1.1.5.tgz" + "version" "1.1.5" + dependencies: + "call-bind" "^1.0.2" + "define-properties" "^1.1.3" + +"object-keys@^1.0.12", "object-keys@^1.1.1": + "integrity" "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==" + "resolved" "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz" + "version" "1.1.1" + +"object-visit@^1.0.0": + "integrity" "sha512-GBaMwwAVK9qbQN3Scdo0OyvgPW7l3lnaVMj84uTOZlswkX0KpF6fyDBJhtTthf7pymztoN36/KEr1DyhF96zEA==" + "resolved" "https://registry.npmjs.org/object-visit/-/object-visit-1.0.1.tgz" + "version" "1.0.1" + dependencies: + "isobject" "^3.0.0" + +"object.assign@^4.1.0", "object.assign@^4.1.2": + "integrity" "sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==" + "resolved" "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz" + "version" "4.1.2" + dependencies: + "call-bind" "^1.0.0" + "define-properties" "^1.1.3" + "has-symbols" "^1.0.1" + "object-keys" "^1.1.1" + +"object.pick@^1.3.0": + "integrity" "sha512-tqa/UMy/CCoYmj+H5qc07qvSL9dqcs/WZENZ1JbtWBlATP+iVOe778gE6MSijnyCnORzDuX6hU+LA4SZ09YjFQ==" + "resolved" "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz" + "version" "1.3.0" + dependencies: + "isobject" "^3.0.1" + +"object.values@^1.1.5": + "integrity" "sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg==" + "resolved" "https://registry.npmjs.org/object.values/-/object.values-1.1.5.tgz" + "version" "1.1.5" + dependencies: + "call-bind" "^1.0.2" + "define-properties" "^1.1.3" + "es-abstract" "^1.19.1" + +"obuf@^1.0.0", "obuf@^1.1.2": + "integrity" "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==" + "resolved" "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz" + "version" "1.1.2" + +"omit.js@^2.0.0": + "integrity" "sha512-hJmu9D+bNB40YpL9jYebQl4lsTW6yEHRTroJzNLqQJYHm7c+NQnJGfZmIWh8S3q3KoaxV1aLhV6B3+0N0/kyJg==" + "resolved" "https://registry.npmjs.org/omit.js/-/omit.js-2.0.2.tgz" + "version" "2.0.2" + +"on-finished@~2.3.0": + "integrity" "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=" + "resolved" "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz" + "version" "2.3.0" + dependencies: + "ee-first" "1.1.1" + +"on-finished@2.4.1": + "integrity" "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==" + "resolved" "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz" + "version" "2.4.1" + dependencies: + "ee-first" "1.1.1" + +"on-headers@~1.0.2": + "integrity" "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==" + "resolved" "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz" + "version" "1.0.2" + +"once@^1.3.0", "once@^1.3.1", "once@^1.4.0": + "integrity" "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=" + "resolved" "https://registry.npmjs.org/once/-/once-1.4.0.tgz" + "version" "1.4.0" + dependencies: + "wrappy" "1" + +"opener@^1.5.2": + "integrity" "sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==" + "resolved" "https://registry.npmjs.org/opener/-/opener-1.5.2.tgz" + "version" "1.5.2" + +"opn@^5.5.0": + "integrity" "sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA==" + "resolved" "https://registry.npmjs.org/opn/-/opn-5.5.0.tgz" + "version" "5.5.0" + dependencies: + "is-wsl" "^1.1.0" + +"optionator@^0.9.1": + "integrity" "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==" + "resolved" "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz" + "version" "0.9.1" + dependencies: + "deep-is" "^0.1.3" + "fast-levenshtein" "^2.0.6" + "levn" "^0.4.1" + "prelude-ls" "^1.2.1" + "type-check" "^0.4.0" + "word-wrap" "^1.2.3" + +"p-finally@^1.0.0": + "integrity" "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==" + "resolved" "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz" + "version" "1.0.0" + +"p-limit@^1.1.0": + "integrity" "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==" + "resolved" "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz" + "version" "1.3.0" + dependencies: + "p-try" "^1.0.0" + +"p-limit@^2.0.0": + "integrity" "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==" + "resolved" "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz" + "version" "2.3.0" + dependencies: + "p-try" "^2.0.0" + +"p-limit@^2.2.0": + "integrity" "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==" + "resolved" "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz" + "version" "2.3.0" + dependencies: + "p-try" "^2.0.0" + +"p-limit@^3.0.2", "p-limit@^3.1.0": + "integrity" "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==" + "resolved" "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz" + "version" "3.1.0" + dependencies: + "yocto-queue" "^0.1.0" + +"p-locate@^2.0.0": + "integrity" "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=" + "resolved" "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz" + "version" "2.0.0" + dependencies: + "p-limit" "^1.1.0" + +"p-locate@^3.0.0": + "integrity" "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==" + "resolved" "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz" + "version" "3.0.0" + dependencies: + "p-limit" "^2.0.0" + +"p-locate@^4.1.0": + "integrity" "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==" + "resolved" "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz" + "version" "4.1.0" + dependencies: + "p-limit" "^2.2.0" + +"p-map@^2.0.0": + "integrity" "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==" + "resolved" "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz" + "version" "2.1.0" + +"p-retry@^3.0.1": + "integrity" "sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w==" + "resolved" "https://registry.npmjs.org/p-retry/-/p-retry-3.0.1.tgz" + "version" "3.0.1" + dependencies: + "retry" "^0.12.0" + +"p-try@^1.0.0": + "integrity" "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=" + "resolved" "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz" + "version" "1.0.0" + +"p-try@^2.0.0": + "integrity" "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" + "resolved" "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz" + "version" "2.2.0" + +"param-case@^3.0.4": + "integrity" "sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==" + "resolved" "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz" + "version" "3.0.4" + dependencies: + "dot-case" "^3.0.4" + "tslib" "^2.0.3" + +"parent-module@^1.0.0": + "integrity" "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==" + "resolved" "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz" + "version" "1.0.1" + dependencies: + "callsites" "^3.0.0" + +"parse-json@^5.0.0": + "integrity" "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==" + "resolved" "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz" + "version" "5.2.0" + dependencies: + "@babel/code-frame" "^7.0.0" + "error-ex" "^1.3.1" + "json-parse-even-better-errors" "^2.3.0" + "lines-and-columns" "^1.1.6" + +"parse5-htmlparser2-tree-adapter@^6.0.0": + "integrity" "sha512-qPuWvbLgvDGilKc5BoicRovlT4MtYT6JfJyBOMDsKoiT+GiuP5qyrPCnR9HcPECIJJmZh5jRndyNThnhhb/vlA==" + "resolved" "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-6.0.1.tgz" + "version" "6.0.1" + dependencies: + "parse5" "^6.0.1" + +"parse5@^5.1.1": + "integrity" "sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug==" + "resolved" "https://registry.npmjs.org/parse5/-/parse5-5.1.1.tgz" + "version" "5.1.1" + +"parse5@^6.0.1": + "integrity" "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==" + "resolved" "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz" + "version" "6.0.1" + +"parseurl@~1.3.2", "parseurl@~1.3.3": + "integrity" "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==" + "resolved" "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz" + "version" "1.3.3" + +"pascal-case@^3.1.2": + "integrity" "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==" + "resolved" "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz" + "version" "3.1.2" + dependencies: + "no-case" "^3.0.4" + "tslib" "^2.0.3" + +"pascalcase@^0.1.1": + "integrity" "sha512-XHXfu/yOQRy9vYOtUDVMN60OEJjW013GoObG1o+xwQTpB9eYJX/BjXMsdW13ZDPruFhYYn0AG22w0xgQMwl3Nw==" + "resolved" "https://registry.npmjs.org/pascalcase/-/pascalcase-0.1.1.tgz" + "version" "0.1.1" + +"path-dirname@^1.0.0": + "integrity" "sha512-ALzNPpyNq9AqXMBjeymIjFDAkAFH06mHJH/cSBHAgU0s4vfpBn6b2nf8tiRLvagKD8RbTpq2FKTBg7cl9l3c7Q==" + "resolved" "https://registry.npmjs.org/path-dirname/-/path-dirname-1.0.2.tgz" + "version" "1.0.2" + +"path-exists@^3.0.0": + "integrity" "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + "resolved" "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz" + "version" "3.0.0" + +"path-exists@^4.0.0": + "integrity" "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==" + "resolved" "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz" + "version" "4.0.0" + +"path-is-absolute@^1.0.0": + "integrity" "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" + "resolved" "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" + "version" "1.0.1" + +"path-is-inside@^1.0.2": + "integrity" "sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w==" + "resolved" "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz" + "version" "1.0.2" + +"path-key@^2.0.0": + "integrity" "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==" + "resolved" "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz" + "version" "2.0.1" + +"path-key@^2.0.1": + "integrity" "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==" + "resolved" "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz" + "version" "2.0.1" + +"path-key@^3.1.0": + "integrity" "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" + "resolved" "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz" + "version" "3.1.1" + +"path-parse@^1.0.6": + "integrity" "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" + "resolved" "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz" + "version" "1.0.7" + +"path-to-regexp@0.1.7": + "integrity" "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" + "resolved" "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz" + "version" "0.1.7" + +"path-type@^4.0.0": + "integrity" "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==" + "resolved" "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz" + "version" "4.0.0" + +"performance-now@^2.1.0": + "integrity" "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" + "resolved" "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz" + "version" "2.1.0" + +"picocolors@^0.2.1": + "integrity" "sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA==" + "resolved" "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz" + "version" "0.2.1" + +"picocolors@^1.0.0": + "integrity" "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" + "resolved" "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz" + "version" "1.0.0" + +"picomatch@^2.0.4", "picomatch@^2.2.1", "picomatch@^2.2.3": + "integrity" "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==" + "resolved" "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz" + "version" "2.3.0" + +"pify@^2.0.0": + "integrity" "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==" + "resolved" "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz" + "version" "2.3.0" + +"pify@^4.0.1": + "integrity" "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==" + "resolved" "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz" + "version" "4.0.1" + +"pinkie-promise@^2.0.0": + "integrity" "sha512-0Gni6D4UcLTbv9c57DfxDGdr41XfgUjqWZu492f0cIGr16zDU06BWP/RAEvOuo7CQ0CNjHaLlM59YJJFm3NWlw==" + "resolved" "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz" + "version" "2.0.1" + dependencies: + "pinkie" "^2.0.0" + +"pinkie@^2.0.0": + "integrity" "sha512-MnUuEycAemtSaeFSjXKW/aroV7akBbY+Sv+RkyqFjgAe73F+MR0TBWKBRDkmfWq/HiFmdavfZ1G7h4SPZXaCSg==" + "resolved" "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz" + "version" "2.0.4" + +"pirates@^4.0.0": + "integrity" "sha512-WuNqLTbMI3tmfef2TKxlQmAiLHKtFhlsCZnPIpuv2Ow0RDVO8lfy1Opf4NUzlMXLjPl+Men7AuVdX6TA+s+uGA==" + "resolved" "https://registry.npmjs.org/pirates/-/pirates-4.0.1.tgz" + "version" "4.0.1" + dependencies: + "node-modules-regexp" "^1.0.0" + +"pkg-dir@^2.0.0": + "integrity" "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=" + "resolved" "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz" + "version" "2.0.0" + dependencies: + "find-up" "^2.1.0" + +"pkg-dir@^3.0.0": + "integrity" "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==" + "resolved" "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz" + "version" "3.0.0" + dependencies: + "find-up" "^3.0.0" + +"pkg-dir@^4.1.0": + "integrity" "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==" + "resolved" "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz" + "version" "4.2.0" + dependencies: + "find-up" "^4.0.0" + +"pkg-up@^3.1.0": + "integrity" "sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA==" + "resolved" "https://registry.npmjs.org/pkg-up/-/pkg-up-3.1.0.tgz" + "version" "3.1.0" + dependencies: + "find-up" "^3.0.0" + +"portfinder@^1.0.26", "portfinder@^1.0.28": + "integrity" "sha512-Se+2isanIcEqf2XMHjyUKskczxbPH7dQnlMjXX6+dybayyHvAf/TCgyMRlzf/B6QDhAEFOGes0pzRo3by4AbMA==" + "resolved" "https://registry.npmjs.org/portfinder/-/portfinder-1.0.28.tgz" + "version" "1.0.28" + dependencies: + "async" "^2.6.2" + "debug" "^3.1.1" + "mkdirp" "^0.5.5" + +"posix-character-classes@^0.1.0": + "integrity" "sha512-xTgYBc3fuo7Yt7JbiuFxSYGToMoz8fLoE6TC9Wx1P/u+LfeThMOAqmuyECnlBaaJb+u1m9hHiXUEtwW4OzfUJg==" + "resolved" "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz" + "version" "0.1.1" + +"postcss-calc@^8.0.0": + "integrity" "sha512-5NglwDrcbiy8XXfPM11F3HeC6hoT9W7GUH/Zi5U/p7u3Irv4rHhdDcIZwG0llHXV4ftsBjpfWMXAnXNl4lnt8g==" + "resolved" "https://registry.npmjs.org/postcss-calc/-/postcss-calc-8.0.0.tgz" + "version" "8.0.0" + dependencies: + "postcss-selector-parser" "^6.0.2" + "postcss-value-parser" "^4.0.2" + +"postcss-colormin@^5.2.0": + "integrity" "sha512-+HC6GfWU3upe5/mqmxuqYZ9B2Wl4lcoUUNkoaX59nEWV4EtADCMiBqui111Bu8R8IvaZTmqmxrqOAqjbHIwXPw==" + "resolved" "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-5.2.0.tgz" + "version" "5.2.0" + dependencies: + "browserslist" "^4.16.6" + "caniuse-api" "^3.0.0" + "colord" "^2.0.1" + "postcss-value-parser" "^4.1.0" + +"postcss-convert-values@^5.0.1": + "integrity" "sha512-C3zR1Do2BkKkCgC0g3sF8TS0koF2G+mN8xxayZx3f10cIRmTaAnpgpRQZjNekTZxM2ciSPoh2IWJm0VZx8NoQg==" + "resolved" "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-5.0.1.tgz" + "version" "5.0.1" + dependencies: + "postcss-value-parser" "^4.1.0" + +"postcss-discard-comments@^5.0.1": + "integrity" "sha512-lgZBPTDvWrbAYY1v5GYEv8fEO/WhKOu/hmZqmCYfrpD6eyDWWzAOsl2rF29lpvziKO02Gc5GJQtlpkTmakwOWg==" + "resolved" "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-5.0.1.tgz" + "version" "5.0.1" + +"postcss-discard-duplicates@^5.0.1": + "integrity" "sha512-svx747PWHKOGpAXXQkCc4k/DsWo+6bc5LsVrAsw+OU+Ibi7klFZCyX54gjYzX4TH+f2uzXjRviLARxkMurA2bA==" + "resolved" "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-5.0.1.tgz" + "version" "5.0.1" + +"postcss-discard-empty@^5.0.1": + "integrity" "sha512-vfU8CxAQ6YpMxV2SvMcMIyF2LX1ZzWpy0lqHDsOdaKKLQVQGVP1pzhrI9JlsO65s66uQTfkQBKBD/A5gp9STFw==" + "resolved" "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-5.0.1.tgz" + "version" "5.0.1" + +"postcss-discard-overridden@^5.0.1": + "integrity" "sha512-Y28H7y93L2BpJhrdUR2SR2fnSsT+3TVx1NmVQLbcnZWwIUpJ7mfcTC6Za9M2PG6w8j7UQRfzxqn8jU2VwFxo3Q==" + "resolved" "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-5.0.1.tgz" + "version" "5.0.1" + +"postcss-flexbugs-fixes@^5.0.2": + "integrity" "sha512-18f9voByak7bTktR2QgDveglpn9DTbBWPUzSOe9g0N4WR/2eSt6Vrcbf0hmspvMI6YWGywz6B9f7jzpFNJJgnQ==" + "resolved" "https://registry.npmjs.org/postcss-flexbugs-fixes/-/postcss-flexbugs-fixes-5.0.2.tgz" + "version" "5.0.2" + +"postcss-loader@^4.2.0": + "integrity" "sha512-M/dSoIiNDOo8Rk0mUqoj4kpGq91gcxCfb9PoyZVdZ76/AuhxylHDYZblNE8o+EQ9AMSASeMFEKxZf5aU6wlx1Q==" + "resolved" "https://registry.npmjs.org/postcss-loader/-/postcss-loader-4.3.0.tgz" + "version" "4.3.0" + dependencies: + "cosmiconfig" "^7.0.0" + "klona" "^2.0.4" + "loader-utils" "^2.0.0" + "schema-utils" "^3.0.0" + "semver" "^7.3.4" + +"postcss-merge-longhand@^5.0.2": + "integrity" "sha512-BMlg9AXSI5G9TBT0Lo/H3PfUy63P84rVz3BjCFE9e9Y9RXQZD3+h3YO1kgTNsNJy7bBc1YQp8DmSnwLIW5VPcw==" + "resolved" "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-5.0.2.tgz" + "version" "5.0.2" + dependencies: + "css-color-names" "^1.0.1" + "postcss-value-parser" "^4.1.0" + "stylehacks" "^5.0.1" + +"postcss-merge-rules@^5.0.2": + "integrity" "sha512-5K+Md7S3GwBewfB4rjDeol6V/RZ8S+v4B66Zk2gChRqLTCC8yjnHQ601omj9TKftS19OPGqZ/XzoqpzNQQLwbg==" + "resolved" "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-5.0.2.tgz" + "version" "5.0.2" + dependencies: + "browserslist" "^4.16.6" + "caniuse-api" "^3.0.0" + "cssnano-utils" "^2.0.1" + "postcss-selector-parser" "^6.0.5" + "vendors" "^1.0.3" + +"postcss-minify-font-values@^5.0.1": + "integrity" "sha512-7JS4qIsnqaxk+FXY1E8dHBDmraYFWmuL6cgt0T1SWGRO5bzJf8sUoelwa4P88LEWJZweHevAiDKxHlofuvtIoA==" + "resolved" "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-5.0.1.tgz" + "version" "5.0.1" + dependencies: + "postcss-value-parser" "^4.1.0" + +"postcss-minify-gradients@^5.0.2": + "integrity" "sha512-7Do9JP+wqSD6Prittitt2zDLrfzP9pqKs2EcLX7HJYxsxCOwrrcLt4x/ctQTsiOw+/8HYotAoqNkrzItL19SdQ==" + "resolved" "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-5.0.2.tgz" + "version" "5.0.2" + dependencies: + "colord" "^2.6" + "cssnano-utils" "^2.0.1" + "postcss-value-parser" "^4.1.0" + +"postcss-minify-params@^5.0.1": + "integrity" "sha512-4RUC4k2A/Q9mGco1Z8ODc7h+A0z7L7X2ypO1B6V8057eVK6mZ6xwz6QN64nHuHLbqbclkX1wyzRnIrdZehTEHw==" + "resolved" "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-5.0.1.tgz" + "version" "5.0.1" + dependencies: + "alphanum-sort" "^1.0.2" + "browserslist" "^4.16.0" + "cssnano-utils" "^2.0.1" + "postcss-value-parser" "^4.1.0" + "uniqs" "^2.0.0" + +"postcss-minify-selectors@^5.1.0": + "integrity" "sha512-NzGBXDa7aPsAcijXZeagnJBKBPMYLaJJzB8CQh6ncvyl2sIndLVWfbcDi0SBjRWk5VqEjXvf8tYwzoKf4Z07og==" + "resolved" "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-5.1.0.tgz" + "version" "5.1.0" + dependencies: + "alphanum-sort" "^1.0.2" + "postcss-selector-parser" "^6.0.5" + +"postcss-modules-extract-imports@^3.0.0": + "integrity" "sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw==" + "resolved" "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz" + "version" "3.0.0" + +"postcss-modules-local-by-default@^4.0.0": + "integrity" "sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ==" + "resolved" "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz" + "version" "4.0.0" + dependencies: + "icss-utils" "^5.0.0" + "postcss-selector-parser" "^6.0.2" + "postcss-value-parser" "^4.1.0" + +"postcss-modules-scope@^3.0.0": + "integrity" "sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg==" + "resolved" "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz" + "version" "3.0.0" + dependencies: + "postcss-selector-parser" "^6.0.4" + +"postcss-modules-values@^4.0.0": + "integrity" "sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ==" + "resolved" "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz" + "version" "4.0.0" + dependencies: + "icss-utils" "^5.0.0" + +"postcss-normalize-charset@^5.0.1": + "integrity" "sha512-6J40l6LNYnBdPSk+BHZ8SF+HAkS4q2twe5jnocgd+xWpz/mx/5Sa32m3W1AA8uE8XaXN+eg8trIlfu8V9x61eg==" + "resolved" "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-5.0.1.tgz" + "version" "5.0.1" + +"postcss-normalize-display-values@^5.0.1": + "integrity" "sha512-uupdvWk88kLDXi5HEyI9IaAJTE3/Djbcrqq8YgjvAVuzgVuqIk3SuJWUisT2gaJbZm1H9g5k2w1xXilM3x8DjQ==" + "resolved" "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-5.0.1.tgz" + "version" "5.0.1" + dependencies: + "cssnano-utils" "^2.0.1" + "postcss-value-parser" "^4.1.0" + +"postcss-normalize-positions@^5.0.1": + "integrity" "sha512-rvzWAJai5xej9yWqlCb1OWLd9JjW2Ex2BCPzUJrbaXmtKtgfL8dBMOOMTX6TnvQMtjk3ei1Lswcs78qKO1Skrg==" + "resolved" "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-5.0.1.tgz" + "version" "5.0.1" + dependencies: + "postcss-value-parser" "^4.1.0" + +"postcss-normalize-repeat-style@^5.0.1": + "integrity" "sha512-syZ2itq0HTQjj4QtXZOeefomckiV5TaUO6ReIEabCh3wgDs4Mr01pkif0MeVwKyU/LHEkPJnpwFKRxqWA/7O3w==" + "resolved" "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.0.1.tgz" + "version" "5.0.1" + dependencies: + "cssnano-utils" "^2.0.1" + "postcss-value-parser" "^4.1.0" + +"postcss-normalize-string@^5.0.1": + "integrity" "sha512-Ic8GaQ3jPMVl1OEn2U//2pm93AXUcF3wz+OriskdZ1AOuYV25OdgS7w9Xu2LO5cGyhHCgn8dMXh9bO7vi3i9pA==" + "resolved" "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-5.0.1.tgz" + "version" "5.0.1" + dependencies: + "postcss-value-parser" "^4.1.0" + +"postcss-normalize-timing-functions@^5.0.1": + "integrity" "sha512-cPcBdVN5OsWCNEo5hiXfLUnXfTGtSFiBU9SK8k7ii8UD7OLuznzgNRYkLZow11BkQiiqMcgPyh4ZqXEEUrtQ1Q==" + "resolved" "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.0.1.tgz" + "version" "5.0.1" + dependencies: + "cssnano-utils" "^2.0.1" + "postcss-value-parser" "^4.1.0" + +"postcss-normalize-unicode@^5.0.1": + "integrity" "sha512-kAtYD6V3pK0beqrU90gpCQB7g6AOfP/2KIPCVBKJM2EheVsBQmx/Iof+9zR9NFKLAx4Pr9mDhogB27pmn354nA==" + "resolved" "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-5.0.1.tgz" + "version" "5.0.1" + dependencies: + "browserslist" "^4.16.0" + "postcss-value-parser" "^4.1.0" + +"postcss-normalize-url@^5.0.2": + "integrity" "sha512-k4jLTPUxREQ5bpajFQZpx8bCF2UrlqOTzP9kEqcEnOfwsRshWs2+oAFIHfDQB8GO2PaUaSE0NlTAYtbluZTlHQ==" + "resolved" "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-5.0.2.tgz" + "version" "5.0.2" + dependencies: + "is-absolute-url" "^3.0.3" + "normalize-url" "^6.0.1" + "postcss-value-parser" "^4.1.0" + +"postcss-normalize-whitespace@^5.0.1": + "integrity" "sha512-iPklmI5SBnRvwceb/XH568yyzK0qRVuAG+a1HFUsFRf11lEJTiQQa03a4RSCQvLKdcpX7XsI1Gen9LuLoqwiqA==" + "resolved" "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.0.1.tgz" + "version" "5.0.1" + dependencies: + "postcss-value-parser" "^4.1.0" + +"postcss-ordered-values@^5.0.2": + "integrity" "sha512-8AFYDSOYWebJYLyJi3fyjl6CqMEG/UVworjiyK1r573I56kb3e879sCJLGvR3merj+fAdPpVplXKQZv+ey6CgQ==" + "resolved" "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-5.0.2.tgz" + "version" "5.0.2" + dependencies: + "cssnano-utils" "^2.0.1" + "postcss-value-parser" "^4.1.0" + +"postcss-reduce-initial@^5.0.1": + "integrity" "sha512-zlCZPKLLTMAqA3ZWH57HlbCjkD55LX9dsRyxlls+wfuRfqCi5mSlZVan0heX5cHr154Dq9AfbH70LyhrSAezJw==" + "resolved" "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-5.0.1.tgz" + "version" "5.0.1" + dependencies: + "browserslist" "^4.16.0" + "caniuse-api" "^3.0.0" + +"postcss-reduce-transforms@^5.0.1": + "integrity" "sha512-a//FjoPeFkRuAguPscTVmRQUODP+f3ke2HqFNgGPwdYnpeC29RZdCBvGRGTsKpMURb/I3p6jdKoBQ2zI+9Q7kA==" + "resolved" "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-5.0.1.tgz" + "version" "5.0.1" + dependencies: + "cssnano-utils" "^2.0.1" + "postcss-value-parser" "^4.1.0" + +"postcss-safe-parser@^5.0.2": + "integrity" "sha512-jDUfCPJbKOABhwpUKcqCVbbXiloe/QXMcbJ6Iipf3sDIihEzTqRCeMBfRaOHxhBuTYqtASrI1KJWxzztZU4qUQ==" + "resolved" "https://registry.npmjs.org/postcss-safe-parser/-/postcss-safe-parser-5.0.2.tgz" + "version" "5.0.2" + dependencies: + "postcss" "^8.1.0" + +"postcss-selector-parser@^6.0.2", "postcss-selector-parser@^6.0.4", "postcss-selector-parser@^6.0.5": + "integrity" "sha512-9LXrvaaX3+mcv5xkg5kFwqSzSH1JIObIx51PrndZwlmznwXRfxMddDvo9gve3gVR8ZTKgoFDdWkbRFmEhT4PMg==" + "resolved" "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.6.tgz" + "version" "6.0.6" + dependencies: + "cssesc" "^3.0.0" + "util-deprecate" "^1.0.2" + +"postcss-svgo@^5.0.2": + "integrity" "sha512-YzQuFLZu3U3aheizD+B1joQ94vzPfE6BNUcSYuceNxlVnKKsOtdo6hL9/zyC168Q8EwfLSgaDSalsUGa9f2C0A==" + "resolved" "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-5.0.2.tgz" + "version" "5.0.2" + dependencies: + "postcss-value-parser" "^4.1.0" + "svgo" "^2.3.0" + +"postcss-unique-selectors@^5.0.1": + "integrity" "sha512-gwi1NhHV4FMmPn+qwBNuot1sG1t2OmacLQ/AX29lzyggnjd+MnVD5uqQmpXO3J17KGL2WAxQruj1qTd3H0gG/w==" + "resolved" "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-5.0.1.tgz" + "version" "5.0.1" + dependencies: + "alphanum-sort" "^1.0.2" + "postcss-selector-parser" "^6.0.5" + "uniqs" "^2.0.0" + +"postcss-value-parser@^4.0.2", "postcss-value-parser@^4.1.0": + "integrity" "sha512-97DXOFbQJhk71ne5/Mt6cOu6yxsSfM0QGQyl0L25Gca4yGWEGJaig7l7gbCX623VqTBNGLRLaVUCnNkcedlRSQ==" + "resolved" "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.1.0.tgz" + "version" "4.1.0" + +"postcss@^7.0.0 || ^8.0.1", "postcss@^8.0.9", "postcss@^8.1.0", "postcss@^8.1.10", "postcss@^8.1.4", "postcss@^8.2.15", "postcss@^8.2.2", "postcss@8.3.0": + "integrity" "sha512-+ogXpdAjWGa+fdYY5BQ96V/6tAo+TdSSIMP5huJBIygdWwKtVoB5JWZ7yUd4xZ8r+8Kvvx4nyg/PQ071H4UtcQ==" + "resolved" "https://registry.npmjs.org/postcss/-/postcss-8.3.0.tgz" + "version" "8.3.0" + dependencies: + "colorette" "^1.2.2" + "nanoid" "^3.1.23" + "source-map-js" "^0.6.2" + +"postcss@^8.3.5": + "integrity" "sha512-hCmlUAIlUiav8Xdqw3Io4LcpA1DOt7h3LSTAC4G6JGHFFaWzI6qvFt9oilvl8BmkbBRX1IhM90ZAmpk68zccQA==" + "resolved" "https://registry.npmjs.org/postcss/-/postcss-8.3.11.tgz" + "version" "8.3.11" + dependencies: + "nanoid" "^3.1.30" + "picocolors" "^1.0.0" + "source-map-js" "^0.6.2" + +"prelude-ls@^1.2.1": + "integrity" "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==" + "resolved" "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz" + "version" "1.2.1" + +"pretty-error@^3.0.4": + "integrity" "sha512-ytLFLfv1So4AO1UkoBF6GXQgJRaKbiSiGFICaOPNwQ3CMvBvXpLRubeQWyPGnsbV/t9ml9qto6IeCsho0aEvwQ==" + "resolved" "https://registry.npmjs.org/pretty-error/-/pretty-error-3.0.4.tgz" + "version" "3.0.4" + dependencies: + "lodash" "^4.17.20" + "renderkid" "^2.0.6" + +"pretty-time@^1.1.0": + "integrity" "sha512-28iF6xPQrP8Oa6uxE6a1biz+lWeTOAPKggvjB8HAs6nVMKZwf5bG++632Dx614hIWgUPkgivRfG+a8uAXGTIbA==" + "resolved" "https://registry.npmjs.org/pretty-time/-/pretty-time-1.1.0.tgz" + "version" "1.1.0" + +"process-nextick-args@~2.0.0": + "integrity" "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + "resolved" "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz" + "version" "2.0.1" + +"progress@^2.0.0": + "integrity" "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==" + "resolved" "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz" + "version" "2.0.3" + +"promise@^7.1.1": + "integrity" "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==" + "resolved" "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz" + "version" "7.3.1" + dependencies: + "asap" "~2.0.3" + +"proxy-addr@~2.0.7": + "integrity" "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==" + "resolved" "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz" + "version" "2.0.7" + dependencies: + "forwarded" "0.2.0" + "ipaddr.js" "1.9.1" + +"prr@~1.0.1": + "integrity" "sha1-0/wRS6BplaRexok/SEzrHXj19HY=" + "resolved" "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz" + "version" "1.0.1" + +"psl@^1.1.28": + "integrity" "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==" + "resolved" "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz" + "version" "1.8.0" + +"pump@^3.0.0": + "integrity" "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==" + "resolved" "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz" + "version" "3.0.0" + dependencies: + "end-of-stream" "^1.1.0" + "once" "^1.3.1" + +"punycode@^2.1.0", "punycode@^2.1.1": + "integrity" "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" + "resolved" "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz" + "version" "2.1.1" + +"punycode@1.3.2": + "integrity" "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==" + "resolved" "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz" + "version" "1.3.2" + +"qs@^6.10.2": + "integrity" "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==" + "resolved" "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz" + "version" "6.11.0" + dependencies: + "side-channel" "^1.0.4" + +"qs@~6.5.2": + "integrity" "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" + "resolved" "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz" + "version" "6.5.2" + +"qs@6.10.3": + "integrity" "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==" + "resolved" "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz" + "version" "6.10.3" + dependencies: + "side-channel" "^1.0.4" + +"qs@6.7.0": + "integrity" "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==" + "resolved" "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz" + "version" "6.7.0" + +"querystring@0.2.0": + "integrity" "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==" + "resolved" "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz" + "version" "0.2.0" + +"querystringify@^2.1.1": + "integrity" "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==" + "resolved" "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz" + "version" "2.2.0" + +"queue-microtask@^1.2.2": + "integrity" "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==" + "resolved" "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz" + "version" "1.2.3" + +"randombytes@^2.1.0": + "integrity" "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==" + "resolved" "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz" + "version" "2.1.0" + dependencies: + "safe-buffer" "^5.1.0" + +"range-parser@^1.2.1", "range-parser@~1.2.1": + "integrity" "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" + "resolved" "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz" + "version" "1.2.1" + +"raw-body@2.4.0": + "integrity" "sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==" + "resolved" "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz" + "version" "2.4.0" + dependencies: + "bytes" "3.1.0" + "http-errors" "1.7.2" + "iconv-lite" "0.4.24" + "unpipe" "1.0.0" + +"raw-body@2.5.1": + "integrity" "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==" + "resolved" "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz" + "version" "2.5.1" + dependencies: + "bytes" "3.1.2" + "http-errors" "2.0.0" + "iconv-lite" "0.4.24" + "unpipe" "1.0.0" + +"raw-loader@^4.0.2": + "integrity" "sha512-ZnScIV3ag9A4wPX/ZayxL/jZH+euYb6FcUinPcgiQW0+UBtEv0O6Q3lGd3cqJ+GHH+rksEv3Pj99oxJ3u3VIKA==" + "resolved" "https://registry.npmjs.org/raw-loader/-/raw-loader-4.0.2.tgz" + "version" "4.0.2" + dependencies: + "loader-utils" "^2.0.0" + "schema-utils" "^3.0.0" + +"readable-stream@^2.0.1", "readable-stream@^2.0.2": + "integrity" "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==" + "resolved" "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz" + "version" "2.3.7" + dependencies: + "core-util-is" "~1.0.0" + "inherits" "~2.0.3" + "isarray" "~1.0.0" + "process-nextick-args" "~2.0.0" + "safe-buffer" "~5.1.1" + "string_decoder" "~1.1.1" + "util-deprecate" "~1.0.1" + +"readable-stream@^3.0.6": + "integrity" "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==" + "resolved" "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz" + "version" "3.6.0" + dependencies: + "inherits" "^2.0.3" + "string_decoder" "^1.1.1" + "util-deprecate" "^1.0.1" + +"readdirp@^2.2.1": + "integrity" "sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==" + "resolved" "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz" + "version" "2.2.1" + dependencies: + "graceful-fs" "^4.1.11" + "micromatch" "^3.1.10" + "readable-stream" "^2.0.2" + +"readdirp@~3.6.0": + "integrity" "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==" + "resolved" "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz" + "version" "3.6.0" + dependencies: + "picomatch" "^2.2.1" + +"readline@^1.3.0": + "integrity" "sha1-xYDXfvLPyHUrEySYBg3JeTp6wBw=" + "resolved" "https://registry.npmjs.org/readline/-/readline-1.3.0.tgz" + "version" "1.3.0" + +"regenerate-unicode-properties@^9.0.0": + "integrity" "sha512-3E12UeNSPfjrgwjkR81m5J7Aw/T55Tu7nUyZVQYCKEOs+2dkxEY+DpPtZzO4YruuiPb7NkYLVcyJC4+zCbk5pA==" + "resolved" "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-9.0.0.tgz" + "version" "9.0.0" + dependencies: + "regenerate" "^1.4.2" + +"regenerate@^1.4.2": + "integrity" "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==" + "resolved" "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz" + "version" "1.4.2" + +"regenerator-runtime@^0.13.4": + "integrity" "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==" + "resolved" "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz" + "version" "0.13.9" + +"regenerator-transform@^0.14.2": + "integrity" "sha512-eOf6vka5IO151Jfsw2NO9WpGX58W6wWmefK3I1zEGr0lOD0u8rwPaNqQL1aRxUaxLeKO3ArNh3VYg1KbaD+FFw==" + "resolved" "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.14.5.tgz" + "version" "0.14.5" + dependencies: + "@babel/runtime" "^7.8.4" + +"regex-not@^1.0.0", "regex-not@^1.0.2": + "integrity" "sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==" + "resolved" "https://registry.npmjs.org/regex-not/-/regex-not-1.0.2.tgz" + "version" "1.0.2" + dependencies: + "extend-shallow" "^3.0.2" + "safe-regex" "^1.1.0" + +"regexp.prototype.flags@^1.2.0": + "integrity" "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==" + "resolved" "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz" + "version" "1.4.3" + dependencies: + "call-bind" "^1.0.2" + "define-properties" "^1.1.3" + "functions-have-names" "^1.2.2" + +"regexpp@^3.0.0", "regexpp@^3.1.0": + "integrity" "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==" + "resolved" "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz" + "version" "3.2.0" + +"regexpu-core@^4.7.1": + "integrity" "sha512-1F6bYsoYiz6is+oz70NWur2Vlh9KWtswuRuzJOfeYUrfPX2o8n74AnUVaOGDbUqVGO9fNHu48/pjJO4sNVwsOg==" + "resolved" "https://registry.npmjs.org/regexpu-core/-/regexpu-core-4.8.0.tgz" + "version" "4.8.0" + dependencies: + "regenerate" "^1.4.2" + "regenerate-unicode-properties" "^9.0.0" + "regjsgen" "^0.5.2" + "regjsparser" "^0.7.0" + "unicode-match-property-ecmascript" "^2.0.0" + "unicode-match-property-value-ecmascript" "^2.0.0" + +"regjsgen@^0.5.2": + "integrity" "sha512-OFFT3MfrH90xIW8OOSyUrk6QHD5E9JOTeGodiJeBS3J6IwlgzJMNE/1bZklWz5oTg+9dCMyEetclvCVXOPoN3A==" + "resolved" "https://registry.npmjs.org/regjsgen/-/regjsgen-0.5.2.tgz" + "version" "0.5.2" + +"regjsparser@^0.7.0": + "integrity" "sha512-A4pcaORqmNMDVwUjWoTzuhwMGpP+NykpfqAsEgI1FSH/EzC7lrN5TMd+kN8YCovX+jMpu8eaqXgXPCa0g8FQNQ==" + "resolved" "https://registry.npmjs.org/regjsparser/-/regjsparser-0.7.0.tgz" + "version" "0.7.0" + dependencies: + "jsesc" "~0.5.0" + +"relateurl@^0.2.7": + "integrity" "sha1-VNvzd+UUQKypCkzSdGANP/LYiKk=" + "resolved" "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz" + "version" "0.2.7" + +"remove-trailing-separator@^1.0.1": + "integrity" "sha512-/hS+Y0u3aOfIETiaiirUFwDBDzmXPvO+jAfKTitUngIPzdKc6Z0LoFjM/CK5PL4C+eKwHohlHAb6H0VFfmmUsw==" + "resolved" "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz" + "version" "1.1.0" + +"renderkid@^2.0.6": + "integrity" "sha512-oCcFyxaMrKsKcTY59qnCAtmDVSLfPbrv6A3tVbPdFMMrv5jaK10V6m40cKsoPNhAqN6rmHW9sswW4o3ruSrwUQ==" + "resolved" "https://registry.npmjs.org/renderkid/-/renderkid-2.0.7.tgz" + "version" "2.0.7" + dependencies: + "css-select" "^4.1.3" + "dom-converter" "^0.2.0" + "htmlparser2" "^6.1.0" + "lodash" "^4.17.21" + "strip-ansi" "^3.0.1" + +"repeat-element@^1.1.2": + "integrity" "sha512-LFiNfRcSu7KK3evMyYOuCzv3L10TW7yC1G2/+StMjK8Y6Vqd2MG7r/Qjw4ghtuCOjFvlnms/iMmLqpvW/ES/WQ==" + "resolved" "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.4.tgz" + "version" "1.1.4" + +"repeat-string@^1.6.1": + "integrity" "sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==" + "resolved" "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz" + "version" "1.6.1" + +"request@^2.83.0": + "integrity" "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==" + "resolved" "https://registry.npmjs.org/request/-/request-2.88.2.tgz" + "version" "2.88.2" + dependencies: + "aws-sign2" "~0.7.0" + "aws4" "^1.8.0" + "caseless" "~0.12.0" + "combined-stream" "~1.0.6" + "extend" "~3.0.2" + "forever-agent" "~0.6.1" + "form-data" "~2.3.2" + "har-validator" "~5.1.3" + "http-signature" "~1.2.0" + "is-typedarray" "~1.0.0" + "isstream" "~0.1.2" + "json-stringify-safe" "~5.0.1" + "mime-types" "~2.1.19" + "oauth-sign" "~0.9.0" + "performance-now" "^2.1.0" + "qs" "~6.5.2" + "safe-buffer" "^5.1.2" + "tough-cookie" "~2.5.0" + "tunnel-agent" "^0.6.0" + "uuid" "^3.3.2" + +"require-directory@^2.1.1": + "integrity" "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=" + "resolved" "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz" + "version" "2.1.1" + +"require-from-string@^2.0.2": + "integrity" "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==" + "resolved" "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz" + "version" "2.0.2" + +"require-main-filename@^2.0.0": + "integrity" "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==" + "resolved" "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz" + "version" "2.0.0" + +"requires-port@^1.0.0": + "integrity" "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==" + "resolved" "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz" + "version" "1.0.0" + +"resize-observer-polyfill@^1.5.1": + "integrity" "sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg==" + "resolved" "https://registry.npmjs.org/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz" + "version" "1.5.1" + +"resolve-cwd@^2.0.0": + "integrity" "sha512-ccu8zQTrzVr954472aUVPLEcB3YpKSYR3cg/3lo1okzobPBM+1INXBbBZlDbnI/hbEocnf8j0QVo43hQKrbchg==" + "resolved" "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-2.0.0.tgz" + "version" "2.0.0" + dependencies: + "resolve-from" "^3.0.0" + +"resolve-cwd@^3.0.0": + "integrity" "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==" + "resolved" "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz" + "version" "3.0.0" + dependencies: + "resolve-from" "^5.0.0" + +"resolve-from@^3.0.0": + "integrity" "sha512-GnlH6vxLymXJNMBo7XP1fJIzBFbdYt49CuTwmB/6N53t+kMPRMFKz783LlQ4tv28XoQfMWinAJX6WCGf2IlaIw==" + "resolved" "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz" + "version" "3.0.0" + +"resolve-from@^4.0.0": + "integrity" "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==" + "resolved" "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz" + "version" "4.0.0" + +"resolve-from@^5.0.0": + "integrity" "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==" + "resolved" "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz" + "version" "5.0.0" + +"resolve-url@^0.2.1": + "integrity" "sha512-ZuF55hVUQaaczgOIwqWzkEcEidmlD/xl44x1UZnhOXcYuFN2S6+rcxpG+C1N3So0wvNI3DmJICUFfu2SxhBmvg==" + "resolved" "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz" + "version" "0.2.1" + +"resolve@^1.10.1", "resolve@^1.14.2", "resolve@^1.20.0": + "integrity" "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==" + "resolved" "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz" + "version" "1.20.0" + dependencies: + "is-core-module" "^2.2.0" + "path-parse" "^1.0.6" + +"ret@~0.1.10": + "integrity" "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==" + "resolved" "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz" + "version" "0.1.15" + +"retry@^0.12.0": + "integrity" "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==" + "resolved" "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz" + "version" "0.12.0" + +"reusify@^1.0.4": + "integrity" "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==" + "resolved" "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz" + "version" "1.0.4" + +"rimraf@^2.6.3": + "integrity" "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==" + "resolved" "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz" + "version" "2.7.1" + dependencies: + "glob" "^7.1.3" + +"rimraf@^3.0.2": + "integrity" "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==" + "resolved" "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz" + "version" "3.0.2" + dependencies: + "glob" "^7.1.3" + +"run-parallel@^1.1.9": + "integrity" "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==" + "resolved" "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz" + "version" "1.2.0" + dependencies: + "queue-microtask" "^1.2.2" + +"safe-buffer@^5.0.1", "safe-buffer@^5.1.0", "safe-buffer@^5.1.2", "safe-buffer@>=5.1.0", "safe-buffer@~5.1.0", "safe-buffer@~5.1.1", "safe-buffer@5.1.2": + "integrity" "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + "resolved" "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" + "version" "5.1.2" + +"safe-buffer@5.2.1": + "integrity" "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + "resolved" "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" + "version" "5.2.1" + +"safe-regex@^1.1.0": + "integrity" "sha512-aJXcif4xnaNUzvUuC5gcb46oTS7zvg4jpMTnuqtrEPlR3vFr4pxtdTwaF1Qs3Enjn9HK+ZlwQui+a7z0SywIzg==" + "resolved" "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz" + "version" "1.1.0" + dependencies: + "ret" "~0.1.10" + +"safer-buffer@^2.0.2", "safer-buffer@^2.1.0", "safer-buffer@>= 2.1.2 < 3", "safer-buffer@~2.1.0": + "integrity" "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + "resolved" "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz" + "version" "2.1.2" + +"schema-utils@^1.0.0": + "integrity" "sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==" + "resolved" "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz" + "version" "1.0.0" + dependencies: + "ajv" "^6.1.0" + "ajv-errors" "^1.0.0" + "ajv-keywords" "^3.1.0" + +"schema-utils@^2.6.5": + "integrity" "sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg==" + "resolved" "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.1.tgz" + "version" "2.7.1" + dependencies: + "@types/json-schema" "^7.0.5" + "ajv" "^6.12.4" + "ajv-keywords" "^3.5.2" + +"schema-utils@^3.0.0": + "integrity" "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==" + "resolved" "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz" + "version" "3.1.1" + dependencies: + "@types/json-schema" "^7.0.8" + "ajv" "^6.12.5" + "ajv-keywords" "^3.5.2" + +"schema-utils@^3.1.0": + "integrity" "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==" + "resolved" "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz" + "version" "3.1.1" + dependencies: + "@types/json-schema" "^7.0.8" + "ajv" "^6.12.5" + "ajv-keywords" "^3.5.2" + +"schema-utils@^3.1.1": + "integrity" "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==" + "resolved" "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz" + "version" "3.1.1" + dependencies: + "@types/json-schema" "^7.0.8" + "ajv" "^6.12.5" + "ajv-keywords" "^3.5.2" + +"schema-utils@^4.0.0": + "integrity" "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==" + "resolved" "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz" + "version" "4.0.0" + dependencies: + "@types/json-schema" "^7.0.9" + "ajv" "^8.8.0" + "ajv-formats" "^2.1.1" + "ajv-keywords" "^5.0.0" + +"scroll-into-view-if-needed@^2.2.25": + "integrity" "sha512-8LuxJSuFVc92+0AdNv4QOxRL4Abeo1DgLnGNkn1XlaujPH/3cCFz3QI60r2VNu4obJJROzgnIUw5TKQkZvZI1w==" + "resolved" "https://registry.npmjs.org/scroll-into-view-if-needed/-/scroll-into-view-if-needed-2.2.28.tgz" + "version" "2.2.28" + dependencies: + "compute-scroll-into-view" "^1.0.17" + +"select-hose@^2.0.0": + "integrity" "sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg==" + "resolved" "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz" + "version" "2.0.0" + +"selfsigned@^1.10.8": + "integrity" "sha512-lkjaiAye+wBZDCBsu5BGi0XiLRxeUlsGod5ZP924CRSEoGuZAw/f7y9RKu28rwTfiHVhdavhB0qH0INV6P1lEA==" + "resolved" "https://registry.npmjs.org/selfsigned/-/selfsigned-1.10.14.tgz" + "version" "1.10.14" + dependencies: + "node-forge" "^0.10.0" + +"semver@^5.5.0": + "integrity" "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + "resolved" "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz" + "version" "5.7.1" + +"semver@^5.6.0": + "integrity" "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + "resolved" "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz" + "version" "5.7.1" + +"semver@^6.0.0", "semver@^6.1.0", "semver@^6.1.1", "semver@^6.1.2", "semver@^6.3.0": + "integrity" "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "resolved" "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz" + "version" "6.3.0" + +"semver@^7.2.1": + "integrity" "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==" + "resolved" "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz" + "version" "7.3.5" + dependencies: + "lru-cache" "^6.0.0" + +"semver@^7.3.4": + "integrity" "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==" + "resolved" "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz" + "version" "7.3.5" + dependencies: + "lru-cache" "^6.0.0" + +"semver@^7.3.5": + "integrity" "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==" + "resolved" "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz" + "version" "7.3.5" + dependencies: + "lru-cache" "^6.0.0" + +"semver@7.0.0": + "integrity" "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==" + "resolved" "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz" + "version" "7.0.0" + +"send@0.18.0": + "integrity" "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==" + "resolved" "https://registry.npmjs.org/send/-/send-0.18.0.tgz" + "version" "0.18.0" + dependencies: + "debug" "2.6.9" + "depd" "2.0.0" + "destroy" "1.2.0" + "encodeurl" "~1.0.2" + "escape-html" "~1.0.3" + "etag" "~1.8.1" + "fresh" "0.5.2" + "http-errors" "2.0.0" + "mime" "1.6.0" + "ms" "2.1.3" + "on-finished" "2.4.1" + "range-parser" "~1.2.1" + "statuses" "2.0.1" + +"serialize-javascript@^6.0.0": + "integrity" "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==" + "resolved" "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz" + "version" "6.0.0" + dependencies: + "randombytes" "^2.1.0" + +"serve-index@^1.9.1": + "integrity" "sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw==" + "resolved" "https://registry.npmjs.org/serve-index/-/serve-index-1.9.1.tgz" + "version" "1.9.1" + dependencies: + "accepts" "~1.3.4" + "batch" "0.6.1" + "debug" "2.6.9" + "escape-html" "~1.0.3" + "http-errors" "~1.6.2" + "mime-types" "~2.1.17" + "parseurl" "~1.3.2" + +"serve-static@1.15.0": + "integrity" "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==" + "resolved" "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz" + "version" "1.15.0" + dependencies: + "encodeurl" "~1.0.2" + "escape-html" "~1.0.3" + "parseurl" "~1.3.3" + "send" "0.18.0" + +"set-blocking@^2.0.0": + "integrity" "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==" + "resolved" "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz" + "version" "2.0.0" + +"set-value@^2.0.0": + "integrity" "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==" + "resolved" "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz" + "version" "2.0.1" + dependencies: + "extend-shallow" "^2.0.1" + "is-extendable" "^0.1.1" + "is-plain-object" "^2.0.3" + "split-string" "^3.0.1" + +"set-value@^2.0.1": + "integrity" "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==" + "resolved" "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz" + "version" "2.0.1" + dependencies: + "extend-shallow" "^2.0.1" + "is-extendable" "^0.1.1" + "is-plain-object" "^2.0.3" + "split-string" "^3.0.1" + +"set-value@3.0.2": + "integrity" "sha512-npjkVoz+ank0zjlV9F47Fdbjfj/PfXyVhZvGALWsyIYU/qrMzpi6avjKW3/7KeSU2Df3I46BrN1xOI1+6vW0hA==" + "resolved" "https://registry.npmjs.org/set-value/-/set-value-3.0.2.tgz" + "version" "3.0.2" + dependencies: + "is-plain-object" "^2.0.4" + +"setprototypeof@1.1.0": + "integrity" "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==" + "resolved" "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz" + "version" "1.1.0" + +"setprototypeof@1.1.1": + "integrity" "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" + "resolved" "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz" + "version" "1.1.1" + +"setprototypeof@1.2.0": + "integrity" "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" + "resolved" "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz" + "version" "1.2.0" + +"shallow-clone@^3.0.0": + "integrity" "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==" + "resolved" "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz" + "version" "3.0.1" + dependencies: + "kind-of" "^6.0.2" + +"shallow-equal@^1.0.0": + "integrity" "sha512-S4vJDjHHMBaiZuT9NPb616CSmLf618jawtv3sufLl6ivK8WocjAo58cXwbRV1cgqxH0Qbv+iUt6m05eqEa2IRA==" + "resolved" "https://registry.npmjs.org/shallow-equal/-/shallow-equal-1.2.1.tgz" + "version" "1.2.1" + +"shebang-command@^1.2.0": + "integrity" "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==" + "resolved" "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz" + "version" "1.2.0" + dependencies: + "shebang-regex" "^1.0.0" + +"shebang-command@^2.0.0": + "integrity" "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==" + "resolved" "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz" + "version" "2.0.0" + dependencies: + "shebang-regex" "^3.0.0" + +"shebang-regex@^1.0.0": + "integrity" "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==" + "resolved" "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz" + "version" "1.0.0" + +"shebang-regex@^3.0.0": + "integrity" "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" + "resolved" "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz" + "version" "3.0.0" + +"side-channel@^1.0.4": + "integrity" "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==" + "resolved" "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz" + "version" "1.0.4" + dependencies: + "call-bind" "^1.0.0" + "get-intrinsic" "^1.0.2" + "object-inspect" "^1.9.0" + +"signal-exit@^3.0.0": + "integrity" "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" + "resolved" "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz" + "version" "3.0.7" + +"sirv@^1.0.7": + "integrity" "sha512-f2AOPogZmXgJ9Ma2M22ZEhc1dNtRIzcEkiflMFeVTRq+OViOZMvH1IPMVOwrKaxpSaHioBJiDR0SluRqGa7atA==" + "resolved" "https://registry.npmjs.org/sirv/-/sirv-1.0.18.tgz" + "version" "1.0.18" + dependencies: + "@polka/url" "^1.0.0-next.20" + "mime" "^2.3.1" + "totalist" "^1.0.0" + +"slash@^3.0.0": + "integrity" "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==" + "resolved" "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz" + "version" "3.0.0" + +"slash@^4.0.0": + "integrity" "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==" + "resolved" "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz" + "version" "4.0.0" + +"slice-ansi@^4.0.0": + "integrity" "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==" + "resolved" "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz" + "version" "4.0.0" + dependencies: + "ansi-styles" "^4.0.0" + "astral-regex" "^2.0.0" + "is-fullwidth-code-point" "^3.0.0" + +"snapdragon-node@^2.0.1": + "integrity" "sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==" + "resolved" "https://registry.npmjs.org/snapdragon-node/-/snapdragon-node-2.1.1.tgz" + "version" "2.1.1" + dependencies: + "define-property" "^1.0.0" + "isobject" "^3.0.0" + "snapdragon-util" "^3.0.1" + +"snapdragon-util@^3.0.1": + "integrity" "sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==" + "resolved" "https://registry.npmjs.org/snapdragon-util/-/snapdragon-util-3.0.1.tgz" + "version" "3.0.1" + dependencies: + "kind-of" "^3.2.0" + +"snapdragon@^0.8.1": + "integrity" "sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==" + "resolved" "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz" + "version" "0.8.2" + dependencies: + "base" "^0.11.1" + "debug" "^2.2.0" + "define-property" "^0.2.5" + "extend-shallow" "^2.0.1" + "map-cache" "^0.2.2" + "source-map" "^0.5.6" + "source-map-resolve" "^0.5.0" + "use" "^3.1.0" + +"sockjs-client@^1.5.0": + "integrity" "sha512-2g0tjOR+fRs0amxENLi/q5TiJTqY+WXFOzb5UwXndlK6TO3U/mirZznpx6w34HVMoc3g7cY24yC/ZMIYnDlfkw==" + "resolved" "https://registry.npmjs.org/sockjs-client/-/sockjs-client-1.6.1.tgz" + "version" "1.6.1" + dependencies: + "debug" "^3.2.7" + "eventsource" "^2.0.2" + "faye-websocket" "^0.11.4" + "inherits" "^2.0.4" + "url-parse" "^1.5.10" + +"sockjs@^0.3.21": + "integrity" "sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ==" + "resolved" "https://registry.npmjs.org/sockjs/-/sockjs-0.3.24.tgz" + "version" "0.3.24" + dependencies: + "faye-websocket" "^0.11.3" + "uuid" "^8.3.2" + "websocket-driver" "^0.7.4" + +"source-list-map@^2.0.0": + "integrity" "sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw==" + "resolved" "https://registry.npmjs.org/source-list-map/-/source-list-map-2.0.1.tgz" + "version" "2.0.1" + +"source-map-js@^0.6.2": + "integrity" "sha512-/3GptzWzu0+0MBQFrDKzw/DvvMTUORvgY6k6jd/VS6iCR4RDTKWH6v6WPwQoUO8667uQEf9Oe38DxAYWY5F/Ug==" + "resolved" "https://registry.npmjs.org/source-map-js/-/source-map-js-0.6.2.tgz" + "version" "0.6.2" + +"source-map-resolve@^0.5.0": + "integrity" "sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==" + "resolved" "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.3.tgz" + "version" "0.5.3" + dependencies: + "atob" "^2.1.2" + "decode-uri-component" "^0.2.0" + "resolve-url" "^0.2.1" + "source-map-url" "^0.4.0" + "urix" "^0.1.0" + +"source-map-support@^0.5.16", "source-map-support@~0.5.20": + "integrity" "sha512-n1lZZ8Ve4ksRqizaBQgxXDgKwttHDhyfQjA6YZZn8+AroHbsIz+JjwxQDxbp+7y5OYCI8t1Yk7etjD9CRd2hIw==" + "resolved" "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.20.tgz" + "version" "0.5.20" + dependencies: + "buffer-from" "^1.0.0" + "source-map" "^0.6.0" + +"source-map-url@^0.4.0": + "integrity" "sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw==" + "resolved" "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.1.tgz" + "version" "0.4.1" + +"source-map@^0.5.6": + "integrity" "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==" + "resolved" "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz" + "version" "0.5.7" + +"source-map@^0.6.0", "source-map@^0.6.1", "source-map@~0.6.0", "source-map@~0.6.1", "source-map@0.6.1": + "integrity" "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + "resolved" "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" + "version" "0.6.1" + +"sourcemap-codec@^1.4.4": + "integrity" "sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==" + "resolved" "https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz" + "version" "1.4.8" + +"spdy-transport@^3.0.0": + "integrity" "sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==" + "resolved" "https://registry.npmjs.org/spdy-transport/-/spdy-transport-3.0.0.tgz" + "version" "3.0.0" + dependencies: + "debug" "^4.1.0" + "detect-node" "^2.0.4" + "hpack.js" "^2.1.6" + "obuf" "^1.1.2" + "readable-stream" "^3.0.6" + "wbuf" "^1.7.3" + +"spdy@^4.0.2": + "integrity" "sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==" + "resolved" "https://registry.npmjs.org/spdy/-/spdy-4.0.2.tgz" + "version" "4.0.2" + dependencies: + "debug" "^4.1.0" + "handle-thing" "^2.0.0" + "http-deceiver" "^1.2.7" + "select-hose" "^2.0.0" + "spdy-transport" "^3.0.0" + +"split-string@^3.0.1", "split-string@^3.0.2": + "integrity" "sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==" + "resolved" "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz" + "version" "3.1.0" + dependencies: + "extend-shallow" "^3.0.0" + +"sprintf-js@~1.0.2": + "integrity" "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" + "resolved" "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz" + "version" "1.0.3" + +"sshpk@^1.7.0": + "integrity" "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==" + "resolved" "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz" + "version" "1.16.1" + dependencies: + "asn1" "~0.2.3" + "assert-plus" "^1.0.0" + "bcrypt-pbkdf" "^1.0.0" + "dashdash" "^1.12.0" + "ecc-jsbn" "~0.1.1" + "getpass" "^0.1.1" + "jsbn" "~0.1.0" + "safer-buffer" "^2.0.2" + "tweetnacl" "~0.14.0" + +"stable@^0.1.8": + "integrity" "sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==" + "resolved" "https://registry.npmjs.org/stable/-/stable-0.1.8.tgz" + "version" "0.1.8" + +"stackframe@^1.3.4": + "integrity" "sha512-oeVtt7eWQS+Na6F//S4kJ2K2VbRlS9D43mAlMyVpVWovy9o+jfgH8O9agzANzaiLjclA0oYzUXEM4PurhSUChw==" + "resolved" "https://registry.npmjs.org/stackframe/-/stackframe-1.3.4.tgz" + "version" "1.3.4" + +"static-extend@^0.1.1": + "integrity" "sha512-72E9+uLc27Mt718pMHt9VMNiAL4LMsmDbBva8mxWUCkT07fSzEGMYUCk0XWY6lp0j6RBAG4cJ3mWuZv2OE3s0g==" + "resolved" "https://registry.npmjs.org/static-extend/-/static-extend-0.1.2.tgz" + "version" "0.1.2" + dependencies: + "define-property" "^0.2.5" + "object-copy" "^0.1.0" + +"statuses@>= 1.4.0 < 2", "statuses@>= 1.5.0 < 2": + "integrity" "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=" + "resolved" "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz" + "version" "1.5.0" + +"statuses@2.0.1": + "integrity" "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==" + "resolved" "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz" + "version" "2.0.1" + +"std-env@^2.2.1": + "integrity" "sha512-eOsoKTWnr6C8aWrqJJ2KAReXoa7Vn5Ywyw6uCXgA/xDhxPoaIsBa5aNJmISY04dLwXPBnDHW4diGM7Sn5K4R/g==" + "resolved" "https://registry.npmjs.org/std-env/-/std-env-2.3.1.tgz" + "version" "2.3.1" + dependencies: + "ci-info" "^3.1.1" + +"string_decoder@^1.1.1", "string_decoder@~1.1.1": + "integrity" "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==" + "resolved" "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + "version" "1.1.1" + dependencies: + "safe-buffer" "~5.1.0" + +"string-width@^3.0.0", "string-width@^3.1.0": + "integrity" "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==" + "resolved" "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz" + "version" "3.1.0" + dependencies: + "emoji-regex" "^7.0.1" + "is-fullwidth-code-point" "^2.0.0" + "strip-ansi" "^5.1.0" + +"string-width@^4.1.0", "string-width@^4.2.0", "string-width@^4.2.3": + "integrity" "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==" + "resolved" "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" + "version" "4.2.3" + dependencies: + "emoji-regex" "^8.0.0" + "is-fullwidth-code-point" "^3.0.0" + "strip-ansi" "^6.0.1" + +"string.prototype.trimend@^1.0.4": + "integrity" "sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A==" + "resolved" "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz" + "version" "1.0.4" + dependencies: + "call-bind" "^1.0.2" + "define-properties" "^1.1.3" + +"string.prototype.trimstart@^1.0.4": + "integrity" "sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw==" + "resolved" "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz" + "version" "1.0.4" + dependencies: + "call-bind" "^1.0.2" + "define-properties" "^1.1.3" + +"strip-ansi@^3.0.1": + "integrity" "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=" + "resolved" "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz" + "version" "3.0.1" + dependencies: + "ansi-regex" "^2.0.0" + +"strip-ansi@^5.0.0": + "integrity" "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==" + "resolved" "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz" + "version" "5.2.0" + dependencies: + "ansi-regex" "^4.1.0" + +"strip-ansi@^5.1.0": + "integrity" "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==" + "resolved" "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz" + "version" "5.2.0" + dependencies: + "ansi-regex" "^4.1.0" + +"strip-ansi@^5.2.0": + "integrity" "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==" + "resolved" "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz" + "version" "5.2.0" + dependencies: + "ansi-regex" "^4.1.0" + +"strip-ansi@^6.0.0", "strip-ansi@^6.0.1": + "integrity" "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==" + "resolved" "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz" + "version" "6.0.1" + dependencies: + "ansi-regex" "^5.0.1" + +"strip-bom@^3.0.0": + "integrity" "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=" + "resolved" "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz" + "version" "3.0.0" + +"strip-eof@^1.0.0": + "integrity" "sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==" + "resolved" "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz" + "version" "1.0.0" + +"strip-json-comments@^3.1.0", "strip-json-comments@^3.1.1": + "integrity" "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==" + "resolved" "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz" + "version" "3.1.1" + +"style-loader@^2.0.0": + "integrity" "sha512-Z0gYUJmzZ6ZdRUqpg1r8GsaFKypE+3xAzuFeMuoHgjc9KZv3wMyCRjQIWEbhoFSq7+7yoHXySDJyyWQaPajeiQ==" + "resolved" "https://registry.npmjs.org/style-loader/-/style-loader-2.0.0.tgz" + "version" "2.0.0" + dependencies: + "loader-utils" "^2.0.0" + "schema-utils" "^3.0.0" + +"stylehacks@^5.0.1": + "integrity" "sha512-Es0rVnHIqbWzveU1b24kbw92HsebBepxfcqe5iix7t9j0PQqhs0IxXVXv0pY2Bxa08CgMkzD6OWql7kbGOuEdA==" + "resolved" "https://registry.npmjs.org/stylehacks/-/stylehacks-5.0.1.tgz" + "version" "5.0.1" + dependencies: + "browserslist" "^4.16.0" + "postcss-selector-parser" "^6.0.4" + +"supports-color@^5.3.0": + "integrity" "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==" + "resolved" "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz" + "version" "5.5.0" + dependencies: + "has-flag" "^3.0.0" + +"supports-color@^6.1.0": + "integrity" "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==" + "resolved" "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz" + "version" "6.1.0" + dependencies: + "has-flag" "^3.0.0" + +"supports-color@^7.1.0": + "integrity" "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==" + "resolved" "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz" + "version" "7.2.0" + dependencies: + "has-flag" "^4.0.0" + +"supports-color@^8.0.0": + "integrity" "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==" + "resolved" "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz" + "version" "8.1.1" + dependencies: + "has-flag" "^4.0.0" + +"svg-tags@^1.0.0": + "integrity" "sha1-WPcc7jvVGbWdSyqEO2x95krAR2Q=" + "resolved" "https://registry.npmjs.org/svg-tags/-/svg-tags-1.0.0.tgz" + "version" "1.0.0" + +"svgo@^2.3.0": + "integrity" "sha512-aDLsGkre4fTDCWvolyW+fs8ZJFABpzLXbtdK1y71CKnHzAnpDxKXPj2mNKj+pyOXUCzFHzuxRJ94XOFygOWV3w==" + "resolved" "https://registry.npmjs.org/svgo/-/svgo-2.7.0.tgz" + "version" "2.7.0" + dependencies: + "@trysound/sax" "0.2.0" + "commander" "^7.2.0" + "css-select" "^4.1.3" + "css-tree" "^1.1.3" + "csso" "^4.2.0" + "nanocolors" "^0.1.12" + "stable" "^0.1.8" + +"table@^6.0.9": + "integrity" "sha512-UFZK67uvyNivLeQbVtkiUs8Uuuxv24aSL4/Vil2PJVtMgU8Lx0CYkP12uCGa3kjyQzOSgV1+z9Wkb82fCGsO0g==" + "resolved" "https://registry.npmjs.org/table/-/table-6.7.2.tgz" + "version" "6.7.2" + dependencies: + "ajv" "^8.0.1" + "lodash.clonedeep" "^4.5.0" + "lodash.truncate" "^4.4.2" + "slice-ansi" "^4.0.0" + "string-width" "^4.2.3" + "strip-ansi" "^6.0.1" + +"tapable@^2.0.0", "tapable@^2.1.1", "tapable@^2.2.0": + "integrity" "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==" + "resolved" "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz" + "version" "2.2.1" + +"terser-webpack-plugin@^5.1.3": + "integrity" "sha512-E2CkNMN+1cho04YpdANyRrn8CyN4yMy+WdFKZIySFZrGXZxJwJP6PMNGGc/Mcr6qygQHUUqRxnAPmi0M9f00XA==" + "resolved" "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.2.4.tgz" + "version" "5.2.4" + dependencies: + "jest-worker" "^27.0.6" + "p-limit" "^3.1.0" + "schema-utils" "^3.1.1" + "serialize-javascript" "^6.0.0" + "source-map" "^0.6.1" + "terser" "^5.7.2" + +"terser@^5.7.2": + "integrity" "sha512-L1BJiXVmheAQQy+as0oF3Pwtlo4s3Wi1X2zNZ2NxOB4wx9bdS9Vk67XQENLFdLYGCK/Z2di53mTj/hBafR+dTA==" + "resolved" "https://registry.npmjs.org/terser/-/terser-5.15.0.tgz" + "version" "5.15.0" + dependencies: + "@jridgewell/source-map" "^0.3.2" + "acorn" "^8.5.0" + "commander" "^2.20.0" + "source-map-support" "~0.5.20" + +"text-table@^0.2.0": + "integrity" "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=" + "resolved" "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz" + "version" "0.2.0" + +"thenify-all@^1.0.0": + "integrity" "sha1-GhkY1ALY/D+Y+/I02wvMjMEOlyY=" + "resolved" "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz" + "version" "1.6.0" + dependencies: + "thenify" ">= 3.1.0 < 4" + +"thenify@>= 3.1.0 < 4": + "integrity" "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==" + "resolved" "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz" + "version" "3.3.1" + dependencies: + "any-promise" "^1.0.0" + +"thunky@^1.0.2": + "integrity" "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==" + "resolved" "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz" + "version" "1.1.0" + +"timsort@^0.3.0": + "integrity" "sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q=" + "resolved" "https://registry.npmjs.org/timsort/-/timsort-0.3.0.tgz" + "version" "0.3.0" + +"to-fast-properties@^2.0.0": + "integrity" "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=" + "resolved" "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz" + "version" "2.0.0" + +"to-object-path@^0.3.0": + "integrity" "sha512-9mWHdnGRuh3onocaHzukyvCZhzvr6tiflAy/JRFXcJX0TjgfWA9pk9t8CMbzmBE4Jfw58pXbkngtBtqYxzNEyg==" + "resolved" "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz" + "version" "0.3.0" + dependencies: + "kind-of" "^3.0.2" + +"to-regex-range@^2.1.0": + "integrity" "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==" + "resolved" "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz" + "version" "2.1.1" + dependencies: + "is-number" "^3.0.0" + "repeat-string" "^1.6.1" + +"to-regex-range@^5.0.1": + "integrity" "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==" + "resolved" "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz" + "version" "5.0.1" + dependencies: + "is-number" "^7.0.0" + +"to-regex@^3.0.1", "to-regex@^3.0.2": + "integrity" "sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==" + "resolved" "https://registry.npmjs.org/to-regex/-/to-regex-3.0.2.tgz" + "version" "3.0.2" + dependencies: + "define-property" "^2.0.2" + "extend-shallow" "^3.0.2" + "regex-not" "^1.0.2" + "safe-regex" "^1.1.0" + +"toidentifier@1.0.0": + "integrity" "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" + "resolved" "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz" + "version" "1.0.0" + +"toidentifier@1.0.1": + "integrity" "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==" + "resolved" "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz" + "version" "1.0.1" + +"totalist@^1.0.0": + "integrity" "sha512-gduQwd1rOdDMGxFG1gEvhV88Oirdo2p+KjoYFU7k2g+i7n6AFFbDQ5kMPUsW0pNbfQsB/cwXvT1i4Bue0s9g5g==" + "resolved" "https://registry.npmjs.org/totalist/-/totalist-1.1.0.tgz" + "version" "1.1.0" + +"tough-cookie@~2.5.0": + "integrity" "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==" + "resolved" "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz" + "version" "2.5.0" + dependencies: + "psl" "^1.1.28" + "punycode" "^2.1.1" + +"tsconfig-paths@^3.11.0": + "integrity" "sha512-7ecdYDnIdmv639mmDwslG6KQg1Z9STTz1j7Gcz0xa+nshh/gKDAHcPxRbWOsA3SPp0tXP2leTcY9Kw+NAkfZzA==" + "resolved" "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.11.0.tgz" + "version" "3.11.0" + dependencies: + "@types/json5" "^0.0.29" + "json5" "^1.0.1" + "minimist" "^1.2.0" + "strip-bom" "^3.0.0" + +"tslib@^2.0.3", "tslib@2.3.0": + "integrity" "sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg==" + "resolved" "https://registry.npmjs.org/tslib/-/tslib-2.3.0.tgz" + "version" "2.3.0" + +"tunnel-agent@^0.6.0": + "integrity" "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=" + "resolved" "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz" + "version" "0.6.0" + dependencies: + "safe-buffer" "^5.0.1" + +"tweetnacl@^0.14.3", "tweetnacl@~0.14.0": + "integrity" "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" + "resolved" "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz" + "version" "0.14.5" + +"type-check@^0.4.0", "type-check@~0.4.0": + "integrity" "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==" + "resolved" "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz" + "version" "0.4.0" + dependencies: + "prelude-ls" "^1.2.1" + +"type-fest@^0.20.2": + "integrity" "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==" + "resolved" "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz" + "version" "0.20.2" + +"type-fest@^0.21.3": + "integrity" "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==" + "resolved" "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz" + "version" "0.21.3" + +"type-is@~1.6.17", "type-is@~1.6.18": + "integrity" "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==" + "resolved" "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz" + "version" "1.6.18" + dependencies: + "media-typer" "0.3.0" + "mime-types" "~2.1.24" + +"unbox-primitive@^1.0.1": + "integrity" "sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw==" + "resolved" "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.1.tgz" + "version" "1.0.1" + dependencies: + "function-bind" "^1.1.1" + "has-bigints" "^1.0.1" + "has-symbols" "^1.0.2" + "which-boxed-primitive" "^1.0.2" + +"unicode-canonical-property-names-ecmascript@^2.0.0": + "integrity" "sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==" + "resolved" "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz" + "version" "2.0.0" + +"unicode-match-property-ecmascript@^2.0.0": + "integrity" "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==" + "resolved" "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz" + "version" "2.0.0" + dependencies: + "unicode-canonical-property-names-ecmascript" "^2.0.0" + "unicode-property-aliases-ecmascript" "^2.0.0" + +"unicode-match-property-value-ecmascript@^2.0.0": + "integrity" "sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw==" + "resolved" "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz" + "version" "2.0.0" + +"unicode-property-aliases-ecmascript@^2.0.0": + "integrity" "sha512-5Zfuy9q/DFr4tfO7ZPeVXb1aPoeQSdeFMLpYuFebehDAhbuevLs5yxSZmIFN1tP5F9Wl4IpJrYojg85/zgyZHQ==" + "resolved" "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.0.0.tgz" + "version" "2.0.0" + +"union-value@^1.0.0": + "integrity" "sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==" + "resolved" "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz" + "version" "1.0.1" + dependencies: + "arr-union" "^3.1.0" + "get-value" "^2.0.6" + "is-extendable" "^0.1.1" + "set-value" "^2.0.1" + +"uniqs@^2.0.0": + "integrity" "sha1-/+3ks2slKQaW5uFl1KWe25mOawI=" + "resolved" "https://registry.npmjs.org/uniqs/-/uniqs-2.0.0.tgz" + "version" "2.0.0" + +"unpipe@~1.0.0", "unpipe@1.0.0": + "integrity" "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" + "resolved" "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" + "version" "1.0.0" + +"unset-value@^1.0.0": + "integrity" "sha512-PcA2tsuGSF9cnySLHTLSh2qrQiJ70mn+r+Glzxv2TWZblxsxCC52BDlZoPCsz7STd9pN7EZetkWZBAvk4cgZdQ==" + "resolved" "https://registry.npmjs.org/unset-value/-/unset-value-1.0.0.tgz" + "version" "1.0.0" + dependencies: + "has-value" "^0.3.1" + "isobject" "^3.0.0" + +"upath@^1.1.1": + "integrity" "sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==" + "resolved" "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz" + "version" "1.2.0" + +"uri-js@^4.2.2": + "integrity" "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==" + "resolved" "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz" + "version" "4.4.1" + dependencies: + "punycode" "^2.1.0" + +"urix@^0.1.0": + "integrity" "sha512-Am1ousAhSLBeB9cG/7k7r2R0zj50uDRlZHPGbazid5s9rlF1F/QKYObEKSIunSjIOkJZqwRRLpvewjEkM7pSqg==" + "resolved" "https://registry.npmjs.org/urix/-/urix-0.1.0.tgz" + "version" "0.1.0" + +"url-loader@^4.1.1": + "integrity" "sha512-3BTV812+AVHHOJQO8O5MkWgZ5aosP7GnROJwvzLS9hWDj00lZ6Z0wNak423Lp9PBZN05N+Jk/N5Si8jRAlGyWA==" + "resolved" "https://registry.npmjs.org/url-loader/-/url-loader-4.1.1.tgz" + "version" "4.1.1" + dependencies: + "loader-utils" "^2.0.0" + "mime-types" "^2.1.27" + "schema-utils" "^3.0.0" + +"url-parse@^1.5.10": + "integrity" "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==" + "resolved" "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz" + "version" "1.5.10" + dependencies: + "querystringify" "^2.1.1" + "requires-port" "^1.0.0" + +"url@^0.11.0": + "integrity" "sha512-kbailJa29QrtXnxgq+DdCEGlbTeYM2eJUxsz6vjZavrCYPMIFHMKQmSKYAIuUK2i7hgPm28a8piX5NTUtM/LKQ==" + "resolved" "https://registry.npmjs.org/url/-/url-0.11.0.tgz" + "version" "0.11.0" + dependencies: + "punycode" "1.3.2" + "querystring" "0.2.0" + +"use@^3.1.0": + "integrity" "sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==" + "resolved" "https://registry.npmjs.org/use/-/use-3.1.1.tgz" + "version" "3.1.1" + +"util-deprecate@^1.0.1", "util-deprecate@^1.0.2", "util-deprecate@~1.0.1": + "integrity" "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + "resolved" "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + "version" "1.0.2" + +"utila@~0.4": + "integrity" "sha1-ihagXURWV6Oupe7MWxKk+lN5dyw=" + "resolved" "https://registry.npmjs.org/utila/-/utila-0.4.0.tgz" + "version" "0.4.0" + +"utils-merge@1.0.1": + "integrity" "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==" + "resolved" "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz" + "version" "1.0.1" + +"uuid@^3.3.2": + "integrity" "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" + "resolved" "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz" + "version" "3.4.0" + +"uuid@^8.3.2": + "integrity" "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + "resolved" "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz" + "version" "8.3.2" + +"v8-compile-cache@^2.0.3": + "integrity" "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==" + "resolved" "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz" + "version" "2.3.0" + +"vary@~1.1.2": + "integrity" "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==" + "resolved" "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz" + "version" "1.1.2" + +"vendors@^1.0.3": + "integrity" "sha512-/juG65kTL4Cy2su4P8HjtkTxk6VmJDiOPBufWniqQ6wknac6jNiXS9vU+hO3wgusiyqWlzTbVHi0dyJqRONg3w==" + "resolved" "https://registry.npmjs.org/vendors/-/vendors-1.0.4.tgz" + "version" "1.0.4" + +"verror@1.10.0": + "integrity" "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=" + "resolved" "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz" + "version" "1.10.0" + dependencies: + "assert-plus" "^1.0.0" + "core-util-is" "1.0.2" + "extsprintf" "^1.2.0" + +"vue-eslint-parser@^7.10.0", "vue-eslint-parser@^7.6.0": + "integrity" "sha512-qh3VhDLeh773wjgNTl7ss0VejY9bMMa0GoDG2fQVyDzRFdiU3L7fw74tWZDHNQXdZqxO3EveQroa9ct39D2nqg==" + "resolved" "https://registry.npmjs.org/vue-eslint-parser/-/vue-eslint-parser-7.11.0.tgz" + "version" "7.11.0" + dependencies: + "debug" "^4.1.1" + "eslint-scope" "^5.1.1" + "eslint-visitor-keys" "^1.1.0" + "espree" "^6.2.1" + "esquery" "^1.4.0" + "lodash" "^4.17.21" + "semver" "^6.3.0" + +"vue-i18n@^9.0.0": + "integrity" "sha512-JeRdNVxS2OGp1E+pye5XB6+M6BBkHwAv9C80Q7+kzoMdUDGRna06tjC0vCB/jDX9aWrl5swxOMFcyAr7or8XTA==" + "resolved" "https://registry.npmjs.org/vue-i18n/-/vue-i18n-9.1.9.tgz" + "version" "9.1.9" + dependencies: + "@intlify/core-base" "9.1.9" + "@intlify/shared" "9.1.9" + "@intlify/vue-devtools" "9.1.9" + "@vue/devtools-api" "^6.0.0-beta.7" + +"vue-loader@^16.1.2": + "integrity" "sha512-V53TJbHmzjBhCG5OYI2JWy/aYDspz4oVHKxS43Iy212GjGIG1T3EsB3+GWXFm/1z5VwjdjLmdZUFYM70y77vtQ==" + "resolved" "https://registry.npmjs.org/vue-loader/-/vue-loader-16.8.1.tgz" + "version" "16.8.1" + dependencies: + "chalk" "^4.1.0" + "hash-sum" "^2.0.0" + "loader-utils" "^2.0.0" + +"vue-request@^1.2.0": + "integrity" "sha512-Yo1KxKpucNZyv/angPv7S8q00MMhFi7mFyITVIIaNMzz6Mu7QrPfP1fIQOoupEqFK+dgbXa8M7/v+7UBPMrCMQ==" + "resolved" "https://registry.npmjs.org/vue-request/-/vue-request-1.2.3.tgz" + "version" "1.2.3" + +"vue-router@^4.0.1": + "integrity" "sha512-CPXvfqe+mZLB1kBWssssTiWg4EQERyqJZes7USiqfW9B5N2x+nHlnsM1D3b5CaJ6qgCvMmYJnz+G0iWjNCvXrg==" + "resolved" "https://registry.npmjs.org/vue-router/-/vue-router-4.0.12.tgz" + "version" "4.0.12" + dependencies: + "@vue/devtools-api" "^6.0.0-beta.18" + +"vue-types@^3.0.0": + "integrity" "sha512-IwUC0Aq2zwaXqy74h4WCvFCUtoV0iSWr0snWnE9TnU18S66GAQyqQbRf2qfJtUuiFsBf6qp0MEwdonlwznlcrw==" + "resolved" "https://registry.npmjs.org/vue-types/-/vue-types-3.0.2.tgz" + "version" "3.0.2" + dependencies: + "is-plain-object" "3.0.1" + +"vue@^3.0.0", "vue@^3.0.5", "vue@^3.1.0", "vue@>=3.0.3", "vue@>=3.1.0", "vue@3.2.20": + "integrity" "sha512-81JjEP4OGk9oO8+CU0h2nFPGgJBm9mNa3kdCX2k6FuRdrWrC+CNe+tOnuIeTg8EWwQuI+wwdra5Q7vSzp7p4Iw==" + "resolved" "https://registry.npmjs.org/vue/-/vue-3.2.20.tgz" + "version" "3.2.20" + dependencies: + "@vue/compiler-dom" "3.2.20" + "@vue/compiler-sfc" "3.2.20" + "@vue/runtime-dom" "3.2.20" + "@vue/server-renderer" "3.2.20" + "@vue/shared" "3.2.20" + +"warning@^4.0.0": + "integrity" "sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w==" + "resolved" "https://registry.npmjs.org/warning/-/warning-4.0.3.tgz" + "version" "4.0.3" + dependencies: + "loose-envify" "^1.0.0" + +"watchpack@^2.2.0": + "integrity" "sha512-up4YAn/XHgZHIxFBVCdlMiWDj6WaLKpwVeGQk2I5thdYxF/KmF0aaz6TfJZ/hfl1h/XlcDr7k1KH7ThDagpFaA==" + "resolved" "https://registry.npmjs.org/watchpack/-/watchpack-2.2.0.tgz" + "version" "2.2.0" + dependencies: + "glob-to-regexp" "^0.4.1" + "graceful-fs" "^4.1.2" + +"wbuf@^1.1.0", "wbuf@^1.7.3": + "integrity" "sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==" + "resolved" "https://registry.npmjs.org/wbuf/-/wbuf-1.7.3.tgz" + "version" "1.7.3" + dependencies: + "minimalistic-assert" "^1.0.0" + +"webpack-bundle-analyzer@^4.4.0": + "integrity" "sha512-GUMZlM3SKwS8Z+CKeIFx7CVoHn3dXFcUAjT/dcZQQmfSZGvitPfMob2ipjai7ovFFqPvTqkEZ/leL4O0YOdAYQ==" + "resolved" "https://registry.npmjs.org/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.5.0.tgz" + "version" "4.5.0" + dependencies: + "acorn" "^8.0.4" + "acorn-walk" "^8.0.0" + "chalk" "^4.1.0" + "commander" "^7.2.0" + "gzip-size" "^6.0.0" + "lodash" "^4.17.20" + "opener" "^1.5.2" + "sirv" "^1.0.7" + "ws" "^7.3.1" + +"webpack-chain@^6.5.1": + "integrity" "sha512-7doO/SRtLu8q5WM0s7vPKPWX580qhi0/yBHkOxNkv50f6qB76Zy9o2wRTrrPULqYTvQlVHuvbA8v+G5ayuUDsA==" + "resolved" "https://registry.npmjs.org/webpack-chain/-/webpack-chain-6.5.1.tgz" + "version" "6.5.1" + dependencies: + "deepmerge" "^1.5.2" + "javascript-stringify" "^2.0.1" + +"webpack-dev-middleware@^3.7.2": + "integrity" "sha512-djelc/zGiz9nZj/U7PTBi2ViorGJXEWo/3ltkPbDyxCXhhEXkW0ce99falaok4TPj+AsxLiXJR0EBOb0zh9fKQ==" + "resolved" "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-3.7.3.tgz" + "version" "3.7.3" + dependencies: + "memory-fs" "^0.4.1" + "mime" "^2.4.4" + "mkdirp" "^0.5.1" + "range-parser" "^1.2.1" + "webpack-log" "^2.0.0" + +"webpack-dev-server@^3.11.2": + "integrity" "sha512-3x31rjbEQWKMNzacUZRE6wXvUFuGpH7vr0lIEbYpMAG9BOxi0928QU1BBswOAP3kg3H1O4hiS+sq4YyAn6ANnA==" + "resolved" "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-3.11.3.tgz" + "version" "3.11.3" + dependencies: + "ansi-html-community" "0.0.8" + "bonjour" "^3.5.0" + "chokidar" "^2.1.8" + "compression" "^1.7.4" + "connect-history-api-fallback" "^1.6.0" + "debug" "^4.1.1" + "del" "^4.1.1" + "express" "^4.17.1" + "html-entities" "^1.3.1" + "http-proxy-middleware" "0.19.1" + "import-local" "^2.0.0" + "internal-ip" "^4.3.0" + "ip" "^1.1.5" + "is-absolute-url" "^3.0.3" + "killable" "^1.0.1" + "loglevel" "^1.6.8" + "opn" "^5.5.0" + "p-retry" "^3.0.1" + "portfinder" "^1.0.26" + "schema-utils" "^1.0.0" + "selfsigned" "^1.10.8" + "semver" "^6.3.0" + "serve-index" "^1.9.1" + "sockjs" "^0.3.21" + "sockjs-client" "^1.5.0" + "spdy" "^4.0.2" + "strip-ansi" "^3.0.1" + "supports-color" "^6.1.0" + "url" "^0.11.0" + "webpack-dev-middleware" "^3.7.2" + "webpack-log" "^2.0.0" + "ws" "^6.2.1" + "yargs" "^13.3.2" + +"webpack-log@^2.0.0": + "integrity" "sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg==" + "resolved" "https://registry.npmjs.org/webpack-log/-/webpack-log-2.0.0.tgz" + "version" "2.0.0" + dependencies: + "ansi-colors" "^3.0.0" + "uuid" "^3.3.2" + +"webpack-sources@^1.1.0": + "integrity" "sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ==" + "resolved" "https://registry.npmjs.org/webpack-sources/-/webpack-sources-1.4.3.tgz" + "version" "1.4.3" + dependencies: + "source-list-map" "^2.0.0" + "source-map" "~0.6.1" + +"webpack-sources@^3.2.0": + "integrity" "sha512-t6BMVLQ0AkjBOoRTZgqrWm7xbXMBzD+XDq2EZ96+vMfn3qKgsvdXZhbPZ4ElUOpdv4u+iiGe+w3+J75iy/bYGA==" + "resolved" "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.1.tgz" + "version" "3.2.1" + +"webpack@^4.0.0 || ^5.0.0", "webpack@^4.1.0 || ^5.0.0-0", "webpack@^4.27.0 || ^5.0.0", "webpack@^4.4.0 || ^5.0.0", "webpack@^4.5.0 || 5.x", "webpack@^5.0.0", "webpack@^5.1.0", "webpack@^5.20.0", "webpack@^5.24.2", "webpack@>=2", "webpack@3 || 4 || 5": + "integrity" "sha512-I01IQV9K96FlpXX3V0L4nvd7gb0r7thfuu1IfT2P4uOHOA77nKARAKDYGe/tScSHKnffNIyQhLC8kRXzY4KEHQ==" + "resolved" "https://registry.npmjs.org/webpack/-/webpack-5.59.1.tgz" + "version" "5.59.1" + dependencies: + "@types/eslint-scope" "^3.7.0" + "@types/estree" "^0.0.50" + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/wasm-edit" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + "acorn" "^8.4.1" + "acorn-import-assertions" "^1.7.6" + "browserslist" "^4.14.5" + "chrome-trace-event" "^1.0.2" + "enhanced-resolve" "^5.8.3" + "es-module-lexer" "^0.9.0" + "eslint-scope" "5.1.1" + "events" "^3.2.0" + "glob-to-regexp" "^0.4.1" + "graceful-fs" "^4.2.4" + "json-parse-better-errors" "^1.0.2" + "loader-runner" "^4.2.0" + "mime-types" "^2.1.27" + "neo-async" "^2.6.2" + "schema-utils" "^3.1.0" + "tapable" "^2.1.1" + "terser-webpack-plugin" "^5.1.3" + "watchpack" "^2.2.0" + "webpack-sources" "^3.2.0" + +"webpackbar@^5.0.0-3": + "integrity" "sha512-viW6KCYjMb0NPoDrw2jAmLXU2dEOhRrtku28KmOfeE1vxbfwCYuTbTaMhnkrCZLFAFyY9Q49Z/jzYO80Dw5b8g==" + "resolved" "https://registry.npmjs.org/webpackbar/-/webpackbar-5.0.0-3.tgz" + "version" "5.0.0-3" + dependencies: + "ansi-escapes" "^4.3.1" + "chalk" "^4.1.0" + "consola" "^2.15.0" + "figures" "^3.2.0" + "pretty-time" "^1.1.0" + "std-env" "^2.2.1" + "text-table" "^0.2.0" + "wrap-ansi" "^7.0.0" + +"websocket-driver@^0.7.4", "websocket-driver@>=0.5.1": + "integrity" "sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==" + "resolved" "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz" + "version" "0.7.4" + dependencies: + "http-parser-js" ">=0.5.1" + "safe-buffer" ">=5.1.0" + "websocket-extensions" ">=0.1.1" + +"websocket-extensions@>=0.1.1": + "integrity" "sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==" + "resolved" "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz" + "version" "0.1.4" + +"which-boxed-primitive@^1.0.2": + "integrity" "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==" + "resolved" "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz" + "version" "1.0.2" + dependencies: + "is-bigint" "^1.0.1" + "is-boolean-object" "^1.1.0" + "is-number-object" "^1.0.4" + "is-string" "^1.0.5" + "is-symbol" "^1.0.3" + +"which-module@^2.0.0": + "integrity" "sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q==" + "resolved" "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz" + "version" "2.0.0" + +"which@^1.2.9": + "integrity" "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==" + "resolved" "https://registry.npmjs.org/which/-/which-1.3.1.tgz" + "version" "1.3.1" + dependencies: + "isexe" "^2.0.0" + +"which@^2.0.1": + "integrity" "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==" + "resolved" "https://registry.npmjs.org/which/-/which-2.0.2.tgz" + "version" "2.0.2" + dependencies: + "isexe" "^2.0.0" + +"word-wrap@^1.2.3": + "integrity" "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==" + "resolved" "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz" + "version" "1.2.3" + +"wrap-ansi@^5.1.0": + "integrity" "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==" + "resolved" "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz" + "version" "5.1.0" + dependencies: + "ansi-styles" "^3.2.0" + "string-width" "^3.0.0" + "strip-ansi" "^5.0.0" + +"wrap-ansi@^7.0.0": + "integrity" "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==" + "resolved" "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz" + "version" "7.0.0" + dependencies: + "ansi-styles" "^4.0.0" + "string-width" "^4.1.0" + "strip-ansi" "^6.0.0" + +"wrappy@1": + "integrity" "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + "resolved" "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + "version" "1.0.2" + +"ws@^6.2.1": + "integrity" "sha512-zmhltoSR8u1cnDsD43TX59mzoMZsLKqUweyYBAIvTngR3shc0W6aOZylZmq/7hqyVxPdi+5Ud2QInblgyE72fw==" + "resolved" "https://registry.npmjs.org/ws/-/ws-6.2.2.tgz" + "version" "6.2.2" + dependencies: + "async-limiter" "~1.0.0" + +"ws@^7.3.1": + "integrity" "sha512-BAkMFcAzl8as1G/hArkxOxq3G7pjUqQ3gzYbLL0/5zNkph70e+lCoxBGnm6AW1+/aiNeV4fnKqZ8m4GZewmH2w==" + "resolved" "https://registry.npmjs.org/ws/-/ws-7.5.5.tgz" + "version" "7.5.5" + +"y18n@^4.0.0": + "integrity" "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==" + "resolved" "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz" + "version" "4.0.3" + +"y18n@^5.0.5": + "integrity" "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==" + "resolved" "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz" + "version" "5.0.8" + +"yallist@^4.0.0": + "integrity" "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + "resolved" "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz" + "version" "4.0.0" + +"yaml@^1.10.0", "yaml@^1.10.2": + "integrity" "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==" + "resolved" "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz" + "version" "1.10.2" + +"yargs-parser@^13.1.2": + "integrity" "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==" + "resolved" "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz" + "version" "13.1.2" + dependencies: + "camelcase" "^5.0.0" + "decamelize" "^1.2.0" + +"yargs-parser@^20.2.2", "yargs-parser@^20.2.9": + "integrity" "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==" + "resolved" "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz" + "version" "20.2.9" + +"yargs@^13.3.2": + "integrity" "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==" + "resolved" "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz" + "version" "13.3.2" + dependencies: + "cliui" "^5.0.0" + "find-up" "^3.0.0" + "get-caller-file" "^2.0.1" + "require-directory" "^2.1.1" + "require-main-filename" "^2.0.0" + "set-blocking" "^2.0.0" + "string-width" "^3.0.0" + "which-module" "^2.0.0" + "y18n" "^4.0.0" + "yargs-parser" "^13.1.2" + +"yargs@^16.0.0": + "integrity" "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==" + "resolved" "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz" + "version" "16.2.0" + dependencies: + "cliui" "^7.0.2" + "escalade" "^3.1.1" + "get-caller-file" "^2.0.5" + "require-directory" "^2.1.1" + "string-width" "^4.2.0" + "y18n" "^5.0.5" + "yargs-parser" "^20.2.2" + +"yocto-queue@^0.1.0": + "integrity" "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==" + "resolved" "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz" + "version" "0.1.0" + +"zrender@5.2.1": + "integrity" "sha512-M3bPGZuyLTNBC6LiNKXJwSCtglMp8XUEqEBG+2MdICDI3d1s500Y4P0CzldQGsqpRVB7fkvf3BKQQRxsEaTlsw==" + "resolved" "https://registry.npmjs.org/zrender/-/zrender-5.2.1.tgz" + "version" "5.2.1" + dependencies: + "tslib" "2.3.0"