diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5f93411ce..018e7e0a4 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -24,12 +24,10 @@ on: jobs: build: - runs-on: ubuntu-latest - strategy: matrix: - node-version: [14.17.3] + node-version: [14.17.5] # See supported Node.js release schedule at https://nodejs.org/en/about/releases/ steps: diff --git a/.gitignore b/.gitignore index 46dec8ece..a2857cb35 100644 --- a/.gitignore +++ b/.gitignore @@ -35,4 +35,6 @@ package-lock.json web/dist -workspace/ \ No newline at end of file +workspace/ + +.flattened-pom.xml \ No newline at end of file diff --git a/README-ZH.md b/README-ZH.md index 2264648a1..ffdc9911f 100644 --- a/README-ZH.md +++ b/README-ZH.md @@ -63,7 +63,7 @@ Exchangis 抽象了一套统一的数据源和同步作业定义插件,允许 如果您想得到最快的响应,请给我们提 issue,或者扫码进群: -![communication](images/zh_CN/ch1/communication.png) +![communication](images/zh_CN/ch1/code.png) ## License diff --git a/README.md b/README.md index 889504b5a..5e211e77a 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ With the help of [Linkis](https://github.com/apache/incubator-linkis) computing If you want to get the fastest response, please mention issue to us, or scan the code into the group : -![communication](images/en_US/ch1/communication.png) +![communication](images/en_US/ch1/code.png) ## License diff --git a/assembly-package/config/application-exchangis.yml b/assembly-package/config/application-exchangis.yml index 86268b937..946ee2cb8 100644 --- a/assembly-package/config/application-exchangis.yml +++ b/assembly-package/config/application-exchangis.yml @@ -2,7 +2,7 @@ server: port: 9321 spring: application: - name: exchangis-server + name: dss-exchangis-main-server-dev eureka: client: serviceUrl: diff --git a/assembly-package/config/dss-exchangis-server.properties b/assembly-package/config/dss-exchangis-server.properties index a8aa9a830..70ebaca62 100644 --- a/assembly-package/config/dss-exchangis-server.properties +++ b/assembly-package/config/dss-exchangis-server.properties @@ -26,10 +26,12 @@ wds.linkis.gateway.url=http://{LINKIS_IP}:{LINKIS_PORT}/ wds.linkis.log.clear=true wds.linkis.server.version=v1 +# server rpc +wds.linkis.ms.service.scan.package=com.webank.wedatasphere.exchangis + # datasource client -wds.exchangis.datasource.client.serverurl=http://{LINKIS_IP}:{LINKIS_PORT}/ -wds.exchangis.datasource.client.authtoken.key=EXCHANGIS-AUTH -wds.exchangis.datasource.client.authtoken.value=EXCHANGIS-AUTH +wds.exchangis.datasource.client.server-url=http://{LINKIS_IP}:{LINKIS_PORT}/ +wds.exchangis.datasource.client.token.value=EXCHANGIS-AUTH wds.exchangis.datasource.client.dws.version=v1 # launcher client diff --git a/assembly-package/pom.xml b/assembly-package/pom.xml index 45315a5fe..dc473f537 100644 --- a/assembly-package/pom.xml +++ b/assembly-package/pom.xml @@ -21,7 +21,8 @@ exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../pom.xml 4.0.0 assembly-package @@ -62,7 +63,7 @@ false - wedatasphere-exchangis-${exchangis.version} + wedatasphere-exchangis-${revision} false false diff --git a/assembly-package/sbin/daemon.sh b/assembly-package/sbin/daemon.sh index a21ccfab6..c7ee8f1e1 100644 --- a/assembly-package/sbin/daemon.sh +++ b/assembly-package/sbin/daemon.sh @@ -15,6 +15,7 @@ # limitations under the License. # +load_env_definitions ${ENV_FILE} if [[ "x"${EXCHANGIS_HOME} != "x" ]]; then source ${EXCHANGIS_HOME}/sbin/launcher.sh source ${EXCHANGIS_HOME}/sbin/common.sh @@ -48,7 +49,6 @@ restart(){ COMMAND=$1 case $COMMAND in start|stop|restart) - load_env_definitions ${ENV_FILE} if [[ ! -z $2 ]]; then SERVICE_NAME=${MODULE_DEFAULT_PREFIX}$2${MODULE_DEFAULT_SUFFIX} MAIN_CLASS=${MODULE_MAIN_CLASS[${SERVICE_NAME}]} diff --git a/assembly-package/sbin/env.properties b/assembly-package/sbin/env.properties index f849b4fa9..c6e528ab4 100644 --- a/assembly-package/sbin/env.properties +++ b/assembly-package/sbin/env.properties @@ -2,3 +2,5 @@ EXCHANGIS_CONF_PATH=/appcom/config/exchangis-config/background EXCHANGIS_LOG_PATH=/appcom/logs/exchangis/background MODULE_DEFAULT_PREFIX="dss-exchangis-main-" MODULE_DEFAULT_SUFFIX="-dev" +DEBUG_MODE=false +DEBUG_PORT=8321 \ No newline at end of file diff --git a/assembly-package/sbin/install.sh b/assembly-package/sbin/install.sh index a9f23aa66..2ce1569f7 100644 --- a/assembly-package/sbin/install.sh +++ b/assembly-package/sbin/install.sh @@ -30,7 +30,7 @@ PACKAGE_DIR="${DIR}/../packages" # Home Path EXCHNGIS_HOME_PATH="${DIR}/../" -CONF_FILE_PATH="bin/configure.sh" +CONF_FILE_PATH="sbin/configure.sh" FORCE_INSTALL=false SKIP_PACKAGE=false USER=`whoami` @@ -125,7 +125,7 @@ interact_echo(){ # Initalize database init_database(){ - BOOTSTRAP_PROP_FILE="${CONF_PATH}/dss-exchangis-server.properties" + BOOTSTRAP_PROP_FILE="${CONF_PATH}/dss-exchangis-main-server-dev.properties" if [ "x${SQL_SOURCE_PATH}" != "x" ] && [ -f "${SQL_SOURCE_PATH}" ]; then `mysql --version >/dev/null 2>&1` DATASOURCE_URL="jdbc:mysql:\/\/${MYSQL_HOST}:${MYSQL_PORT}\/${DATABASE}\?useSSL=false\&characterEncoding=UTF-8\&allowMultiQueries=true" diff --git a/assembly-package/sbin/launcher.sh b/assembly-package/sbin/launcher.sh index 50a79d279..4c9530eae 100644 --- a/assembly-package/sbin/launcher.sh +++ b/assembly-package/sbin/launcher.sh @@ -110,9 +110,9 @@ construct_java_command(){ # mkdir mkdir -p ${EXCHANGIS_LOG_PATH} mkdir -p ${EXCHANGIS_PID_PATH} - local classpath=${EXCHANGIS_CONF_PATH}":." + local classpath=${EXCHANGIS_CONF_PATH} local opts="" - classpath=${EXCHANGIS_LIB_PATH}/"exchangis-server/*:"${classpath} + classpath=${classpath}":"${EXCHANGIS_LIB_PATH}/"exchangis-server/*:." LOG INFO "classpath:"${classpath} if [[ "x${EXCHANGIS_JAVA_OPTS}" == "x" ]]; then # Use G1 garbage collector @@ -133,7 +133,6 @@ construct_java_command(){ opts=${opts}" -Dlogging.level.reactor.ipc.netty.channel.CloseableContextHandler=off" opts=${opts}" -Duser.dir=${USER_DIR}" opts=${opts}" -classpath "${classpath} - LOG INFO "opts:"${opts} if [[ "x${JAVA_HOME}" != "x" ]]; then EXEC_JAVA=${JAVA_HOME}"/bin/java "${opts}" "$2 else diff --git a/db/1.1.3/exchangis_ddl.sql b/db/1.1.3/exchangis_ddl.sql new file mode 100644 index 000000000..a503dba4e --- /dev/null +++ b/db/1.1.3/exchangis_ddl.sql @@ -0,0 +1 @@ +ALTER TABLE exchangis_launchable_task CHANGE linkis_job_content linkis_job_content mediumtext NULL; \ No newline at end of file diff --git a/db/1.1.3/exchangis_dml.sql b/db/1.1.3/exchangis_dml.sql new file mode 100644 index 000000000..8d8530575 --- /dev/null +++ b/db/1.1.3/exchangis_dml.sql @@ -0,0 +1,7 @@ +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +,('writeMode','写入方式','DATAX-SINK','STARROCKS','OPTION','writeMode','写入方式','',1,'OPTION','["insert"]','insert','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('batchSize','批量字节数大小','DATAX-SINK','STARROCKS','INPUT','maxBatchSize','批量字节数大小','',0,'NUMBER','','','REGEX','^[1-9]\\d*$','批量大小输入错误',0,0,'',1,'',2,'',1,NULL); + +UPDATE exchangis_engine_settings +SET engine_direction='mysql->hive,hive->mysql,mysql->oracle,oracle->mysql,oracle->hive,hive->oracle,mongodb->hive,hive->mongodb,mysql->elasticsearch,oracle->elasticsearch,mongodb->elasticsearch,mysql->mongodb,mongodb->mysql,oracle->mongodb,mongodb->oracle,hive->starrocks' +WHERE engine_name='datax'; \ No newline at end of file diff --git a/db/exchangis_ddl.sql b/db/exchangis_ddl.sql index c04796624..28ce7ac9d 100644 --- a/db/exchangis_ddl.sql +++ b/db/exchangis_ddl.sql @@ -92,7 +92,7 @@ CREATE TABLE `exchangis_project_user` ( `priv` int(20) DEFAULT NULL, `last_update_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), - UNIQUE KEY `exchangis_project_user_un` (`project_id`) + UNIQUE KEY `exchangis_project_user_un` (`project_id`,`priv_user`,`priv`) ) ENGINE=InnoDB AUTO_INCREMENT=844 DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=COMPACT; -- exchangis_launchable_task definition @@ -106,7 +106,7 @@ CREATE TABLE `exchangis_launchable_task` ( `engine_type` varchar(45) DEFAULT '', `execute_user` varchar(50) DEFAULT '', `linkis_job_name` varchar(100) NOT NULL, - `linkis_job_content` text NOT NULL, + `linkis_job_content` mediumtext NOT NULL, `linkis_params` text DEFAULT NULL, `linkis_source` varchar(64) DEFAULT NULL, `labels` varchar(64) DEFAULT NULL, diff --git a/db/exchangis_dml.sql b/db/exchangis_dml.sql index 2e6bee29e..967381fa0 100644 --- a/db/exchangis_dml.sql +++ b/db/exchangis_dml.sql @@ -38,41 +38,56 @@ INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_directio ,('setting.max.memory','作业最大使用内存','','DATAX','INPUT','setting.max.memory','作业最大使用内存','Mb',1,'NUMBER','','1024','REGEX','^[1-9]\\d*$','作业最大使用内存输入错误',0,0,'',1,'',4,'',1,NULL) ,('setting.errorLimit.record','最多错误记录数','','DATAX','INPUT','setting.errorlimit.record','最多错误记录数','条',0,'NUMBER','','','REGEX','^[0-9]\\d*$','最多错误记录数输入错误',0,0,'',1,'',5,'',1,NULL) ,('setting.max.parallelism','作业最大并行数','','SQOOP','INPUT','setting.max.parallelism','作业最大并行数','个',1,'NUMBER','','1','REGEX','^[1-9]\\d*$','作业最大并行数输入错误',0,0,'',1,'',1,'',1,NULL) -,('setting.max.memory','作业最大内存','','SQOOP','INPUT','setting.max.memory','作业最大内存','Mb',1,'NUMBER','','1024','REGEX','^[1-9]\\d*$','作业最大内存输入错误',0,0,'',1,'',2,'',1,NULL) -,('where','WHERE条件','SOURCE','MYSQL','INPUT','where','WHERE条件','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,500}$','WHERE条件输入过长',0,0,'',1,'',2,'',1,NULL) -,('writeMode','写入方式','SQOOP-SINK','HIVE','OPTION','writeMode','写入方式(OVERWRITE只对TEXT类型表生效)','',1,'OPTION','["OVERWRITE","APPEND"]','OVERWRITE','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) -,('partition','分区信息','SINK','HIVE','MAP','partition','分区信息(文本)','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','分区信息过长',0,0,'/api/rest_j/v1/dss/exchangis/main/datasources/render/partition/element/map',1,'',2,'',1,NULL) -; +,('setting.max.memory','作业最大内存','','SQOOP','INPUT','setting.max.memory','作业最大内存','Mb',1,'NUMBER','','1024','REGEX','^[1-9]\\d*$','作业最大内存输入错误',0,0,'',1,'',2,'',1,NULL); + INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES -('partition','分区信息','SOURCE','HIVE','MAP','partition','分区信息(文本)','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','分区信息过长',0,0,'/api/rest_j/v1/dss/exchangis/main/datasources/render/partition/element/map',1,'',2,'',1,NULL) +('where','WHERE条件','SOURCE','MYSQL','INPUT','where','WHERE条件','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,500}$','WHERE条件输入过长',0,0,'',1,'',2,'',1,NULL); ,('writeMode','写入方式','SQOOP-SINK','MYSQL','OPTION','writeMode','写入方式','',1,'OPTION','["INSERT","UPDATE"]','INSERT','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) -,('batchSize','批量大小','DATAX-SINK','ELASTICSEARCH','INPUT','batchSize','批量大小','',0,'NUMBER','','','REGEX','^[1-9]\\d*$','批量大小输入错误',0,0,'',1,'',1,'',1,NULL) -,('query','query条件','DATAX-SOURCE','MONGODB','INPUT','query','query条件','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,500}$','query条件输入过长',0,0,'',1,'',2,'',1,NULL) -,('writeMode','写入方式','DATAX-SINK','MONGODB','OPTION','writeMode','写入方式','',1,'OPTION','["insert","replace"]','insert','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) -,('batchSize','批量大小','DATAX-SINK','MONGODB','INPUT','batchSize','批量大小','',0,'NUMBER','','','REGEX','^[1-9]\\d*$','批量大小输入错误',0,0,'',1,'',2,'',1,NULL) -,('transferMode','传输方式','DATAX-SOURCE','HIVE','OPTION','transferMode','传输方式','',1,'OPTION','["二进制","记录"]','二进制','','','该传输方式不可用',0,0,'',1,'',1,'',1,NULL) -,('nullFormat','空值字符','DATAX-SOURCE','HIVE','INPUT','nullFormat','空值字符','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','空值字符输入错误',0,0,'',1,'',2,'',1,49) -,('writeMode','写入方式','DATAX-SINK','MYSQL','OPTION','writeMode','写入方式','',1,'OPTION','["INSERT","UPDATE"]','INSERT','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('writeMode','写入方式','DATAX-SINK','MYSQL','OPTION','writeMode','写入方式','',1,'OPTION','["INSERT","UPDATE"]','INSERT','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL); + +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('where','WHERE条件','SOURCE','TDSQL','INPUT','where','WHERE条件','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,500}$','WHERE条件输入过长',0,0,'',1,'',2,'',1,NULL); +,('writeMode','写入方式','SQOOP-SINK','TDSQL','OPTION','writeMode','写入方式','',1,'OPTION','["INSERT","UPDATE"]','INSERT','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('writeMode','写入方式','DATAX-SINK','TDSQL','OPTION','writeMode','写入方式','',1,'OPTION','["INSERT","UPDATE"]','INSERT','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL); + +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('writeMode','写入方式','SQOOP-SINK','HIVE','OPTION','writeMode','写入方式(OVERWRITE只对TEXT类型表生效)','',1,'OPTION','["OVERWRITE","APPEND"]','OVERWRITE','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('partition','分区信息','SINK','HIVE','MAP','partition','分区信息(文本)','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','分区信息过长',0,0,'/api/rest_j/v1/dss/exchangis/main/datasources/render/partition/element/map',1,'',2,'',1,NULL) +,('partition','分区信息','SOURCE','HIVE','MAP','partition','分区信息(文本)','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','分区信息过长',0,0,'/api/rest_j/v1/dss/exchangis/main/datasources/render/partition/element/map',1,'',2,'',1,NULL); +,('transferMode','传输方式','DATAX-SOURCE','HIVE','OPTION','transferMode','传输方式','',1,'OPTION','["记录"]','二进制','','','该传输方式不可用',0,0,'',1,'',1,'',1,NULL) +,('nullFormat','空值字符','DATAX-SOURCE','HIVE','INPUT','nullFormat','空值字符','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','空值字符输入错误',0,0,'',1,'',2,'',1,48) ,('writeMode','写入方式','DATAX-SINK','HIVE','OPTION','writeMode','写入方式(OVERWRITE只对TEXT类型表生效)','',1,'OPTION','["append","truncate"]','append','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) -; +,('nullFormat','空值字符','DATAX-SINK','HIVE','INPUT','nullFormat','空值字符','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','空值字符输入错误',0,0,'',1,'',2,'',1,49); + INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES -('nullFormat','空值字符','DATAX-SINK','HIVE','INPUT','nullFormat','空值字符','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','空值字符输入错误',0,0,'',1,'',2,'',1,49) -,('nullFormat','空值字符','DATAX-SINK','ELASTICSEARCH','INPUT','nullFormat','空值字符','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','空值字符输入错误',0,0,'',1,'',2,'',1,49) -,('where','WHERE条件','SOURCE','ORACLE','INPUT','where','WHERE条件',NULL,0,'VARCHAR',NULL,NULL,'REGEX','^[\\s\\S]{0,500}$','WHERE条件输入过长',0,0,NULL,1,'',2,NULL,1,NULL) -,('writeMode','写入方式','DATAX-SINK','ORACLE','OPTION','writeMode','写入方式',NULL,1,'OPTION','["INSERT","UPDATE"]','INSERT',NULL,NULL,'写入方式输入错误',0,0,NULL,1,NULL,1,NULL,1,NULL) -; +('batchSize','批量大小','DATAX-SINK','ELASTICSEARCH','INPUT','batchSize','批量大小','',0,'NUMBER','','','REGEX','^[1-9]\\d*$','批量大小输入错误',0,0,'',1,'',1,'',1,NULL) +,('nullFormat','空值字符','DATAX-SINK','ELASTICSEARCH','INPUT','nullFormat','空值字符','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','空值字符输入错误',0,0,'',1,'',2,'',1,49); + +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('where','WHERE条件','SOURCE','ORACLE','INPUT','where','WHERE条件',NULL,0,'VARCHAR',NULL,NULL,'REGEX','^[\\s\\S]{0,500}$','WHERE条件输入过长',0,0,NULL,1,'',2,NULL,1,NULL) +,('writeMode','写入方式','DATAX-SINK','ORACLE','OPTION','writeMode','写入方式',NULL,1,'OPTION','["INSERT","UPDATE"]','INSERT',NULL,NULL,'写入方式输入错误',0,0,NULL,1,NULL,1,NULL,1,NULL); + +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('query','query条件','DATAX-SOURCE','MONGODB','INPUT','query','query条件','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,500}$','query条件输入过长',0,0,'',1,'',2,'',1,NULL) +,('writeMode','写入方式','DATAX-SINK','MONGODB','OPTION','writeMode','写入方式','',1,'OPTION','["insert","replace"]','insert','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('batchSize','批量大小','DATAX-SINK','MONGODB','INPUT','batchSize','批量大小','',0,'NUMBER','','','REGEX','^[1-9]\\d*$','批量大小输入错误',0,0,'',1,'',2,'',1,NULL); + +INSERT INTO `exchangis_job_param_config` (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status,ref_id) VALUES +('writeMode','写入方式','DATAX-SINK','STARROCKS','OPTION','writeMode','写入方式','',1,'OPTION','["upsert"]','upsert','','','写入方式输入错误',0,0,'',1,'',1,'',1,NULL) +,('batchSize','批量字节数大小','DATAX-SINK','STARROCKS','INPUT','maxBatchSize','批量字节数大小','',0,'NUMBER','','','REGEX','^[1-9]\\d*$','批量大小输入错误',0,0,'',1,'',2,'',1,NULL); -- engine_settings records INSERT INTO `exchangis_engine_settings` (id, engine_name, engine_desc, engine_settings_value, engine_direction, res_loader_class, res_uploader_class, modify_time) VALUES -(1, 'datax', 'datax sync engine', '{}', 'mysql->hive,hive->mysql,mysql->oracle,oracle->mysql,oracle->hive,hive->oracle,mongodb->hive,hive->mongodb,mysql->elasticsearch,oracle->elasticsearch,mongodb->elasticsearch,mysql->mongodb,mongodb->mysql,oracle->mongodb,mongodb->oracle', 'com.webank.wedatasphere.exchangis.engine.resource.loader.datax.DataxEngineResourceLoader', NULL, NULL, '2022-08-09 18:20:51.0'), +(1, 'datax', 'datax sync engine', '{}', 'mysql->hive,hive->mysql,mysql->oracle,oracle->mysql,oracle->hive,hive->oracle,mongodb->hive,hive->mongodb,mysql->elasticsearch,oracle->elasticsearch,mongodb->elasticsearch,mysql->mongodb,mongodb->mysql,oracle->mongodb,mongodb->oracle,hive->starrocks', 'com.webank.wedatasphere.exchangis.engine.resource.loader.datax.DataxEngineResourceLoader', NULL, NULL), (2, 'sqoop', 'hadoop tool', '{}', 'mysql->hive,hive->mysql', '', NULL, NULL); -- exchangis_job_transform_rule records INSERT INTO `exchangis_job_transform_rule` (rule_name,rule_type,rule_source,data_source_type,engine_type,direction) VALUES ('es_with_post_processor','DEF','{"types": ["MAPPING", "PROCESSOR"]}','ELASTICSEARCH',NULL,'SINK') -,('es_fields_not_editable','MAPPING','{"fieldEditEnable": false, "fieldDeleteEnable": false}','ELASTICSEARCH',NULL,'SINK') -,('hive_sink_not_access','MAPPING','{"fieldEditEnable": false, "fieldDeleteEnable": false, "fieldAddEnable": false}','HIVE',NULL,'SINK') +,('es_fields_not_editable','MAPPING','{"fieldEditEnable": true, "fieldDeleteEnable": true}','ELASTICSEARCH',NULL,'SINK') +,('hive_sink_not_access','MAPPING','{"fieldEditEnable": true, "fieldDeleteEnable": true, "fieldAddEnable": true}','HIVE',NULL,'SINK') ,('mongo_field_match','MAPPING','{"fieldMatchStrategyName": "CAMEL_CASE_MATCH"}','MONGODB',NULL,'SINK') -,('mysql_field_source_match','MAPPING','{"fieldMatchStrategyName": "CAMEL_CASE_MATCH","fieldEditEnable": true, "fieldDeleteEnable": true, "fieldAddEnable": false}','MYSQL',NULL,'SOURCE') +,('mysql_field_source_match','MAPPING','{"fieldMatchStrategyName": "CAMEL_CASE_MATCH","fieldEditEnable": true, "fieldDeleteEnable": true, "fieldAddEnable": true}','MYSQL',NULL,'SOURCE') +,('starrocks_field_source_match','MAPPING','{"fieldMatchStrategyName": "CAMEL_CASE_MATCH","fieldEditEnable": true, "fieldDeleteEnable": true, "fieldAddEnable": true}','STARROCKS',NULL,'SINK') ; diff --git a/exchangis-dao/pom.xml b/exchangis-dao/pom.xml index 5776a5b52..ca406e7fa 100644 --- a/exchangis-dao/pom.xml +++ b/exchangis-dao/pom.xml @@ -5,7 +5,8 @@ exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../pom.xml 4.0.0 @@ -26,15 +27,15 @@ org.apache.linkis linkis-module + + org.apache.linkis + linkis-gateway-httpclient-support + org.hibernate hibernate-validator ${hibernate.validator} - - org.springframework - spring-orm - diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/AuditLogUtils.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/AuditLogUtils.java index f42908668..e79ee6bb5 100644 --- a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/AuditLogUtils.java +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/AuditLogUtils.java @@ -31,7 +31,7 @@ public static void printLog(String user, String proxyUser, TargetTypeEnum target //String detailInfo=new Gson().toJson(params); String detailInfo=params.toString(); LOGGER.info("[{}],[{}],[{}],[{}],[{}],[{}],[{}],[{}],[{}]", - new Date(),user, "proxyUser is: " + proxyUser, "Exchangis-1.1.2", targetType.getName(), + new Date(),user, "proxyUser is: " + proxyUser, "Exchangis-1.1.3", targetType.getName(), targetId,targetName,operateType.getName(), detailInfo); } diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/EnvironmentUtils.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/EnvironmentUtils.java index eaf5d6c38..ac6e77198 100644 --- a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/EnvironmentUtils.java +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/EnvironmentUtils.java @@ -1,6 +1,13 @@ package com.webank.wedatasphere.exchangis.common; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.DataWorkCloudApplication; import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.server.utils.LinkisMainHelper; +import org.springframework.context.ApplicationContext; + /** * Environment utils @@ -9,6 +16,8 @@ public class EnvironmentUtils { private static final CommonVars JVM_USER = CommonVars.apply("wds.exchangis.env.jvm.user", System.getProperty("user.name", "hadoop")); + private static final CommonVars SERVER_NAME = CommonVars.apply(LinkisMainHelper.SERVER_NAME_KEY(), "exchangis"); + /** * Jvm user * @return user name @@ -16,4 +25,60 @@ public class EnvironmentUtils { public static String getJvmUser(){ return JVM_USER.getValue(); } + + /** + * Server name + * @return name + */ + public static String getServerName(){ + return SERVER_NAME.getValue(); + } + + /** + * Get server address + * @return address + */ + public static String getServerAddress(){ + ApplicationContext context = DataWorkCloudApplication.getApplicationContext(); + String hostname; + if (Configuration.PREFER_IP_ADDRESS()) { + hostname = context + .getEnvironment().getProperty("spring.cloud.client.ip-address"); + } else { + hostname = context.getEnvironment().getProperty("eureka.instance.hostname", ""); + if (StringUtils.isBlank(hostname)) { + hostname = Utils.getComputerName(); + } + } + String serverPort = context.getEnvironment().getProperty("server.port"); + return hostname + (StringUtils.isNotBlank(serverPort) ? ":" + serverPort : ""); + } + /** + * Get server host name + * @return hostname + */ + public static String getServerHost(){ + ApplicationContext context = DataWorkCloudApplication.getApplicationContext(); + if (Configuration.PREFER_IP_ADDRESS()) { + return context + .getEnvironment().getProperty("spring.cloud.client.ip-address"); + } else { + String hostname = context.getEnvironment().getProperty("eureka.instance.hostname", ""); + if (StringUtils.isBlank(hostname)) { + return Utils.getComputerName(); + } + return hostname; + } + } + + /** + * Get server port + * @return port number + */ + public static Integer getServerPort(){ + String serverPort = DataWorkCloudApplication.getApplicationContext() + .getEnvironment().getProperty("server.port"); + return StringUtils.isNotBlank(serverPort) ? Integer.parseInt(serverPort) : null; + } + } diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/http/HttpClientConfiguration.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/http/HttpClientConfiguration.java new file mode 100644 index 000000000..548b99650 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/http/HttpClientConfiguration.java @@ -0,0 +1,25 @@ +package com.webank.wedatasphere.exchangis.common.http; + +import org.apache.linkis.common.conf.CommonVars; + +/** + * Define the http configuration + */ +public class HttpClientConfiguration { + + /** + * Connect timeout + */ + public static final CommonVars CONNECTION_TIMEOUT = CommonVars.apply("wds.exchangis.http.client.connection.timeout", 30000L); + + /** + * Max connection size + */ + public static final CommonVars MAX_CONNECTION_SIZE = CommonVars.apply("wds.exchangis.http.client.connection.max-size", 100); + + /** + * Read timeout + */ + public static final CommonVars READ_TIMEOUT = CommonVars.apply("wds.exchangis.http.client.read-timeout", 90000L); + +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/linkis/ClientConfiguration.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/linkis/ClientConfiguration.java deleted file mode 100644 index 3725610fd..000000000 --- a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/linkis/ClientConfiguration.java +++ /dev/null @@ -1,26 +0,0 @@ -package com.webank.wedatasphere.exchangis.common.linkis; - -import org.apache.linkis.common.conf.CommonVars; -import org.apache.linkis.common.conf.Configuration; - -/** - * Configuration for linkis client - */ -public class ClientConfiguration { - - /** - * Linkis server url - */ - public static final CommonVars LINKIS_SERVER_URL = CommonVars.apply("wds.exchangis.client.linkis.server-url", Configuration.getGateWayURL()); - - /** - * Linkis token value - */ - public static final CommonVars LINKIS_TOKEN_VALUE = CommonVars.apply("wds.exchangis.client.linkis.token.value", "EXCHANGIS-TOKEN"); - - /** - * Linkis client max connections - */ - public static final CommonVars LINKIS_DEFAULT_MAX_CONNECTIONS = CommonVars.apply("wds.exchangis.client.linkis.max-connections.default", 70); - -} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/linkis/client/ClientConfiguration.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/linkis/client/ClientConfiguration.java new file mode 100644 index 000000000..bb65867a4 --- /dev/null +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/linkis/client/ClientConfiguration.java @@ -0,0 +1,54 @@ +package com.webank.wedatasphere.exchangis.common.linkis.client; + +import com.webank.wedatasphere.exchangis.common.http.HttpClientConfiguration; +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.common.conf.Configuration; + +/** + * Configuration for linkis client + */ +public class ClientConfiguration { + + /** + * Linkis server url + */ + public static final CommonVars LINKIS_SERVER_URL = CommonVars.apply("wds.exchangis.client.linkis.server-url", Configuration.getGateWayURL()); + + /** + * Linkis token value + */ + public static final CommonVars LINKIS_TOKEN_VALUE = CommonVars.apply("wds.exchangis.client.linkis.token.value", "EXCHANGIS-TOKEN"); + + /** + * Linkis client max connections + */ + public static final CommonVars LINKIS_DEFAULT_MAX_CONNECTIONS = CommonVars.apply("wds.exchangis.client.linkis.max-connections.default", + HttpClientConfiguration.MAX_CONNECTION_SIZE.getValue()); + + + /** + * Linkis discovery enable + */ + public static final CommonVars LINKIS_DISCOVERY_ENABLED = CommonVars.apply("wds.exchangis.client.linkis.discovery.enabled", true); + + /** + * Linkis discovery frequency + */ + public static final CommonVars LINKIS_DISCOVERY_FREQUENCY_PERIOD = CommonVars.apply("wds.exchangis.client.linkis.discovery.frequency-period", 1L); + + /** + * Linkis client load balance + */ + public static final CommonVars LINKIS_LOAD_BALANCER_ENABLED = CommonVars.apply("wds.exchangis.client.linkis.load-balancer.enabled", true); + + + /** + * Linkis client retry + */ + public static final CommonVars LINKIS_RETRY_ENABLED = CommonVars.apply("wds.exchangis.client.linkis.retry.enabled", false); + + /** + * DWS version + */ + public static final CommonVars LINKIS_DWS_VERSION = CommonVars.apply("wds.exchangis.client.linkis.dws.version", Configuration.LINKIS_WEB_VERSION().getValue()); +} diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobDsBind.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobDsBind.java index e21695e6d..8b99351ca 100644 --- a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobDsBind.java +++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobDsBind.java @@ -18,8 +18,18 @@ public class ExchangisJobDsBind { private Long sourceDsId; + /** + * Source data source name + */ + private String sourceDsName; + private Long sinkDsId; + /** + * Sink data source name + */ + private String sinkDsName; + public Long getId() { return id; } @@ -59,4 +69,20 @@ public Long getSinkDsId() { public void setSinkDsId(Long sinkDsId) { this.sinkDsId = sinkDsId; } + + public String getSourceDsName() { + return sourceDsName; + } + + public void setSourceDsName(String sourceDsName) { + this.sourceDsName = sourceDsName; + } + + public String getSinkDsName() { + return sinkDsName; + } + + public void setSinkDsName(String sinkDsName) { + this.sinkDsName = sinkDsName; + } } diff --git a/exchangis-dao/src/main/scala/com/webank/wedatasphere/exchangis/common/linkis/client/ExchangisHttpClient.scala b/exchangis-dao/src/main/scala/com/webank/wedatasphere/exchangis/common/linkis/client/ExchangisHttpClient.scala new file mode 100644 index 000000000..45efe146d --- /dev/null +++ b/exchangis-dao/src/main/scala/com/webank/wedatasphere/exchangis/common/linkis/client/ExchangisHttpClient.scala @@ -0,0 +1,39 @@ +package com.webank.wedatasphere.exchangis.common.linkis.client + +import com.webank.wedatasphere.exchangis.common.linkis.client.config.{ExchangisClientConfig} +import org.apache.http.client.config.RequestConfig +import org.apache.http.impl.client.{CloseableHttpClient, HttpClients} +import org.apache.linkis.httpclient.dws.DWSHttpClient + +import java.util.concurrent.TimeUnit + +/** + * Enhanced http client config + */ +class ExchangisHttpClient(clientConfig: ExchangisClientConfig, clientName: String) + extends DWSHttpClient(clientConfig, clientName){ + /** + * Build http client + */ + override protected val httpClient: CloseableHttpClient = { + val defaultRequestConfig = RequestConfig.custom() + .setConnectTimeout(clientConfig.getConnectTimeout.toInt) + .setConnectionRequestTimeout(clientConfig.getConnReqTimeout.toInt) + .setSocketTimeout(clientConfig.getReadTimeout.toInt) + .build() + val clientBuilder = HttpClients.custom() + clientBuilder.setDefaultRequestConfig(defaultRequestConfig).useSystemProperties() + .setMaxConnPerRoute(clientConfig.getMaxConnection / 2).setMaxConnTotal(clientConfig.getMaxConnection) + val maxIdleTime = clientConfig.getMaxIdleTime + if (maxIdleTime > 0){ + // Evict idle connections + clientBuilder.evictExpiredConnections(); + clientBuilder.evictIdleConnections(maxIdleTime, TimeUnit.MILLISECONDS) + } + clientBuilder.build() + } + + def getHttpClient: CloseableHttpClient = { + httpClient + } +} diff --git a/exchangis-dao/src/main/scala/com/webank/wedatasphere/exchangis/common/linkis/client/config/ExchangisClientConfig.scala b/exchangis-dao/src/main/scala/com/webank/wedatasphere/exchangis/common/linkis/client/config/ExchangisClientConfig.scala new file mode 100644 index 000000000..ab9767ab9 --- /dev/null +++ b/exchangis-dao/src/main/scala/com/webank/wedatasphere/exchangis/common/linkis/client/config/ExchangisClientConfig.scala @@ -0,0 +1,36 @@ +package com.webank.wedatasphere.exchangis.common.linkis.client.config + +import org.apache.linkis.httpclient.config.ClientConfig +import org.apache.linkis.httpclient.dws.config.DWSClientConfig + +/** + * Enhanced dws client config + */ +class ExchangisClientConfig private[config]( + clientConfig: ClientConfig, + maxIdleTime: Long, + connReqTimeout: Long + ) extends DWSClientConfig(clientConfig) { + + /** + * Max idle time + * @return + */ + def getMaxIdleTime: Long = { + maxIdleTime + } + + /** + * Connection request timeout + * @return + */ + def getConnReqTimeout: Long = { + connReqTimeout + } +} + +object ExchangisClientConfig{ + def newBuilder: ExchangisClientConfigBuilder = { + new ExchangisClientConfigBuilder() + } +} \ No newline at end of file diff --git a/exchangis-dao/src/main/scala/com/webank/wedatasphere/exchangis/common/linkis/client/config/ExchangisClientConfigBuilder.scala b/exchangis-dao/src/main/scala/com/webank/wedatasphere/exchangis/common/linkis/client/config/ExchangisClientConfigBuilder.scala new file mode 100644 index 000000000..47df001f4 --- /dev/null +++ b/exchangis-dao/src/main/scala/com/webank/wedatasphere/exchangis/common/linkis/client/config/ExchangisClientConfigBuilder.scala @@ -0,0 +1,59 @@ +package com.webank.wedatasphere.exchangis.common.linkis.client.config + +import com.webank.wedatasphere.exchangis.common.http.HttpClientConfiguration +import com.webank.wedatasphere.exchangis.common.linkis.client.ClientConfiguration +import org.apache.linkis.httpclient.config.{ClientConfig, ClientConfigBuilder} +import org.apache.linkis.httpclient.dws.authentication.TokenAuthenticationStrategy + +import java.util.concurrent.TimeUnit + +/** + * Enhanced dws client config builder + */ +class ExchangisClientConfigBuilder extends ClientConfigBuilder{ + + private var maxIdleTime: Long = _ + + private var connReqTimeout: Long = _ + + private var dwsVersion: String = _ + + // Load from vars default + // Http common + maxConnection = HttpClientConfiguration.MAX_CONNECTION_SIZE.getValue + connectTimeout = HttpClientConfiguration.CONNECTION_TIMEOUT.getValue + readTimeout = HttpClientConfiguration.READ_TIMEOUT.getValue + // Linkis client, use token auth default + dwsVersion = ClientConfiguration.LINKIS_DWS_VERSION.getValue + serverUrl = ClientConfiguration.LINKIS_SERVER_URL.getValue + discoveryEnabled = ClientConfiguration.LINKIS_DISCOVERY_ENABLED.getValue + discoveryFrequency(ClientConfiguration.LINKIS_DISCOVERY_FREQUENCY_PERIOD.getValue, TimeUnit.MINUTES) + loadbalancerEnabled = ClientConfiguration.LINKIS_LOAD_BALANCER_ENABLED.getValue + retryEnabled = ClientConfiguration.LINKIS_RETRY_ENABLED.getValue + authenticationStrategy = new TokenAuthenticationStrategy() + authTokenKey = TokenAuthenticationStrategy.TOKEN_KEY + authTokenValue = ClientConfiguration.LINKIS_TOKEN_VALUE.getValue + + def maxIdleTime(maxIdleTime: Long): this.type = { + this.maxIdleTime = maxIdleTime + this + } + + def connReqTimeout(connReqTimeout: Long): this.type = { + this.connReqTimeout = connReqTimeout + this + } + + def setDWSVersion(dwsVersion: String): this.type = { + this.dwsVersion = dwsVersion + this + } + + override def build(): ExchangisClientConfig = { + val clientConfig = new ExchangisClientConfig(super.build(), maxIdleTime, connReqTimeout) + clientConfig.setDWSVersion(dwsVersion) + clientConfig + } + + +} diff --git a/exchangis-datasource/exchangis-datasource-core/pom.xml b/exchangis-datasource/exchangis-datasource-core/pom.xml index 990c92d6f..a3f253324 100644 --- a/exchangis-datasource/exchangis-datasource-core/pom.xml +++ b/exchangis-datasource/exchangis-datasource-core/pom.xml @@ -3,9 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-datasource + exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../../pom.xml 4.0.0 @@ -20,7 +21,7 @@ com.webank.wedatasphere.exchangis exchangis-dao - 1.1.2 + ${project.version} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/AbstractExchangisDataSourceDefinition.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/AbstractExchangisDataSourceDefinition.java new file mode 100644 index 000000000..c86fa6ef6 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/AbstractExchangisDataSourceDefinition.java @@ -0,0 +1,85 @@ +package com.webank.wedatasphere.exchangis.datasource.core; + +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; +import com.webank.wedatasphere.exchangis.dao.hook.MapperHook; +import com.webank.wedatasphere.exchangis.dao.mapper.ExchangisJobParamConfigMapper; +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.datasourcemanager.common.domain.DataSourceType; + +import java.util.List; + +public abstract class AbstractExchangisDataSourceDefinition implements ExchangisDataSourceDefinition { + + /** + * Mapper hook from common module? + */ + protected MapperHook mapperHook; + /** + * Type id + */ + protected String id; + @Override + public String name() { + return type().name; + } + + + @Override + public String classifier() { + return type().classifier; + } + + + @Override + public void setMapperHook(MapperHook mapperHook) { + this.mapperHook = mapperHook; + } + + @Override + public List getDataSourceTypes(String user) { + return ExchangisDataSourceDefinition.super.getDataSourceTypes(user); + } + + @Override + public String id() { + if (null == id || id.equalsIgnoreCase("")) { + List types = getDataSourceTypes("hdfs"); + for (DataSourceType type : types) { + if (type.getName().equalsIgnoreCase(name())) { + this.id = type.getId(); + } + } + } + return this.id; + } + + @Override + public List getDataSourceParamConfigs() { + return getDataSourceParamConfigs(type().name); + } + + protected List getDataSourceParamConfigs(String type) { + return getDataSourceParamConfigs(type, null); + } + + + protected List getDataSourceParamConfigs(String type, String dir) { + ExchangisJobParamConfigMapper exchangisJobParamConfigMapper = this.mapperHook.getExchangisJobParamConfigMapper(); + QueryWrapper queryWrapper = new QueryWrapper<>(); + if (StringUtils.isNotBlank(dir)) { + queryWrapper.eq("config_direction", dir); + } + queryWrapper.eq("type", type); + queryWrapper.eq("is_hidden", 0); + queryWrapper.eq("status", 1); + return exchangisJobParamConfigMapper.selectList(queryWrapper); + } + + /** + * Data source type + * @return type + */ + protected abstract ExchangisDataSourceType type(); +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSource.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSource.java index 13ecdfdac..2eea07dd7 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSource.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSource.java @@ -1,52 +1,19 @@ package com.webank.wedatasphere.exchangis.datasource.core; -import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; -import com.webank.wedatasphere.exchangis.dao.hook.MapperHook; -import org.apache.linkis.datasource.client.impl.LinkisDataSourceRemoteClient; -import org.apache.linkis.datasource.client.impl.LinkisMetaDataRemoteClient; -import org.apache.linkis.datasource.client.request.GetAllDataSourceTypesAction; -import org.apache.linkis.datasource.client.response.GetAllDataSourceTypesResult; -import org.apache.linkis.datasourcemanager.common.domain.DataSourceType; - -import java.util.Collections; -import java.util.List; -import java.util.Objects; - +/** + * Data source basic inf + */ public interface ExchangisDataSource { - String id(); - - String name(); - - String description(); - - String option(); - - String classifier(); -// String type(); - - String structClassifier(); - -// String category(); - - String icon(); - - List getDataSourceParamConfigs(); + /** + * Id + * @return id + */ + Long getId(); - LinkisDataSourceRemoteClient getDataSourceRemoteClient(); + void setId(Long id); - LinkisMetaDataRemoteClient getMetaDataRemoteClient(); - void setMapperHook(MapperHook mapperHook); - default List getDataSourceTypes(String user) { - GetAllDataSourceTypesResult result = getDataSourceRemoteClient().getAllDataSourceTypes(GetAllDataSourceTypesAction.builder() - .setUser(user) - .build() - ); - List allDataSourceType = result.getAllDataSourceType(); - if (Objects.isNull(allDataSourceType)) allDataSourceType = Collections.emptyList(); - return allDataSourceType; - } } diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSourceConfiguration.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSourceConfiguration.java index 77e94b12a..782c26977 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSourceConfiguration.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSourceConfiguration.java @@ -1,19 +1,28 @@ package com.webank.wedatasphere.exchangis.datasource.core; +import com.webank.wedatasphere.exchangis.common.linkis.client.ClientConfiguration; import org.apache.linkis.common.conf.CommonVars; +/** + * Exchangis data source config + */ public class ExchangisDataSourceConfiguration { - public static final CommonVars SERVER_URL = CommonVars.apply("wds.exchangis.datasource.client.serverurl", ""); - public static final CommonVars CONNECTION_TIMEOUT = CommonVars.apply("wds.exchangis.datasource.client.connection.timeout", 30000L); - public static final CommonVars DISCOVERY_ENABLED = CommonVars.apply("wds.exchangis.datasource.client.discovery.enabled", true); - public static final CommonVars DISCOVERY_FREQUENCY_PERIOD = CommonVars.apply("wds.exchangis.datasource.client.discoveryfrequency.period", 1L); - public static final CommonVars LOAD_BALANCER_ENABLED = CommonVars.apply("wds.exchangis.datasource.client.loadbalancer.enabled", true); - public static final CommonVars MAX_CONNECTION_SIZE = CommonVars.apply("wds.exchangis.datasource.client.maxconnection.size", 5); - public static final CommonVars RETRY_ENABLED = CommonVars.apply("wds.exchangis.datasource.client.retryenabled", false); - public static final CommonVars READ_TIMEOUT = CommonVars.apply("wds.exchangis.datasource.client.readtimeout", 30000L); + /** + * Server url + */ + public static final CommonVars SERVER_URL = CommonVars.apply("wds.exchangis.datasource.client.server-url", + ClientConfiguration.LINKIS_SERVER_URL.getValue()); - public static final CommonVars AUTHTOKEN_KEY = CommonVars.apply("wds.exchangis.datasource.client.authtoken.key", ""); - public static final CommonVars AUTHTOKEN_VALUE = CommonVars.apply("wds.exchangis.datasource.client.authtoken.value", ""); - public static final CommonVars DWS_VERSION = CommonVars.apply("wds.exchangis.datasource.client.dws.version", ""); + /** + * Token value + */ + public static final CommonVars AUTH_TOKEN_VALUE = CommonVars.apply("wds.exchangis.datasource.client.token.value", + ClientConfiguration.LINKIS_TOKEN_VALUE.getValue()); + + /** + * Dws version + */ + public static final CommonVars DWS_VERSION = CommonVars.apply("wds.exchangis.datasource.client.dws.version", + ClientConfiguration.LINKIS_DWS_VERSION.getValue()); } diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSourceDefinition.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSourceDefinition.java new file mode 100644 index 000000000..8c6e440c2 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/ExchangisDataSourceDefinition.java @@ -0,0 +1,75 @@ +package com.webank.wedatasphere.exchangis.datasource.core; + +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; +import com.webank.wedatasphere.exchangis.dao.hook.MapperHook; +import org.apache.linkis.datasource.client.impl.LinkisDataSourceRemoteClient; +import org.apache.linkis.datasource.client.impl.LinkisMetaDataRemoteClient; +import org.apache.linkis.datasource.client.request.GetAllDataSourceTypesAction; +import org.apache.linkis.datasource.client.response.GetAllDataSourceTypesResult; +import org.apache.linkis.datasourcemanager.common.domain.DataSourceType; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * Ds type definition + */ +public interface ExchangisDataSourceDefinition { + + /** + * Type id + * @return + */ + String id(); + + /** + * Type name + * @return name + */ + String name(); + + /** + * Description + * @return desc + */ + String description(); + + String option(); + + String classifier(); + + String structClassifier(); + + String icon(); + + /** + * Parameter config in + * @return + */ + default List getDataSourceParamConfigs(){ + return new ArrayList<>(); + }; + + default LinkisDataSourceRemoteClient getDataSourceRemoteClient(){ + throw new IllegalArgumentException("unsupported to get data source remote client"); + } + + default LinkisMetaDataRemoteClient getMetaDataRemoteClient(){ + throw new IllegalArgumentException("unsupported to get metadata remote client"); + } + + void setMapperHook(MapperHook mapperHook); + + default List getDataSourceTypes(String user) { + GetAllDataSourceTypesResult result = getDataSourceRemoteClient().getAllDataSourceTypes(GetAllDataSourceTypesAction.builder() + .setUser(user) + .build() + ); + + List allDataSourceType = result.getAllDataSourceType(); + if (Objects.isNull(allDataSourceType)) allDataSourceType = Collections.emptyList(); + return allDataSourceType; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/context/DefaultExchangisDataSourceContext.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/context/DefaultExchangisDsContext.java similarity index 56% rename from exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/context/DefaultExchangisDataSourceContext.java rename to exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/context/DefaultExchangisDsContext.java index db43c030f..c59ffe36f 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/context/DefaultExchangisDataSourceContext.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/context/DefaultExchangisDsContext.java @@ -2,8 +2,8 @@ import com.google.common.base.Strings; -import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSource; -import com.webank.wedatasphere.exchangis.datasource.core.loader.ExchangisDataSourceLoader; +import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSourceDefinition; +import com.webank.wedatasphere.exchangis.datasource.core.loader.ExchangisDataSourceDefLoader; import java.util.Collection; import java.util.Map; @@ -11,34 +11,34 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; -public class DefaultExchangisDataSourceContext implements ExchangisDataSourceContext { +public class DefaultExchangisDsContext implements ExchangisDataSourceContext { - private final Map dataSources = new ConcurrentHashMap<>(24); + private final Map dataSources = new ConcurrentHashMap<>(24); @Override - public boolean registerDataSourceLoader(ExchangisDataSourceLoader loader) { + public boolean registerLoader(ExchangisDataSourceDefLoader loader) { return false; } @Override - public void addExchangisDataSource(ExchangisDataSource dataSource) { + public void addExchangisDsDefinition(ExchangisDataSourceDefinition dataSource) { Objects.requireNonNull(dataSource, "dataSource required"); String name = dataSource.name(); dataSources.put(name, dataSource); } @Override - public ExchangisDataSource removeExchangisDataSource(String type) { + public ExchangisDataSourceDefinition removeExchangisDsDefinition(String type) { return null; } @Override - public ExchangisDataSource updateExchangisDataSource(ExchangisDataSource dataSource) { + public ExchangisDataSourceDefinition updateExchangisDsDefinition(ExchangisDataSourceDefinition dataSource) { return null; } @Override - public ExchangisDataSource getExchangisDataSource(String type) { + public ExchangisDataSourceDefinition getExchangisDsDefinition(String type) { if (Strings.isNullOrEmpty(type)) { return null; } @@ -46,12 +46,12 @@ public ExchangisDataSource getExchangisDataSource(String type) { } @Override - public ExchangisDataSource getExchangisDataSource(Long dataSourceTypeId) { + public ExchangisDataSourceDefinition getExchangisDsDefinition(Long dataSourceTypeId) { if (Objects.isNull(dataSourceTypeId)) { return null; } - Collection values = this.dataSources.values(); - for (ExchangisDataSource ds : values) { + Collection values = this.dataSources.values(); + for (ExchangisDataSourceDefinition ds : values) { if (ds.id().equalsIgnoreCase(dataSourceTypeId+"")) { return ds; } @@ -61,7 +61,7 @@ public ExchangisDataSource getExchangisDataSource(Long dataSourceTypeId) { } @Override - public Collection all() { + public Collection all() { return this.dataSources.values(); } diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/context/ExchangisDataSourceContext.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/context/ExchangisDataSourceContext.java index f15e614a0..c86cf9870 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/context/ExchangisDataSourceContext.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/context/ExchangisDataSourceContext.java @@ -1,27 +1,62 @@ package com.webank.wedatasphere.exchangis.datasource.core.context; -import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSource; -import com.webank.wedatasphere.exchangis.datasource.core.loader.ExchangisDataSourceLoader; +import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSourceDefinition; +import com.webank.wedatasphere.exchangis.datasource.core.loader.ExchangisDataSourceDefLoader; import java.util.Collection; import java.util.Set; +/** + * Data source context + */ public interface ExchangisDataSourceContext { - boolean registerDataSourceLoader(ExchangisDataSourceLoader loader); - - void addExchangisDataSource(ExchangisDataSource dataSource); - - ExchangisDataSource removeExchangisDataSource(String type); - - ExchangisDataSource updateExchangisDataSource(ExchangisDataSource dataSource); - - ExchangisDataSource getExchangisDataSource(String type); - - ExchangisDataSource getExchangisDataSource(Long dataSourceTypeId); - - Collection all(); - + boolean registerLoader(ExchangisDataSourceDefLoader loader); + + /** + * Add ds definition + * @param dataSource ds + */ + void addExchangisDsDefinition(ExchangisDataSourceDefinition dataSource); + + /** + * Remove definition + * @param type type + * @return definition + */ + ExchangisDataSourceDefinition removeExchangisDsDefinition(String type); + + /** + * Update definition + * @param dataSource ds + * @return definition + */ + ExchangisDataSourceDefinition updateExchangisDsDefinition(ExchangisDataSourceDefinition dataSource); + + /** + * Get ds definition + * @param type type + * @return definition + */ + ExchangisDataSourceDefinition getExchangisDsDefinition(String type); + + /** + * Get ds definition + * @param dataSourceTypeId type id + * @return definition + */ + ExchangisDataSourceDefinition getExchangisDsDefinition(Long dataSourceTypeId); + + /** + * All definition + * @return definitions + */ + Collection all(); + + /** + * Type names + * @return set + */ Set keys(); } diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/Classifier.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/Classifier.java index 4e385450c..78d144ce9 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/Classifier.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/Classifier.java @@ -12,7 +12,11 @@ public enum Classifier { SFTP("sftp连接"), - ORACLE("关系型数据库"); + ORACLE("关系型数据库"), + + STARROCKS("关系型数据库"), + + TDSQL("关系型数据库"); public String name; diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/DataSourceType.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/DataSourceType.java deleted file mode 100644 index d0193c4a2..000000000 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/DataSourceType.java +++ /dev/null @@ -1,22 +0,0 @@ -package com.webank.wedatasphere.exchangis.datasource.core.domain; - -public enum DataSourceType { - - ELASTICSEARCH("ELASTICSEARCH"), - - HIVE("HIVE"), - - MONGODB("MONGODB"), - - MYSQL("MYSQL"), - - SFTP("SFTP"), - - ORACLE("ORACLE"); - - public String name; - - DataSourceType(String name) { - this.name = name; - } -} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/ExchangisDataSourceType.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/ExchangisDataSourceType.java new file mode 100644 index 000000000..22c7e3085 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/ExchangisDataSourceType.java @@ -0,0 +1,34 @@ +package com.webank.wedatasphere.exchangis.datasource.core.domain; + +public enum ExchangisDataSourceType { + + ELASTICSEARCH("ELASTICSEARCH", "分布式全文索引"), + + HIVE("HIVE", "大数据存储"), + + MONGODB("MONGODB", "非关系型数据库"), + + MYSQL("MYSQL", "关系型数据库"), + + SFTP("SFTP", "sftp连接"), + + ORACLE("ORACLE", "关系型数据库"), + + STARROCKS("STARROCKS", "大数据存储"), + + TDSQL("TDSQL", "大数据存储"); + + /** + * Type name + */ + public String name; + + /** + * Classifier + */ + public String classifier; + ExchangisDataSourceType(String name, String classifier) { + this.name = name; + this.classifier = classifier; + } +} diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/loader/ExchangisDataSourceLoader.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/loader/ExchangisDataSourceDefLoader.java similarity index 81% rename from exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/loader/ExchangisDataSourceLoader.java rename to exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/loader/ExchangisDataSourceDefLoader.java index f68f696e8..a1fdc8a34 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/loader/ExchangisDataSourceLoader.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/loader/ExchangisDataSourceDefLoader.java @@ -2,13 +2,13 @@ import com.webank.wedatasphere.exchangis.dao.hook.MapperHook; -import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSource; +import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSourceDefinition; import com.webank.wedatasphere.exchangis.datasource.core.context.ExchangisDataSourceContext; import org.apache.linkis.common.conf.CommonVars; import java.util.Objects; -public interface ExchangisDataSourceLoader { +public interface ExchangisDataSourceDefLoader { String EXCHANGIS_DIR_NAME = Objects.isNull(CommonVars.apply("wds.exchangis.datasource.extension.dir").getValue()) ? "exchangis-extds" : CommonVars.apply("wds.exchangis.datasource.extension.dir").getValue().toString(); @@ -26,8 +26,8 @@ public interface ExchangisDataSourceLoader { void init(MapperHook mapperHook) throws Exception; - ExchangisDataSource load(String dataSourceType); + ExchangisDataSourceDefinition load(String dataSourceType); - ExchangisDataSource get(String dataSourceType, boolean reload); + ExchangisDataSourceDefinition get(String dataSourceType, boolean reload); } diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/utils/Json.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/utils/Json.java index a59822cec..2cb70ab41 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/utils/Json.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/utils/Json.java @@ -28,6 +28,8 @@ public class Json { mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); //Ignore unknown properties mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + //Accept NaN + mapper.configure(JsonParser.Feature.ALLOW_NON_NUMERIC_NUMBERS, true); //Cancel to scape no ascii // mapper.configure(JsonWriteFeature.ESCAPE_NON_ASCII.mappedFeature(), false); } diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobDataSourcesContent.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobDataSourcesContent.java index fc1618318..368cce9ea 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobDataSourcesContent.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobDataSourcesContent.java @@ -1,17 +1,38 @@ package com.webank.wedatasphere.exchangis.datasource.core.vo; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; +/** + * string: HIVE.ID.DB.TABLE + * json: { + * "type": "HIVE", + * "id": 467, + * "name": "HIVE-DEMO", + * "database": "default", + * "table": "demo-test" + * } + */ public class ExchangisJobDataSourcesContent { - // 唯一标识符 - // HIVE.ID.DB.TABLE @JsonProperty("source_id") private String sourceId; + /** + * Source ds + */ +// private ExchangisJobDataSource source = new ExchangisJobDataSource(); + + @JsonProperty("sink_id") private String sinkId; + /** + * Sink ds + */ +// private ExchangisJobDataSource sink = new ExchangisJobDataSource(); + public String getSourceId() { return sourceId; } @@ -27,4 +48,102 @@ public String getSinkId() { public void setSinkId(String sinkId) { this.sinkId = sinkId; } + +// public void setSource(ExchangisJobDataSource source) { +// this.source = source; +// } + +// public ExchangisJobDataSource getSource() { +// return source; +// } + +// public void setSink(ExchangisJobDataSource sink) { +// this.sink = sink; +// } + +// public ExchangisJobDataSource getSink() { +// return sink; +// } + + @JsonInclude(JsonInclude.Include.NON_EMPTY) + public static class ExchangisJobDataSource { + + /** + * Data source type + */ + private ExchangisDataSourceType type; + + /** + * Data source id + */ + private String id; + + /** + * Data source name + */ + private String name; + + /** + * Database field + */ + private String database; + + /** + * Table field + */ + private String table; + + /** + * Uri field + */ + private String uri; + + public void setType(ExchangisDataSourceType type) { + this.type = type; + } + + public ExchangisDataSourceType getType() { + return type; + } + + public void setId(String id) { + this.id = id; + } + + public String getId() { + return id; + } + + public void setName(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public void setDatabase(String database) { + this.database = database; + } + + public String getDatabase() { + return database; + } + + public void setTable(String table) { + this.table = table; + } + + public String getTable() { + return table; + } + + public void setUri(String uri) { + this.uri = uri; + } + + public String getUri() { + return uri; + } + } } diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobInfoContent.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobInfoContent.java index 25ef70e09..eae71c5a6 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobInfoContent.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobInfoContent.java @@ -7,17 +7,35 @@ @JsonIgnoreProperties(ignoreUnknown = true) public class ExchangisJobInfoContent { + /** + * Engine name + */ private String engine; + /** + * Sub job name + */ private String subJobName; + /** + * Data source content + */ private ExchangisJobDataSourcesContent dataSources; + /** + * Extra params + */ private ExchangisJobParamsContent params; + /** + * Transform define + */ // private List transforms; private ExchangisJobTransformsContent transforms; + /** + * Settings + */ private List settings; public String getEngine() { diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobParamsContent.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobParamsContent.java index ef8dbcb57..64d3a6106 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobParamsContent.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobParamsContent.java @@ -1,12 +1,20 @@ package com.webank.wedatasphere.exchangis.datasource.core.vo; +import com.fasterxml.jackson.annotation.JsonAlias; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; public class ExchangisJobParamsContent { + /** + * Source params + */ private List sources; + + /** + * Sink params + */ private List sinks; public List getSources() { @@ -26,13 +34,17 @@ public void setSinks(List sinks) { } public static class ExchangisJobParamsItem { + @JsonProperty("config_key") + @JsonAlias({"key", "k"}) private String configKey; @JsonProperty("config_name") + @JsonAlias({"name", "n"}) private String configName; @JsonProperty("config_value") + @JsonAlias({"value", "v"}) private Object configValue; private Integer sort; diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformsContent.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformsContent.java index f588f5786..7b7b61e85 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformsContent.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformsContent.java @@ -12,6 +12,16 @@ public class ExchangisJobTransformsContent { @JsonProperty("code_id") private String codeId; + /** + * Table (source) not exist + */ + private boolean srcTblNotExist = false; + + /** + * Table (sink) not exist + */ + private boolean sinkTblNotExist = false; + private List mapping; public boolean isAddEnable() { @@ -53,4 +63,20 @@ public String getCodeId() { public void setCodeId(String codeId) { this.codeId = codeId; } + + public boolean isSrcTblNotExist() { + return srcTblNotExist; + } + + public void setSrcTblNotExist(boolean srcTblNotExist) { + this.srcTblNotExist = srcTblNotExist; + } + + public boolean isSinkTblNotExist() { + return sinkTblNotExist; + } + + public void setSinkTblNotExist(boolean sinkTblNotExist) { + this.sinkTblNotExist = sinkTblNotExist; + } } diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformsItem.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformsItem.java index d0ae8ede0..10ddad5a6 100644 --- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformsItem.java +++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/vo/ExchangisJobTransformsItem.java @@ -2,24 +2,56 @@ import java.util.List; +import com.fasterxml.jackson.annotation.JsonAlias; import com.fasterxml.jackson.annotation.JsonProperty; public class ExchangisJobTransformsItem { + /** + * Source field name + */ @JsonProperty("source_field_name") + @JsonAlias({"srcFieldName"}) private String sourceFieldName; + + /** + * Source field type + */ @JsonProperty("source_field_type") + @JsonAlias({"srcFieldType"}) private String sourceFieldType; + + /** + * Sink field name + */ @JsonProperty("sink_field_name") + @JsonAlias({"sinkFieldName"}) private String sinkFieldName; + + /** + * Sink field type + */ @JsonProperty("sink_field_type") + @JsonAlias({"sinkFieldType"}) private String sinkFieldType; + + /** + * Delete enable + */ @JsonProperty("deleteEnable") private boolean deleteEnable; + /** + * Source field index + */ @JsonProperty("source_field_index") + @JsonAlias({"srcFieldIdx"}) private Integer sourceFieldIndex; + /** + * Sink field index + */ @JsonProperty("sink_field_index") + @JsonAlias({"sinkFi"}) private Integer sinkFieldIndex; @JsonProperty("source_field_editable") diff --git a/exchangis-datasource/exchangis-datasource-linkis/pom.xml b/exchangis-datasource/exchangis-datasource-linkis/pom.xml index 2ab7eda3d..d2a6553dd 100644 --- a/exchangis-datasource/exchangis-datasource-linkis/pom.xml +++ b/exchangis-datasource/exchangis-datasource-linkis/pom.xml @@ -3,9 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-datasource + exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../../pom.xml 4.0.0 @@ -20,7 +21,7 @@ com.webank.wedatasphere.exchangis exchangis-datasource-core - 1.1.2 + ${project.version} diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisBatchDataSource.java b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisBatchDataSource.java index 38058e58c..277eee047 100644 --- a/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisBatchDataSource.java +++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisBatchDataSource.java @@ -1,44 +1,14 @@ package com.webank.wedatasphere.exchangis.datasource.linkis; -import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; -import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; -import com.webank.wedatasphere.exchangis.dao.hook.MapperHook; -import com.webank.wedatasphere.exchangis.dao.mapper.ExchangisJobParamConfigMapper; -import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSource; +import com.webank.wedatasphere.exchangis.datasource.core.AbstractExchangisDataSourceDefinition; import org.apache.linkis.datasource.client.impl.LinkisDataSourceRemoteClient; import org.apache.linkis.datasource.client.impl.LinkisMetaDataRemoteClient; -import org.apache.linkis.datasourcemanager.common.domain.DataSourceType; -import java.util.List; +/** + * Batch data source + */ +public abstract class ExchangisBatchDataSource extends AbstractExchangisDataSourceDefinition { -public abstract class ExchangisBatchDataSource implements ExchangisDataSource { - - protected MapperHook mapperHook; - protected String id; - - @Override - public void setMapperHook(MapperHook mapperHook) { - this.mapperHook = mapperHook; - } - - protected List getDataSourceParamConfigs(String type) { - ExchangisJobParamConfigMapper exchangisJobParamConfigMapper = this.mapperHook.getExchangisJobParamConfigMapper(); - QueryWrapper queryWrapper = new QueryWrapper<>(); - queryWrapper.eq("type", type); - queryWrapper.eq("is_hidden", 0); - queryWrapper.eq("status", 1); - return exchangisJobParamConfigMapper.selectList(queryWrapper); - } - - protected List getDataSourceParamConfigs(String type, String dir) { - ExchangisJobParamConfigMapper exchangisJobParamConfigMapper = this.mapperHook.getExchangisJobParamConfigMapper(); - QueryWrapper queryWrapper = new QueryWrapper<>(); - queryWrapper.eq("config_direction", dir); - queryWrapper.eq("type", type); - queryWrapper.eq("is_hidden", 0); - queryWrapper.eq("status", 1); - return exchangisJobParamConfigMapper.selectList(queryWrapper); - } @Override public LinkisDataSourceRemoteClient getDataSourceRemoteClient() { @@ -50,16 +20,4 @@ public LinkisMetaDataRemoteClient getMetaDataRemoteClient() { return ExchangisLinkisRemoteClient.getLinkisMetadataRemoteClient(); } - @Override - public String id() { - if (null == id || id.equalsIgnoreCase("")) { - List types = getDataSourceTypes("hdfs"); - for (DataSourceType type : types) { - if (type.getName().equalsIgnoreCase(name())) { - this.id = type.getId(); - } - } - } - return this.id; - } } diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisDataSourceConfiguration.java b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisDataSourceConfiguration.java deleted file mode 100644 index ad2e1deab..000000000 --- a/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisDataSourceConfiguration.java +++ /dev/null @@ -1,19 +0,0 @@ -package com.webank.wedatasphere.exchangis.datasource.linkis; - - -import org.apache.linkis.common.conf.CommonVars; - -public class ExchangisDataSourceConfiguration { - public static final CommonVars SERVER_URL = CommonVars.apply("wds.exchangis.datasource.client.serverurl", ""); - public static final CommonVars CONNECTION_TIMEOUT = CommonVars.apply("wds.exchangis.datasource.client.connection.timeout", 30000L); - public static final CommonVars DISCOVERY_ENABLED = CommonVars.apply("wds.exchangis.datasource.client.discovery.enabled", false); - public static final CommonVars DISCOVERY_FREQUENCY_PERIOD = CommonVars.apply("wds.exchangis.datasource.client.discoveryfrequency.period", 1L); - public static final CommonVars LOAD_BALANCER_ENABLED = CommonVars.apply("wds.exchangis.datasource.client.loadbalancer.enabled", true); - public static final CommonVars MAX_CONNECTION_SIZE = CommonVars.apply("wds.exchangis.datasource.client.maxconnection.size", 5); - public static final CommonVars RETRY_ENABLED = CommonVars.apply("wds.exchangis.datasource.client.retryenabled", false); - public static final CommonVars READ_TIMEOUT = CommonVars.apply("wds.exchangis.datasource.client.readtimeout", 30000L); - - public static final CommonVars AUTHTOKEN_KEY = CommonVars.apply("wds.exchangis.datasource.client.authtoken.key", ""); - public static final CommonVars AUTHTOKEN_VALUE = CommonVars.apply("wds.exchangis.datasource.client.authtoken.value", ""); - public static final CommonVars DWS_VERSION = CommonVars.apply("wds.exchangis.datasource.client.dws.version", ""); -} diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisLinkisRemoteClient.scala b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisLinkisRemoteClient.scala index a8126a04c..c3d4d242b 100644 --- a/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisLinkisRemoteClient.scala +++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/ExchangisLinkisRemoteClient.scala @@ -1,62 +1,36 @@ package com.webank.wedatasphere.exchangis.datasource.linkis -import com.webank.wedatasphere.exchangis.common.linkis.ClientConfiguration +import com.webank.wedatasphere.exchangis.common.linkis.client.{ClientConfiguration, ExchangisHttpClient} +import com.webank.wedatasphere.exchangis.common.linkis.client.config.ExchangisClientConfig +import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSourceConfiguration +import com.webank.wedatasphere.exchangis.datasource.core.exception.{ExchangisDataSourceException, ExchangisDataSourceExceptionCode} +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.datasource.client.config.DatasourceClientConfig.DATA_SOURCE_SERVICE_CLIENT_NAME import org.apache.linkis.datasource.client.impl.{LinkisDataSourceRemoteClient, LinkisMetaDataRemoteClient} import org.apache.linkis.datasource.client.request._ import org.apache.linkis.datasource.client.response._ import org.apache.linkis.datasourcemanager.common.domain.{DataSource, DataSourceType} -import org.apache.linkis.httpclient.dws.authentication.{StaticAuthenticationStrategy, TokenAuthenticationStrategy} -import org.apache.linkis.httpclient.dws.config.{DWSClientConfig, DWSClientConfigBuilder} +import org.apache.linkis.httpclient.dws.DWSHttpClient -import java.lang -import java.util.concurrent.TimeUnit object ExchangisLinkisRemoteClient { + //Linkis Datasource Client Config - val serverUrl: String = ClientConfiguration.LINKIS_SERVER_URL.getValue - val authTokenValue: String = ClientConfiguration.LINKIS_TOKEN_VALUE.getValue - val connectionTimeout: lang.Long = ExchangisDataSourceConfiguration.CONNECTION_TIMEOUT.getValue - val discoveryEnabled: lang.Boolean = ExchangisDataSourceConfiguration.DISCOVERY_ENABLED.getValue - val discoveryFrequencyPeriod: lang.Long = ExchangisDataSourceConfiguration.DISCOVERY_FREQUENCY_PERIOD.getValue - val loadbalancerEnabled: lang.Boolean = ExchangisDataSourceConfiguration.LOAD_BALANCER_ENABLED.getValue - val maxConnectionSize: Integer = ExchangisDataSourceConfiguration.MAX_CONNECTION_SIZE.getValue - val retryEnabled: lang.Boolean = ExchangisDataSourceConfiguration.RETRY_ENABLED.getValue - val readTimeout: lang.Long = ExchangisDataSourceConfiguration.READ_TIMEOUT.getValue - val dwsVersion: String = ExchangisDataSourceConfiguration.DWS_VERSION.getValue - - - // val clientConfig = DWSClientConfigBuilder.newBuilder() - // .addServerUrl(serverUrl) - // .connectionTimeout(connectionTimeout) - // .discoveryEnabled(discoveryEnabled) - // .discoveryFrequency(1,TimeUnit.MINUTES) - // .loadbalancerEnabled(loadbalancerEnabled) - // .maxConnectionSize(maxConnectionSize) - // .retryEnabled(retryEnabled) - // .readTimeout(readTimeout) - // .setAuthenticationStrategy(new StaticAuthenticationStrategy()) - // .setAuthTokenKey(authTokenKey) - // .setAuthTokenValue(authTokenValue) - // .setDWSVersion(dwsVersion) - // .build() - - val clientConfig: DWSClientConfig = DWSClientConfigBuilder.newBuilder() - .addServerUrl(serverUrl) - .connectionTimeout(connectionTimeout) - .discoveryEnabled(discoveryEnabled) - .discoveryFrequency(discoveryFrequencyPeriod, TimeUnit.MINUTES) - .loadbalancerEnabled(loadbalancerEnabled) - .maxConnectionSize(maxConnectionSize) - .retryEnabled(retryEnabled) - .readTimeout(readTimeout) - .setAuthenticationStrategy(new TokenAuthenticationStrategy()) - .setAuthTokenValue(authTokenValue) - .setDWSVersion(dwsVersion) + val clientConfig: ExchangisClientConfig = ExchangisClientConfig.newBuilder + .addServerUrl(ExchangisDataSourceConfiguration.SERVER_URL.getValue) + .setAuthTokenValue(ExchangisDataSourceConfiguration.AUTH_TOKEN_VALUE.getValue) + .setDWSVersion(ExchangisDataSourceConfiguration.DWS_VERSION.getValue) .build() - val dataSourceClient = new LinkisDataSourceRemoteClient(clientConfig) + /** + * Data source client + */ + val dataSourceClient = new ExchangisDataSourceClient(clientConfig, null) - val metaDataClient = new LinkisMetaDataRemoteClient(clientConfig) + /** + * Meta data client + */ + val metaDataClient = new ExchangisMetadataClient(clientConfig) def getLinkisDataSourceRemoteClient: LinkisDataSourceRemoteClient = { dataSourceClient @@ -83,10 +57,6 @@ object ExchangisLinkisRemoteClient { ) } -// def createDataSource() = { -// dataSourceClient.execute().asInstanceOf[] -// } - /** * get datasourceConnect information * @@ -172,3 +142,27 @@ object ExchangisLinkisRemoteClient { } + +/** + * Exchangis data source client + * @param clientConfig client config + * @param clientName client name + */ +class ExchangisDataSourceClient(clientConfig: ExchangisClientConfig, clientName: String) extends LinkisDataSourceRemoteClient(clientConfig, clientName){ + + protected override val dwsHttpClient: DWSHttpClient = { + val client = if (StringUtils.isEmpty(clientName)) DATA_SOURCE_SERVICE_CLIENT_NAME.getValue else clientName + Option(clientConfig) match { + case Some(config) => new ExchangisHttpClient(config, client) + case _ => throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARAMETER_INVALID.getCode, "Linkis client config cannot be null") + } + } +} + +/** + * Exchangis meta data client + * @param clientConfig client config + */ +class ExchangisMetadataClient(clientConfig: ExchangisClientConfig) extends LinkisMetaDataRemoteClient(clientConfig){ + protected override val dwsHttpClient: DWSHttpClient = new ExchangisHttpClient(clientConfig, "MetaData-Client") +} diff --git a/exchangis-datasource/exchangis-datasource-loader/pom.xml b/exchangis-datasource/exchangis-datasource-loader/pom.xml index 30ebb07ad..97af1ee06 100644 --- a/exchangis-datasource/exchangis-datasource-loader/pom.xml +++ b/exchangis-datasource/exchangis-datasource-loader/pom.xml @@ -3,9 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-datasource + exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../../pom.xml 4.0.0 @@ -20,7 +21,7 @@ com.webank.wedatasphere.exchangis exchangis-datasource-core - 1.1.2 + ${project.version} diff --git a/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/loader/ExchangisDataSourceLoaderFactory.java b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/loader/ExchangisDataSourceLoaderFactory.java index 58d6c29dc..979ca9a86 100644 --- a/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/loader/ExchangisDataSourceLoaderFactory.java +++ b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/loader/ExchangisDataSourceLoaderFactory.java @@ -1,6 +1,6 @@ package com.webank.wedatasphere.exchangis.datasource.loader.loader; -import com.webank.wedatasphere.exchangis.datasource.core.loader.ExchangisDataSourceLoader; +import com.webank.wedatasphere.exchangis.datasource.core.loader.ExchangisDataSourceDefLoader; import org.apache.commons.lang.ClassUtils; import org.apache.commons.lang.StringUtils; import org.apache.linkis.common.conf.CommonVars; @@ -11,13 +11,13 @@ public class ExchangisDataSourceLoaderFactory { private static final Logger logger = LoggerFactory.getLogger(ExchangisDataSourceLoaderFactory.class); - private static Class clazz = LocalExchangisDataSourceLoader.class; - private static ExchangisDataSourceLoader exchangisDataSourceLoader = null; + private static Class clazz = LocalExchangisDataSourceLoader.class; + private static ExchangisDataSourceDefLoader exchangisDataSourceDefLoader = null; - public static ExchangisDataSourceLoader getLoader(){ - if (exchangisDataSourceLoader == null){ + public static ExchangisDataSourceDefLoader getLoader(){ + if (exchangisDataSourceDefLoader == null){ synchronized (ExchangisDataSourceLoaderFactory.class){ - if (exchangisDataSourceLoader == null){ + if (exchangisDataSourceDefLoader == null){ // 可以通过配置自行加载对应的类 CommonVars apply = CommonVars.apply("exchangis.extds.loader.classname", ""); String className = apply.getValue(); @@ -29,7 +29,7 @@ public static ExchangisDataSourceLoader getLoader(){ } } try { - exchangisDataSourceLoader = clazz.newInstance(); + exchangisDataSourceDefLoader = clazz.newInstance(); } catch (Exception e) { logger.error(String.format("Can not initialize ExchangisDataSourceLoader class %s.", clazz.getSimpleName()), e); } @@ -37,7 +37,7 @@ public static ExchangisDataSourceLoader getLoader(){ } } } - return exchangisDataSourceLoader; + return exchangisDataSourceDefLoader; } } diff --git a/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/loader/LocalExchangisDataSourceLoader.java b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/loader/LocalExchangisDataSourceLoader.java index 50f6f59e3..18d843f09 100644 --- a/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/loader/LocalExchangisDataSourceLoader.java +++ b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/loader/LocalExchangisDataSourceLoader.java @@ -1,9 +1,9 @@ package com.webank.wedatasphere.exchangis.datasource.loader.loader; import com.webank.wedatasphere.exchangis.dao.hook.MapperHook; -import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSource; +import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSourceDefinition; import com.webank.wedatasphere.exchangis.datasource.core.context.ExchangisDataSourceContext; -import com.webank.wedatasphere.exchangis.datasource.core.loader.ExchangisDataSourceLoader; +import com.webank.wedatasphere.exchangis.datasource.core.loader.ExchangisDataSourceDefLoader; import com.webank.wedatasphere.exchangis.datasource.loader.clazzloader.ExchangisDataSourceClassLoader; import com.webank.wedatasphere.exchangis.datasource.loader.utils.ExceptionHelper; import com.webank.wedatasphere.exchangis.datasource.loader.utils.ExtDsUtils; @@ -17,7 +17,7 @@ import java.util.List; import java.util.Objects; -public class LocalExchangisDataSourceLoader implements ExchangisDataSourceLoader { +public class LocalExchangisDataSourceLoader implements ExchangisDataSourceDefLoader { private static final Logger LOGGER = LoggerFactory.getLogger(LocalExchangisDataSourceLoader.class); @@ -61,24 +61,24 @@ public void init(MapperHook mapperHook) throws Exception { if (clazz == null) { Thread.currentThread().setContextClassLoader(currentClassLoader); } else { - ExchangisDataSource exchangisDataSource = (ExchangisDataSource) clazz.newInstance(); - exchangisDataSource.setMapperHook(mapperHook); + ExchangisDataSourceDefinition dsType = (ExchangisDataSourceDefinition) clazz.newInstance(); + dsType.setMapperHook(mapperHook); Thread.currentThread().setContextClassLoader(currentClassLoader); - LOGGER.info("ExchangisDataSource is {}", exchangisDataSource.getClass().toString()); + LOGGER.info("ExchangisDataSource is {}", dsType.getClass().toString()); - context.addExchangisDataSource(exchangisDataSource); + context.addExchangisDsDefinition(dsType); } } } @Override - public ExchangisDataSource load(String dataSourceType) { + public ExchangisDataSourceDefinition load(String dataSourceType) { return null; } @Override - public ExchangisDataSource get(String dataSourceType, boolean reload) { + public ExchangisDataSourceDefinition get(String dataSourceType, boolean reload) { return null; } } diff --git a/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/utils/ExtDsUtils.java b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/utils/ExtDsUtils.java index 69db1da10..59b40ea66 100644 --- a/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/utils/ExtDsUtils.java +++ b/exchangis-datasource/exchangis-datasource-loader/src/main/java/com/webank/wedatasphere/exchangis/datasource/loader/utils/ExtDsUtils.java @@ -18,8 +18,8 @@ package com.webank.wedatasphere.exchangis.datasource.loader.utils; -import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSource; -import com.webank.wedatasphere.exchangis.datasource.core.loader.ExchangisDataSourceLoader; +import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSourceDefinition; +import com.webank.wedatasphere.exchangis.datasource.core.loader.ExchangisDataSourceDefLoader; import com.webank.wedatasphere.exchangis.datasource.loader.exception.NoSuchExchangisExtDataSourceException; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; @@ -40,7 +40,7 @@ public class ExtDsUtils { private static final Logger logger = LoggerFactory.getLogger(ExtDsUtils.class); - private static Class PARENT_CLASS = ExchangisDataSource.class; + private static Class PARENT_CLASS = ExchangisDataSourceDefinition.class; public static String getExchangisExtDataSourceClassName(String libPath, @@ -99,7 +99,7 @@ public static List getJarsOfPath(String path) { if (file.listFiles() != null) { for (File f : file.listFiles()) { // exchangis-xxxxx.jar - if (!f.isDirectory() && f.getName().endsWith(ExchangisDataSourceLoader.JAR_SUF_NAME) && f.getName().startsWith("exchangis")) { + if (!f.isDirectory() && f.getName().endsWith(ExchangisDataSourceDefLoader.JAR_SUF_NAME) && f.getName().startsWith("exchangis")) { jars.add(f.getPath()); } } @@ -115,11 +115,11 @@ public static List getJarsUrlsOfPath(String path) { List jars = new ArrayList<>(); if (file.listFiles() != null) { for (File f : file.listFiles()) { - if (!f.isDirectory() && f.getName().endsWith(ExchangisDataSourceLoader.JAR_SUF_NAME)) { + if (!f.isDirectory() && f.getName().endsWith(ExchangisDataSourceDefLoader.JAR_SUF_NAME)) { try { jars.add(f.toURI().toURL()); } catch (MalformedURLException e) { - logger.warn("url {} cannot be added", ExchangisDataSourceLoader.FILE_SCHEMA + f.getPath()); + logger.warn("url {} cannot be added", ExchangisDataSourceDefLoader.FILE_SCHEMA + f.getPath()); } } } diff --git a/exchangis-datasource/exchangis-datasource-server/pom.xml b/exchangis-datasource/exchangis-datasource-server/pom.xml index c41abaa25..e2b04ecad 100644 --- a/exchangis-datasource/exchangis-datasource-server/pom.xml +++ b/exchangis-datasource/exchangis-datasource-server/pom.xml @@ -3,9 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-datasource + exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../../pom.xml 4.0.0 @@ -20,13 +21,13 @@ com.webank.wedatasphere.exchangis exchangis-datasource-service - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-datasource-loader - 1.1.2 + ${project.version} diff --git a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/configuration/ServerConfig.java b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/configuration/ServerConfig.java index 85a71ef80..5f9809e85 100644 --- a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/configuration/ServerConfig.java +++ b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/configuration/ServerConfig.java @@ -2,9 +2,9 @@ import com.webank.wedatasphere.exchangis.dao.hook.MapperHook; -import com.webank.wedatasphere.exchangis.datasource.core.context.DefaultExchangisDataSourceContext; +import com.webank.wedatasphere.exchangis.datasource.core.context.DefaultExchangisDsContext; import com.webank.wedatasphere.exchangis.datasource.core.context.ExchangisDataSourceContext; -import com.webank.wedatasphere.exchangis.datasource.core.loader.ExchangisDataSourceLoader; +import com.webank.wedatasphere.exchangis.datasource.core.loader.ExchangisDataSourceDefLoader; import com.webank.wedatasphere.exchangis.datasource.loader.loader.ExchangisDataSourceLoaderFactory; import org.apache.linkis.common.exception.ErrorException; import org.springframework.context.annotation.Bean; @@ -15,8 +15,8 @@ public class ServerConfig { @Bean public ExchangisDataSourceContext context(MapperHook mapperHook) throws Exception { - DefaultExchangisDataSourceContext context = new DefaultExchangisDataSourceContext(); - ExchangisDataSourceLoader loader = ExchangisDataSourceLoaderFactory.getLoader(); + DefaultExchangisDsContext context = new DefaultExchangisDsContext(); + ExchangisDataSourceDefLoader loader = ExchangisDataSourceLoaderFactory.getLoader(); loader.setContext(context); try { loader.init(mapperHook); diff --git a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRenderRestfulApi.java b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRenderRestfulApi.java index 2e4bdd724..d83538d14 100644 --- a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRenderRestfulApi.java +++ b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRenderRestfulApi.java @@ -12,6 +12,7 @@ import javax.servlet.http.HttpServletRequest; import java.util.Locale; import java.util.Objects; +import java.util.Optional; /** * Expose the ui interface to front-end rendering @@ -29,6 +30,7 @@ public class ExchangisDataSourceRenderRestfulApi { public Message partition(@PathVariable("elementType") String type, @RequestParam("dataSourceId") Long dataSourceId, @RequestParam("database") String database, + @RequestParam(value = "tableNotExist", required = false) Boolean tableNotExist, @RequestParam("table") String table, HttpServletRequest request){ String userName = UserUtils.getLoginUser(request); ElementUI.Type uiType; @@ -39,8 +41,11 @@ public Message partition(@PathVariable("elementType") String type, } Message result = Message.ok(); try{ - ElementUI elementUI = renderService.getPartitionAndRender(userName, dataSourceId, database, table, uiType); + boolean notExist = Optional.ofNullable(tableNotExist).orElse(false); + ElementUI elementUI = renderService.getPartitionAndRender(userName, dataSourceId, + database, table, uiType, notExist); result.data("type", uiType.name()); + result.data("customize", notExist); if (Objects.nonNull(elementUI)){ result.data("render", elementUI.getValue()); } diff --git a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRestfulApi.java b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRestfulApi.java index 474a94439..dbb0e2526 100644 --- a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRestfulApi.java +++ b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRestfulApi.java @@ -162,7 +162,7 @@ public Message create(/*@PathParam("type") String type, */@Valid @RequestBody Da Message message = new Message(); String loginUser = UserUtils.getLoginUser(request); String oringinUser = SecurityFilter.getLoginUsername(request); - LOG.info("dataSourceName: " + dataSourceCreateVO.getDataSourceName() + "dataSourceDesc: " + dataSourceCreateVO.getDataSourceDesc() + "label: " + dataSourceCreateVO.getLabels()); + LOG.info("dataSourceName: " + dataSourceCreateVO.getDataSourceName() + ", dataSourceDesc: " + dataSourceCreateVO.getDataSourceDesc() + ", label: " + dataSourceCreateVO.getLabels()); if(bindingResult.hasErrors()){ List fieldErrors = bindingResult.getFieldErrors(); for(int i=0;i - exchangis-datasource + exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../../pom.xml 4.0.0 @@ -21,19 +22,19 @@ com.webank.wedatasphere.exchangis exchangis-datasource-linkis - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-job-common - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-engine-core - 1.1.2 + ${project.version} compile diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/Utils/RSAUtil.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/Utils/RSAUtil.java index d75996d99..b85130df5 100644 --- a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/Utils/RSAUtil.java +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/Utils/RSAUtil.java @@ -1,14 +1,13 @@ package com.webank.wedatasphere.exchangis.datasource.Utils; import org.apache.linkis.common.conf.CommonVars; -import sun.misc.BASE64Decoder; -import sun.misc.BASE64Encoder; import javax.crypto.Cipher; import java.io.IOException; import java.security.*; import java.security.spec.PKCS8EncodedKeySpec; import java.security.spec.X509EncodedKeySpec; +import java.util.Base64; /** * @author tikazhang @@ -78,13 +77,13 @@ public static byte[] privateDecrypt(byte[] content, PrivateKey privateKey) throw //字节数组转Base64编码 public static String byte2Base64(byte[] bytes){ - BASE64Encoder encoder = new BASE64Encoder(); - return encoder.encode(bytes); + Base64.Encoder encoder = Base64.getEncoder(); + return encoder.encodeToString(bytes); } //Base64编码转字节数组 public static byte[] base642Byte(String base64Key) throws IOException { - BASE64Decoder decoder = new BASE64Decoder(); - return decoder.decodeBuffer(base64Key); + Base64.Decoder decoder = Base64.getDecoder(); + return decoder.decode(base64Key); } } diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/domain/ExchangisDsProject.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/domain/ExchangisDsProject.java new file mode 100644 index 000000000..322424cb9 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/domain/ExchangisDsProject.java @@ -0,0 +1,8 @@ +package com.webank.wedatasphere.exchangis.datasource.domain; + +/** + * The relation between data source and project + */ +public class ExchangisDsProject { +// private String +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/DataSourceDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/DataSourceDTO.java deleted file mode 100644 index 2ee15e9ed..000000000 --- a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/DataSourceDTO.java +++ /dev/null @@ -1,169 +0,0 @@ -package com.webank.wedatasphere.exchangis.datasource.dto; - -import java.util.Date; - -public class DataSourceDTO { - - private Long id; - private String name; - private String type; - private Long dataSourceTypeId; - private String createIdentify; - private String createSystem; - private String desc; - private String createUser; - private String labels; - private String label; - private Long versionId; - private String modifyUser; - private Date modifyTime; - private boolean expire; - private boolean writeAble; - private boolean readAble; - private String authDbs; - private String authTbls; - - public boolean isExpire() { - return expire; - } - - public void setExpire(boolean expire) { - this.expire = expire; - } - - public Long getId() { - return id; - } - - public void setId(Long id) { - this.id = id; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - public String getCreateIdentify() { - return createIdentify; - } - - public void setCreateIdentify(String createIdentify) { - this.createIdentify = createIdentify; - } - - public Long getDataSourceTypeId() { - return dataSourceTypeId; - } - - public void setDataSourceTypeId(Long dataSourceTypeId) { - this.dataSourceTypeId = dataSourceTypeId; - } - - public String getDesc() { - return desc; - } - - public void setDesc(String desc) { - this.desc = desc; - } - - public String getCreateUser() { - return createUser; - } - - public void setCreateUser(String createUser) { - this.createUser = createUser; - } - - public String getLabels() { - return labels; - } - - public void setLabels(String labels) { - this.labels = labels; - } - - public String getLabel() { - return label; - } - - public void setLabel(String label) { - this.label = label; - } - - public Long getVersionId() { - return versionId; - } - - public void setVersionId(Long versionId) { - this.versionId = versionId; - } - - public String getModifyUser() { - return modifyUser; - } - - public void setModifyUser(String modifyUser) { - this.modifyUser = modifyUser; - } - - public Date getModifyTime() { - return modifyTime; - } - - public void setModifyTime(Date modifyTime) { - this.modifyTime = modifyTime; - } - - public String getCreateSystem() { - return createSystem; - } - - public void setCreateSystem(String createSystem) { - this.createSystem = createSystem; - } - - public boolean isWriteAble() { - return writeAble; - } - - public void setWriteAble(boolean writeAble) { - this.writeAble = writeAble; - } - - public boolean isReadAble() { - return readAble; - } - - public void setReadAble(boolean readAble) { - this.readAble = readAble; - } - - public String getAuthDbs() { - return authDbs; - } - - public void setAuthDbs(String authDbs) { - this.authDbs = authDbs; - } - - public String getAuthTbls() { - return authTbls; - } - - public void setAuthTbls(String authTbls) { - this.authTbls = authTbls; - } -} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExchangisDataSourceDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExchangisDataSourceDTO.java index b6a2fff3a..4f2c06fe3 100644 --- a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExchangisDataSourceDTO.java +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExchangisDataSourceDTO.java @@ -1,62 +1,169 @@ package com.webank.wedatasphere.exchangis.datasource.dto; +import java.util.Date; + public class ExchangisDataSourceDTO { - private final String id; - private final String classifier; - private final String name; - private String option; - private String description; - private String icon; - private String struct_classifier; - - public ExchangisDataSourceDTO(String id, String classifier, String name, String struct_classifier) { - this.id = id; - this.classifier = classifier; - this.name = name; - this.struct_classifier = struct_classifier; + + private Long id; + private String name; + private String type; + private Long dataSourceTypeId; + private String createIdentify; + private String createSystem; + private String desc; + private String createUser; + private String labels; + private String label; + private Long versionId; + private String modifyUser; + private Date modifyTime; + private boolean expire; + private boolean writeAble; + private boolean readAble; + private String authDbs; + private String authTbls; + + public boolean isExpire() { + return expire; + } + + public void setExpire(boolean expire) { + this.expire = expire; } - public String getId() { + public Long getId() { return id; } - public String getClassifier() { - return classifier; + public void setId(Long id) { + this.id = id; } public String getName() { return name; } - public String getOption() { - return option; + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getCreateIdentify() { + return createIdentify; + } + + public void setCreateIdentify(String createIdentify) { + this.createIdentify = createIdentify; + } + + public Long getDataSourceTypeId() { + return dataSourceTypeId; + } + + public void setDataSourceTypeId(Long dataSourceTypeId) { + this.dataSourceTypeId = dataSourceTypeId; + } + + public String getDesc() { + return desc; + } + + public void setDesc(String desc) { + this.desc = desc; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } + + public String getLabels() { + return labels; + } + + public void setLabels(String labels) { + this.labels = labels; + } + + public String getLabel() { + return label; + } + + public void setLabel(String label) { + this.label = label; + } + + public Long getVersionId() { + return versionId; + } + + public void setVersionId(Long versionId) { + this.versionId = versionId; + } + + public String getModifyUser() { + return modifyUser; + } + + public void setModifyUser(String modifyUser) { + this.modifyUser = modifyUser; + } + + public Date getModifyTime() { + return modifyTime; + } + + public void setModifyTime(Date modifyTime) { + this.modifyTime = modifyTime; + } + + public String getCreateSystem() { + return createSystem; + } + + public void setCreateSystem(String createSystem) { + this.createSystem = createSystem; + } + + public boolean isWriteAble() { + return writeAble; } - public void setOption(String option) { - this.option = option; + public void setWriteAble(boolean writeAble) { + this.writeAble = writeAble; } - public void setDescription(String description) { - this.description = description; + public boolean isReadAble() { + return readAble; } - public void setIcon(String icon) { - this.icon = icon; + public void setReadAble(boolean readAble) { + this.readAble = readAble; } - public String getDescription() { - return description; + public String getAuthDbs() { + return authDbs; } - public String getIcon() { - return icon; + public void setAuthDbs(String authDbs) { + this.authDbs = authDbs; } - public String getStruct_classifier() { - return struct_classifier; + public String getAuthTbls() { + return authTbls; } - public void setStruct_classifier(String struct_classifier) { - this.struct_classifier = struct_classifier; + public void setAuthTbls(String authTbls) { + this.authTbls = authTbls; } } diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExchangisDataSourceDefDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExchangisDataSourceDefDTO.java new file mode 100644 index 000000000..c71f60fd9 --- /dev/null +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/ExchangisDataSourceDefDTO.java @@ -0,0 +1,62 @@ +package com.webank.wedatasphere.exchangis.datasource.dto; + +public class ExchangisDataSourceDefDTO { + private final String id; + private final String classifier; + private final String name; + private String option; + private String description; + private String icon; + private String struct_classifier; + + public ExchangisDataSourceDefDTO(String id, String classifier, String name, String struct_classifier) { + this.id = id; + this.classifier = classifier; + this.name = name; + this.struct_classifier = struct_classifier; + } + + public String getId() { + return id; + } + + public String getClassifier() { + return classifier; + } + + public String getName() { + return name; + } + + public String getOption() { + return option; + } + + public void setOption(String option) { + this.option = option; + } + + public void setDescription(String description) { + this.description = description; + } + + public void setIcon(String icon) { + this.icon = icon; + } + + public String getDescription() { + return description; + } + + public String getIcon() { + return icon; + } + + public String getStruct_classifier() { + return struct_classifier; + } + + public void setStruct_classifier(String struct_classifier) { + this.struct_classifier = struct_classifier; + } +} diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/AbstractDataSourceService.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/AbstractDataSourceService.java index 8688554be..f4ebaab69 100644 --- a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/AbstractDataSourceService.java +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/AbstractDataSourceService.java @@ -8,7 +8,7 @@ import com.webank.wedatasphere.exchangis.common.UserUtils; import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; import com.webank.wedatasphere.exchangis.dao.mapper.ExchangisJobParamConfigMapper; -import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSource; +import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSourceDefinition; import com.webank.wedatasphere.exchangis.datasource.core.context.ExchangisDataSourceContext; import com.webank.wedatasphere.exchangis.datasource.core.ui.*; import com.webank.wedatasphere.exchangis.datasource.core.ui.viewer.DefaultDataSourceUIViewer; @@ -84,7 +84,7 @@ private ExchangisDataSourceIdsUI buildDataSourceIdsUI(HttpServletRequest request source.setType(split[0]); source.setId(split[1]); Optional.ofNullable(loginUser).ifPresent(u -> { - Optional.ofNullable(this.context.getExchangisDataSource(split[0])).ifPresent(o -> { + Optional.ofNullable(this.context.getExchangisDsDefinition(split[0])).ifPresent(o -> { LinkisDataSourceRemoteClient dsClient = o.getDataSourceRemoteClient(); GetInfoByDataSourceIdAction action = GetInfoByDataSourceIdAction.builder() .setDataSourceId(Long.parseLong(split[1])) @@ -112,7 +112,7 @@ private ExchangisDataSourceIdsUI buildDataSourceIdsUI(HttpServletRequest request sink.setType(split[0]); sink.setId(split[1]); Optional.ofNullable(loginUser).ifPresent(u -> { - Optional.ofNullable(this.context.getExchangisDataSource(split[0])).ifPresent(o -> { + Optional.ofNullable(this.context.getExchangisDsDefinition(split[0])).ifPresent(o -> { LinkisDataSourceRemoteClient dsClient = o.getDataSourceRemoteClient(); GetInfoByDataSourceIdAction action = GetInfoByDataSourceIdAction.builder() .setDataSourceId(Long.parseLong(split[1])) @@ -146,7 +146,7 @@ protected ExchangisDataSourceParamsUI buildDataSourceParamsUI(ExchangisJobInfoCo ExchangisDataSourceIdUI source = dataSourceIdsUI.getSource(); if (null != source) { String type = source.getType(); - ExchangisDataSource exchangisSourceDataSource = this.context.getExchangisDataSource(type); + ExchangisDataSourceDefinition exchangisSourceDataSource = this.context.getExchangisDsDefinition(type); if (null != exchangisSourceDataSource) { sourceParamConfigs = exchangisSourceDataSource.getDataSourceParamConfigs().stream().filter( i -> i.getConfigDirection().equals(content.getEngine() + "-SOURCE") || "SOURCE".equalsIgnoreCase(i.getConfigDirection())).collect(Collectors.toList()); @@ -156,7 +156,7 @@ protected ExchangisDataSourceParamsUI buildDataSourceParamsUI(ExchangisJobInfoCo ExchangisDataSourceIdUI sink = dataSourceIdsUI.getSink(); if (null != sink) { String type = sink.getType(); - ExchangisDataSource exchangisSinkDataSource = this.context.getExchangisDataSource(type); + ExchangisDataSourceDefinition exchangisSinkDataSource = this.context.getExchangisDsDefinition(type); if (null != exchangisSinkDataSource) { sinkParamConfigs = exchangisSinkDataSource.getDataSourceParamConfigs().stream().filter(i -> i.getConfigDirection().equals(content.getEngine() + "-SINK") || "SINK".equalsIgnoreCase(i.getConfigDirection())).collect(Collectors.toList()); diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/DataSourceRenderService.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/DataSourceRenderService.java index 8e237fa48..5df843cdc 100644 --- a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/DataSourceRenderService.java +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/DataSourceRenderService.java @@ -11,6 +11,7 @@ public interface DataSourceRenderService { * @return element ui */ ElementUI getPartitionAndRender(String userName, - Long dataSourceId, String database, String table, ElementUI.Type uiType) throws ExchangisDataSourceException; + Long dataSourceId, String database, + String table, ElementUI.Type uiType, boolean tableNotExist) throws ExchangisDataSourceException; } diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/ExchangisDataSourceService.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/ExchangisDataSourceService.java index 40fcced7b..69d4f7d65 100644 --- a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/ExchangisDataSourceService.java +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/ExchangisDataSourceService.java @@ -11,7 +11,7 @@ import com.webank.wedatasphere.exchangis.dao.mapper.ExchangisJobParamConfigMapper; import com.webank.wedatasphere.exchangis.datasource.GetDataSourceInfoByIdAndVersionIdAction; import com.webank.wedatasphere.exchangis.datasource.Utils.RSAUtil; -import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSource; +import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSourceDefinition; import com.webank.wedatasphere.exchangis.datasource.core.context.ExchangisDataSourceContext; import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceExceptionCode; @@ -112,7 +112,7 @@ public List getJobDataSourceUIs(HttpServletRequest @Override public List> getDataSourceParamsUI(String dsType, String engineAndDirection) { - ExchangisDataSource exchangisDataSource = this.context.getExchangisDataSource(dsType); + ExchangisDataSourceDefinition exchangisDataSource = this.context.getExchangisDsDefinition(dsType); List paramConfigs = exchangisDataSource.getDataSourceParamConfigs(); List filteredConfigs = new ArrayList<>(); String[] engineDirect = engineAndDirection.split("-"); @@ -138,8 +138,8 @@ public List> getJobEngineSettingsUI(String engineType) { * 做比较,筛选出可以给前端展示的数据源类型 */ public Message listDataSources(HttpServletRequest request, String engineType, String direct, String sourceType) throws Exception { - Collection all = this.context.all(); - List dtos = new ArrayList<>(); + Collection all = this.context.all(); + List dtos = new ArrayList<>(); List settingsList = this.settingsDao.getSettings(); List engineSettings = new ArrayList<>(); @@ -160,19 +160,22 @@ public Message listDataSources(HttpServletRequest request, String engineType, St Set directType = new HashSet<>(); for (EngineSettings engineSetting: engineSettings) { - if (StringUtils.isEmpty(direct)) { - for (int i = 0; i < engineSetting.getDirectionRules().size(); i++) { - directType.add(engineSetting.getDirectionRules().get(i).getSource()); - directType.add(engineSetting.getDirectionRules().get(i).getSink()); - } - } else { - for (int i = 0; i < engineSetting.getDirectionRules().size(); i++) { - if ((StringUtils.equals(direct.toLowerCase(), "source"))) { - directType.add(engineSetting.getDirectionRules().get(i).getSource()); + for (int i = 0; i < engineSetting.getDirectionRules().size(); i++) { + engineSetting.getDirectionRules().stream().forEach(item -> { + String source = item.getSource(); + String sink = item.getSink(); + if (StringUtils.isEmpty(direct)) { + directType.add(source); + directType.add(sink); + } else if (StringUtils.equals(direct, "source")) { + directType.add(source); } else { - directType.add(engineSetting.getDirectionRules().get(i).getSink()); + if ((StringUtils.isBlank(sourceType) || + (StringUtils.isNoneBlank(sourceType) && StringUtils.equals(source, sourceType.toLowerCase())))) { + directType.add(sink); + } } - } + }); } } @@ -180,7 +183,6 @@ public Message listDataSources(HttpServletRequest request, String engineType, St LOGGER.info("listDataSources userName: {}" + userName); // 通过 datasourcemanager 获取的数据源类型和context中的数据源通过 type 和 name 比较 // 以 exchangis 中注册了的数据源集合为准 - LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); GetAllDataSourceTypesResult result; try { @@ -207,26 +209,19 @@ public Message listDataSources(HttpServletRequest request, String engineType, St for (DataSourceType type : allDataSourceType) { LOGGER.info("Current datasource Type is :{}", type.getName()); - for (ExchangisDataSource item : all) { + for (ExchangisDataSourceDefinition item : all) { if (item.name().equalsIgnoreCase(type.getName())) { - ExchangisDataSourceDTO dto = new ExchangisDataSourceDTO( + ExchangisDataSourceDefDTO dto = new ExchangisDataSourceDefDTO( type.getId(), type.getClassifier(), -// item.classifier(), item.name(), item.structClassifier() ); -// dto.setDescription(item.description()); -// dto.setIcon(item.icon()); -// dto.setOption(item.option()); // use linkis datasource table field to fill the dto bean dto.setIcon(type.getIcon()); dto.setDescription(type.getDescription()); dto.setOption(type.getOption()); - if (sourceType == null || !sourceType.toLowerCase().equals(type.getName())) { - //LOGGER.info("sourceType:{}, typename: {}", sourceType.toLowerCase(), type.getName()); - dtos.add(dto); - } + dtos.add(dto); } } } @@ -258,12 +253,12 @@ public Message create(HttpServletRequest request, /*String type, */DataSourceCre String user = UserUtils.getLoginUser(request); LOGGER.info("createDatasource userName:" + user); - ExchangisDataSource exchangisDataSource = context.getExchangisDataSource(vo.getDataSourceTypeId()); - if (Objects.isNull(exchangisDataSource)) { + ExchangisDataSourceDefinition dsType = context.getExchangisDsDefinition(vo.getDataSourceTypeId()); + if (Objects.isNull(dsType)) { throw new ExchangisDataSourceException(CONTEXT_GET_DATASOURCE_NULL.getCode(), "exchangis context get datasource null"); } - LinkisDataSourceRemoteClient client = exchangisDataSource.getDataSourceRemoteClient(); + LinkisDataSourceRemoteClient client = dsType.getDataSourceRemoteClient(); LOGGER.info("create datasource json as follows"); Set> entries = json.entrySet(); for (Map.Entry entry : entries) { @@ -341,12 +336,12 @@ public Message updateDataSource(HttpServletRequest request,/* String type,*/ Lon LOGGER.info("updateDataSource userName:" + user); LOGGER.info("DataSourceTypeId:" + vo.getDataSourceTypeId()); - ExchangisDataSource exchangisDataSource = context.getExchangisDataSource(vo.getDataSourceTypeId()); - if (Objects.isNull(exchangisDataSource)) { + ExchangisDataSourceDefinition dsType = context.getExchangisDsDefinition(vo.getDataSourceTypeId()); + if (Objects.isNull(dsType)) { throw new ExchangisDataSourceException(30401, "exchangis.datasource.null"); } - LinkisDataSourceRemoteClient client = exchangisDataSource.getDataSourceRemoteClient(); + LinkisDataSourceRemoteClient client = dsType.getDataSourceRemoteClient(); // UpdateDataSourceResult updateDataSourceResult; String responseBody; try { @@ -401,7 +396,7 @@ public Message deleteDataSource(HttpServletRequest request, /*String type,*/ Lon QueryWrapper condition = new QueryWrapper<>(); condition.eq("source_ds_id", id).or().eq("sink_ds_id", id); - Long inUseCount = this.exchangisJobDsBindMapper.selectCount(condition); + Long inUseCount = Long.valueOf(this.exchangisJobDsBindMapper.selectCount(condition)); if (inUseCount > 0) { throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_DELETE_ERROR.getCode(), "目前存在引用依赖"); } @@ -441,8 +436,8 @@ public Message deleteDataSource(HttpServletRequest request, /*String type,*/ Lon } public Message queryDataSourceDBs(HttpServletRequest request, String type, Long id) throws Exception { - ExchangisDataSource exchangisDataSource = context.getExchangisDataSource(type); - LinkisMetaDataRemoteClient metaDataRemoteClient = exchangisDataSource.getMetaDataRemoteClient(); + ExchangisDataSourceDefinition definition = context.getExchangisDsDefinition(type); + LinkisMetaDataRemoteClient metaDataRemoteClient = definition.getMetaDataRemoteClient(); String userName = UserUtils.getLoginUser(request); LOGGER.info("queryDataSourceDBs userName:" + userName); @@ -471,10 +466,10 @@ public Message queryDataSourceDBTables(HttpServletRequest request, String type, String user = UserUtils.getLoginUser(request); LOGGER.info("queryDataSourceDBTables userName:" + user); - ExchangisDataSource exchangisDataSource = context.getExchangisDataSource(type); + ExchangisDataSourceDefinition definition = context.getExchangisDsDefinition(type); MetadataGetTablesResult tables; try { - LinkisMetaDataRemoteClient metaDataRemoteClient = exchangisDataSource.getMetaDataRemoteClient(); + LinkisMetaDataRemoteClient metaDataRemoteClient = definition.getMetaDataRemoteClient(); tables = metaDataRemoteClient.getTables(MetadataGetTablesAction.builder() .setSystem(type) .setDataSourceId(id) @@ -586,8 +581,8 @@ public Message getJobDataSourceSettingsUI(Long jobId, String jobName) throws Exc } public Message queryDataSourceDBTableFields(HttpServletRequest request, String type, Long id, String dbName, String tableName) throws Exception { - ExchangisDataSource exchangisDataSource = context.getExchangisDataSource(type); - LinkisMetaDataRemoteClient metaDataRemoteClient = exchangisDataSource.getMetaDataRemoteClient(); + ExchangisDataSourceDefinition definition = context.getExchangisDsDefinition(type); + LinkisMetaDataRemoteClient metaDataRemoteClient = definition.getMetaDataRemoteClient(); String user = UserUtils.getLoginUser(request); LOGGER.info("queryDataSourceDBTableFields userName:" + user); @@ -671,10 +666,10 @@ public Message queryDataSources(HttpServletRequest request, DataSourceQueryVO vo List allDataSource = result.getAllDataSource(); - List originDataSources = new ArrayList<>(); - List dataSources = new ArrayList<>(); + List originDataSources = new ArrayList<>(); + List dataSources = new ArrayList<>(); allDataSource.forEach(ds -> { - DataSourceDTO item = new DataSourceDTO(); + ExchangisDataSourceDTO item = new ExchangisDataSourceDTO(); item.setId(ds.getId()); item.setCreateIdentify(ds.getCreateIdentify()); item.setName(ds.getDataSourceName()); @@ -701,13 +696,13 @@ public Message queryDataSources(HttpServletRequest request, DataSourceQueryVO vo LOGGER.info("originDatasource is: {}", originDataSources); if (direct!=null) { if ("source".equals(direct)) { - for (DataSourceDTO originDataSource : originDataSources) { + for (ExchangisDataSourceDTO originDataSource : originDataSources) { if (originDataSource.isReadAble()) { dataSources.add(originDataSource); } } } else if ("sink".equals(direct)) { - for (DataSourceDTO originDataSource : originDataSources) { + for (ExchangisDataSourceDTO originDataSource : originDataSources) { if (originDataSource.isReadAble()) { dataSources.add(originDataSource); } @@ -758,10 +753,10 @@ public Message listAllDataSources(HttpServletRequest request, String typeName, L } catch (Exception e) { throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_QUERY_DATASOURCE_ERROR.getCode(), e.getMessage()); } - List dataSources = new ArrayList<>(); + List dataSources = new ArrayList<>(); if (!Objects.isNull(allDataSource)) { allDataSource.forEach(ds -> { - DataSourceDTO item = new DataSourceDTO(); + ExchangisDataSourceDTO item = new ExchangisDataSourceDTO(); item.setId(ds.getId()); item.setCreateIdentify(ds.getCreateIdentify()); item.setName(ds.getDataSourceName()); diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/impl/DefaultDataSourceRenderService.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/impl/DefaultDataSourceRenderService.java index ff870752b..6452c8071 100644 --- a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/impl/DefaultDataSourceRenderService.java +++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/impl/DefaultDataSourceRenderService.java @@ -36,11 +36,13 @@ public class DefaultDataSourceRenderService implements DataSourceRenderService { @Override public ElementUI getPartitionAndRender(String userName, - Long dataSourceId, String database, String table, ElementUI.Type uiType) throws ExchangisDataSourceException { - List partitionKeys = metadataInfoService.getPartitionKeys(userName, dataSourceId, database, table); + Long dataSourceId, String database, String table, ElementUI.Type uiType, boolean tableNotExist) throws ExchangisDataSourceException { Map renderParams = new LinkedHashMap<>(); - List placeHolder = Arrays.asList(DEFAULT_PLACEHOLDER); - partitionKeys.forEach(partition -> renderParams.putIfAbsent(partition, placeHolder)); + if (!tableNotExist) { + List partitionKeys = metadataInfoService.getPartitionKeys(userName, dataSourceId, database, table); + List placeHolder = Arrays.asList(DEFAULT_PLACEHOLDER); + partitionKeys.forEach(partition -> renderParams.putIfAbsent(partition, placeHolder)); + } return elementUIFactory.createElement(uiType.name(), renderParams, Map.class); } } diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/scala/com/webank/wedatasphere/exchangis/datasource/GetDataSourceInfoByIdAndVersionIdAction.scala b/exchangis-datasource/exchangis-datasource-service/src/main/scala/com/webank/wedatasphere/exchangis/datasource/GetDataSourceInfoByIdAndVersionIdAction.scala index b4329f88e..ac84231cd 100644 --- a/exchangis-datasource/exchangis-datasource-service/src/main/scala/com/webank/wedatasphere/exchangis/datasource/GetDataSourceInfoByIdAndVersionIdAction.scala +++ b/exchangis-datasource/exchangis-datasource-service/src/main/scala/com/webank/wedatasphere/exchangis/datasource/GetDataSourceInfoByIdAndVersionIdAction.scala @@ -49,7 +49,7 @@ object GetDataSourceInfoByIdAndVersionIdAction { } def build(): GetDataSourceInfoByIdAndVersionIdAction = { - if(dataSourceId == null) throw new DataSourceClientBuilderException("dataSourceId is needed!") + if(dataSourceId == 0L) throw new DataSourceClientBuilderException("dataSourceId is needed!") if(versionId == null) throw new DataSourceClientBuilderException("versionId is needed!") if(system == null) throw new DataSourceClientBuilderException("system is needed!") if(user == null) throw new DataSourceClientBuilderException("user is needed!") diff --git a/exchangis-datasource/exchangis-datasource-streamis/pom.xml b/exchangis-datasource/exchangis-datasource-streamis/pom.xml index 906e781a2..bf63d0db6 100644 --- a/exchangis-datasource/exchangis-datasource-streamis/pom.xml +++ b/exchangis-datasource/exchangis-datasource-streamis/pom.xml @@ -3,9 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-datasource + exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../../pom.xml 4.0.0 @@ -20,9 +21,13 @@ com.webank.wedatasphere.exchangis exchangis-datasource-core - 1.1.2 + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-linkis + ${project.version} - diff --git a/exchangis-datasource/exchangis-datasource-streamis/src/main/java/com/webank/wedatasphere/exchangis/datasource/streamis/ExchangisStreamisDataSource.java b/exchangis-datasource/exchangis-datasource-streamis/src/main/java/com/webank/wedatasphere/exchangis/datasource/streamis/ExchangisStreamisDataSource.java index a47a040ca..71b614f5a 100644 --- a/exchangis-datasource/exchangis-datasource-streamis/src/main/java/com/webank/wedatasphere/exchangis/datasource/streamis/ExchangisStreamisDataSource.java +++ b/exchangis-datasource/exchangis-datasource-streamis/src/main/java/com/webank/wedatasphere/exchangis/datasource/streamis/ExchangisStreamisDataSource.java @@ -1,40 +1,22 @@ package com.webank.wedatasphere.exchangis.datasource.streamis; -import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; -import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; -import com.webank.wedatasphere.exchangis.dao.hook.MapperHook; -import com.webank.wedatasphere.exchangis.dao.mapper.ExchangisJobParamConfigMapper; -import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSource; +import com.webank.wedatasphere.exchangis.datasource.core.AbstractExchangisDataSourceDefinition; +import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisLinkisRemoteClient; import org.apache.linkis.datasource.client.impl.LinkisDataSourceRemoteClient; import org.apache.linkis.datasource.client.impl.LinkisMetaDataRemoteClient; -import java.util.List; - -public abstract class ExchangisStreamisDataSource implements ExchangisDataSource { - - protected MapperHook mapperHook; - - @Override - public void setMapperHook(MapperHook mapperHook) { - this.mapperHook = mapperHook; - } - - protected List getDataSourceParamConfigs(String type) { - ExchangisJobParamConfigMapper exchangisJobParamConfigMapper = this.mapperHook.getExchangisJobParamConfigMapper(); - QueryWrapper queryWrapper = new QueryWrapper<>(); - queryWrapper.eq("type", type); - queryWrapper.eq("is_hidden", 0); - queryWrapper.eq("status", 1); - return exchangisJobParamConfigMapper.selectList(queryWrapper); - } +/** + * Exchangis streamis data source + */ +public abstract class ExchangisStreamisDataSource extends AbstractExchangisDataSourceDefinition { @Override public LinkisDataSourceRemoteClient getDataSourceRemoteClient() { - return ExchangisStreamisRemoteClient.getStreamisDataSourceRemoteClient(); + return ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient(); } @Override public LinkisMetaDataRemoteClient getMetaDataRemoteClient() { - return ExchangisStreamisRemoteClient.getStreamisMetadataRemoteClient(); + return ExchangisLinkisRemoteClient.getLinkisMetadataRemoteClient(); } } diff --git a/exchangis-datasource/exchangis-datasource-streamis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/streamis/ExchangisStreamisRemoteClient.scala b/exchangis-datasource/exchangis-datasource-streamis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/streamis/ExchangisStreamisRemoteClient.scala deleted file mode 100644 index 0085608bb..000000000 --- a/exchangis-datasource/exchangis-datasource-streamis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/streamis/ExchangisStreamisRemoteClient.scala +++ /dev/null @@ -1,172 +0,0 @@ -package com.webank.wedatasphere.exchangis.datasource.streamis - -import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSourceConfiguration -import java.lang -import java.util.concurrent.TimeUnit - -import org.apache.linkis.datasource.client.impl.{LinkisDataSourceRemoteClient, LinkisMetaDataRemoteClient} -import org.apache.linkis.datasource.client.request.{GetAllDataSourceTypesAction, GetConnectParamsByDataSourceIdAction, MetadataGetColumnsAction, MetadataGetDatabasesAction, MetadataGetTablesAction, QueryDataSourceAction} -import org.apache.linkis.datasource.client.response.{GetConnectParamsByDataSourceIdResult, MetadataGetColumnsResult, MetadataGetDatabasesResult, MetadataGetTablesResult, QueryDataSourceResult} -import org.apache.linkis.datasourcemanager.common.domain.{DataSource, DataSourceType} -import org.apache.linkis.httpclient.dws.authentication.StaticAuthenticationStrategy -import org.apache.linkis.httpclient.dws.config.{DWSClientConfig, DWSClientConfigBuilder} - -object ExchangisStreamisRemoteClient { - //Linkis Datasource Client Config - val serverUrl: String = ExchangisDataSourceConfiguration.SERVER_URL.getValue - val connectionTimeout: lang.Long = ExchangisDataSourceConfiguration.CONNECTION_TIMEOUT.getValue - val discoveryEnabled: lang.Boolean = ExchangisDataSourceConfiguration.DISCOVERY_ENABLED.getValue - val discoveryFrequencyPeriod: lang.Long = ExchangisDataSourceConfiguration.DISCOVERY_FREQUENCY_PERIOD.getValue - val loadbalancerEnabled: lang.Boolean = ExchangisDataSourceConfiguration.LOAD_BALANCER_ENABLED.getValue - val maxConnectionSize: Integer = ExchangisDataSourceConfiguration.MAX_CONNECTION_SIZE.getValue - val retryEnabled: lang.Boolean = ExchangisDataSourceConfiguration.RETRY_ENABLED.getValue - val readTimeout: lang.Long = ExchangisDataSourceConfiguration.READ_TIMEOUT.getValue - val authTokenKey: String = ExchangisDataSourceConfiguration.AUTHTOKEN_KEY.getValue - val authTokenValue: String = ExchangisDataSourceConfiguration.AUTHTOKEN_VALUE.getValue - val dwsVersion: String = ExchangisDataSourceConfiguration.DWS_VERSION.getValue - - - // val clientConfig = DWSClientConfigBuilder.newBuilder() - // .addServerUrl(serverUrl) - // .connectionTimeout(connectionTimeout) - // .discoveryEnabled(discoveryEnabled) - // .discoveryFrequency(1,TimeUnit.MINUTES) - // .loadbalancerEnabled(loadbalancerEnabled) - // .maxConnectionSize(maxConnectionSize) - // .retryEnabled(retryEnabled) - // .readTimeout(readTimeout) - // .setAuthenticationStrategy(new StaticAuthenticationStrategy()) - // .setAuthTokenKey(authTokenKey) - // .setAuthTokenValue(authTokenValue) - // .setDWSVersion(dwsVersion) - // .build() - - val clientConfig: DWSClientConfig = DWSClientConfigBuilder.newBuilder() - .addServerUrl(serverUrl) - .connectionTimeout(connectionTimeout) - .discoveryEnabled(discoveryEnabled) - .discoveryFrequency(discoveryFrequencyPeriod, TimeUnit.MINUTES) - .loadbalancerEnabled(loadbalancerEnabled) - .maxConnectionSize(maxConnectionSize) - .retryEnabled(retryEnabled) - .readTimeout(readTimeout) - .setAuthenticationStrategy(new StaticAuthenticationStrategy()) - .setAuthTokenKey(authTokenKey) - .setAuthTokenValue(authTokenValue) - .setDWSVersion(dwsVersion) - .build() - - val dataSourceClient = new LinkisDataSourceRemoteClient(clientConfig) - - val metaDataClient = new LinkisMetaDataRemoteClient(clientConfig) - - def getStreamisDataSourceRemoteClient: LinkisDataSourceRemoteClient = { - dataSourceClient - } - - def getStreamisMetadataRemoteClient: LinkisMetaDataRemoteClient = { - metaDataClient - } - - def close(): Unit = { - dataSourceClient.close() - metaDataClient.close() - } - - def queryDataSource(linkisDatasourceName: String): QueryDataSourceResult = { - dataSourceClient.queryDataSource(QueryDataSourceAction.builder() - .setSystem("") - .setName(linkisDatasourceName) - .setTypeId(1) - .setIdentifies("") - .setCurrentPage(1) - .setUser("hadoop") - .setPageSize(1).build() - ) - } - - /** - * get datasourceConnect information - * - * @param dataSourceId id - * @param system dssSystem - * @param user username - * @return - */ - def queryConnectParams(dataSourceId: Long, system: String, user: String): GetConnectParamsByDataSourceIdResult = { - dataSourceClient.getConnectParams(GetConnectParamsByDataSourceIdAction.builder() - .setDataSourceId(dataSourceId) - .setSystem(system) - .setUser(user) - .build() - ) - } - - /** - * get all DataSourceTypes - * - * @param user user - * @return - */ - def queryDataSourceTypes(user: String): java.util.List[DataSourceType] = { - dataSourceClient.getAllDataSourceTypes(GetAllDataSourceTypesAction.builder() - .setUser(user) - .build() - ).getAllDataSourceType - } - - - def queryClusterByDataSourceType(system: String, name: String, typeId: Long, user: String): java.util.List[DataSource] = { - dataSourceClient.queryDataSource(QueryDataSourceAction.builder() - .setSystem(system) - .setName(name) - .setTypeId(typeId) - .setIdentifies("") - .setCurrentPage(1) - .setPageSize(10) - .setUser(user) - .build() - ).getAllDataSource - } - - - /** - * get DataBases list - * - * @param system - * @param dataSourceId - * @param user - * @return list - */ - def queryDataBasesByCuster(system: String, dataSourceId: Long, user: String): MetadataGetDatabasesResult = { - metaDataClient.getDatabases(MetadataGetDatabasesAction.builder() - .setSystem(system) - .setDataSourceId(dataSourceId) - .setUser(user) - .build() - ) - } - - def queryTablesByDataBase(system: String, dataSourceId: Long, dataBase: String, user: String): MetadataGetTablesResult = { - metaDataClient.getTables(MetadataGetTablesAction.builder() - .setSystem(system) - .setDataSourceId(dataSourceId) - .setDatabase(dataBase) - .setUser(user) - .build() - ) - } - - def queryColumnsByTable(system: String, dataSourceId: Long, dataBase: String, table: String, user: String): MetadataGetColumnsResult = { - metaDataClient.getColumns(MetadataGetColumnsAction.builder() - .setSystem(system) - .setDataSourceId(dataSourceId) - .setDatabase(dataBase) - .setTable(table) - .setUser(user) - .build() - ) - } - - -} diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/pom.xml index a92fa201c..0ea794a53 100644 --- a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/pom.xml +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/pom.xml @@ -3,10 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-datasource + exchangis com.webank.wedatasphere.exchangis - 1.1.2 - ../../pom.xml + ${revision} + ../../../pom.xml 4.0.0 @@ -21,17 +21,17 @@ com.webank.wedatasphere.exchangis exchangis-dao - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-datasource-linkis - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-datasource-core - 1.1.2 + ${project.version} diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisESDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisESDataSource.java index 607af79de..a049d07ce 100644 --- a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisESDataSource.java +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-elasticsearch/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisESDataSource.java @@ -1,23 +1,17 @@ package com.webank.wedatasphere.exchangis.extension.datasource.mysql; -import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; -import com.webank.wedatasphere.exchangis.datasource.core.domain.Classifier; -import com.webank.wedatasphere.exchangis.datasource.core.domain.DataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; -import java.util.List; - +/** + * Note: ES data source + */ public class ExchangisESDataSource extends ExchangisBatchDataSource { @Override - public String name() { - return DataSourceType.ELASTICSEARCH.name; - } - - @Override - public String classifier() { - return Classifier.ELASTICSEARCH.name; + protected ExchangisDataSourceType type() { + return ExchangisDataSourceType.ELASTICSEARCH; } @Override @@ -40,8 +34,4 @@ public String icon() { return "icon-es"; } - @Override - public List getDataSourceParamConfigs() { - return super.getDataSourceParamConfigs(DataSourceType.ELASTICSEARCH.name); - } } \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/pom.xml index c1e258f85..3f4323ab1 100644 --- a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/pom.xml +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/pom.xml @@ -3,10 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-datasource + exchangis com.webank.wedatasphere.exchangis - 1.1.2 - ../../pom.xml + ${revision} + ../../../pom.xml 4.0.0 @@ -22,17 +22,17 @@ com.webank.wedatasphere.exchangis exchangis-dao - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-datasource-linkis - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-dao - 1.1.2 + ${project.version} diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/hive/ExchangisHiveDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/hive/ExchangisHiveDataSource.java index e9ce77738..c59cfe277 100644 --- a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/hive/ExchangisHiveDataSource.java +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-hive/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/hive/ExchangisHiveDataSource.java @@ -1,23 +1,17 @@ package com.webank.wedatasphere.exchangis.extension.datasource.hive; -import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; -import com.webank.wedatasphere.exchangis.datasource.core.domain.Classifier; -import com.webank.wedatasphere.exchangis.datasource.core.domain.DataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; -import java.util.List; - +/** + * Note: Hive data source + */ public class ExchangisHiveDataSource extends ExchangisBatchDataSource { @Override - public String name() { - return DataSourceType.HIVE.name; - } - - @Override - public String classifier() { - return Classifier.HIVE.name; + protected ExchangisDataSourceType type() { + return ExchangisDataSourceType.HIVE; } @Override @@ -40,8 +34,4 @@ public String icon() { return "icon-hive"; } - @Override - public List getDataSourceParamConfigs() { - return super.getDataSourceParamConfigs(DataSourceType.HIVE.name); - } } diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/pom.xml index a26312d6e..7bbd306c7 100644 --- a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/pom.xml +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/pom.xml @@ -3,10 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-datasource + exchangis com.webank.wedatasphere.exchangis - 1.1.2 - ../../pom.xml + ${revision} + ../../../pom.xml 4.0.0 @@ -21,17 +21,17 @@ com.webank.wedatasphere.exchangis exchangis-dao - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-datasource-linkis - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-datasource-core - 1.1.2 + ${project.version} diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMongoDbDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMongoDbDataSource.java index 269efce87..0fec6da36 100644 --- a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMongoDbDataSource.java +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mongodb/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMongoDbDataSource.java @@ -1,23 +1,17 @@ package com.webank.wedatasphere.exchangis.extension.datasource.mysql; -import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; -import com.webank.wedatasphere.exchangis.datasource.core.domain.Classifier; -import com.webank.wedatasphere.exchangis.datasource.core.domain.DataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; -import java.util.List; - +/** + * Note: MongoDB data source + */ public class ExchangisMongoDbDataSource extends ExchangisBatchDataSource { @Override - public String name() { - return DataSourceType.MONGODB.name; - } - - @Override - public String classifier() { - return Classifier.MONGODB.name; + protected ExchangisDataSourceType type() { + return ExchangisDataSourceType.MONGODB; } @Override @@ -40,8 +34,4 @@ public String icon() { return "icon-mongodb"; } - @Override - public List getDataSourceParamConfigs() { - return super.getDataSourceParamConfigs(DataSourceType.MONGODB.name); - } } \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/pom.xml index 71bb30072..6b8bc388f 100644 --- a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/pom.xml +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/pom.xml @@ -3,10 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-datasource + exchangis com.webank.wedatasphere.exchangis - 1.1.2 - ../../pom.xml + ${revision} + ../../../pom.xml 4.0.0 @@ -21,17 +21,17 @@ com.webank.wedatasphere.exchangis exchangis-dao - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-datasource-linkis - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-datasource-core - 1.1.2 + ${project.version} diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMySQLDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMySQLDataSource.java index e05e790da..d818a1f65 100644 --- a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMySQLDataSource.java +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-mysql/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/mysql/ExchangisMySQLDataSource.java @@ -1,23 +1,18 @@ package com.webank.wedatasphere.exchangis.extension.datasource.mysql; -import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; -import com.webank.wedatasphere.exchangis.datasource.core.domain.Classifier; -import com.webank.wedatasphere.exchangis.datasource.core.domain.DataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; -import java.util.List; +/** + * Note: MYSQL data source + */ public class ExchangisMySQLDataSource extends ExchangisBatchDataSource { @Override - public String name() { - return DataSourceType.MYSQL.name; - } - - @Override - public String classifier() { - return Classifier.MYSQL.name; + protected ExchangisDataSourceType type() { + return ExchangisDataSourceType.MYSQL; } @Override @@ -40,8 +35,5 @@ public String icon() { return "icon-mysql"; } - @Override - public List getDataSourceParamConfigs() { - return super.getDataSourceParamConfigs(DataSourceType.MYSQL.name); - } + } \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/pom.xml index e3cce71da..8741dfdd7 100644 --- a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/pom.xml +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/pom.xml @@ -3,10 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-datasource + exchangis com.webank.wedatasphere.exchangis - 1.1.2 - ../../pom.xml + ${revision} + ../../../pom.xml 4.0.0 @@ -21,17 +21,17 @@ com.webank.wedatasphere.exchangis exchangis-dao - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-datasource-linkis - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-datasource-core - 1.1.2 + ${project.version} diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/oracle/ExchangisOracleDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/oracle/ExchangisOracleDataSource.java index 8d3ca3adb..38d0c1a8b 100644 --- a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/oracle/ExchangisOracleDataSource.java +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-oracle/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/oracle/ExchangisOracleDataSource.java @@ -2,7 +2,7 @@ import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; import com.webank.wedatasphere.exchangis.datasource.core.domain.Classifier; -import com.webank.wedatasphere.exchangis.datasource.core.domain.DataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; @@ -13,9 +13,10 @@ * @create 2022-09-14 **/ public class ExchangisOracleDataSource extends ExchangisBatchDataSource { + @Override - public String name() { - return DataSourceType.ORACLE.name; + protected ExchangisDataSourceType type() { + return ExchangisDataSourceType.ORACLE; } @Override @@ -28,10 +29,6 @@ public String option() { return "Oracle数据库"; } - @Override - public String classifier() { - return Classifier.ORACLE.name; - } @Override public String structClassifier() { @@ -43,9 +40,5 @@ public String icon() { return "icon-oracle"; } - @Override - public List getDataSourceParamConfigs() { - return super.getDataSourceParamConfigs(DataSourceType.ORACLE.name); - } } diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/pom.xml index 47af538ab..f8b375288 100644 --- a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/pom.xml +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/pom.xml @@ -3,10 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-datasource + exchangis com.webank.wedatasphere.exchangis - 1.1.2 - ../../pom.xml + ${revision} + ../../../pom.xml 4.0.0 @@ -21,17 +21,17 @@ com.webank.wedatasphere.exchangis exchangis-dao - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-datasource-linkis - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-datasource-core - 1.1.2 + ${project.version} diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/sftp/ExchangisSftpDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/sftp/ExchangisSftpDataSource.java index c8256dddb..b9ecdfbb4 100644 --- a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/sftp/ExchangisSftpDataSource.java +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-sftp/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/sftp/ExchangisSftpDataSource.java @@ -2,7 +2,7 @@ import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; import com.webank.wedatasphere.exchangis.datasource.core.domain.Classifier; -import com.webank.wedatasphere.exchangis.datasource.core.domain.DataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; @@ -11,13 +11,8 @@ public class ExchangisSftpDataSource extends ExchangisBatchDataSource { @Override - public String name() { - return DataSourceType.SFTP.name; - } - - @Override - public String classifier() { - return Classifier.SFTP.name; + protected ExchangisDataSourceType type() { + return ExchangisDataSourceType.SFTP; } @Override @@ -27,7 +22,7 @@ public String structClassifier() { @Override public String description() { - return "This is Sftp"; + return "This is sftp"; } @Override @@ -42,6 +37,7 @@ public String icon() { @Override public List getDataSourceParamConfigs() { - return super.getDataSourceParamConfigs(DataSourceType.SFTP.name); + return super.getDataSourceParamConfigs(ExchangisDataSourceType.SFTP.name); } + } diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-starrocks/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-starrocks/pom.xml new file mode 100644 index 000000000..d04f4d53f --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-starrocks/pom.xml @@ -0,0 +1,54 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../../pom.xml + + 4.0.0 + + exchangis-datasource-ext-starrocks + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-linkis + ${project.version} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-core + ${project.version} + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-starrocks/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/starrocks/ExchangisStarRocksDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-starrocks/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/starrocks/ExchangisStarRocksDataSource.java new file mode 100644 index 000000000..f0732ba53 --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-starrocks/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/starrocks/ExchangisStarRocksDataSource.java @@ -0,0 +1,46 @@ +package com.webank.wedatasphere.exchangis.extension.datasource.starrocks; + +import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig; +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; +import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; + +import java.util.List; + +/** + * @author jefftlin + * @date 2024/5/14 + */ +public class ExchangisStarRocksDataSource extends ExchangisBatchDataSource { + + @Override + protected ExchangisDataSourceType type() { + return ExchangisDataSourceType.STARROCKS; + } + @Override + public String description() { + return "This is StarRocks DataSource"; + } + + @Override + public String option() { + return "StarRocks数据库"; + } + + @Override + public String structClassifier() { + return StructClassifier.STRUCTURED.name; + } + + @Override + public String icon() { + return "icon-starrocks"; + } + + @Override + public List getDataSourceParamConfigs() { + return super.getDataSourceParamConfigs(ExchangisDataSourceType.STARROCKS.name); + } + + +} diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-tdsql/pom.xml b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-tdsql/pom.xml new file mode 100644 index 000000000..7ba9ba944 --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-tdsql/pom.xml @@ -0,0 +1,38 @@ + + + + exchangis + com.webank.wedatasphere.exchangis + ${revision} + ../../../pom.xml + + 4.0.0 + + exchangis-datasource-ext-tdsql + + + 8 + 8 + + + + + com.webank.wedatasphere.exchangis + exchangis-dao + ${revision} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-linkis + ${revision} + + + com.webank.wedatasphere.exchangis + exchangis-datasource-core + ${revision} + + + + \ No newline at end of file diff --git a/exchangis-datasource/extension-datasources/exchangis-datasource-ext-tdsql/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/tdsql/ExchangisTdsqlDataSource.java b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-tdsql/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/tdsql/ExchangisTdsqlDataSource.java new file mode 100644 index 000000000..2f5908022 --- /dev/null +++ b/exchangis-datasource/extension-datasources/exchangis-datasource-ext-tdsql/src/main/java/com/webank/wedatasphere/exchangis/extension/datasource/tdsql/ExchangisTdsqlDataSource.java @@ -0,0 +1,36 @@ +package com.webank.wedatasphere.exchangis.extension.datasource.tdsql; + +import com.webank.wedatasphere.exchangis.datasource.core.domain.ExchangisDataSourceType; +import com.webank.wedatasphere.exchangis.datasource.core.domain.StructClassifier; +import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisBatchDataSource; + +/** + * @author jefftlin + * @date 2024/5/27 + */ +public class ExchangisTdsqlDataSource extends ExchangisBatchDataSource { + + @Override + protected ExchangisDataSourceType type() { + return ExchangisDataSourceType.TDSQL; + } + @Override + public String description() { + return "This is tdsql DataSource"; + } + + @Override + public String option() { + return "Tdsql数据库"; + } + + @Override + public String structClassifier() { + return StructClassifier.STRUCTURED.name; + } + + @Override + public String icon() { + return "icon-tdsql"; + } +} \ No newline at end of file diff --git a/exchangis-datasource/pom.xml b/exchangis-datasource/pom.xml index ad1bd9fae..b22e15b49 100644 --- a/exchangis-datasource/pom.xml +++ b/exchangis-datasource/pom.xml @@ -5,7 +5,8 @@ exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../pom.xml 4.0.0 @@ -15,8 +16,8 @@ exchangis-datasource-core exchangis-datasource-loader - exchangis-datasource-streamis exchangis-datasource-linkis + exchangis-datasource-streamis exchangis-datasource-service extension-datasources/exchangis-datasource-ext-mysql extension-datasources/exchangis-datasource-ext-hive @@ -24,6 +25,8 @@ extension-datasources/exchangis-datasource-ext-elasticsearch extension-datasources/exchangis-datasource-ext-mongodb extension-datasources/exchangis-datasource-ext-oracle + extension-datasources/exchangis-datasource-ext-starrocks + extension-datasources/exchangis-datasource-ext-tdsql exchangis-datasource-server diff --git a/exchangis-engines/engineconn-plugins/datax/pom.xml b/exchangis-engines/engineconn-plugins/datax/pom.xml index dd10e755f..cd9b069df 100644 --- a/exchangis-engines/engineconn-plugins/datax/pom.xml +++ b/exchangis-engines/engineconn-plugins/datax/pom.xml @@ -3,10 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-engines + exchangis com.webank.wedatasphere.exchangis - 1.1.2 - ../../pom.xml + ${revision} + ../../../pom.xml 4.0.0 @@ -116,6 +116,27 @@ linkis-udf-client 1.4.0 + + org.apache.linkis + linkis-module + + + org.springframework.cloud + spring-cloud-commons + + + + + + org.jasypt + jasypt + ${jasypt.version} + + + com.fasterxml.jackson.core + jackson-core + ${jackson-core.version} + diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/assembly/distribution.xml b/exchangis-engines/engineconn-plugins/datax/src/main/assembly/distribution.xml index 6c517ef5b..dee47b9d9 100644 --- a/exchangis-engines/engineconn-plugins/datax/src/main/assembly/distribution.xml +++ b/exchangis-engines/engineconn-plugins/datax/src/main/assembly/distribution.xml @@ -39,7 +39,6 @@ false false true - antlr:antlr:jar aopalliance:aopalliance:jar diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxCoreConfiguration.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxCoreConfiguration.scala index 01907434f..29aa81749 100644 --- a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxCoreConfiguration.scala +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxCoreConfiguration.scala @@ -91,7 +91,7 @@ object DataxCoreConfiguration { /** * Stream channel class */ - val CORE_TRANSPORT_STREAM_CHANNEL_CLASS: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_STREAM_CHANNEL_CLASS, "com.webank.wedatasphere.exchangis.datax.core.transport.channel.memory.MemoryStreamChannel") + val CORE_TRANSPORT_STREAM_CHANNEL_CLASS: CommonVars[String] = CommonVars(CoreConstant.DATAX_CORE_TRANSPORT_STREAM_CHANNEL_CLASS, "com.alibaba.datax.core.transport.channel.memory.MemoryStreamChannel") /** * Block size of stream channel diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxSpringConfiguration.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxSpringConfiguration.scala new file mode 100644 index 000000000..06609c615 --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/config/DataxSpringConfiguration.scala @@ -0,0 +1,23 @@ +package org.apache.linkis.engineconnplugin.datax.config + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.engineconn.acessible.executor.info.NodeHeartbeatMsgManager +import org.apache.linkis.engineconnplugin.datax.service.DataxHeartbeatMsgManager +import org.springframework.context.annotation.{Bean, Configuration, Primary} + +/** + * Spring configuration for datax + */ +@Configuration +class DataxSpringConfiguration extends Logging { + + /** + * Override the heartbeat manager + * @return + */ + @Bean + @Primary + def nodeHeartbeatMsgManager(): NodeHeartbeatMsgManager = { + new DataxHeartbeatMsgManager() + } +} diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxContainerOnceExecutor.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxContainerOnceExecutor.scala index 9cec013ac..5b118110a 100644 --- a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxContainerOnceExecutor.scala +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/executor/DataxContainerOnceExecutor.scala @@ -26,7 +26,9 @@ import com.alibaba.datax.core.util.container.{CoreConstant, LoadUtil} import com.alibaba.datax.core.util.{ConfigurationValidate, ExceptionTracker, FrameworkErrorCode, SecretUtil} import org.apache.commons.lang3.StringUtils import org.apache.linkis.common.utils.{ClassUtils, Utils} +import org.apache.linkis.engineconn.acessible.executor.service.{ExecutorHeartbeatService, ExecutorHeartbeatServiceHolder} import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconn.executor.service.ManagerService import org.apache.linkis.engineconn.once.executor.{OnceExecutorExecutionContext, OperableOnceExecutor} import org.apache.linkis.engineconnplugin.datax.config.DataxConfiguration import org.apache.linkis.engineconnplugin.datax.exception.{DataxJobExecutionException, DataxPluginLoadException} @@ -78,15 +80,17 @@ abstract class DataxContainerOnceExecutor extends DataxOnceExecutor with Operabl override def run(): Unit = { val params: util.Map[String, Object] = onceExecutorExecutionContext.getOnceExecutorContent.getJobContent val result = execute(params, onceExecutorExecutionContext.getEngineCreationContext) + info(s"The executor: [${getId}] has been finished, now to stop DataxEngineConn.") + closeDaemon() if (result._1 != 0) { isFailed = true - tryFailed() val message = s"Exec Datax engine conn occurred error, with exit code: [${result._1}]" setResponse(ErrorExecuteResponse(message, new DataxJobExecutionException(message, result._2))) + tryFailed() } - info(s"The executor: [${getId}] has been finished, now to stop DataxEngineConn.") - closeDaemon() if (!isFailed) { + // Try to heartbeat at last + tryToHeartbeat() trySucceed() } this synchronized notify() @@ -102,6 +106,8 @@ abstract class DataxContainerOnceExecutor extends DataxOnceExecutor with Operabl override def run(): Unit = { if (!(future.isDone || future.isCancelled)) { trace(s"The executor: [$getId] has been still running") + // Heartbeat action interval + tryToHeartbeat() } } }, DataxConfiguration.STATUS_FETCH_INTERVAL.getValue.toLong, @@ -143,6 +149,10 @@ abstract class DataxContainerOnceExecutor extends DataxOnceExecutor with Operabl metrics } + def getMessage(key: String):util.Map[String, util.List[String]] = { + null + } + override def getDiagnosis: util.Map[String, Any] = { // Not support diagnosis new util.HashMap[String, Any]() @@ -156,6 +166,19 @@ abstract class DataxContainerOnceExecutor extends DataxOnceExecutor with Operabl // Option(this.container).foreach(_.shutdown()) super.tryFailed() } + + /** + * Try to send heartbeat message to ecm + */ + private def tryToHeartbeat(): Unit = { + logger.trace("heartbeat and record to linkis manager") + ExecutorHeartbeatServiceHolder.getDefaultHeartbeatService() match { + case heartbeatService: ExecutorHeartbeatService => + val heartbeatMsg = heartbeatService.generateHeartBeatMsg(this) + ManagerService.getManagerService.heartbeatReport(heartbeatMsg) + logger.trace(s"Succeed to report heartbeatMsg: [${heartbeatMsg}]") + } + } /** * Execute with job content * @param jobContent job content @@ -234,6 +257,10 @@ abstract class DataxContainerOnceExecutor extends DataxOnceExecutor with Operabl private def setPluginConfig(self: Configuration): Unit = { val plugins: util.Map[String, Configuration] = dataxEngineConnContext .getPluginDefinitions.asScala.map(define => (define.getPluginName, define.getPluginConf)).toMap.asJava + info(s"content is ${dataxEngineConnContext.toString}") + dataxEngineConnContext.getPluginDefinitions.asScala.foreach { definition => + info(s"PluginName: ${definition.getPluginName}, pluginConf: ${definition.getPluginConf}, pluginPath: ${definition.getPluginPath}") + } val pluginsNeed: util.Map[String, Configuration] = new util.HashMap() Option(self.getString(CoreConstant.DATAX_JOB_CONTENT_READER_NAME)).foreach(readerPlugin => pluginsNeed.put(readerPlugin, plugins.get(readerPlugin))) Option(self.getString(CoreConstant.DATAX_JOB_CONTENT_WRITER_NAME)).foreach(writerPlugin => pluginsNeed.put(writerPlugin, plugins.get(writerPlugin))) diff --git a/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/service/DataxHeartbeatMsgManager.scala b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/service/DataxHeartbeatMsgManager.scala new file mode 100644 index 000000000..26b26392c --- /dev/null +++ b/exchangis-engines/engineconn-plugins/datax/src/main/scala/org/apache/linkis/engineconnplugin/datax/service/DataxHeartbeatMsgManager.scala @@ -0,0 +1,30 @@ +package org.apache.linkis.engineconnplugin.datax.service + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.acessible.executor.info.NodeHeartbeatMsgManager +import org.apache.linkis.engineconn.executor.entity.Executor +import org.apache.linkis.engineconnplugin.datax.executor.DataxContainerOnceExecutor +import org.apache.linkis.server.BDPJettyServerHelper + +import scala.collection.JavaConverters.mapAsScalaMapConverter + +/** + * Datax heartbeat message (include: metric, error message) + */ +class DataxHeartbeatMsgManager extends NodeHeartbeatMsgManager with Logging{ + override def getHeartBeatMsg(executor: Executor): String = { + executor match { + case dataxExecutor: DataxContainerOnceExecutor => + val metric = dataxExecutor.getMetrics + Utils.tryCatch(BDPJettyServerHelper.gson.toJson(metric)) { case e: Exception => + val mV = metric.asScala + .map { case (k, v) => if (null == v) s"${k}->null" else s"${k}->${v.toString}" } + .mkString(",") + val errMsg = e.getMessage + logger.error(s"Convert metric to json failed because : ${errMsg}, metric values : {${mV}}") + "{\"errorMsg\":\"Convert metric to json failed because : " + errMsg + "\"}" + } + case _ => "{}" + } + } +} diff --git a/exchangis-engines/engineconn-plugins/sqoop/pom.xml b/exchangis-engines/engineconn-plugins/sqoop/pom.xml index ee8398bf7..bd6afb7d2 100644 --- a/exchangis-engines/engineconn-plugins/sqoop/pom.xml +++ b/exchangis-engines/engineconn-plugins/sqoop/pom.xml @@ -20,10 +20,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-engines + exchangis com.webank.wedatasphere.exchangis - 1.1.2 - ../../pom.xml + ${revision} + ../../../pom.xml 4.0.0 diff --git a/exchangis-engines/engines/datax/datax-assembly/pom.xml b/exchangis-engines/engines/datax/datax-assembly/pom.xml index 74b755512..7f9873a45 100644 --- a/exchangis-engines/engines/datax/datax-assembly/pom.xml +++ b/exchangis-engines/engines/datax/datax-assembly/pom.xml @@ -5,7 +5,7 @@ exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} ../pom.xml 4.0.0 diff --git a/exchangis-engines/engines/datax/datax-core/pom.xml b/exchangis-engines/engines/datax/datax-core/pom.xml index 48be143d3..d7d17f09d 100644 --- a/exchangis-engines/engines/datax/datax-core/pom.xml +++ b/exchangis-engines/engines/datax/datax-core/pom.xml @@ -5,7 +5,7 @@ exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} ../pom.xml 4.0.0 @@ -147,7 +147,7 @@ org.apache.hadoop hadoop-common - ${hadoop.version} + 3.3.4 org.apache.commons @@ -198,6 +198,15 @@ core + + org.apache.maven.plugins + maven-compiler-plugin + 2.5.1 + + ${jdk.compile.version} + ${jdk.compile.version} + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/BoolColumn.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/BoolColumn.java index ab4ba98fe..ee5fca453 100644 --- a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/BoolColumn.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/BoolColumn.java @@ -13,7 +13,7 @@ public class BoolColumn extends Column { public BoolColumn(Boolean bool) { - super(bool, Column.Type.BOOL, 1); + super(bool, Column.Type.BOOLEAN, 1); } public BoolColumn(final String data) { @@ -30,7 +30,7 @@ public BoolColumn(final String data) { } public BoolColumn() { - super(null, Column.Type.BOOL, 1); + super(null, Column.Type.BOOLEAN, 1); } @Override diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Column.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Column.java index c5a121bd3..829fd520c 100644 --- a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Column.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/common/element/Column.java @@ -71,6 +71,6 @@ public String toString() { } public enum Type { - BAD, NULL, INT, LONG, DOUBLE, STRING, BOOL, DATE, BYTES + BAD, NULL, INT, LONG, DOUBLE, STRING, BOOLEAN, DATE, BYTES } } diff --git a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/JobContainer.java b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/JobContainer.java index c71523ece..73d1aceba 100644 --- a/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/JobContainer.java +++ b/exchangis-engines/engines/datax/datax-core/src/main/java/com/alibaba/datax/core/job/JobContainer.java @@ -37,8 +37,8 @@ import com.webank.wedatasphere.exchangis.datax.util.Json; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.FileFileFilter; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang.Validate; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.Validate; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.StringEntity; import org.slf4j.Logger; diff --git a/exchangis-engines/engines/datax/datax-elasticsearchwriter/pom.xml b/exchangis-engines/engines/datax/datax-elasticsearchwriter/pom.xml index 357a9b855..11adfd18a 100644 --- a/exchangis-engines/engines/datax/datax-elasticsearchwriter/pom.xml +++ b/exchangis-engines/engines/datax/datax-elasticsearchwriter/pom.xml @@ -5,7 +5,7 @@ exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} ../pom.xml 4.0.0 diff --git a/exchangis-engines/engines/datax/datax-ftpreader/pom.xml b/exchangis-engines/engines/datax/datax-ftpreader/pom.xml index 8520f0145..8912e7745 100644 --- a/exchangis-engines/engines/datax/datax-ftpreader/pom.xml +++ b/exchangis-engines/engines/datax/datax-ftpreader/pom.xml @@ -5,7 +5,7 @@ exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} ../pom.xml 4.0.0 diff --git a/exchangis-engines/engines/datax/datax-ftpwriter/pom.xml b/exchangis-engines/engines/datax/datax-ftpwriter/pom.xml index ec0635c29..d738c01d9 100644 --- a/exchangis-engines/engines/datax/datax-ftpwriter/pom.xml +++ b/exchangis-engines/engines/datax/datax-ftpwriter/pom.xml @@ -5,7 +5,7 @@ exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} ../pom.xml 4.0.0 diff --git a/exchangis-engines/engines/datax/datax-hdfsreader/pom.xml b/exchangis-engines/engines/datax/datax-hdfsreader/pom.xml index 81789b25b..853f56e87 100644 --- a/exchangis-engines/engines/datax/datax-hdfsreader/pom.xml +++ b/exchangis-engines/engines/datax/datax-hdfsreader/pom.xml @@ -5,7 +5,7 @@ exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} ../pom.xml 4.0.0 diff --git a/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderUtil.java b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderUtil.java index 7738c9ce8..38e094578 100644 --- a/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderUtil.java +++ b/exchangis-engines/engines/datax/datax-hdfsreader/src/main/java/com/alibaba/datax/plugin/reader/hdfsreader/HdfsReaderUtil.java @@ -603,7 +603,7 @@ private Record transportOneRecord(List columnConfigs, List } Type type = Type.valueOf(columnType.toUpperCase()); // it's all ok if nullFormat is null - if (StringUtils.equals(columnValue, nullFormat) || StringUtils.isBlank(columnValue)) { + if (StringUtils.equals(columnValue, nullFormat) || StringUtils.isEmpty(columnValue)) { columnValue = null; } switch (type) { diff --git a/exchangis-engines/engines/datax/datax-hdfswriter/pom.xml b/exchangis-engines/engines/datax/datax-hdfswriter/pom.xml index 11c814aec..69e76c86c 100644 --- a/exchangis-engines/engines/datax/datax-hdfswriter/pom.xml +++ b/exchangis-engines/engines/datax/datax-hdfswriter/pom.xml @@ -5,7 +5,7 @@ exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} ../pom.xml 4.0.0 diff --git a/exchangis-engines/engines/datax/datax-mysqlreader/pom.xml b/exchangis-engines/engines/datax/datax-mysqlreader/pom.xml index 12389b823..483c78022 100644 --- a/exchangis-engines/engines/datax/datax-mysqlreader/pom.xml +++ b/exchangis-engines/engines/datax/datax-mysqlreader/pom.xml @@ -5,7 +5,7 @@ exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} ../pom.xml 4.0.0 diff --git a/exchangis-engines/engines/datax/datax-mysqlwriter/pom.xml b/exchangis-engines/engines/datax/datax-mysqlwriter/pom.xml index 7339dbc27..df5898ab7 100644 --- a/exchangis-engines/engines/datax/datax-mysqlwriter/pom.xml +++ b/exchangis-engines/engines/datax/datax-mysqlwriter/pom.xml @@ -5,7 +5,7 @@ exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} ../pom.xml 4.0.0 diff --git a/exchangis-engines/engines/datax/datax-oraclereader/pom.xml b/exchangis-engines/engines/datax/datax-oraclereader/pom.xml index fd6f86f07..3e87bfad4 100644 --- a/exchangis-engines/engines/datax/datax-oraclereader/pom.xml +++ b/exchangis-engines/engines/datax/datax-oraclereader/pom.xml @@ -5,7 +5,7 @@ exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} ../pom.xml 4.0.0 diff --git a/exchangis-engines/engines/datax/datax-oraclewriter/pom.xml b/exchangis-engines/engines/datax/datax-oraclewriter/pom.xml index 054951019..d4f3813ff 100644 --- a/exchangis-engines/engines/datax/datax-oraclewriter/pom.xml +++ b/exchangis-engines/engines/datax/datax-oraclewriter/pom.xml @@ -5,7 +5,7 @@ exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} ../pom.xml 4.0.0 diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/pom.xml b/exchangis-engines/engines/datax/datax-starrockswriter/pom.xml new file mode 100644 index 000000000..c2f92f30c --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/pom.xml @@ -0,0 +1,93 @@ + + + + exchangis-engine-datax + com.webank.wedatasphere.exchangis + ${revision} + ../pom.xml + + 4.0.0 + + 3.0.0-Plus-2 + datax-oraclewriter + jar + + + + com.webank.wedatasphere.exchangis + datax-core + provided + + + org.apache.hadoop + hadoop-common + + + slf4j-log4j12 + org.slf4j + + + ${datax.engine.version} + + + org.slf4j + slf4j-api + provided + + + ch.qos.logback + logback-classic + provided + + + commons-codec + commons-codec + 1.9 + + + org.apache.commons + commons-lang3 + 3.12.0 + + + commons-logging + commons-logging + 1.1.1 + + + org.apache.httpcomponents + httpcore + 4.4.6 + + + org.apache.httpcomponents + httpclient + 4.5.3 + + + com.alibaba.fastjson2 + fastjson2 + 2.0.51 + + + mysql + mysql-connector-java + 8.0.16 + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/StarRocksWriter.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/StarRocksWriter.java new file mode 100644 index 000000000..19238525b --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/StarRocksWriter.java @@ -0,0 +1,152 @@ +package com.alibaba.datax.plugin.writer.starrockswriter; + +import com.alibaba.datax.common.element.Record; +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.plugin.RecordReceiver; +import com.alibaba.datax.common.spi.Writer; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.plugin.rdbms.util.DBUtil; +import com.alibaba.datax.plugin.rdbms.util.DBUtilErrorCode; +import com.alibaba.datax.plugin.rdbms.util.DataBaseType; +import com.alibaba.datax.plugin.writer.starrockswriter.manager.StarRocksWriterManager; +import com.alibaba.datax.plugin.writer.starrockswriter.row.StarRocksISerializer; +import com.alibaba.datax.plugin.writer.starrockswriter.row.StarRocksSerializerFactory; +import com.alibaba.datax.plugin.writer.starrockswriter.util.StarRocksWriterUtil; + +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +public class StarRocksWriter extends Writer { + + public static class Job extends Writer.Job { + + private static final Logger LOG = LoggerFactory.getLogger(Job.class); + private Configuration originalConfig = null; + private StarRocksWriterOptions options; + + @Override + public void init() { + this.originalConfig = super.getPluginJobConf(); + String selectedDatabase = super.getPluginJobConf().getString(StarRocksWriterOptions.KEY_SELECTED_DATABASE); + if(StringUtils.isBlank(this.originalConfig.getString(StarRocksWriterOptions.KEY_DATABASE)) && StringUtils.isNotBlank(selectedDatabase)){ + this.originalConfig.set(StarRocksWriterOptions.KEY_DATABASE, selectedDatabase); + } + options = new StarRocksWriterOptions(super.getPluginJobConf()); + options.doPretreatment(); + } + + @Override + public void preCheck(){ + this.init(); + StarRocksWriterUtil.preCheckPrePareSQL(options); + StarRocksWriterUtil.preCheckPostSQL(options); + } + + @Override + public void prepare() { + String username = options.getUsername(); + String password = options.getPassword(); + String jdbcUrl = options.getJdbcUrl(); + List renderedPreSqls = StarRocksWriterUtil.renderPreOrPostSqls(options.getPreSqlList(), options.getTable()); + if (null != renderedPreSqls && !renderedPreSqls.isEmpty()) { + Connection conn = DBUtil.getConnection(DataBaseType.MySql, jdbcUrl, username, password); + LOG.info("Begin to execute preSqls:[{}]. context info:{}.", String.join(";", renderedPreSqls), jdbcUrl); + StarRocksWriterUtil.executeSqls(conn, renderedPreSqls); + DBUtil.closeDBResources(null, null, conn); + } + } + + @Override + public List split(int mandatoryNumber) { + List configurations = new ArrayList<>(mandatoryNumber); + for (int i = 0; i < mandatoryNumber; i++) { + configurations.add(originalConfig); + } + return configurations; + } + + @Override + public void post() { + String username = options.getUsername(); + String password = options.getPassword(); + String jdbcUrl = options.getJdbcUrl(); + List renderedPostSqls = StarRocksWriterUtil.renderPreOrPostSqls(options.getPostSqlList(), options.getTable()); + if (null != renderedPostSqls && !renderedPostSqls.isEmpty()) { + Connection conn = DBUtil.getConnection(DataBaseType.MySql, jdbcUrl, username, password); + LOG.info("Begin to execute postSqls:[{}]. context info:{}.", String.join(";", renderedPostSqls), jdbcUrl); + StarRocksWriterUtil.executeSqls(conn, renderedPostSqls); + DBUtil.closeDBResources(null, null, conn); + } + } + + @Override + public void destroy() { + } + + } + + public static class Task extends Writer.Task { + private StarRocksWriterManager writerManager; + private StarRocksWriterOptions options; + private StarRocksISerializer rowSerializer; + + @Override + public void init() { + options = new StarRocksWriterOptions(super.getPluginJobConf()); + if (options.isWildcardColumn()) { + Connection conn = DBUtil.getConnection(DataBaseType.MySql, options.getJdbcUrl(), options.getUsername(), options.getPassword()); + List columns = StarRocksWriterUtil.getStarRocksColumns(conn, options.getDatabase(), options.getTable()); + options.setInfoCchemaColumns(columns); + } + writerManager = new StarRocksWriterManager(options, getTaskPluginCollector()); + rowSerializer = StarRocksSerializerFactory.createSerializer(options); + } + + @Override + public void prepare() { + } + + public void startWrite(RecordReceiver recordReceiver) { + try { + Record record; + while ((record = recordReceiver.getFromReader()) != null) { + if (record.getColumnNumber() != options.getColumns().size()) { + throw DataXException + .asDataXException( + DBUtilErrorCode.CONF_ERROR, + String.format( + "Column configuration error. The number of reader columns %d and the number of writer columns %d are not equal.", + record.getColumnNumber(), + options.getColumns().size())); + } + writerManager.writeRecord(rowSerializer.serialize(record)); + } + } catch (Exception e) { + throw DataXException.asDataXException(DBUtilErrorCode.WRITE_DATA_ERROR, e); + } + } + + @Override + public void post() { + try { + writerManager.close(); + } catch (Exception e) { + throw DataXException.asDataXException(DBUtilErrorCode.WRITE_DATA_ERROR, e); + } + } + + @Override + public void destroy() {} + + @Override + public boolean supportFailOver(){ + return false; + } + } +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/StarRocksWriterOptions.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/StarRocksWriterOptions.java new file mode 100644 index 000000000..c670c7643 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/StarRocksWriterOptions.java @@ -0,0 +1,231 @@ +package com.alibaba.datax.plugin.writer.starrockswriter; + +import com.alibaba.datax.common.exception.DataXException; +import com.alibaba.datax.common.util.Configuration; +import com.alibaba.datax.plugin.rdbms.util.DBUtilErrorCode; +import com.webank.wedatasphere.exchangis.datax.common.CryptoUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.Serializable; +import java.util.Map; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +public class StarRocksWriterOptions implements Serializable { + + private static final Logger LOG = LoggerFactory.getLogger(StarRocksWriterOptions.class); + + private static final long serialVersionUID = 1l; + private static final long KILO_BYTES_SCALE = 1024l; + private static final long MEGA_BYTES_SCALE = KILO_BYTES_SCALE * KILO_BYTES_SCALE; + private static final int MAX_RETRIES = 1; + private static final int BATCH_ROWS = 500000; + private static final long BATCH_BYTES = 5 * MEGA_BYTES_SCALE; + private static final long FLUSH_INTERVAL = 300000; + + private static final String KEY_LOAD_PROPS_FORMAT = "format"; + public enum StreamLoadFormat { + CSV, JSON; + } + + public static final String KEY_USERNAME = "username"; + public static final String KEY_PASSWORD = "password"; + public static final String KEY_DATABASE = "database"; + public static final String KEY_SELECTED_DATABASE = "selectedDatabase"; + public static final String KEY_TABLE = "table"; + public static final String KEY_COLUMN = "column"; + public static final String KEY_PRE_SQL = "preSql"; + public static final String KEY_POST_SQL = "postSql"; + public static final String KEY_JDBC_URL = "jdbcUrl"; + public static final String KEY_HOST = "host"; + public static final String KEY_PORT = "port"; + public static final String KEY_HTTP_PORT = "httpPort"; + public static final String KEY_LABEL_PREFIX = "labelPrefix"; + public static final String KEY_MAX_BATCH_ROWS = "maxBatchRows"; + public static final String KEY_MAX_BATCH_SIZE = "maxBatchSize"; + public static final String KEY_FLUSH_INTERVAL = "flushInterval"; + public static final String KEY_LOAD_URL = "loadUrl"; + public static final String KEY_FLUSH_QUEUE_LENGTH = "flushQueueLength"; + public static final String KEY_LOAD_PROPS = "loadProps"; + public static final String CONNECTION_JDBC_URL = "connection[0].jdbcUrl"; + public static final String CONNECTION_HOST = "connection[0].host"; + public static final String CONNECTION_PORT = "connection[0].port"; + public static final String CONNECTION_HTTP_PORT = "connection[0].httpPort"; + public static final String CONNECTION_TABLE_NAME = "connection[0].table[0]"; + public static final String CONNECTION_SELECTED_DATABASE = "connection[0].selectedDatabase"; + + private final Configuration options; + private List infoCchemaColumns; + private List userSetColumns; + private boolean isWildcardColumn; + + public StarRocksWriterOptions(Configuration options) { + this.options = options; + // database + String database = this.options.getString(KEY_DATABASE); + if (StringUtils.isNotBlank(database)) { + this.options.set(KEY_DATABASE, database); + } + // jdbcUrl + String jdbcUrl = null; + String host = this.options.getString(CONNECTION_HOST); + String port = this.options.getString(CONNECTION_PORT); + if (StringUtils.isNotBlank(host) && StringUtils.isNotBlank(port)) { + jdbcUrl = "jdbc:mysql://" + host + ":" + port + "/"; + this.options.set(KEY_JDBC_URL, jdbcUrl); + } + // table + String table = this.options.getString(CONNECTION_TABLE_NAME); + if (StringUtils.isNotBlank(table)) { + this.options.set(KEY_TABLE, table); + } + // column + List keyColumns = this.options.getList(KEY_COLUMN, Map.class); + if (Objects.nonNull(keyColumns) && keyColumns.size() > 0) { + this.userSetColumns = keyColumns.stream() + .map(map -> String.valueOf(map.getOrDefault("name", ""))) + .map(name -> name.replace("`", "")) + .collect(Collectors.toList()); + } + } + + public void doPretreatment() { + validateRequired(); + validateStreamLoadUrl(); + } + + public String getJdbcUrl() { + return options.getString(KEY_JDBC_URL); + } + + public String getDatabase() { + return options.getString(KEY_DATABASE); + } + + public String getTable() { + return options.getString(KEY_TABLE); + } + + public String getUsername() { + return options.getString(KEY_USERNAME); + } + + public String getPassword() { + if(StringUtils.isNotBlank(options.getString(KEY_PASSWORD))) { + try { + return (String) CryptoUtils.string2Object(options.getString(KEY_PASSWORD)); + } catch (Exception e) { + throw DataXException.asDataXException(DBUtilErrorCode.CONF_ERROR, + "Decrypt password failed."); + } + } + return ""; + } + + public String getLabelPrefix() { + return options.getString(KEY_LABEL_PREFIX); + } + + public List getLoadUrlList() { + return options.getList(KEY_LOAD_URL, String.class); + } + + public List getColumns() { + if (isWildcardColumn) { + return this.infoCchemaColumns; + } + return this.userSetColumns; + } + + public boolean isWildcardColumn() { + return this.isWildcardColumn; + } + + public void setInfoCchemaColumns(List cols) { + this.infoCchemaColumns = cols; + } + + public List getPreSqlList() { + return options.getList(KEY_PRE_SQL, String.class); + } + + public List getPostSqlList() { + return options.getList(KEY_POST_SQL, String.class); + } + + public Map getLoadProps() { + return options.getMap(KEY_LOAD_PROPS); + } + + public int getMaxRetries() { + return MAX_RETRIES; + } + + public int getBatchRows() { + Integer rows = options.getInt(KEY_MAX_BATCH_ROWS); + return null == rows ? BATCH_ROWS : rows; + } + + public long getBatchSize() { + Long size = options.getLong(KEY_MAX_BATCH_SIZE); + return null == size ? BATCH_BYTES : size; + } + + public long getFlushInterval() { + Long interval = options.getLong(KEY_FLUSH_INTERVAL); + return null == interval ? FLUSH_INTERVAL : interval; + } + + public int getFlushQueueLength() { + Integer len = options.getInt(KEY_FLUSH_QUEUE_LENGTH); + return null == len ? 1 : len; + } + + public StreamLoadFormat getStreamLoadFormat() { + Map loadProps = getLoadProps(); + if (null == loadProps) { + return StreamLoadFormat.CSV; + } + if (loadProps.containsKey(KEY_LOAD_PROPS_FORMAT) + && StreamLoadFormat.JSON.name().equalsIgnoreCase(String.valueOf(loadProps.get(KEY_LOAD_PROPS_FORMAT)))) { + return StreamLoadFormat.JSON; + } + return StreamLoadFormat.CSV; + } + + private void validateStreamLoadUrl() { + List urlList = getLoadUrlList(); + for (String host : urlList) { + if (host.split(":").length < 2) { + throw DataXException.asDataXException(DBUtilErrorCode.CONF_ERROR, + "The format of loadUrl is illegal, please input `fe_ip:fe_http_ip;fe_ip:fe_http_ip`."); + } + } + } + + private void validateRequired() { + final String[] requiredOptionKeys = new String[]{ + KEY_USERNAME, + KEY_DATABASE, + KEY_TABLE, + KEY_COLUMN, + KEY_LOAD_URL + }; + for (String optionKey : requiredOptionKeys) { + options.getNecessaryValue(optionKey, DBUtilErrorCode.REQUIRED_VALUE); + } + } + + @Override + public String toString() { + return "StarRocksWriterOptions{" + + "options=" + options + + ", infoCchemaColumns=" + infoCchemaColumns + + ", userSetColumns=" + userSetColumns + + ", isWildcardColumn=" + isWildcardColumn + + '}'; + } +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksFlushTuple.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksFlushTuple.java new file mode 100644 index 000000000..105982195 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksFlushTuple.java @@ -0,0 +1,21 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.manager; + +import java.util.List; + +public class StarRocksFlushTuple { + + private String label; + private Long bytes; + private List rows; + + public StarRocksFlushTuple(String label, Long bytes, List rows) { + this.label = label; + this.bytes = bytes; + this.rows = rows; + } + + public String getLabel() { return label; } + public void setLabel(String label) { this.label = label; } + public Long getBytes() { return bytes; } + public List getRows() { return rows; } +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksStreamLoadFailedException.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksStreamLoadFailedException.java new file mode 100644 index 000000000..859f5777f --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksStreamLoadFailedException.java @@ -0,0 +1,32 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.manager; + +import java.io.IOException; +import java.util.Map; + +public class StarRocksStreamLoadFailedException extends IOException { + + static final long serialVersionUID = 1L; + + private final Map response; + private boolean reCreateLabel; + + public StarRocksStreamLoadFailedException(String message, Map response) { + super(message); + this.response = response; + } + + public StarRocksStreamLoadFailedException(String message, Map response, boolean reCreateLabel) { + super(message); + this.response = response; + this.reCreateLabel = reCreateLabel; + } + + public Map getFailedResponse() { + return response; + } + + public boolean needReCreateLabel() { + return reCreateLabel; + } + +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksStreamLoadVisitor.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksStreamLoadVisitor.java new file mode 100644 index 000000000..138c5e6bf --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksStreamLoadVisitor.java @@ -0,0 +1,319 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.manager; + +import java.io.IOException; +import java.net.HttpURLConnection; +import java.net.URL; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; + +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import com.alibaba.datax.core.statistics.plugin.task.util.DirtyRecord; +import com.alibaba.fastjson2.JSON; +import com.alibaba.datax.plugin.writer.starrockswriter.StarRocksWriterOptions; +import com.alibaba.datax.plugin.writer.starrockswriter.row.StarRocksDelimiterParser; + +import org.apache.commons.codec.binary.Base64; +import org.apache.http.HttpEntity; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPut; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.DefaultRedirectStrategy; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.util.EntityUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +public class StarRocksStreamLoadVisitor { + + private static final Logger LOG = LoggerFactory.getLogger(StarRocksStreamLoadVisitor.class); + + private final StarRocksWriterOptions writerOptions; + private long pos; + private static final String RESULT_FAILED = "Fail"; + private static final String RESULT_LABEL_EXISTED = "Label Already Exists"; + private static final String LAEBL_STATE_VISIBLE = "VISIBLE"; + private static final String LAEBL_STATE_COMMITTED = "COMMITTED"; + private static final String RESULT_LABEL_PREPARE = "PREPARE"; + private static final String RESULT_LABEL_ABORTED = "ABORTED"; + private static final String RESULT_LABEL_UNKNOWN = "UNKNOWN"; + + public StarRocksStreamLoadVisitor(StarRocksWriterOptions writerOptions) { + this.writerOptions = writerOptions; + } + + public void doStreamLoad(StarRocksFlushTuple flushData, TaskPluginCollector taskPluginCollector) throws IOException { + String host = getAvailableHost(); + if (null == host) { + throw new IOException("None of the host in `load_url` could be connected."); + } + String loadUrl = new StringBuilder(host) + .append("/api/") + .append(writerOptions.getDatabase()) + .append("/") + .append(writerOptions.getTable()) + .append("/_stream_load") + .toString(); + if (LOG.isDebugEnabled()) { + LOG.debug(String.format("Start to join batch data: rows[%d] bytes[%d] label[%s].", flushData.getRows().size(), flushData.getBytes(), flushData.getLabel())); + } + Map loadResult = doHttpPut(loadUrl, flushData.getLabel(), joinRows(flushData.getRows(), flushData.getBytes().intValue())); + LOG.info("LoadResult is {}", loadResult.toString()); + final String keyStatus = "Status"; + if (null == loadResult || !loadResult.containsKey(keyStatus)) { + LOG.error("unknown result status. {}", loadResult); + throw new IOException("Unable to flush data to StarRocks: unknown result status. " + loadResult); + } + if (LOG.isDebugEnabled()) { + LOG.debug(new StringBuilder("StreamLoad response:\n").append(JSON.toJSONString(loadResult)).toString()); + } + int dirtyRecord = 0; + String errorMsg = null; + if (loadResult.containsKey("NumberFilteredRows")) { + Object numberFilteredRows = loadResult.get("NumberFilteredRows"); + dirtyRecord = Integer.parseInt(String.valueOf(numberFilteredRows)); + } + if (RESULT_FAILED.equals(loadResult.get(keyStatus))) { + StringBuilder errorBuilder = new StringBuilder("Failed to flush data to StarRocks.\n"); + if (loadResult.containsKey("Message")) { + errorBuilder.append(loadResult.get("Message")); + errorBuilder.append('\n'); + } + if (loadResult.containsKey("ErrorURL")) { + LOG.error("StreamLoad response: {}", loadResult); + try { + errorBuilder.append(doHttpGet(loadResult.get("ErrorURL").toString())); + errorBuilder.append('\n'); + } catch (IOException e) { + LOG.warn("Get Error URL failed. {} ", loadResult.get("ErrorURL"), e); + } + } else { + errorBuilder.append(JSON.toJSONString(loadResult)); + errorBuilder.append('\n'); + } + LOG.error(errorBuilder.toString()); + errorMsg = errorBuilder.toString(); + } else if (RESULT_LABEL_EXISTED.equals(loadResult.get(keyStatus))) { + LOG.debug(new StringBuilder("StreamLoad response:\n").append(JSON.toJSONString(loadResult)).toString()); + // has to block-checking the state to get the final result + checkLabelState(host, flushData.getLabel()); + } + if (dirtyRecord > 0) { + for (int i = 0; i < dirtyRecord; i++) { + taskPluginCollector.collectDirtyRecord(new DirtyRecord(), errorMsg); + } + } + } + + private String getAvailableHost() { + List hostList = writerOptions.getLoadUrlList(); + long tmp = pos + hostList.size(); + for (; pos < tmp; pos++) { + String host = new StringBuilder("http://").append(hostList.get((int) (pos % hostList.size()))).toString(); + if (tryHttpConnection(host)) { + return host; + } + } + return null; + } + + private boolean tryHttpConnection(String host) { + try { + URL url = new URL(host); + HttpURLConnection co = (HttpURLConnection) url.openConnection(); + co.setConnectTimeout(1000); + co.connect(); + co.disconnect(); + return true; + } catch (Exception e1) { + LOG.warn("Failed to connect to address:{}", host, e1); + return false; + } + } + + private byte[] joinRows(List rows, int totalBytes) { + if (StarRocksWriterOptions.StreamLoadFormat.CSV.equals(writerOptions.getStreamLoadFormat())) { + Map props = (writerOptions.getLoadProps() == null ? new HashMap<>() : writerOptions.getLoadProps()); + byte[] lineDelimiter = StarRocksDelimiterParser.parse((String)props.get("row_delimiter"), "\n").getBytes(StandardCharsets.UTF_8); + ByteBuffer bos = ByteBuffer.allocate(totalBytes + rows.size() * lineDelimiter.length); + for (byte[] row : rows) { + bos.put(row); + bos.put(lineDelimiter); + } + return bos.array(); + } + + if (StarRocksWriterOptions.StreamLoadFormat.JSON.equals(writerOptions.getStreamLoadFormat())) { + ByteBuffer bos = ByteBuffer.allocate(totalBytes + (rows.isEmpty() ? 2 : rows.size() + 1)); + bos.put("[".getBytes(StandardCharsets.UTF_8)); + byte[] jsonDelimiter = ",".getBytes(StandardCharsets.UTF_8); + boolean isFirstElement = true; + for (byte[] row : rows) { + if (!isFirstElement) { + bos.put(jsonDelimiter); + } + bos.put(row); + isFirstElement = false; + } + bos.put("]".getBytes(StandardCharsets.UTF_8)); + return bos.array(); + } + throw new RuntimeException("Failed to join rows data, unsupported `format` from stream load properties:"); + } + + @SuppressWarnings("unchecked") + private void checkLabelState(String host, String label) throws IOException { + int idx = 0; + while(true) { + try { + TimeUnit.SECONDS.sleep(Math.min(++idx, 5)); + } catch (InterruptedException ex) { + break; + } + try (CloseableHttpClient httpclient = HttpClients.createDefault()) { + HttpGet httpGet = new HttpGet(new StringBuilder(host).append("/api/").append(writerOptions.getDatabase()).append("/get_load_state?label=").append(label).toString()); + httpGet.setHeader("Authorization", getBasicAuthHeader(writerOptions.getUsername(), writerOptions.getPassword())); + httpGet.setHeader("Connection", "close"); + + try (CloseableHttpResponse resp = httpclient.execute(httpGet)) { + HttpEntity respEntity = getHttpEntity(resp); + if (respEntity == null) { + throw new IOException(String.format("Failed to flush data to StarRocks, Error " + + "could not get the final state of label[%s].\n", label), null); + } + Map result = (Map)JSON.parse(EntityUtils.toString(respEntity)); + String labelState = (String)result.get("state"); + if (null == labelState) { + throw new IOException(String.format("Failed to flush data to StarRocks, Error " + + "could not get the final state of label[%s]. response[%s]\n", label, EntityUtils.toString(respEntity)), null); + } + LOG.info(String.format("Checking label[%s] state[%s]\n", label, labelState)); + switch(labelState) { + case LAEBL_STATE_VISIBLE: + case LAEBL_STATE_COMMITTED: + return; + case RESULT_LABEL_PREPARE: + continue; + case RESULT_LABEL_ABORTED: + throw new StarRocksStreamLoadFailedException(String.format("Failed to flush data to StarRocks, Error " + + "label[%s] state[%s]\n", label, labelState), null, true); + case RESULT_LABEL_UNKNOWN: + default: + throw new IOException(String.format("Failed to flush data to StarRocks, Error " + + "label[%s] state[%s]\n", label, labelState), null); + } + } + } + } + } + + @SuppressWarnings("unchecked") + private Map doHttpPut(String loadUrl, String label, byte[] data) throws IOException { + LOG.info(String.format("Executing stream load to: '%s', size: '%s'", loadUrl, data.length)); + final HttpClientBuilder httpClientBuilder = HttpClients.custom() + .setRedirectStrategy(new DefaultRedirectStrategy() { + @Override + protected boolean isRedirectable(String method) { + return true; + } + }); + try (CloseableHttpClient httpclient = httpClientBuilder.build()) { + HttpPut httpPut = new HttpPut(loadUrl); + List cols = writerOptions.getColumns(); + if (null != cols && !cols.isEmpty() && StarRocksWriterOptions.StreamLoadFormat.CSV.equals(writerOptions.getStreamLoadFormat())) { + httpPut.setHeader("columns", String.join(",", cols.stream().map(f -> String.format("`%s`", f)).collect(Collectors.toList()))); + } + if (null != writerOptions.getLoadProps()) { + for (Map.Entry entry : writerOptions.getLoadProps().entrySet()) { + httpPut.setHeader(entry.getKey(), String.valueOf(entry.getValue())); + } + } + httpPut.setHeader("strict_mode", "true"); + httpPut.setHeader("Expect", "100-continue"); + httpPut.setHeader("label", label); + httpPut.setHeader("Content-Type", "application/x-www-form-urlencoded"); + httpPut.setHeader("Authorization", getBasicAuthHeader(writerOptions.getUsername(), writerOptions.getPassword())); + httpPut.setEntity(new ByteArrayEntity(data)); + httpPut.setConfig(RequestConfig.custom().setRedirectsEnabled(true).build()); + try (CloseableHttpResponse resp = httpclient.execute(httpPut)) { + int code = resp.getStatusLine().getStatusCode(); + if (200 != code) { + String errorText; + try { + HttpEntity respEntity = resp.getEntity(); + errorText = EntityUtils.toString(respEntity); + } catch (Exception err) { + errorText = "find errorText failed: " + err.getMessage(); + } + LOG.warn("Request failed with code:{}, err:{}", code, errorText); + Map errorMap = new HashMap<>(); + errorMap.put("Status", "Fail"); + errorMap.put("Message", errorText); + return errorMap; + } + HttpEntity respEntity = resp.getEntity(); + if (null == respEntity) { + LOG.warn("Request failed with empty response."); + return null; + } + return (Map)JSON.parse(EntityUtils.toString(respEntity)); + } + } + } + + private String getBasicAuthHeader(String username, String password) { + String auth = username + ":" + password; + byte[] encodedAuth = Base64.encodeBase64(auth.getBytes(StandardCharsets.UTF_8)); + return new StringBuilder("Basic ").append(new String(encodedAuth)).toString(); + } + + private HttpEntity getHttpEntity(CloseableHttpResponse resp) { + int code = resp.getStatusLine().getStatusCode(); + if (200 != code) { + LOG.warn("Request failed with code:{}", code); + return null; + } + HttpEntity respEntity = resp.getEntity(); + if (null == respEntity) { + LOG.warn("Request failed with empty response."); + return null; + } + return respEntity; + } + + private String doHttpGet(String getUrl) throws IOException { + LOG.info("Executing GET from {}.", getUrl); + try (CloseableHttpClient httpclient = buildHttpClient()) { + HttpGet httpGet = new HttpGet(getUrl); + try (CloseableHttpResponse resp = httpclient.execute(httpGet)) { + HttpEntity respEntity = resp.getEntity(); + if (null == respEntity) { + LOG.warn("Request failed with empty response."); + return null; + } + return EntityUtils.toString(respEntity); + } + } + } + + private CloseableHttpClient buildHttpClient(){ + final HttpClientBuilder httpClientBuilder = HttpClients.custom() + .setRedirectStrategy(new DefaultRedirectStrategy() { + @Override + protected boolean isRedirectable(String method) { + return true; + } + }); + return httpClientBuilder.build(); + } + +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksWriterManager.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksWriterManager.java new file mode 100644 index 000000000..349712c70 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/manager/StarRocksWriterManager.java @@ -0,0 +1,204 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.manager; + +import com.alibaba.datax.common.plugin.TaskPluginCollector; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.concurrent.BasicThreadFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; + +import com.alibaba.datax.plugin.writer.starrockswriter.StarRocksWriterOptions; + +public class StarRocksWriterManager { + + private static final Logger LOG = LoggerFactory.getLogger(StarRocksWriterManager.class); + + private final StarRocksStreamLoadVisitor starrocksStreamLoadVisitor; + private final StarRocksWriterOptions writerOptions; + + private final List buffer = new ArrayList<>(); + private int batchCount = 0; + private long batchSize = 0; + private volatile boolean closed = false; + private volatile Exception flushException; + private final LinkedBlockingDeque flushQueue; + private ScheduledExecutorService scheduler; + private ScheduledFuture scheduledFuture; + + public StarRocksWriterManager(StarRocksWriterOptions writerOptions, TaskPluginCollector taskPluginCollector) { + this.writerOptions = writerOptions; + this.starrocksStreamLoadVisitor = new StarRocksStreamLoadVisitor(writerOptions); + flushQueue = new LinkedBlockingDeque<>(writerOptions.getFlushQueueLength()); + this.startScheduler(); + this.startAsyncFlushing(taskPluginCollector); + } + + public void startScheduler() { + stopScheduler(); + this.scheduler = Executors.newScheduledThreadPool(1, new BasicThreadFactory.Builder().namingPattern("starrocks-interval-flush").daemon(true).build()); + this.scheduledFuture = this.scheduler.schedule(() -> { + synchronized (StarRocksWriterManager.this) { + if (!closed) { + try { + String label = createBatchLabel(); + LOG.info(String.format("StarRocks interval Sinking triggered: label[%s].", label)); + if (batchCount == 0) { + startScheduler(); + } + flush(label, false); + } catch (Exception e) { + flushException = e; + } + } + } + }, writerOptions.getFlushInterval(), TimeUnit.MILLISECONDS); + } + + public void stopScheduler() { + if (this.scheduledFuture != null) { + scheduledFuture.cancel(false); + this.scheduler.shutdown(); + } + } + + public final synchronized void writeRecord(String record) throws IOException { + checkFlushException(); + try { + byte[] bts = record.getBytes(StandardCharsets.UTF_8); + buffer.add(bts); + batchCount++; + batchSize += bts.length; + if (batchCount >= writerOptions.getBatchRows() || batchSize >= writerOptions.getBatchSize()) { + String label = createBatchLabel(); + if (LOG.isDebugEnabled()) { + LOG.debug(String.format("StarRocks buffer Sinking triggered: rows[%d] label[%s].", batchCount, label)); + } + flush(label, false); + } + } catch (Exception e) { + throw new IOException("Writing records to StarRocks failed.", e); + } + } + + public synchronized void flush(String label, boolean waitUtilDone) throws Exception { + checkFlushException(); + if (batchCount == 0) { + if (waitUtilDone) { + waitAsyncFlushingDone(); + } + return; + } + flushQueue.put(new StarRocksFlushTuple(label, batchSize, new ArrayList<>(buffer))); + if (waitUtilDone) { + // wait the last flush + waitAsyncFlushingDone(); + } + buffer.clear(); + batchCount = 0; + batchSize = 0; + } + + public synchronized void close() { + if (!closed) { + closed = true; + try { + String label = createBatchLabel(); + if (batchCount > 0) { + if (LOG.isDebugEnabled()) { + LOG.debug(String.format("StarRocks Sink is about to close: label[%s].", label)); + } + } + flush(label, true); + } catch (Exception e) { + throw new RuntimeException("Writing records to StarRocks failed.", e); + } + } + checkFlushException(); + } + + public String createBatchLabel() { + StringBuilder sb = new StringBuilder(); + if (StringUtils.isNotBlank(writerOptions.getLabelPrefix())) { + sb.append(writerOptions.getLabelPrefix()); + } + return sb.append(UUID.randomUUID().toString()) + .toString(); + } + + private void startAsyncFlushing(TaskPluginCollector taskPluginCollector) { + // start flush thread + Thread flushThread = new Thread(new Runnable(){ + public void run() { + while(true) { + try { + asyncFlush(taskPluginCollector); + } catch (Exception e) { + flushException = e; + } + } + } + }); + flushThread.setDaemon(true); + flushThread.start(); + } + + private void waitAsyncFlushingDone() throws InterruptedException { + // wait previous flushings + for (int i = 0; i <= writerOptions.getFlushQueueLength(); i++) { + flushQueue.put(new StarRocksFlushTuple("", 0l, null)); + } + checkFlushException(); + } + + private void asyncFlush(TaskPluginCollector taskPluginCollector) throws Exception { + StarRocksFlushTuple flushData = flushQueue.take(); + if (StringUtils.isBlank(flushData.getLabel())) { + return; + } + stopScheduler(); + if (LOG.isDebugEnabled()) { + LOG.debug(String.format("Async stream load: rows[%d] bytes[%d] label[%s].", flushData.getRows().size(), flushData.getBytes(), flushData.getLabel())); + } + for (int i = 0; i <= writerOptions.getMaxRetries(); i++) { + try { + // flush to StarRocks with stream load + starrocksStreamLoadVisitor.doStreamLoad(flushData, taskPluginCollector); + LOG.info(String.format("Async stream load finished: label[%s].", flushData.getLabel())); + startScheduler(); + break; + } catch (Exception e) { + LOG.warn("Failed to flush batch data to StarRocks, retry times = {}", i, e); + if (i >= writerOptions.getMaxRetries()) { + throw new IOException(e); + } + if (e instanceof StarRocksStreamLoadFailedException && ((StarRocksStreamLoadFailedException)e).needReCreateLabel()) { + String newLabel = createBatchLabel(); + LOG.warn(String.format("Batch label changed from [%s] to [%s]", flushData.getLabel(), newLabel)); + flushData.setLabel(newLabel); + } + try { + Thread.sleep(1000l * Math.min(i + 1, 10)); + } catch (InterruptedException ex) { + Thread.currentThread().interrupt(); + throw new IOException("Unable to flush, interrupted while doing another attempt", e); + } + } + } + } + + private void checkFlushException() { + if (flushException != null) { + throw new RuntimeException("Writing records to StarRocks failed.", flushException); + } + } +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksBaseSerializer.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksBaseSerializer.java new file mode 100644 index 000000000..c2948b44a --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksBaseSerializer.java @@ -0,0 +1,26 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.row; + +import com.alibaba.datax.common.element.Column; +import com.alibaba.datax.common.element.Column.Type; + +public class StarRocksBaseSerializer { + + protected String fieldConvertion(Column col) { + if (null == col.getRawData() || Type.NULL == col.getType()) { + return null; + } + if (Type.BOOLEAN == col.getType()) { + return String.valueOf(col.asLong()); + } + if (Type.BYTES == col.getType()) { + byte[] bts = (byte[])col.getRawData(); + long value = 0; + for (int i = 0; i < bts.length; i++) { + value += (bts[bts.length - i - 1] & 0xffL) << (8 * i); + } + return String.valueOf(value); + } + return col.asString(); + } + +} \ No newline at end of file diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksCsvSerializer.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksCsvSerializer.java new file mode 100644 index 000000000..55b429fea --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksCsvSerializer.java @@ -0,0 +1,28 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.row; + +import com.alibaba.datax.common.element.Record; + +public class StarRocksCsvSerializer extends StarRocksBaseSerializer implements StarRocksISerializer { + + private static final long serialVersionUID = 1L; + + private final String columnSeparator; + + public StarRocksCsvSerializer(String sp) { + this.columnSeparator = StarRocksDelimiterParser.parse(sp, "\t"); + } + + @Override + public String serialize(Record row) { + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < row.getColumnNumber(); i++) { + String value = fieldConvertion(row.getColumn(i)); + sb.append(null == value ? "\\N" : value); + if (i < row.getColumnNumber() - 1) { + sb.append(columnSeparator); + } + } + return sb.toString(); + } + +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksDelimiterParser.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksDelimiterParser.java new file mode 100644 index 000000000..523d7dcf7 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksDelimiterParser.java @@ -0,0 +1,55 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.row; + +import org.apache.commons.lang3.StringUtils; + +import java.io.StringWriter; + +public class StarRocksDelimiterParser { + + private static final String HEX_STRING = "0123456789ABCDEF"; + + public static String parse(String sp, String dSp) throws RuntimeException { + if (StringUtils.isBlank(sp)) { + return dSp; + } + if (!sp.toUpperCase().startsWith("\\X")) { + return sp; + } + String hexStr = sp.substring(2); + // check hex str + if (hexStr.isEmpty()) { + throw new RuntimeException("Failed to parse delimiter: `Hex str is empty`"); + } + if (hexStr.length() % 2 != 0) { + throw new RuntimeException("Failed to parse delimiter: `Hex str length error`"); + } + for (char hexChar : hexStr.toUpperCase().toCharArray()) { + if (HEX_STRING.indexOf(hexChar) == -1) { + throw new RuntimeException("Failed to parse delimiter: `Hex str format error`"); + } + } + // transform to separator + StringWriter writer = new StringWriter(); + for (byte b : hexStrToBytes(hexStr)) { + writer.append((char) b); + } + return writer.toString(); + } + + private static byte[] hexStrToBytes(String hexStr) { + String upperHexStr = hexStr.toUpperCase(); + int length = upperHexStr.length() / 2; + char[] hexChars = upperHexStr.toCharArray(); + byte[] bytes = new byte[length]; + for (int i = 0; i < length; i++) { + int pos = i * 2; + bytes[i] = (byte) (charToByte(hexChars[pos]) << 4 | charToByte(hexChars[pos + 1])); + } + return bytes; + } + + private static byte charToByte(char c) { + return (byte) HEX_STRING.indexOf(c); + } + +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksISerializer.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksISerializer.java new file mode 100644 index 000000000..5924a4274 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksISerializer.java @@ -0,0 +1,11 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.row; + +import java.io.Serializable; + +import com.alibaba.datax.common.element.Record; + +public interface StarRocksISerializer extends Serializable { + + String serialize(Record row); + +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksJsonSerializer.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksJsonSerializer.java new file mode 100644 index 000000000..4648a446f --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksJsonSerializer.java @@ -0,0 +1,34 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.row; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import com.alibaba.datax.common.element.Record; +import com.alibaba.fastjson2.JSON; + +public class StarRocksJsonSerializer extends StarRocksBaseSerializer implements StarRocksISerializer { + + private static final long serialVersionUID = 1L; + + private final List fieldNames; + + public StarRocksJsonSerializer(List fieldNames) { + this.fieldNames = fieldNames; + } + + @Override + public String serialize(Record row) { + if (null == fieldNames) { + return ""; + } + Map rowMap = new HashMap<>(fieldNames.size()); + int idx = 0; + for (String fieldName : fieldNames) { + rowMap.put(fieldName, fieldConvertion(row.getColumn(idx))); + idx++; + } + return JSON.toJSONString(rowMap); + } + +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksSerializerFactory.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksSerializerFactory.java new file mode 100644 index 000000000..f5da30963 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/row/StarRocksSerializerFactory.java @@ -0,0 +1,22 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.row; + +import java.util.Map; + +import com.alibaba.datax.plugin.writer.starrockswriter.StarRocksWriterOptions; + +public class StarRocksSerializerFactory { + + private StarRocksSerializerFactory() {} + + public static StarRocksISerializer createSerializer(StarRocksWriterOptions writerOptions) { + if (StarRocksWriterOptions.StreamLoadFormat.CSV.equals(writerOptions.getStreamLoadFormat())) { + Map props = writerOptions.getLoadProps(); + return new StarRocksCsvSerializer(null == props || !props.containsKey("column_separator") ? null : String.valueOf(props.get("column_separator"))); + } + if (StarRocksWriterOptions.StreamLoadFormat.JSON.equals(writerOptions.getStreamLoadFormat())) { + return new StarRocksJsonSerializer(writerOptions.getColumns()); + } + throw new RuntimeException("Failed to create row serializer, unsupported `format` from stream load properties."); + } + +} diff --git a/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/util/StarRocksWriterUtil.java b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/util/StarRocksWriterUtil.java new file mode 100644 index 000000000..279ce9fb3 --- /dev/null +++ b/exchangis-engines/engines/datax/datax-starrockswriter/src/main/java/com/alibaba/datax/plugin/writer/starrockswriter/util/StarRocksWriterUtil.java @@ -0,0 +1,93 @@ +package com.alibaba.datax.plugin.writer.starrockswriter.util; + +import com.alibaba.datax.plugin.rdbms.util.DBUtil; +import com.alibaba.datax.plugin.rdbms.util.DataBaseType; +import com.alibaba.datax.plugin.rdbms.util.RdbmsException; +import com.alibaba.datax.plugin.rdbms.writer.Constant; +import com.alibaba.datax.plugin.writer.starrockswriter.StarRocksWriterOptions; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.Statement; +import java.util.*; + +public class StarRocksWriterUtil { + + private static final Logger LOG = LoggerFactory.getLogger(StarRocksWriterUtil.class); + + private StarRocksWriterUtil() {} + + public static List getStarRocksColumns(Connection conn, String databaseName, String tableName) { + String currentSql = String.format("SELECT COLUMN_NAME FROM `information_schema`.`COLUMNS` WHERE `TABLE_SCHEMA` = '%s' AND `TABLE_NAME` = '%s' ORDER BY `ORDINAL_POSITION` ASC;", databaseName, tableName); + List columns = new ArrayList<>(); + ResultSet rs = null; + try { + rs = DBUtil.query(conn, currentSql); + while (DBUtil.asyncResultSetNext(rs)) { + String colName = rs.getString("COLUMN_NAME"); + columns.add(colName); + } + return columns; + } catch (Exception e) { + throw RdbmsException.asQueryException(DataBaseType.MySql, e, currentSql, null, null); + } finally { + DBUtil.closeDBResources(rs, null, null); + } + } + + public static List renderPreOrPostSqls(List preOrPostSqls, String tableName) { + if (null == preOrPostSqls) { + return Collections.emptyList(); + } + List renderedSqls = new ArrayList<>(); + for (String sql : preOrPostSqls) { + if (StringUtils.isNotBlank(sql)) { + renderedSqls.add(sql.replace(Constant.TABLE_NAME_PLACEHOLDER, tableName)); + } + } + return renderedSqls; + } + + public static void executeSqls(Connection conn, List sqls) { + Statement stmt = null; + String currentSql = null; + try { + stmt = conn.createStatement(); + for (String sql : sqls) { + currentSql = sql; + DBUtil.executeSqlWithoutResultSet(stmt, sql); + } + } catch (Exception e) { + throw RdbmsException.asQueryException(DataBaseType.MySql, e, currentSql, null, null); + } finally { + DBUtil.closeDBResources(null, stmt, null); + } + } + + public static void preCheckPrePareSQL(StarRocksWriterOptions options) { + String table = options.getTable(); + List preSqls = options.getPreSqlList(); + List renderedPreSqls = StarRocksWriterUtil.renderPreOrPostSqls(preSqls, table); + if (null != renderedPreSqls && !renderedPreSqls.isEmpty()) { + LOG.info("Begin to preCheck preSqls:[{}].", String.join(";", renderedPreSqls)); + for (String sql : renderedPreSqls) { + DBUtil.sqlValid(sql, DataBaseType.MySql); + } + } + } + + public static void preCheckPostSQL(StarRocksWriterOptions options) { + String table = options.getTable(); + List postSqls = options.getPostSqlList(); + List renderedPostSqls = StarRocksWriterUtil.renderPreOrPostSqls(postSqls, table); + if (null != renderedPostSqls && !renderedPostSqls.isEmpty()) { + LOG.info("Begin to preCheck postSqls:[{}].", String.join(";", renderedPostSqls)); + for(String sql : renderedPostSqls) { + DBUtil.sqlValid(sql, DataBaseType.MySql); + } + } + } +} diff --git a/exchangis-engines/engines/datax/datax-textfilereader/pom.xml b/exchangis-engines/engines/datax/datax-textfilereader/pom.xml index 6e7c18313..e8c690ddf 100644 --- a/exchangis-engines/engines/datax/datax-textfilereader/pom.xml +++ b/exchangis-engines/engines/datax/datax-textfilereader/pom.xml @@ -5,7 +5,7 @@ exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} ../pom.xml 4.0.0 diff --git a/exchangis-engines/engines/datax/datax-textfilewriter/pom.xml b/exchangis-engines/engines/datax/datax-textfilewriter/pom.xml index 1d0c9f1f3..7ce1bf54b 100644 --- a/exchangis-engines/engines/datax/datax-textfilewriter/pom.xml +++ b/exchangis-engines/engines/datax/datax-textfilewriter/pom.xml @@ -5,7 +5,7 @@ exchangis-engine-datax com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} ../pom.xml 4.0.0 diff --git a/exchangis-engines/engines/datax/pom.xml b/exchangis-engines/engines/datax/pom.xml index 15d7f5879..59d8ac664 100644 --- a/exchangis-engines/engines/datax/pom.xml +++ b/exchangis-engines/engines/datax/pom.xml @@ -5,7 +5,7 @@ com.webank.wedatasphere.exchangis exchangis - 1.1.2 + ${revision} ../../../pom.xml @@ -44,7 +44,7 @@ datax-mysqlreader datax-mysqlwriter datax-oraclereader - datax-oraclewriter + datax-starrockswriter datax-assembly diff --git a/exchangis-engines/exchangis-engine-common/pom.xml b/exchangis-engines/exchangis-engine-common/pom.xml index d6cf14efa..16cf86da2 100644 --- a/exchangis-engines/exchangis-engine-common/pom.xml +++ b/exchangis-engines/exchangis-engine-common/pom.xml @@ -3,9 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-engines + exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../../pom.xml 4.0.0 @@ -19,7 +20,7 @@ com.webank.wedatasphere.exchangis exchangis-dao - ${exchangis.version} + ${project.version} org.apache.linkis diff --git a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineResource.java b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineResource.java index ba74d2596..632614c91 100644 --- a/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineResource.java +++ b/exchangis-engines/exchangis-engine-common/src/main/java/com/webank/wedatasphere/exchangis/engine/domain/EngineResource.java @@ -124,4 +124,18 @@ public String getPath() { public void setPath(String path) { this.path = path; } + + @Override + public String toString() { + return "EngineResource{" + + "engineType='" + engineType + '\'' + + ", id='" + id + '\'' + + ", name='" + name + '\'' + + ", type='" + type + '\'' + + ", path='" + path + '\'' + + ", createTime=" + createTime + + ", modifyTime=" + modifyTime + + ", creator='" + creator + '\'' + + '}'; + } } diff --git a/exchangis-engines/exchangis-engine-core/pom.xml b/exchangis-engines/exchangis-engine-core/pom.xml index 5d040b341..379c1dbfd 100644 --- a/exchangis-engines/exchangis-engine-core/pom.xml +++ b/exchangis-engines/exchangis-engine-core/pom.xml @@ -3,9 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-engines + exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../../pom.xml 4.0.0 @@ -20,12 +21,12 @@ com.webank.wedatasphere.exchangis exchangis-engine-common - ${exchangis.version} + ${project.version} com.webank.wedatasphere.exchangis exchangis-dao - ${exchangis.version} + ${project.version} diff --git a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/AbstractEngineResourceContainer.java b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/AbstractEngineResourceContainer.java index b7994bac4..8b353c837 100644 --- a/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/AbstractEngineResourceContainer.java +++ b/exchangis-engines/exchangis-engine-core/src/main/java/com/webank/wedatasphere/exchangis/engine/resource/AbstractEngineResourceContainer.java @@ -245,6 +245,7 @@ private void flushResources(ResourcePathNode pathNode) throws ExchangisEngineRes // Try tp upload the node engine resource try { U uploadedRes = this.engineResourceUploader.upload(nodeEngineRes, pathNode.getRemoteResource()); + LOG.info("uploadedRes is {}", uploadedRes.toString()); if (Objects.nonNull(uploadedRes)) { // Store the uploaded remoted resource information if (Objects.nonNull(pathNode.getRemoteResource())) { diff --git a/exchangis-engines/exchangis-engine-server/pom.xml b/exchangis-engines/exchangis-engine-server/pom.xml index 5873fe538..5671e01bb 100644 --- a/exchangis-engines/exchangis-engine-server/pom.xml +++ b/exchangis-engines/exchangis-engine-server/pom.xml @@ -3,9 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-engines + exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../../pom.xml 4.0.0 @@ -20,12 +21,12 @@ com.webank.wedatasphere.exchangis exchangis-dao - ${exchangis.version} + ${project.version} com.webank.wedatasphere.exchangis exchangis-engine-core - ${exchangis.version} + ${project.version} diff --git a/exchangis-engines/pom.xml b/exchangis-engines/pom.xml index 7dfac3c51..edec6ab46 100644 --- a/exchangis-engines/pom.xml +++ b/exchangis-engines/pom.xml @@ -5,13 +5,14 @@ exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../pom.xml 4.0.0 exchangis-engines pom - 1.1.2 + ${revision} exchangis-engine-common diff --git a/exchangis-job/exchangis-job-builder/pom.xml b/exchangis-job/exchangis-job-builder/pom.xml index acfbb460a..e2eec71ec 100644 --- a/exchangis-job/exchangis-job-builder/pom.xml +++ b/exchangis-job/exchangis-job-builder/pom.xml @@ -3,9 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-job + exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../../pom.xml 4.0.0 @@ -15,7 +16,7 @@ com.webank.wedatasphere.exchangis exchangis-job-common - 1.1.2 + ${project.version} com.google.code.gson @@ -25,7 +26,7 @@ com.webank.wedatasphere.exchangis exchangis-datasource-service - 1.1.2 + ${project.version} compile diff --git a/exchangis-job/exchangis-job-common/pom.xml b/exchangis-job/exchangis-job-common/pom.xml index 1bae2fa82..aba4edd1a 100644 --- a/exchangis-job/exchangis-job-common/pom.xml +++ b/exchangis-job/exchangis-job-common/pom.xml @@ -3,9 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-job + exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../../pom.xml 4.0.0 @@ -15,7 +16,7 @@ com.webank.wedatasphere.exchangis exchangis-dao - 1.1.2 + ${project.version} org.apache.linkis @@ -25,7 +26,7 @@ com.webank.wedatasphere.exchangis exchangis-engine-common - 1.1.2 + ${project.version} org.apache.linkis diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisJob.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisJob.java index 3107b81ed..8afd21e49 100644 --- a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisJob.java +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/ExchangisJob.java @@ -41,6 +41,7 @@ public interface ExchangisJob extends ExchangisBase{ * @param jobLabels */ void setJobLabels(Map jobLabels); + /** * Create user * @return user name diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/SubExchangisJob.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/SubExchangisJob.java index 1679e38ff..7812212ac 100644 --- a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/SubExchangisJob.java +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/SubExchangisJob.java @@ -24,10 +24,10 @@ public class SubExchangisJob extends GenericExchangisJob { public static final String REALM_JOB_DATA_SOURCE = "job.realm.data-source"; - public static final String REALM_JOB_CONTENT_SINK = "job.realm.content.sink"; - public static final String REALM_JOB_CONTENT_SOURCE = "job.realm.content.source"; + public static final String REALM_JOB_CONTENT_SINK = "job.realm.content.sink"; + // public static final String REALM_JOB_COLUMN_MAPPING = "job.realm.column-mappings"; /** @@ -135,6 +135,12 @@ public static class ColumnDefine{ * Column type */ private String type; + + /** + * Raw column type + */ + private String rawType; + /** * Column index */ @@ -144,6 +150,11 @@ public ColumnDefine(){ } + public ColumnDefine(String name, String type){ + this.name = name; + this.type = type; + } + public ColumnDefine(String name, String type, Integer index){ this.name = name; this.type = type; @@ -172,8 +183,61 @@ public Integer getIndex() { public void setIndex(Integer index) { this.index = index; } + + public String getRawType() { + return rawType; + } + + public void setRawType(String rawType) { + this.rawType = rawType; + } } + /** + * Column define with precision and scale + */ + public static class DecimalColumnDefine extends ColumnDefine{ + + private static final int DEFAULT_PRECISION = 38; + + private static final int DEFAULT_SCALE = 18; + + /** + * Precision + */ + private int precision = DEFAULT_PRECISION; + + /** + * Scale + */ + private int scale = DEFAULT_SCALE; + + public DecimalColumnDefine(){ + + } + + public DecimalColumnDefine(String name, String type, Integer index, int precision, int scale){ + super(name, type, index); + this.precision = precision; + this.scale = scale; + } + + public int getPrecision() { + return precision; + } + + public void setPrecision(int precision) { + this.precision = precision; + } + + public int getScale() { + return scale; + } + + public void setScale(int scale) { + this.scale = scale; + } + } /** * Column function */ diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/DefaultJobParam.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/DefaultJobParam.java index 78d6e3304..7d938ebcd 100644 --- a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/DefaultJobParam.java +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/domain/params/DefaultJobParam.java @@ -19,7 +19,7 @@ public class DefaultJobParam implements JobParam { private Class sourceType = Object.class; - DefaultJobParam(){ + public DefaultJobParam(){ } diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/log/LogQuery.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/log/LogQuery.java index 0ab7780b9..de19116c9 100644 --- a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/log/LogQuery.java +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/log/LogQuery.java @@ -21,6 +21,11 @@ public class LogQuery { private Integer lastRows; + /** + * Reverse the reader + */ + private boolean enableTail; + public LogQuery(){ } @@ -88,4 +93,13 @@ public List getOnlyKeywordsList(){ public void setOnlyKeywords(String onlyKeywords) { this.onlyKeywords = onlyKeywords; } + + + public boolean isEnableTail() { + return enableTail; + } + + public void setEnableTail(boolean enableTail) { + this.enableTail = enableTail; + } } diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/utils/ColumnDefineUtils.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/utils/ColumnDefineUtils.java new file mode 100644 index 000000000..6c440f320 --- /dev/null +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/utils/ColumnDefineUtils.java @@ -0,0 +1,40 @@ +package com.webank.wedatasphere.exchangis.job.utils; + +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import org.apache.commons.lang3.StringUtils; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Utils to data column + */ +public class ColumnDefineUtils { + /** + * Pattern of decimal column + */ + public static final Pattern DECIMAL_PATTERN = Pattern.compile("^decimal[((](\\d+)[^,]*?,[^,]*?(\\d+)[))]$"); + + /** + * Get data column + * @param name column name + * @param type column type + * @param index index + * @return data column + */ + public static SubExchangisJob.ColumnDefine getColumn(String name, String type, Integer index){ + if (StringUtils.isNotBlank(type)) { + Matcher decimalMatch = DECIMAL_PATTERN.matcher(type.toLowerCase()); + if (decimalMatch.matches()) { + int precision = Integer.parseInt(decimalMatch.group(1)); + int scale = Integer.parseInt(decimalMatch.group(2)); + return new SubExchangisJob.DecimalColumnDefine(name, type, index, precision, scale); + } + } + return new SubExchangisJob.ColumnDefine(name, type, index); + } + + public static SubExchangisJob.ColumnDefine getColumn(String name, String type){ + return getColumn(name, type, null); + } +} diff --git a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/vo/ExchangisJobVo.java b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/vo/ExchangisJobVo.java index 6bc16b717..bfbf1d957 100644 --- a/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/vo/ExchangisJobVo.java +++ b/exchangis-job/exchangis-job-common/src/main/java/com/webank/wedatasphere/exchangis/job/vo/ExchangisJobVo.java @@ -24,7 +24,6 @@ public class ExchangisJobVo { /** * Project id */ - @NotNull(groups = InsertGroup.class, message = "Project id cannot be null (工程ID不能为空)") private Long projectId; /** diff --git a/exchangis-job/exchangis-job-launcher/pom.xml b/exchangis-job/exchangis-job-launcher/pom.xml index 2375cdea3..b06b04516 100644 --- a/exchangis-job/exchangis-job-launcher/pom.xml +++ b/exchangis-job/exchangis-job-launcher/pom.xml @@ -3,9 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-job + exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../../pom.xml 4.0.0 @@ -15,12 +16,12 @@ com.webank.wedatasphere.exchangis exchangis-job-common - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-job-builder - 1.1.2 + ${project.version} org.apache.linkis @@ -29,4 +30,21 @@ + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + \ No newline at end of file diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/LaunchedExchangisJobEntity.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/LaunchedExchangisJobEntity.java index 59b991cd5..bfc9d2a8e 100644 --- a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/LaunchedExchangisJobEntity.java +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/entity/LaunchedExchangisJobEntity.java @@ -1,5 +1,6 @@ package com.webank.wedatasphere.exchangis.job.launcher.entity; +import com.webank.wedatasphere.exchangis.common.EnvironmentUtils; import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobEntity; import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisJob; @@ -8,6 +9,7 @@ import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; +import java.util.Optional; /** * Entity to persist the launched job @@ -58,8 +60,10 @@ public LaunchedExchangisJobEntity(LaunchableExchangisJob job){ this.lastUpdateTime = job.getLastUpdateTime(); this.jobExecutionId = job.getJobExecutionId(); SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd"); - this.logPath = this.executeUser + IOUtils.DIR_SEPARATOR_UNIX + + String logPath = this.executeUser + IOUtils.DIR_SEPARATOR_UNIX + simpleDateFormat.format(new Date()) + IOUtils.DIR_SEPARATOR_UNIX + this.jobExecutionId; + logPath = EnvironmentUtils.getServerAddress() + "@" + logPath; + this.logPath = logPath; } public String getJobExecutionId() { return jobExecutionId; diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisExchangisTaskLauncher.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisExchangisTaskLauncher.java index d5ef7e8e0..807c1eaaf 100644 --- a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisExchangisTaskLauncher.java +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisExchangisTaskLauncher.java @@ -1,6 +1,7 @@ package com.webank.wedatasphere.exchangis.job.launcher.linkis; -import com.webank.wedatasphere.exchangis.common.linkis.ClientConfiguration; +import com.webank.wedatasphere.exchangis.common.linkis.client.config.ExchangisClientConfig; +import com.webank.wedatasphere.exchangis.common.linkis.client.config.ExchangisClientConfigBuilder; import com.webank.wedatasphere.exchangis.job.enums.EngineTypeEnum; import com.webank.wedatasphere.exchangis.job.launcher.AccessibleLauncherTask; import com.webank.wedatasphere.exchangis.job.launcher.exception.ExchangisTaskLaunchException; @@ -8,22 +9,16 @@ import com.webank.wedatasphere.exchangis.job.launcher.ExchangisTaskLauncher; import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchableExchangisTask; import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; +import com.webank.wedatasphere.exchangis.job.launcher.linkis.client.ExchangisLaunchClient; import org.apache.commons.lang.StringUtils; -import org.apache.linkis.common.conf.Configuration; -import org.apache.linkis.common.conf.Configuration$; import org.apache.linkis.common.exception.LinkisRetryException; import org.apache.linkis.common.utils.DefaultRetryHandler; import org.apache.linkis.common.utils.RetryHandler; -import org.apache.linkis.computation.client.LinkisJobClient; import org.apache.linkis.computation.client.LinkisJobClient$; -import org.apache.linkis.httpclient.config.ClientConfig; -import org.apache.linkis.httpclient.dws.authentication.TokenAuthenticationStrategy; -import org.apache.linkis.httpclient.dws.config.DWSClientConfig; -import org.apache.linkis.httpclient.dws.config.DWSClientConfigBuilder; -import org.apache.linkis.httpclient.dws.config.DWSClientConfigBuilder$; +import org.apache.linkis.computation.client.once.simple.SimpleOnceJobBuilder$; +import java.lang.reflect.Field; import java.util.*; -import java.util.concurrent.TimeUnit; /** * Linkis task launcher @@ -47,23 +42,31 @@ public void init(ExchangisTaskLaunchManager jobLaunchManager) { this.engineVersions.put(EngineTypeEnum.DATAX.name().toLowerCase(), "3.0.0"); RetryHandler retryHandler = new DefaultRetryHandler(){}; retryHandler.addRetryException(LinkisRetryException.class); - ClientConfig clientConfig = DWSClientConfigBuilder$.MODULE$ - .newBuilder() - .setDWSVersion(Configuration.LINKIS_WEB_VERSION().getValue()) - .addServerUrl(ClientConfiguration.LINKIS_SERVER_URL.getValue()) - .connectionTimeout(45000) - .discoveryEnabled(false) - .discoveryFrequency(1, TimeUnit.MINUTES) - .loadbalancerEnabled(false) - .maxConnectionSize(ClientConfiguration.LINKIS_DEFAULT_MAX_CONNECTIONS.getValue()) + ExchangisClientConfigBuilder builder = (ExchangisClientConfigBuilder) ExchangisClientConfig.newBuilder().discoveryEnabled(false) .retryEnabled(true) - .setRetryHandler(retryHandler) - .readTimeout(90000) // We think 90s is enough, if SocketTimeoutException is throw, just set a new clientConfig to modify it. - .setAuthenticationStrategy(new TokenAuthenticationStrategy()) - .setAuthTokenKey(TokenAuthenticationStrategy.TOKEN_KEY()) - .setAuthTokenValue(ClientConfiguration.LINKIS_TOKEN_VALUE.getValue()) - .build(); - LinkisJobClient$.MODULE$.config().setDefaultClientConfig((DWSClientConfig) clientConfig); + .setRetryHandler(retryHandler); + ExchangisClientConfig clientConfig = builder.build(); + // Try to set the static method + Class clz = SimpleOnceJobBuilder$.MODULE$.getClass(); + Field field; + boolean setField = false; + try { + field = clz.getDeclaredField(SimpleOnceJobBuilder$.class.getName().replace(".", "$") + "$linkisManagerClient"); + field.setAccessible(true); + try { + ExchangisLaunchClient client = new ExchangisLaunchClient(clientConfig); + field.set(SimpleOnceJobBuilder$.MODULE$, client); + Runtime.getRuntime().addShutdownHook(new Thread(client::close)); + setField = true; + } catch (IllegalAccessException e) { + // Ignore + } + } catch (NoSuchFieldException e) { + // Ignore + } + if (!setField){ + LinkisJobClient$.MODULE$.config().setDefaultClientConfig(clientConfig); + } } @Override @@ -107,4 +110,6 @@ private Map convertJobInfoToStore(Map jobInfo){ }); return storeInfo; } + } + diff --git a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisLauncherTask.java b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisLauncherTask.java index 408fdc43b..c51a1293c 100644 --- a/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisLauncherTask.java +++ b/exchangis-job/exchangis-job-launcher/src/main/java/com/webank/wedatasphere/exchangis/job/launcher/linkis/LinkisLauncherTask.java @@ -1,5 +1,6 @@ package com.webank.wedatasphere.exchangis.job.launcher.linkis; +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; import com.webank.wedatasphere.exchangis.job.launcher.exception.ExchangisTaskLaunchException; import com.webank.wedatasphere.exchangis.job.launcher.exception.ExchangisTaskNotExistException; import com.webank.wedatasphere.exchangis.job.launcher.AccessibleLauncherTask; @@ -9,7 +10,6 @@ import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskProgressInfo; import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; import org.apache.commons.lang.StringUtils; -import org.apache.linkis.common.utils.Utils; import org.apache.linkis.computation.client.LinkisJobBuilder; import org.apache.linkis.computation.client.LinkisJobClient; import org.apache.linkis.computation.client.once.SubmittableOnceJob; @@ -35,6 +35,8 @@ public class LinkisLauncherTask implements AccessibleLauncherTask { private static final Logger LOG = LoggerFactory.getLogger(LinkisLauncherTask.class); + private static final String METRIC_NAME = "ecMetrics"; + /** * Engine versions */ @@ -162,6 +164,13 @@ public Map getMetricsInfo() throws ExchangisTaskLaunchException // Init the error count this.reqError.set(0); return metrics; + }else { + // Try to get metric from job info + Map jobInfo = getJobInfo(false); + Object metric = jobInfo.get(METRIC_NAME); + if (Objects.nonNull(metric)){ + return Json.fromJson(String.valueOf(metric), Map.class); + } } }catch(Exception e){ dealException(e); @@ -222,13 +231,14 @@ public LogResult queryLogs(LogQuery query) throws ExchangisTaskLaunchException { // The logOperator is not thread safe, so create it each time if (Objects.nonNull(this.onceJob)){ try{ - EngineConnLogOperator logOperator = (EngineConnLogOperator) this.onceJob.getOperator(EngineConnLogOperator.OPERATOR_NAME()); + LaunchTaskLogOperator logOperator = (LaunchTaskLogOperator) this.onceJob.getOperator(LaunchTaskLogOperator.OPERATOR_NAME()); logOperator.setFromLine(query.getFromLine()); logOperator.setPageSize(query.getPageSize()); logOperator.setEngineConnType(this.engineConn); logOperator.setECMServiceInstance(this.onceJob.getECMServiceInstance(this.jobInfo)); logOperator.setIgnoreKeywords(query.getIgnoreKeywords()); logOperator.setOnlyKeywords(query.getOnlyKeywords()); + logOperator.setEnableTail(query.isEnableTail()); if (Objects.nonNull(query.getLastRows())){ logOperator.setLastRows(query.getLastRows()); } diff --git a/exchangis-job/exchangis-job-launcher/src/main/scala/com/webank/wedatasphere/exchangis/job/launcher/linkis/LaunchTaskLogOperator.scala b/exchangis-job/exchangis-job-launcher/src/main/scala/com/webank/wedatasphere/exchangis/job/launcher/linkis/LaunchTaskLogOperator.scala new file mode 100644 index 000000000..c31911609 --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/scala/com/webank/wedatasphere/exchangis/job/launcher/linkis/LaunchTaskLogOperator.scala @@ -0,0 +1,31 @@ +package com.webank.wedatasphere.exchangis.job.launcher.linkis + +import org.apache.linkis.computation.client.once.action.EngineConnOperateAction +import org.apache.linkis.computation.client.operator.impl.EngineConnLogOperator + +/** + * Enable to reverse read log file + */ +class LaunchTaskLogOperator extends EngineConnLogOperator{ + + private var enableTail: Boolean = false + + def setEnableTail(enableTail: Boolean): Unit = { + this.enableTail = enableTail + } + + def isEnableTail: Boolean = { + this.enableTail + } + + protected override def addParameters(builder: EngineConnOperateAction.Builder): Unit = { + super.addParameters(builder) + builder.operatorName(EngineConnLogOperator.OPERATOR_NAME) + builder.addParameter("enableTail", enableTail) + } + + override def getName: String = LaunchTaskLogOperator.OPERATOR_NAME +} +object LaunchTaskLogOperator { + val OPERATOR_NAME = "launchTaskLog" +} \ No newline at end of file diff --git a/exchangis-job/exchangis-job-launcher/src/main/scala/com/webank/wedatasphere/exchangis/job/launcher/linkis/client/ExchangisLaunchClient.scala b/exchangis-job/exchangis-job-launcher/src/main/scala/com/webank/wedatasphere/exchangis/job/launcher/linkis/client/ExchangisLaunchClient.scala new file mode 100644 index 000000000..e3558dfe9 --- /dev/null +++ b/exchangis-job/exchangis-job-launcher/src/main/scala/com/webank/wedatasphere/exchangis/job/launcher/linkis/client/ExchangisLaunchClient.scala @@ -0,0 +1,50 @@ +package com.webank.wedatasphere.exchangis.job.launcher.linkis.client + +import com.webank.wedatasphere.exchangis.common.linkis.client.ExchangisHttpClient +import com.webank.wedatasphere.exchangis.common.linkis.client.config.ExchangisClientConfig +import org.apache.linkis.common.utils.Utils +import org.apache.linkis.computation.client.once.LinkisManagerClient +import org.apache.linkis.computation.client.once.action.{AskEngineConnAction, CreateEngineConnAction, EngineConnOperateAction, GetEngineConnAction, KillEngineConnAction, LinkisManagerAction} +import org.apache.linkis.computation.client.once.result.{AskEngineConnResult, CreateEngineConnResult, EngineConnOperateResult, GetEngineConnResult, KillEngineConnResult, LinkisManagerResult} +import org.apache.linkis.httpclient.request.Action + +/** + * Exchangis launch client + */ +class ExchangisLaunchClient(clientConfig: ExchangisClientConfig) extends LinkisManagerClient{ + private val dwsHttpClient = new ExchangisHttpClient(clientConfig, "Linkis-Job-Execution-Thread") + + protected def execute[T <: LinkisManagerResult](linkisManagerAction: LinkisManagerAction): T = + linkisManagerAction match { + case action: Action => dwsHttpClient.execute(action).asInstanceOf[T] + } + + override def createEngineConn( + createEngineConnAction: CreateEngineConnAction + ): CreateEngineConnResult = execute(createEngineConnAction) + + override def getEngineConn(getEngineConnAction: GetEngineConnAction): GetEngineConnResult = + execute(getEngineConnAction) + + override def killEngineConn(killEngineConnAction: KillEngineConnAction): KillEngineConnResult = + execute(killEngineConnAction) + + override def executeEngineConnOperation( + engineConnOperateAction: EngineConnOperateAction + ): EngineConnOperateResult = { + Utils.tryCatch { + val rs = execute[EngineConnOperateResult](engineConnOperateAction) + rs + } { case e: Exception => + val rs = new EngineConnOperateResult + rs.setIsError(true) + rs.setErrorMsg(e.getMessage) + rs + } + } + + override def close(): Unit = dwsHttpClient.close() + + override def askEngineConn(askEngineConnAction: AskEngineConnAction): AskEngineConnResult = + execute(askEngineConnAction) +} diff --git a/exchangis-job/exchangis-job-metrics/pom.xml b/exchangis-job/exchangis-job-metrics/pom.xml index 9dcdd12fb..7da6f5992 100644 --- a/exchangis-job/exchangis-job-metrics/pom.xml +++ b/exchangis-job/exchangis-job-metrics/pom.xml @@ -3,9 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-job + exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../../pom.xml 4.0.0 diff --git a/exchangis-job/exchangis-job-server/pom.xml b/exchangis-job/exchangis-job-server/pom.xml index 60c64165e..0c83c65d4 100644 --- a/exchangis-job/exchangis-job-server/pom.xml +++ b/exchangis-job/exchangis-job-server/pom.xml @@ -3,9 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-job + exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../../pom.xml 4.0.0 @@ -20,22 +21,22 @@ com.webank.wedatasphere.exchangis exchangis-project-provider - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-job-launcher - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-datasource-service - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-engine-core - ${exchangis.version} + ${project.version} @@ -49,6 +50,17 @@ mysql-connector-java 5.1.49 + + + org.apache.linkis + linkis-rpc + ${linkis.version} + + + org.modelmapper + modelmapper + 2.4.3 + @@ -56,15 +68,17 @@ org.apache.maven.plugins maven-deploy-plugin + ${maven-deploy-plugin.version} - net.alchim31.maven scala-maven-plugin + ${scala-maven-plugin.version} org.apache.maven.plugins maven-jar-plugin + ${maven-jar-plugin.version} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/ExchangisJobExecuteAutoConfiguration.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/ExchangisJobExecuteAutoConfiguration.java index 0c3391d94..3d2139c68 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/ExchangisJobExecuteAutoConfiguration.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/ExchangisJobExecuteAutoConfiguration.java @@ -20,7 +20,7 @@ import com.webank.wedatasphere.exchangis.job.server.execution.subscriber.TaskObserver; import com.webank.wedatasphere.exchangis.job.server.log.DefaultRpcJobLogger; import com.webank.wedatasphere.exchangis.job.server.log.JobLogService; -import com.webank.wedatasphere.exchangis.job.server.log.service.LocalSimpleJobLogService; +import com.webank.wedatasphere.exchangis.job.server.log.service.RpcJobLogService; import com.webank.wedatasphere.exchangis.job.server.utils.SpringContextHolder; import org.apache.linkis.scheduler.Scheduler; import org.apache.linkis.scheduler.executer.ExecutorManager; @@ -50,7 +50,7 @@ public JobLogListener logListener(){ @Bean @ConditionalOnMissingBean(JobLogService.class) public JobLogService jobLogService(){ - return new LocalSimpleJobLogService(); + return new RpcJobLogService(); } /** diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/JobParamConstraints.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/JobParamConstraints.java index dd79b28c2..64c48831c 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/JobParamConstraints.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/JobParamConstraints.java @@ -15,6 +15,12 @@ public class JobParamConstraints { public static final String PASSWORD = "password"; + public static final String APP_ID = "appid"; + + public static final String OBJECT_ID = "objectid"; + + public static final String DK = "dk"; + public static final String DATABASE = "database"; public static final String CONNECT_PARAMS = "params"; @@ -25,6 +31,10 @@ public class JobParamConstraints { public static final String PORT = "port"; + public static final String HTTP_PORT = "http_port"; + + public static final String LOAD_URL = "load_url"; + public static final String SERVICE_NAME = "instance"; public static final String WHERE = "where"; diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxExchangisEngineJobBuilder.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxExchangisEngineJobBuilder.java index 9132100cf..df0ec7131 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxExchangisEngineJobBuilder.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxExchangisEngineJobBuilder.java @@ -39,6 +39,7 @@ public class DataxExchangisEngineJobBuilder extends AbstractResourceEngineJobBui static{ //hive use hdfs plugin resource PLUGIN_NAME_MAPPER.put("hive", "hdfs"); + PLUGIN_NAME_MAPPER.put("tdsql", "mysql"); } /** @@ -47,10 +48,12 @@ public class DataxExchangisEngineJobBuilder extends AbstractResourceEngineJobBui private static final JobParamDefine COLUMN_MAPPINGS = JobParams.define("column.mappings", job -> { DataxMappingContext mappingContext = new DataxMappingContext(); job.getSourceColumns().forEach(columnDefine -> mappingContext.getSourceColumns().add( - new DataxMappingContext.Column(columnDefine.getName(), columnDefine.getType(), columnDefine.getIndex() + "") + new DataxMappingContext.Column(columnDefine.getName(), columnDefine.getType(), + columnDefine.getRawType(), columnDefine.getIndex() + "") )); job.getSinkColumns().forEach(columnDefine -> mappingContext.getSinkColumns().add( - new DataxMappingContext.Column(columnDefine.getName(), columnDefine.getType(), columnDefine.getIndex() + "") + new DataxMappingContext.Column(columnDefine.getName(), columnDefine.getType(), + columnDefine.getRawType(), columnDefine.getIndex() + "") )); job.getColumnFunctions().forEach(function -> { DataxMappingContext.Transformer.Parameter parameter = new DataxMappingContext.Transformer.Parameter(); diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxMappingContext.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxMappingContext.java index 5bc868162..56e90c863 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxMappingContext.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/engine/DataxMappingContext.java @@ -61,6 +61,11 @@ public static class Column{ */ private String type; + /** + * Raw column type + */ + private String rawType; + /** * Index name */ @@ -71,8 +76,13 @@ public Column(){ } public Column(String name, String type, String index){ + this(name, type, null, index); + } + + public Column(String name, String type, String rawType, String index){ this.name = name; this.type = type; + this.rawType = rawType; this.index = index; } public String getName() { @@ -98,6 +108,14 @@ public String getIndex() { public void setIndex(String index) { this.index = index; } + + public String getRawType() { + return rawType; + } + + public void setRawType(String rawType) { + this.rawType = rawType; + } } /** diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/TransformExchangisJob.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/TransformExchangisJob.java index f2fb0ce24..a9798b845 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/TransformExchangisJob.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/TransformExchangisJob.java @@ -168,7 +168,13 @@ private void convertContentToParams(ExchangisJobInfoContent content){ ExchangisJobParamsContent.ExchangisJobParamsItem::getConfigValue)); }); if(Objects.nonNull(paramSet)) { - this.sourceType = resolveDataSourceId(content.getDataSources().getSourceId(), paramSet); + String sourceId = content.getDataSources().getSourceId(); + if (StringUtils.isNotBlank(sourceId)){ + this.sourceType = resolveDataSourceId(content.getDataSources().getSourceId(), paramSet); + } else { + + } + } } @@ -183,7 +189,12 @@ private void convertContentToParams(ExchangisJobInfoContent content){ ExchangisJobParamsContent.ExchangisJobParamsItem::getConfigValue)); }); if(Objects.nonNull(paramSet)) { - this.sinkType = resolveDataSourceId(content.getDataSources().getSinkId(), paramSet); + String sinkId = content.getDataSources().getSinkId(); + if (StringUtils.isNotBlank(sinkId)){ + this.sinkType = resolveDataSourceId(content.getDataSources().getSinkId(), paramSet); + } else { + + } } } } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/AbstractLoggingSubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/AbstractLoggingSubExchangisJobHandler.java index 1d190cbaa..568270548 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/AbstractLoggingSubExchangisJobHandler.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/AbstractLoggingSubExchangisJobHandler.java @@ -9,6 +9,7 @@ import com.webank.wedatasphere.exchangis.job.server.builder.SpringExchangisJobBuilderContext; import org.apache.linkis.common.exception.ErrorException; +import java.util.List; import java.util.Objects; import java.util.Optional; @@ -23,6 +24,7 @@ public abstract class AbstractLoggingSubExchangisJobHandler implements SubExchan public final void handleSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { wrapFuncWithContext(ctx, () -> { try { + handleSrcColumns(subExchangisJob, ctx, subExchangisJob.getSourceColumns()); handleJobSource(subExchangisJob, ctx); }catch (ErrorException e){ throw new ExchangisJobException.Runtime(-1, "Exception in handling job source parameters", e); @@ -34,6 +36,7 @@ public final void handleSource(SubExchangisJob subExchangisJob, ExchangisJobBuil public final void handleSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { wrapFuncWithContext(ctx, () -> { try { + handleSinkColumns(subExchangisJob, ctx, subExchangisJob.getSinkColumns()); handleJobSink(subExchangisJob, ctx); } catch (ErrorException e) { throw new ExchangisJobException.Runtime(-1, "Exception in handling job sink parameters", e); @@ -68,6 +71,27 @@ private void wrapFuncWithContext(ExchangisJobBuilderContext context, Runnable ru } } + + /** + * Handle source columns + * @param columns columns + */ + protected void handleSrcColumns(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx, + List columns) { + // Empty + } + + /** + * Handle sink columns + * @param columns columns + */ + protected void handleSinkColumns(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx, + List columns){ + // Empty + } + + + /** * handle job source params * @param subExchangisJob sub exchangis job @@ -82,6 +106,10 @@ private void wrapFuncWithContext(ExchangisJobBuilderContext context, Runnable ru */ public abstract void handleJobSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException; + public void preHandleJobParamSet(JobParamSet paramSet) { + // Empty + } + /** * Warn message * @param message message @@ -113,4 +141,5 @@ public static T getBean(Class clazz){ protected static SpringExchangisJobBuilderContext getJobBuilderContext(){ return springContext.get(); } + } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/MySQLDataxSubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/MySQLDataxSubExchangisJobHandler.java index a7ec7b9cd..873cdf9b9 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/MySQLDataxSubExchangisJobHandler.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/MySQLDataxSubExchangisJobHandler.java @@ -80,6 +80,7 @@ public class MySQLDataxSubExchangisJobHandler extends AuthEnabledSubExchangisJob @Override public void handleJobSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE); + preHandleJobParamSet(paramSet); if (Objects.nonNull(paramSet)){ Arrays.asList(sourceMappings()).forEach(define -> paramSet.addNonNull(define.get(paramSet))); paramSet.add(QUERY_SQL.newParam(subExchangisJob)); @@ -89,6 +90,7 @@ public void handleJobSource(SubExchangisJob subExchangisJob, ExchangisJobBuilder @Override public void handleJobSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SINK); + preHandleJobParamSet(paramSet); if (Objects.nonNull(paramSet)){ Arrays.asList(sinkMappings()).forEach(define -> paramSet.addNonNull(define.get(paramSet))); paramSet.add(SQL_COLUMN.newParam(subExchangisJob)); @@ -105,13 +107,13 @@ public boolean acceptEngine(String engineType) { return "datax".equalsIgnoreCase(engineType); } - private JobParamDefine[] sourceMappings(){ + protected JobParamDefine[] sourceMappings(){ return new JobParamDefine[]{USERNAME, PASSWORD, SOURCE_DATABASE, SOURCE_HOST, SOURCE_PORT, SOURCE_PARAMS_MAP}; } - public JobParamDefine[] sinkMappings(){ - return new JobParamDefine[]{USERNAME, PASSWORD, SINK_DATABASE, SINK_TABLE, - SINK_HOST, SINK_PORT, SINK_PARAMS_MAP}; + protected JobParamDefine[] sinkMappings(){ + return new JobParamDefine[]{SINK_HOST, SINK_PORT, USERNAME, PASSWORD, + SINK_DATABASE, SINK_TABLE, SINK_PARAMS_MAP}; } } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/StarRocksDataxSubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/StarRocksDataxSubExchangisJobHandler.java new file mode 100644 index 000000000..93e10e0f3 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/StarRocksDataxSubExchangisJobHandler.java @@ -0,0 +1,107 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers; + +import com.webank.wedatasphere.exchangis.datasource.core.utils.Json; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParam; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.common.exception.ErrorException; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +/** + * StarRocks in datax + */ +public class StarRocksDataxSubExchangisJobHandler extends AuthEnabledSubExchangisJobHandler { + + /** + * Host + */ + private static final JobParamDefine SINK_HOST = JobParams.define("connection[0].host", JobParamConstraints.HOST); + + /** + * TCP_Port + */ + private static final JobParamDefine SINK_PORT = JobParams.define("connection[0].port", JobParamConstraints.PORT); + + /** + * HTTP_Port + */ + private static final JobParamDefine SINK_LOAD_URL = JobParams.define("loadUrl[0]", paramSet -> { + JobParam host = paramSet.get("connection[0].host"); + JobParam httpPort = paramSet.get(JobParamConstraints.HTTP_PORT); + if (Objects.nonNull(host) && StringUtils.isNotBlank(host.getValue()) && + Objects.nonNull(httpPort) && StringUtils.isNotBlank(httpPort.getValue())) { + return host.getValue() + ":" + httpPort.getValue(); + } + return null; + }); + + /** + * Database + */ + private static final JobParamDefine SINK_DATABASE = JobParams.define("database", JobParamConstraints.DATABASE); + + /** + * Table + */ + private static final JobParamDefine SINK_TABLE = JobParams.define("table", JobParamConstraints.TABLE); + + /** + * Connect params + */ + private static final JobParamDefine> SINK_PARAMS_MAP = JobParams.define("connection[0].connParams", JobParamConstraints.CONNECT_PARAMS, + connectParams -> Json.fromJson(connectParams, Map.class), String.class); + + /** + * SQL column + */ + private static final JobParamDefine> SQL_COLUMN = JobParams.define("column", job -> { + List columns = job.getSinkColumns().stream().map(SubExchangisJob.ColumnDefine::getName).collect(Collectors.toList()); + if (columns.isEmpty()){ + columns.add("*"); + } + return columns; + }, SubExchangisJob.class); + + @Override + public void handleJobSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + } + + @Override + public void handleJobSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SINK); + if (Objects.nonNull(paramSet)){ + JobParamDefine[] jobParamDefines = sinkMappings(); + Arrays.asList(jobParamDefines).forEach( + define -> paramSet.addNonNull(define.get(paramSet)) + ); + } + } + + @Override + public String dataSourceType() { + return "starrocks"; + } + + @Override + public boolean acceptEngine(String engineType) { + return "datax".equalsIgnoreCase(engineType); + } + + private JobParamDefine[] sourceMappings(){ + return null; + } + + public JobParamDefine[] sinkMappings(){ + return new JobParamDefine[]{USERNAME, PASSWORD, SINK_HOST, SINK_PORT, SINK_LOAD_URL, SINK_DATABASE, SINK_TABLE, SINK_PARAMS_MAP}; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/AutoColumnSubExchangisJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/AutoColumnSubExchangisJobHandler.java new file mode 100644 index 000000000..416886004 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/AutoColumnSubExchangisJobHandler.java @@ -0,0 +1,148 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.column; + +import com.webank.wedatasphere.exchangis.datasource.core.domain.MetaColumn; +import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException; +import com.webank.wedatasphere.exchangis.datasource.core.service.MetadataInfoService; +import com.webank.wedatasphere.exchangis.job.builder.ExchangisJobBuilderContext; +import com.webank.wedatasphere.exchangis.job.domain.SubExchangisJob; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParam; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamDefine; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParamSet; +import com.webank.wedatasphere.exchangis.job.domain.params.JobParams; +import com.webank.wedatasphere.exchangis.job.exception.ExchangisJobException; +import com.webank.wedatasphere.exchangis.job.server.builder.JobParamConstraints; +import com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.AbstractLoggingSubExchangisJobHandler; +import com.webank.wedatasphere.exchangis.job.utils.ColumnDefineUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.exception.ErrorException; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +/** + * Provide method to autofill columns + */ +public abstract class AutoColumnSubExchangisJobHandler extends AbstractLoggingSubExchangisJobHandler { + /** + * Auto type name + */ + private static final String AUTO_TYPE = "[Auto]"; + + /** + * Database + */ + private static final JobParamDefine DATABASE = JobParams.define(JobParamConstraints.DATABASE); + + /** + * Table + */ + private static final JobParamDefine TABLE = JobParams.define(JobParamConstraints.TABLE); + + @Override + public void handleJobSource(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + // Ignore + } + + @Override + public void handleJobSink(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx) throws ErrorException { + // Ignore + } + + /** + * Handle source columns + * @param columns columns + */ + protected void handleSrcColumns(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx, + List columns) { + if (autoColumn()){ + boolean complete = Objects.nonNull(columns) && columns.size() > 0 && + columns.stream().noneMatch(column -> StringUtils.isBlank(column.getType()) || column.getType().equals(AUTO_TYPE) || null == column.getIndex()); + if (!complete){ + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SOURCE); + doFillColumns(paramSet, columns); + } + } + } + + /** + * Handle sink columns + * @param columns columns + */ + protected void handleSinkColumns(SubExchangisJob subExchangisJob, ExchangisJobBuilderContext ctx, + List columns){ + if (autoColumn()){ + boolean complete = Objects.nonNull(columns) && columns.size() > 0 && + columns.stream().noneMatch(column -> StringUtils.isBlank(column.getType()) || column.getType().equals(AUTO_TYPE)); + if (!complete){ + JobParamSet paramSet = subExchangisJob.getRealmParams(SubExchangisJob.REALM_JOB_CONTENT_SINK); + doFillColumns(paramSet, columns); + } + } + } + + + /** + * Do fill column + * @param columns columns + */ + protected void doFillColumns(JobParamSet paramSet, List columns){ + List metaColumns = getMetaColumns(paramSet); + if (Objects.nonNull(metaColumns) && !metaColumns.isEmpty()){ + if (columns.size() <= 0){ + for(MetaColumn metaColumn : metaColumns){ + SubExchangisJob.ColumnDefine columnDefine = ColumnDefineUtils + .getColumn(metaColumn.getName(), metaColumn.getType()); + columnDefine.setIndex(metaColumn.getIndex()); + columns.add(columnDefine); + } + } else { + completeColumns(columns, metaColumns); + } + } + } + + /** + * Get columns for metadata server + * @param paramSet param set + * @return columns + */ + protected List getMetaColumns(JobParamSet paramSet){ + String database = DATABASE.getValue(paramSet); + String table = TABLE.getValue(paramSet); + JobParam dataSourceId = paramSet.get(JobParamConstraints.DATA_SOURCE_ID); + try { + return Objects.requireNonNull(getBean(MetadataInfoService.class)).getColumns(getJobBuilderContext().getOriginalJob().getCreateUser(), + Long.valueOf(dataSourceId.getValue()), database, table); + } catch (ExchangisDataSourceException e) { + throw new ExchangisJobException.Runtime(e.getErrCode(), e.getMessage(), e.getCause()); + } + } + /** + * + * @param columns columns + * @param metaColumns meta columns + */ + protected final void completeColumns(List columns, List metaColumns){ + Map metaColumnMap = metaColumns.stream().collect(Collectors.toMap( + MetaColumn::getName, metaColumn -> metaColumn, (left, right) -> left + )); + for (int i = 0; i < columns.size(); i ++){ + SubExchangisJob.ColumnDefine column = columns.get(i); + String name = column.getName(); + MetaColumn metaColumn = metaColumnMap.get(name); + if (Objects.isNull(metaColumn)){ + throw new ExchangisJobException.Runtime(-1, "Unable to find match column: [" + name + "] (表中找不到对应的字段)", null); + } + columns.set(i, ColumnDefineUtils.getColumn(name, metaColumn.getType(), metaColumn.getIndex())); + } + } + + /** + * If auto fill column + * @return bool + */ + protected abstract boolean autoColumn(); +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/EsAutoColumnJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/EsAutoColumnJobHandler.java new file mode 100644 index 000000000..dafab1ead --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/EsAutoColumnJobHandler.java @@ -0,0 +1,21 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.column; + +/** + * ES auto column handler + */ +public class EsAutoColumnJobHandler extends AutoColumnSubExchangisJobHandler{ + @Override + public String dataSourceType() { + return "elasticsearch"; + } + + @Override + public int order() { + return 0; + } + + @Override + protected boolean autoColumn() { + return true; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/HiveAutoColumnJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/HiveAutoColumnJobHandler.java new file mode 100644 index 000000000..2901fb07d --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/HiveAutoColumnJobHandler.java @@ -0,0 +1,26 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.column; + + + +/** + * Hive auto column handler + */ +public class HiveAutoColumnJobHandler extends AutoColumnSubExchangisJobHandler { + + + @Override + public String dataSourceType() { + return "hive"; + } + + @Override + public int order() { + return 0; + } + + @Override + protected boolean autoColumn() { + return true; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/MongoAutoColumnJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/MongoAutoColumnJobHandler.java new file mode 100644 index 000000000..c3abba033 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/MongoAutoColumnJobHandler.java @@ -0,0 +1,22 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.column; + +/** + * Mongo auto column handler + */ +public class MongoAutoColumnJobHandler extends AutoColumnSubExchangisJobHandler{ + + @Override + public String dataSourceType() { + return "mongodb"; + } + + @Override + public int order() { + return 0; + } + + @Override + protected boolean autoColumn() { + return true; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/MySQLAutoColumnJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/MySQLAutoColumnJobHandler.java new file mode 100644 index 000000000..20ce2de16 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/MySQLAutoColumnJobHandler.java @@ -0,0 +1,22 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.column; + +/** + * Mysql auto column handler + */ +public class MySQLAutoColumnJobHandler extends AutoColumnSubExchangisJobHandler { + @Override + protected boolean autoColumn() { + return false; + } + + @Override + public String dataSourceType() { + return "mysql"; + } + + + @Override + public int order() { + return 0; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/OracleAutoColumnJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/OracleAutoColumnJobHandler.java new file mode 100644 index 000000000..22866c48e --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/OracleAutoColumnJobHandler.java @@ -0,0 +1,21 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.column; + +/** + * Oracle auto column handler + */ +public class OracleAutoColumnJobHandler extends AutoColumnSubExchangisJobHandler{ + @Override + public String dataSourceType() { + return "oracle"; + } + + @Override + public int order() { + return 0; + } + + @Override + protected boolean autoColumn() { + return true; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/StarRocksAutoColumnJobHandler.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/StarRocksAutoColumnJobHandler.java new file mode 100644 index 000000000..43ea42989 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/handlers/column/StarRocksAutoColumnJobHandler.java @@ -0,0 +1,23 @@ +package com.webank.wedatasphere.exchangis.job.server.builder.transform.handlers.column; + +/** + * StarRocks auto column handler + */ +public class StarRocksAutoColumnJobHandler extends AutoColumnSubExchangisJobHandler{ + + @Override + public String dataSourceType() { + return "starrocks"; + } + + @Override + public int order() { + return 0; + } + + @Override + protected boolean autoColumn() { + return true; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveDataxParamsMapping.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveDataxParamsMapping.java index 031425be7..ca3e36cff 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveDataxParamsMapping.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/builder/transform/mappings/HiveDataxParamsMapping.java @@ -34,7 +34,7 @@ private enum Type { /** * types that supported by DataX */ - STRING, LONG, BOOLEAN, DOUBLE, DATE + STRING, LONG, BOOLEAN, DOUBLE, DATE, BINARY, OBJECT } //hive type => dataX type static{ @@ -49,10 +49,10 @@ private enum Type { FIELD_MAP.put("CHAR", Type.STRING); FIELD_MAP.put("VARCHAR", Type.STRING); FIELD_MAP.put("STRUCT", Type.STRING); - FIELD_MAP.put("MAP", Type.STRING); - FIELD_MAP.put("ARRAY", Type.STRING); + FIELD_MAP.put("MAP", Type.OBJECT); + FIELD_MAP.put("ARRAY", Type.OBJECT); FIELD_MAP.put("UNION", Type.STRING); - FIELD_MAP.put("BINARY", Type.STRING); + FIELD_MAP.put("BINARY", Type.BINARY); FIELD_MAP.put("BOOLEAN", Type.BOOLEAN); FIELD_MAP.put("DATE", Type.DATE); FIELD_MAP.put("TIMESTAMP", Type.DATE); @@ -321,6 +321,10 @@ protected Consumer srcColumnMappingFunc() { Type t = FIELD_MAP.get(type.toUpperCase().replaceAll("[(<(][\\s\\S]+", "")); if (null != t){ columnDefine.setType(t.toString()); + if (t == Type.OBJECT){ + // Set the raw column type + columnDefine.setRawType(type); + } } else { columnDefine.setType(Type.STRING.toString()); } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/AbstractTaskManager.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/AbstractTaskManager.java index b3a05b8af..e31d3f087 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/AbstractTaskManager.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/AbstractTaskManager.java @@ -14,6 +14,8 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.locks.ReentrantLock; +import java.util.stream.Collectors; /** * Launched task manager @@ -33,7 +35,7 @@ public abstract class AbstractTaskManager implements TaskManager Running task */ - private ConcurrentHashMap runningTasks = new ConcurrentHashMap<>(); + private ConcurrentHashMap runningTasks = new ConcurrentHashMap<>(); /** * job_execution_id => List(Running tasks) @@ -50,18 +52,22 @@ public List getJobExecutionIds(){ @Override public List getRunningTasks() { - return new ArrayList<>(runningTasks.values()); + return runningTasks.values().stream().map(ctx -> + ctx.task).collect(Collectors.toList()); } @Override public void cancelRunningTask(String taskId) { - LaunchedExchangisTask task = runningTasks.get(taskId); - if (Objects.nonNull(task)){ - onEvent(new TaskStatusUpdateEvent(task, TaskStatus.Cancelled)); - info(task, "Status of task: [name: {}, id: {}] change {} => {}", - task.getName(), task.getTaskId(), task.getStatus(), TaskStatus.Cancelled); - JobLogCacheUtils.flush(task.getJobExecutionId(), false); - runningTasks.remove(taskId); + TaskContext context = runningTasks.get(taskId); + if (Objects.nonNull(context)){ + LaunchedExchangisTask task = context.task; + context.access(() -> { + onEvent(new TaskStatusUpdateEvent(task, TaskStatus.Cancelled)); + info(task, "Status of task: [name: {}, id: {}] change {} => {}", + task.getName(), task.getTaskId(), task.getStatus(), TaskStatus.Cancelled); + JobLogCacheUtils.flush(task.getJobExecutionId(), false); + runningTasks.remove(taskId); + }); JobWrapper wrapper = jobWrappers.get(task.getJobExecutionId()); if (Objects.nonNull(wrapper)){ wrapper.removeTask(task); @@ -75,7 +81,7 @@ public void addRunningTask(LaunchedExchangisTask task) { task.setRunningTime(Calendar.getInstance().getTime()); onEvent(new TaskLaunchEvent(task)); info(task, "Status of task: [name: {}, id: {}] change to {}, info: [{}]", task.getName(), task.getTaskId(), task.getStatus(), ""); - if (Objects.isNull(runningTasks.putIfAbsent(task.getTaskId(), task))){ + if (Objects.isNull(runningTasks.putIfAbsent(task.getTaskId(), new TaskContext(task)))){ jobWrappers.compute(task.getJobExecutionId(), (jobExecutionId, jobWrapper) -> { if (Objects.nonNull(jobWrapper) && jobWrapper.addTask(task)){ return jobWrapper; @@ -95,12 +101,9 @@ public void removeRunningTask(String taskId) { @Override public boolean refreshRunningTaskMetrics(LaunchedExchangisTask task, Map metricsMap) { - task = runningTasks.get(task.getTaskId()); - if (Objects.nonNull(task)) { - onEvent(new TaskMetricsUpdateEvent(task, metricsMap)); - task.setMetrics(null); - task.setMetricsMap(metricsMap); - trace(task, "Metrics info of task: [{}]", Json.toJson(metricsMap, null)); + TaskContext context = runningTasks.get(task.getTaskId()); + if (Objects.nonNull(context)) { + refreshRunningTaskMetrics(context, metricsMap); return true; } return false; @@ -108,37 +111,53 @@ public boolean refreshRunningTaskMetrics(LaunchedExchangisTask task, Map metricsMap) { TaskStatus beforeStatus = task.getStatus(); - if (TaskStatus.isCompleted(status)){ - info(task, "Status of task: [name: {}, id: {}] change {} => {}", - task.getName(), task.getTaskId(), beforeStatus, status); - onEvent(new TaskStatusUpdateEvent(task, status)); - removeRunningTaskInner(task.getTaskId(), false); - return true; - } else { - task = runningTasks.get(task.getTaskId()); - if (Objects.nonNull(task) ) { - onEvent(new TaskStatusUpdateEvent(task, status)); - if (isTransition(task, status)) { - info(task, "Status of task: [name: {}, id: {}] change {} => {}", - task.getName(), task.getTaskId(), beforeStatus, status); + TaskContext context = runningTasks.get(task.getTaskId()); + if (Objects.nonNull(context)){ + task = context.task; + LaunchedExchangisTask finalTask = task; + context.access( () -> { + if (Objects.nonNull(metricsMap)){ + refreshRunningTaskMetrics(context, metricsMap); } - task.setStatus(status); - return true; - } - return false; + if (TaskStatus.isCompleted(status)){ + info(finalTask, "Status of task: [name: {}, id: {}] change {} => {}", + finalTask.getName(), finalTask.getTaskId(), beforeStatus, status); + onEvent(new TaskStatusUpdateEvent(finalTask, status)); + removeRunningTaskInner(finalTask.getTaskId(), false); + } else { + onEvent(new TaskStatusUpdateEvent(finalTask, status)); + if (isTransition(finalTask, status)) { + info(finalTask, "Status of task: [name: {}, id: {}] change {} => {}", + finalTask.getName(), finalTask.getTaskId(), beforeStatus, status); + } + } + finalTask.setStatus(status); + }); + return true; } + return false; } @Override public boolean refreshRunningTaskProgress(LaunchedExchangisTask task, TaskProgressInfo progressInfo) { - task = runningTasks.get(task.getTaskId()); - if (Objects.nonNull(task)){ - onEvent(new TaskProgressUpdateEvent(task, progressInfo)); - if (task.getProgress() != progressInfo.getProgress()){ - info(task, "Progress of task: [{}] change {} => {}", task.getTaskId(), task.getProgress(), progressInfo.getProgress()); - } - task.setProgress(progressInfo.getProgress()); + TaskContext context = runningTasks.get(task.getTaskId()); + if (Objects.nonNull(context)){ + task = context.task; + LaunchedExchangisTask finalTask = task; + context.access(() -> { + onEvent(new TaskProgressUpdateEvent(finalTask, progressInfo)); + if (finalTask.getProgress() != progressInfo.getProgress()){ + info(finalTask, "Progress of task: [{}] change {} => {}", + finalTask.getTaskId(), finalTask.getProgress(), progressInfo.getProgress()); + } + finalTask.setProgress(progressInfo.getProgress()); + }); return true; } return false; @@ -146,7 +165,8 @@ public boolean refreshRunningTaskProgress(LaunchedExchangisTask task, TaskProgre @Override public LaunchedExchangisTask getRunningTask(String taskId) { - return runningTasks.get(taskId); + TaskContext context = runningTasks.get(taskId); + return context != null ? context.task : null; } public TaskExecutionListener getExecutionListener() { @@ -157,18 +177,38 @@ public void setExecutionListener(TaskExecutionListener executionListener) { this.executionListener = executionListener; } + /** + * Refresh running task metrics + * @param context context + * @param metricsMap metric map + */ + private void refreshRunningTaskMetrics(TaskContext context, Map metricsMap){ + LaunchedExchangisTask finalTask = context.task; + context.access(() -> { + if (!TaskStatus.isCompleted(finalTask.getStatus())) { + onEvent(new TaskMetricsUpdateEvent(finalTask, metricsMap)); + finalTask.setMetrics(null); + finalTask.setMetricsMap(metricsMap); + trace(finalTask, "Metrics info of task: [{}]", Json.toJson(metricsMap, null)); + } + }); + } + /** * Remove inner * @param taskId task id * @param updateStatus if update status */ private void removeRunningTaskInner(String taskId, boolean updateStatus){ - LaunchedExchangisTask task = runningTasks.get(taskId); - if (Objects.nonNull(task)){ - if (updateStatus) { - onEvent(new TaskStatusUpdateEvent(task, task.getStatus())); - } - runningTasks.remove(taskId); + TaskContext context = runningTasks.get(taskId); + if (Objects.nonNull(context)){ + LaunchedExchangisTask task = context.task; + context.access(() -> { + if (updateStatus) { + onEvent(new TaskStatusUpdateEvent(task, task.getStatus())); + } + runningTasks.remove(taskId); + }); JobWrapper wrapper = jobWrappers.get(task.getJobExecutionId()); if (Objects.nonNull(wrapper)){ wrapper.removeTask(task); @@ -199,6 +239,30 @@ public JobLogEvent getJobLogEvent(JobLogEvent.Level level, LaunchedExchangisTask return new JobLogEvent(level, task.getExecuteUser(), task.getJobExecutionId(), message, args); } + private static class TaskContext{ + /** + * Access lock + */ + private final ReentrantLock accessLock = new ReentrantLock(); + + private final LaunchedExchangisTask task; + + public TaskContext(LaunchedExchangisTask task){ + this.task = task; + } + /** + * Access the process + * @param exec exec process + */ + private void access(Runnable exec){ + accessLock.lock(); + try{ + exec.run(); + }finally { + accessLock.unlock(); + } + } + } private class JobWrapper{ /** diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/TaskManager.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/TaskManager.java index 0ffecb43a..80d4bf9de 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/TaskManager.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/TaskManager.java @@ -41,6 +41,7 @@ public interface TaskManager extends JobServerLogging metricsMap); + /** * Refresh running task status * @param task @@ -49,6 +50,15 @@ public interface TaskManager extends JobServerLogging metricsMap); + /** * Refresh progress * @param task diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerTask.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerTask.java index 904906274..a0ec6eeb4 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerTask.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/ExchangisSchedulerTask.java @@ -1,11 +1,12 @@ package com.webank.wedatasphere.exchangis.job.server.execution.scheduler; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.priority.PriorityRunnable; import org.apache.linkis.scheduler.queue.SchedulerEvent; /** * Exchangis scheduler task */ -public interface ExchangisSchedulerTask extends SchedulerEvent { +public interface ExchangisSchedulerTask extends PriorityRunnable, SchedulerEvent { /** * Tenancy diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/SchedulerThread.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/SchedulerThread.java index df9717029..b9da86213 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/SchedulerThread.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/SchedulerThread.java @@ -1,9 +1,11 @@ package com.webank.wedatasphere.exchangis.job.server.execution.scheduler; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.priority.PriorityRunnable; + /** * Define the basic interface of thread in scheduler */ -public interface SchedulerThread extends Runnable{ +public interface SchedulerThread extends PriorityRunnable { /** * Start entrance */ diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/TenancyParallelConsumerManager.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/TenancyParallelConsumerManager.java index da52b214b..0e2bd5caf 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/TenancyParallelConsumerManager.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/TenancyParallelConsumerManager.java @@ -1,6 +1,8 @@ package com.webank.wedatasphere.exchangis.job.server.execution.scheduler; import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisSchedulerException; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.priority.PriorityOrderedQueue; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.priority.PriorityRunnable; import org.apache.commons.lang.StringUtils; import org.apache.linkis.common.utils.Utils; import org.apache.linkis.scheduler.listener.ConsumerListener; @@ -9,14 +11,10 @@ import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - -import javax.annotation.PreDestroy; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutorService; + +import java.util.*; +import java.util.concurrent.*; +import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantLock; /** @@ -53,7 +51,7 @@ public ExecutorService getOrCreateExecutorService() { try{ Group group = getSchedulerContext().getOrCreateGroupFactory().getOrCreateGroup(null); if (group instanceof FIFOGroup){ - defaultExecutorService = Utils.newCachedThreadPool(((FIFOGroup) group).getMaxRunningJobs() + + defaultExecutorService = newPriorityThreadPool(((FIFOGroup) group).getMaxRunningJobs() + this.initResidentThreads + 1, TenancyParallelGroupFactory.GROUP_NAME_PREFIX + TenancyParallelGroupFactory.DEFAULT_TENANCY + "-Executor-", true); tenancyExecutorServices.put(TenancyParallelGroupFactory.DEFAULT_TENANCY, defaultExecutorService); @@ -128,7 +126,7 @@ protected ExecutorService getOrCreateExecutorService(String groupName){ if (StringUtils.isNotBlank(tenancy)){ return tenancyExecutorServices.computeIfAbsent(tenancy, tenancyName -> { // Use the default value of max running jobs - return Utils.newCachedThreadPool(parallelGroupFactory.getDefaultMaxRunningJobs() + parallelGroupFactory.getParallelPerTenancy(), + return newPriorityThreadPool(parallelGroupFactory.getDefaultMaxRunningJobs() + parallelGroupFactory.getParallelPerTenancy(), TenancyParallelGroupFactory.GROUP_NAME_PREFIX + tenancy + "-Executor-", true); }); } @@ -151,4 +149,34 @@ public void setInitResidentThreads(int initResidentThreads) { public Map getTenancyExecutorServices() { return tenancyExecutorServices; } + + /** + * Create thread pool with priority for tenancy consumer + * @return + */ + private ExecutorService newPriorityThreadPool(int threadNum, String threadName, boolean isDaemon){ + ThreadPoolExecutor threadPool = new ThreadPoolExecutor( + threadNum, + threadNum, + 120L, + TimeUnit.SECONDS, + new PriorityBlockingQueue<>(10 * threadNum, (o1, o2) -> { + int left = o1 instanceof PriorityRunnable ? ((PriorityRunnable) o1).getPriority() : 0; + int right = o2 instanceof PriorityRunnable ? ((PriorityRunnable) o2).getPriority() : 0; + return right - left; + }), + new ThreadFactory() { + final AtomicInteger num = new AtomicInteger(0); + @Override + public Thread newThread(Runnable r) { + Thread t = new Thread(r); + t.setDaemon(isDaemon); + t.setName(threadName + num.incrementAndGet()); + return t; + } + }); + threadPool.allowCoreThreadTimeOut(true); + return threadPool; + } + } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/priority/PriorityOrderedQueue.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/priority/PriorityOrderedQueue.java new file mode 100644 index 000000000..4277d768a --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/priority/PriorityOrderedQueue.java @@ -0,0 +1,169 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler.priority; + +import com.webank.wedatasphere.exchangis.job.utils.SnowFlake; + +import java.util.*; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.PriorityBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +/** + * Refer to 'PriorityBlockingQueue', + * Use snowflake to generate order number of elements + */ +public class PriorityOrderedQueue extends AbstractQueue + implements BlockingQueue, java.io.Serializable { + + /** + * Priority queue + */ + private final PriorityBlockingQueue priorityQueue; + + /** + /** + * Snowflake context + */ + private final SnowFlake snowFlake; + public PriorityOrderedQueue(int initialCapacity, + Comparator comparator){ + if (Objects.isNull(comparator)){ + this.priorityQueue = new PriorityBlockingQueue<>(initialCapacity, + (left, right) -> (int) (right.seq - left.seq)); + } else { + this.priorityQueue = new PriorityBlockingQueue<>(initialCapacity, + (left, right) -> { + int result = comparator.compare(left.element, right.element); + if (result == 0){ + return (int)(left.seq - right.seq); + } + return result; + }); + } + this.snowFlake = new SnowFlake(0, 0, System.currentTimeMillis()); + } + @Override + public Iterator iterator() { + return new Itr(priorityQueue.iterator()); + } + + @Override + public int size() { + return priorityQueue.size(); + } + + @Override + public void put(E e) throws InterruptedException { + offer(e); + } + + @Override + public boolean offer(E e, long timeout, TimeUnit unit) throws InterruptedException { + return offer(e); + } + + @Override + public E take() throws InterruptedException { + Ordered ordered = this.priorityQueue.take(); + return ordered.element; + } + + @Override + public E poll(long timeout, TimeUnit unit) throws InterruptedException { + Ordered ordered = this.priorityQueue.poll(timeout, unit); + if (null != ordered){ + return ordered.element; + } + return null; + } + + @Override + public int remainingCapacity() { + return this.priorityQueue.remainingCapacity(); + } + + @Override + public int drainTo(Collection c) { + return drainTo(c, Integer.MAX_VALUE); + } + + @Override + @SuppressWarnings("unchecked") + public int drainTo(Collection c, int maxElements) { + Collection collection = null; + if (null != c && c != this){ + collection = c.stream().map(e -> new Ordered((E) e)).collect(Collectors.toList()); + } + return this.priorityQueue.drainTo(collection); + } + + @Override + public boolean offer(E e) { + return this.priorityQueue.offer(new Ordered(e)); + } + + @Override + public E poll() { + Ordered ordered = this.priorityQueue.poll(); + if (null != ordered){ + return ordered.element; + } + return null; + } + + @Override + public E peek() { + Ordered ordered = this.priorityQueue.peek(); + if (null != ordered){ + return ordered.element; + } + return null; + } + + private class Ordered{ + /** + * Seq number + */ + private long seq; + + /** + * Queue element + */ + private E element; + + public Ordered(E element){ + this.seq = snowFlake.nextId(); + this.element = element; + } + } + + private class Itr implements Iterator { + private Iterator innerItr; + public Itr(Iterator iterator){ + innerItr = iterator; + } + + + @Override + public boolean hasNext() { + return innerItr.hasNext(); + } + + @Override + public E next() { + return innerItr.next().element; + } + + @Override + public void remove() { + innerItr.remove(); + } + + @Override + public void forEachRemaining(Consumer action) { + innerItr.forEachRemaining(eOrdered -> + action.accept(eOrdered.element)); + } + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/priority/PriorityRunnable.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/priority/PriorityRunnable.java new file mode 100644 index 000000000..4d8ed3608 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/priority/PriorityRunnable.java @@ -0,0 +1,16 @@ +package com.webank.wedatasphere.exchangis.job.server.execution.scheduler.priority; + +/** + * Runnable with priority + */ +public interface PriorityRunnable extends Runnable{ + + /** + * Default: 1 + * @return value + */ + default int getPriority(){ + return 1; + } + +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/MetricUpdateSchedulerTask.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/MetricUpdateSchedulerTask.java index 4fded220d..941d36461 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/MetricUpdateSchedulerTask.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/MetricUpdateSchedulerTask.java @@ -31,6 +31,15 @@ public MetricUpdateSchedulerTask(TaskManager taskManager) this.taskManager = taskManager; } + /** + * High priority to get schedule resource + * @return priority + */ + @Override + public int getPriority() { + return 2; + } + @Override protected void onPoll(LaunchedExchangisTask launchedExchangisTask) throws ExchangisSchedulerException, ExchangisSchedulerRetryException { LOG.trace("Metrics update task: [{}] in scheduler: [{}]", launchedExchangisTask.getTaskId(), getName()); diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/StatusUpdateSchedulerTask.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/StatusUpdateSchedulerTask.java index 3bfc64374..4772fdc22 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/StatusUpdateSchedulerTask.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/StatusUpdateSchedulerTask.java @@ -1,5 +1,6 @@ package com.webank.wedatasphere.exchangis.job.server.execution.scheduler.tasks; +import com.webank.wedatasphere.exchangis.job.launcher.domain.task.TaskStatus; import com.webank.wedatasphere.exchangis.job.launcher.exception.ExchangisTaskLaunchException; import com.webank.wedatasphere.exchangis.job.launcher.AccessibleLauncherTask; import com.webank.wedatasphere.exchangis.job.launcher.domain.LaunchedExchangisTask; @@ -26,6 +27,15 @@ public class StatusUpdateSchedulerTask extends AbstractLoadBalanceSchedulerTask< private TaskManager taskManager; + /** + * High priority to get schedule resource + * @return priority + */ + @Override + public int getPriority() { + return 2; + } + public StatusUpdateSchedulerTask(TaskManager taskManager){ this.taskManager = taskManager; } @@ -38,7 +48,13 @@ protected void onPoll(LaunchedExchangisTask launchedExchangisTask) throws Exchan if (Objects.nonNull(progressInfo)){ this.taskManager.refreshRunningTaskProgress(launchedExchangisTask, progressInfo); } - this.taskManager.refreshRunningTaskStatus(launchedExchangisTask, launcherTask.getLocalStatus()); + TaskStatus status = launcherTask.getLocalStatus(); + if (TaskStatus.isCompleted(status)){ + this.taskManager.refreshRunningTaskStatusAndMetrics(launchedExchangisTask, + status, launcherTask.getMetricsInfo()); + } else { + this.taskManager.refreshRunningTaskStatus(launchedExchangisTask, status); + } } catch (ExchangisTaskLaunchException e){ throw new ExchangisSchedulerException("Fail to update status(progress) for task: [" + launchedExchangisTask.getTaskId() + "]", e); } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/SubmitSchedulerTask.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/SubmitSchedulerTask.java index 4a3df1e06..1bfd20dda 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/SubmitSchedulerTask.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/execution/scheduler/tasks/SubmitSchedulerTask.java @@ -34,6 +34,11 @@ public class SubmitSchedulerTask extends AbstractExchangisSchedulerTask implemen private static final Logger LOG = LoggerFactory.getLogger(SubmitSchedulerTask.class); + /** + * Submit parallel limit + */ + private static final AtomicInteger SUBMIT_PARALLEL = new AtomicInteger(0); + private LaunchableExchangisTask launchableExchangisTask; private TaskManager taskManager; @@ -72,6 +77,7 @@ public SubmitSchedulerTask(LaunchableExchangisTask task, Callable submi // Ignore } } + // Set max retry } @Override protected void schedule() throws ExchangisSchedulerException, ExchangisSchedulerRetryException { diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/rpc/FetchLogRequest.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/rpc/FetchLogRequest.java new file mode 100644 index 000000000..18d9a5f8b --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/rpc/FetchLogRequest.java @@ -0,0 +1,31 @@ +package com.webank.wedatasphere.exchangis.job.server.log.rpc; + +import com.webank.wedatasphere.exchangis.job.log.LogQuery; +import org.apache.linkis.protocol.message.RequestProtocol; + +/** + * Fetch log request + */ +public class FetchLogRequest extends LogQuery implements RequestProtocol { + + /** + * Log path + */ + private String logPath; + + public FetchLogRequest(LogQuery logQuery, String logPath){ + super(logQuery.getFromLine(), logQuery.getPageSize(), + logQuery.getIgnoreKeywords(), logQuery.getOnlyKeywords(), + logQuery.getLastRows()); + setEnableTail(logQuery.isEnableTail()); + this.logPath = logPath; + } + + public String getLogPath() { + return logPath; + } + + public void setLogPath(String logPath) { + this.logPath = logPath; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/rpc/FetchLogResponse.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/rpc/FetchLogResponse.java new file mode 100644 index 000000000..b9dd399b4 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/rpc/FetchLogResponse.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.exchangis.job.server.log.rpc; + +import com.webank.wedatasphere.exchangis.job.log.LogResult; +import org.apache.linkis.protocol.message.RequestProtocol; + +import java.util.List; + +/** + * Extend log result + */ +public class FetchLogResponse extends LogResult implements RequestProtocol { + + public FetchLogResponse(LogResult logResult){ + super(logResult.getEndLine(), logResult.isEnd(), logResult.getLogs()); + } + + public FetchLogResponse(int endLine, boolean isEnd, List logs) { + super(endLine, isEnd, logs); + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/rpc/SendLogRequest.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/rpc/SendLogRequest.java new file mode 100644 index 000000000..8310dfef0 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/rpc/SendLogRequest.java @@ -0,0 +1,57 @@ +package com.webank.wedatasphere.exchangis.job.server.log.rpc; + +import org.apache.linkis.protocol.message.RequestProtocol; + +import java.util.ArrayList; +import java.util.List; + +/** + * Send log request + */ +public class SendLogRequest implements RequestProtocol { + /** + * Exec id + */ + private String jobExecId; + + /** + * Is reached the end of log + */ + private boolean isEnd; + /** + * Log lines + */ + private List logLines = new ArrayList<>(); + + public SendLogRequest(String jobExecId, + boolean isEnd, + List logLines){ + this.jobExecId = jobExecId; + this.isEnd = isEnd; + this.logLines = logLines; + } + + public String getJobExecId() { + return jobExecId; + } + + public void setJobExecId(String jobExecId) { + this.jobExecId = jobExecId; + } + + public List getLogLines() { + return logLines; + } + + public void setLogLines(List logLines) { + this.logLines = logLines; + } + + public boolean isEnd() { + return isEnd; + } + + public void setEnd(boolean end) { + isEnd = end; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/AbstractJobLogService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/AbstractJobLogService.java new file mode 100644 index 000000000..cb47f18c4 --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/AbstractJobLogService.java @@ -0,0 +1,159 @@ +package com.webank.wedatasphere.exchangis.job.server.log.service; + +import com.google.common.cache.*; +import com.webank.wedatasphere.exchangis.job.launcher.entity.LaunchedExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.log.LogQuery; +import com.webank.wedatasphere.exchangis.job.log.LogResult; +import com.webank.wedatasphere.exchangis.job.server.mapper.LaunchedJobDao; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.AbstractExchangisSchedulerTask; +import com.webank.wedatasphere.exchangis.job.server.log.JobLogService; +import com.webank.wedatasphere.exchangis.job.server.log.cache.AbstractJobLogCache; +import com.webank.wedatasphere.exchangis.job.server.log.cache.JobLogCache; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.scheduler.Scheduler; +import org.apache.linkis.scheduler.queue.JobInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; +import javax.annotation.Resource; +import java.io.File; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.LOG_OP_ERROR; + +/** + * Abstract Job log service + */ +public abstract class AbstractJobLogService implements JobLogService { + + private static final Logger LOG = LoggerFactory.getLogger(AbstractJobLogService.class); + + protected Cache> cacheHolder; + + private AbstractExchangisSchedulerTask cleaner; + + private volatile boolean cleanerOn; + + protected static class Constraints{ + public static final CommonVars LOG_LOCAL_PATH = CommonVars.apply("wds.exchangis.job.log.local.path", "/data/bdp/dss/exchangis/main/logs"); + + public static final CommonVars lOG_CACHE_SIZE = CommonVars.apply("wds.exchangis.job.log.cache.size", 15); + + public static final CommonVars LOG_CACHE_EXPIRE_TIME_IN_SECONDS = CommonVars.apply("wds.exchangis.job.log.cache.expire.time-in-seconds", 5); + + public static final CommonVars LOG_MULTILINE_PATTERN = CommonVars.apply("wds.exchangis.log.multiline.pattern", "^\\d{4}-\\d{2}-\\d{2}\\s+\\d{2}:\\d{2}:\\d{2}\\.\\d{3}"); + } + + @Resource + protected Scheduler scheduler; + + @Resource + private LaunchedJobDao launchedJobDao; + @PostConstruct + public void init(){ + cleanerOn = true; + cacheHolder = CacheBuilder.newBuilder().maximumSize(Constraints.lOG_CACHE_SIZE.getValue()) + .expireAfterAccess(Constraints.LOG_CACHE_EXPIRE_TIME_IN_SECONDS.getValue(), TimeUnit.SECONDS) + .removalListener((RemovalListener>) removalNotification -> { + // Flush for expired + if (removalNotification.getCause() == RemovalCause.EXPIRED){ + removalNotification.getValue().flushCache(true); + } + }) + .build(); + cleaner = new AbstractExchangisSchedulerTask("Job-Log-Cache-Cleaner") { + @Override + public String getTenancy() { + return "log"; + } + + @Override + public String getName() { + return getId(); + } + + @Override + public JobInfo getJobInfo() { + return null; + } + + @Override + protected void schedule() { + while(cleanerOn){ + try { + Thread.sleep(Constraints.LOG_CACHE_EXPIRE_TIME_IN_SECONDS.getValue()); + //Just invoke the auto cleaner + cacheHolder.get("log", () -> null); + } catch (Exception e){ + //Ignore + } + } + } + }; + scheduler.submit(cleaner); + } + + @PreDestroy + public void destroy(){ + this.cleanerOn = false; + if (Objects.nonNull(this.cleaner.future())){ + this.cleaner.future().cancel(true); + } + } + + @Override + public LogResult logsFromPage( String jobExecId, LogQuery logQuery) { + LaunchedExchangisJobEntity launchedExchangisJob = launchedJobDao.searchLogPathInfo(jobExecId); + return logsFromPageAndPath(launchedExchangisJob.getLogPath(), logQuery); + } + + @Override + public void appendLog(String tenancy, String jobExecId, List logs) { + appendLog(jobExecId, logs); + } + + @Override + public void appendLog(String jobExecId, List logs) { + JobLogCache cache = getOrCreateLogCache(jobExecId); + logs.forEach(cache ::cacheLog); + } + + + @Override + public JobLogCache getOrCreateLogCache(String jobExecId){ + try { + return cacheHolder.get(jobExecId, () -> { + LaunchedExchangisJobEntity launchedExchangisJob = launchedJobDao.searchLogPathInfo(jobExecId); + if (Objects.nonNull(launchedExchangisJob)) { + return loadJobLogCache(jobExecId, launchedExchangisJob); + } + return null; + }); + } catch (ExecutionException e) { + throw new ExchangisJobServerException.Runtime(LOG_OP_ERROR.getCode(),"Fail to create the job log cache of [" + jobExecId +"]", e); + } + } + + /** + * Load job log cache + * @param launchedExchangisJob job + * @return log cache + */ + protected abstract AbstractJobLogCache loadJobLogCache(String jobExcId, LaunchedExchangisJobEntity launchedExchangisJob) + throws Exception; +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/LocalSimpleJobLogService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/LocalSimpleJobLogService.java deleted file mode 100644 index 1eea657ce..000000000 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/LocalSimpleJobLogService.java +++ /dev/null @@ -1,282 +0,0 @@ -package com.webank.wedatasphere.exchangis.job.server.log.service; - -import com.google.common.cache.*; -import com.webank.wedatasphere.exchangis.job.launcher.entity.LaunchedExchangisJobEntity; -import com.webank.wedatasphere.exchangis.job.log.LogQuery; -import com.webank.wedatasphere.exchangis.job.log.LogResult; -import com.webank.wedatasphere.exchangis.job.server.mapper.LaunchedJobDao; -import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; -import com.webank.wedatasphere.exchangis.job.server.execution.scheduler.AbstractExchangisSchedulerTask; -import com.webank.wedatasphere.exchangis.job.server.log.JobLogService; -import com.webank.wedatasphere.exchangis.job.server.log.cache.AbstractJobLogCache; -import com.webank.wedatasphere.exchangis.job.server.log.cache.JobLogCache; -import org.apache.commons.io.FileUtils; -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.linkis.common.conf.CommonVars; -import org.apache.linkis.common.utils.Utils; -import org.apache.linkis.scheduler.Scheduler; -import org.apache.linkis.scheduler.queue.JobInfo; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; -import javax.annotation.Resource; -import java.io.File; -import java.io.IOException; -import java.io.RandomAccessFile; -import java.nio.charset.Charset; -import java.nio.charset.StandardCharsets; -import java.util.*; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.LOG_OP_ERROR; - -/** - * Just store the log into the local - */ -public class LocalSimpleJobLogService implements JobLogService { - - private static final Logger LOG = LoggerFactory.getLogger(LocalSimpleJobLogService.class); - - private Cache> cacheHolder; - - private AbstractExchangisSchedulerTask cleaner; - - private volatile boolean cleanerOn; - - private static class Constraints{ - public static final CommonVars LOG_LOCAL_PATH = CommonVars.apply("wds.exchangis.job.log.local.path", "/data/bdp/dss/exchangis/main/logs"); - - public static final CommonVars lOG_CACHE_SIZE = CommonVars.apply("wds.exchangis.job.log.cache.size", 15); - - public static final CommonVars LOG_CACHE_EXPIRE_TIME_IN_SECONDS = CommonVars.apply("wds.exchangis.job.log.cache.expire.time-in-seconds", 5); - - public static final CommonVars LOG_MULTILINE_PATTERN = CommonVars.apply("wds.exchangis.log.multiline.pattern", "^\\d{4}-\\d{2}-\\d{2}\\s+\\d{2}:\\d{2}:\\d{2}\\.\\d{3}"); - } - - @Resource - private Scheduler scheduler; - - @Resource - private LaunchedJobDao launchedJobDao; - @PostConstruct - public void init(){ - cleanerOn = true; - cacheHolder = CacheBuilder.newBuilder().maximumSize(Constraints.lOG_CACHE_SIZE.getValue()) - .expireAfterAccess(Constraints.LOG_CACHE_EXPIRE_TIME_IN_SECONDS.getValue(), TimeUnit.SECONDS) - .removalListener((RemovalListener>) removalNotification -> { - // Flush for expired - if (removalNotification.getCause() == RemovalCause.EXPIRED){ - removalNotification.getValue().flushCache(true); - } - }) - .build(); - cleaner = new AbstractExchangisSchedulerTask("Job-Log-Cache-Cleaner") { - @Override - public String getTenancy() { - return "log"; - } - - @Override - public String getName() { - return getId(); - } - - @Override - public JobInfo getJobInfo() { - return null; - } - - @Override - protected void schedule() { - while(cleanerOn){ - try { - Thread.sleep(Constraints.LOG_CACHE_EXPIRE_TIME_IN_SECONDS.getValue()); - //Just invoke the auto cleaner - cacheHolder.get("log", () -> null); - } catch (Exception e){ - //Ignore - } - } - } - }; - scheduler.submit(cleaner); - } - - @PreDestroy - public void destroy(){ - this.cleanerOn = false; - if (Objects.nonNull(this.cleaner.future())){ - this.cleaner.future().cancel(true); - } - } - - @Override - public LogResult logsFromPage( String jobExecId, LogQuery logQuery) { - LaunchedExchangisJobEntity launchedExchangisJob = launchedJobDao.searchLogPathInfo(jobExecId); - return logsFromPageAndPath(launchedExchangisJob.getLogPath(), logQuery); - } - - @Override - public LogResult logsFromPageAndPath(String logPath, LogQuery logQuery) { - String fullPath = Constraints.LOG_LOCAL_PATH.getValue() + IOUtils.DIR_SEPARATOR_UNIX + logPath; - LogResult result = new LogResult(0, false, Collections.emptyList()); - if (!new File(fullPath).exists()){ - return result; - } - if (logQuery.getLastRows() != null && logQuery.getLastRows() > 0){ - return getLastRows(fullPath, logQuery.getLastRows()); - } - RandomAccessFile logReader = null; - try { - logReader = new RandomAccessFile(fullPath, "rw"); - String patternValue = Constraints.LOG_MULTILINE_PATTERN.getValue(); - Pattern linePattern = StringUtils.isNotBlank(patternValue)? Pattern.compile(patternValue) : null; - int readLine = 0; - int lineNum = 0; - int skippedLine = 0; - int ignoreLine = 0; - int pageSize = logQuery.getPageSize(); - int fromLine = logQuery.getFromLine(); - List ignoreKeywords = logQuery.getIgnoreKeywordsList(); - List onlyKeywords = logQuery.getOnlyKeywordsList(); - boolean rowIgnore = false; - String line = logReader.readLine(); - List logs = new ArrayList<>(); - while (readLine < pageSize && line != null){ - lineNum += 1; - if (skippedLine < fromLine - 1){ - skippedLine += 1; - } else { - if (rowIgnore) { - if (Objects.nonNull(linePattern)){ - Matcher matcher = linePattern.matcher(line); - if (matcher.find()){ - ignoreLine = 0; - rowIgnore = !isIncludeLine(line, onlyKeywords, ignoreKeywords); - } else { - ignoreLine += 1; - // TODO limit the value of ignoreLine - } - }else{ - rowIgnore = !isIncludeLine(line, onlyKeywords, ignoreKeywords); - } - }else { - rowIgnore = !isIncludeLine(line, onlyKeywords, ignoreKeywords); - } - if (!rowIgnore) { - if (line.contains("password")) { - LOG.info("have error information"); - } - if (!line.contains("password")) { - logs.add(new String(line.getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8)); - } - readLine += 1; - } - } - line = logReader.readLine(); - } - result = new LogResult(lineNum, false, logs); - } catch (IOException e) { - throw new ExchangisJobServerException.Runtime(LOG_OP_ERROR.getCode(),"Unable to query the logs from path: [" + logPath + "]", e); - } finally { - if (Objects.nonNull(logReader)) { - try { - logReader.close(); - } catch (IOException e) { - //Ignore - } - } - } - return result; - } - - @Override - public void appendLog(String tenancy, String jobExecId, List logs) { - appendLog(jobExecId, logs); - } - - @Override - public void appendLog(String jobExecId, List logs) { - JobLogCache cache = getOrCreateLogCache(jobExecId); - logs.forEach(cache ::cacheLog); - } - - - private boolean isIncludeLine(String line, List onlyKeywordList, List ignoreKeywordList){ - boolean accept = ignoreKeywordList.isEmpty() || ignoreKeywordList.stream().noneMatch(line::contains); - if (accept){ - accept = onlyKeywordList.isEmpty() || onlyKeywordList.stream().anyMatch(line::contains); - } - return accept; - } - - /** - * Get last rows - * @param fullPath full path - * @param lastRows last rows - * @return - */ - private LogResult getLastRows(String fullPath, int lastRows){ - try { - List logs = Arrays.asList(Utils.exec(new String[]{"tail", "-n", lastRows + "", fullPath}, 5000L).split("\n")); - return new LogResult(0, true, logs); - }catch (Exception e){ - throw new ExchangisJobServerException.Runtime(LOG_OP_ERROR.getCode(), "Fail to get last rows from path: [" + fullPath + "]", e); - } - } - @Override - public JobLogCache getOrCreateLogCache(String jobExecId){ - try { - return cacheHolder.get(jobExecId, () -> { - LaunchedExchangisJobEntity launchedExchangisJob = launchedJobDao.searchLogPathInfo(jobExecId); - if (Objects.nonNull(launchedExchangisJob)) { - File logFile = new File(Constraints.LOG_LOCAL_PATH.getValue() + IOUtils.DIR_SEPARATOR_UNIX + - launchedExchangisJob.getLogPath()); - if (!logFile.exists()){ - // Write empty string to create new file - FileUtils.writeStringToFile(logFile, ""); - LOG.info("Create the new job log file: {}", logFile.getAbsolutePath()); - } - RandomAccessFile file = new RandomAccessFile(logFile, "rw"); - // Seek to the end of file - file.seek(file.length()); - return new AbstractJobLogCache(scheduler, 100, 2000) { - @Override - public synchronized void flushCache(boolean isEnd) { - // Store into local path - if (!cacheQueue().isEmpty()) { - try { - List logLines = new ArrayList<>(); - cacheQueue().drainTo(logLines); - for (Object line : logLines) { - file.write(String.valueOf(line).getBytes(Charset.defaultCharset())); - } - } catch (IOException ex) { - LOG.error("Fail to flush the log cache of [" + launchedExchangisJob.getJobExecutionId() + "]", ex); - } - } - if (isEnd) { - cacheHolder.invalidate(jobExecId); - try { - file.close(); - } catch (IOException e) { - //Ignore - } - } - } - }; - } - return null; - }); - } catch (ExecutionException e) { - throw new ExchangisJobServerException.Runtime(LOG_OP_ERROR.getCode(),"Fail to create the job log cache of [" + jobExecId +"]", e); - } - } - -} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/RpcJobLogService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/RpcJobLogService.java new file mode 100644 index 000000000..a466d692e --- /dev/null +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/log/service/RpcJobLogService.java @@ -0,0 +1,289 @@ +package com.webank.wedatasphere.exchangis.job.server.log.service; + +import com.webank.wedatasphere.exchangis.common.EnvironmentUtils; +import com.webank.wedatasphere.exchangis.job.launcher.entity.LaunchedExchangisJobEntity; +import com.webank.wedatasphere.exchangis.job.log.LogQuery; +import com.webank.wedatasphere.exchangis.job.log.LogResult; +import com.webank.wedatasphere.exchangis.job.server.exception.ExchangisJobServerException; +import com.webank.wedatasphere.exchangis.job.server.log.cache.AbstractJobLogCache; +import com.webank.wedatasphere.exchangis.job.server.log.cache.JobLogCache; +import com.webank.wedatasphere.exchangis.job.server.log.rpc.FetchLogRequest; +import com.webank.wedatasphere.exchangis.job.server.log.rpc.FetchLogResponse; +import com.webank.wedatasphere.exchangis.job.server.log.rpc.SendLogRequest; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.io.input.ReversedLinesFileReader; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.rpc.Sender; +import org.apache.linkis.rpc.message.annotation.Receiver; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.function.Supplier; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static com.webank.wedatasphere.exchangis.job.exception.ExchangisJobExceptionCode.LOG_OP_ERROR; + +/** + * Rpc job log service + */ +public class RpcJobLogService extends AbstractJobLogService{ + + private static final Logger LOG = LoggerFactory.getLogger(RpcJobLogService.class); + + + @Receiver + public void appendLog(SendLogRequest sendLogRequest){ + String jobExecId = sendLogRequest.getJobExecId(); + List logLines = sendLogRequest.getLogLines(); + if (logLines.size() > 0) { + // Two level cache + JobLogCache cache = getOrCreateLogCache(jobExecId); + logLines.forEach(cache :: cacheLog); + if (sendLogRequest.isEnd()){ + cache.flushCache(true); + } + } else if (sendLogRequest.isEnd()){ + Optional.ofNullable(cacheHolder.getIfPresent(jobExecId)).ifPresent( cache -> { + cache.flushCache(true); + }); + } + } + + @Receiver + public FetchLogResponse logsFromPage(FetchLogRequest fetchLogRequest){ + return new FetchLogResponse( + logsFromPageAndPath(fetchLogRequest.getLogPath(), fetchLogRequest)); + } + @Override + protected AbstractJobLogCache loadJobLogCache(String jobExecId, + LaunchedExchangisJobEntity launchedExchangisJob) throws Exception{ + String logPath = launchedExchangisJob.getLogPath(); + int splitPos = logPath.indexOf("@"); + if (splitPos > 0){ + String logAddress = logPath.substring(0, splitPos); + if (!logAddress.equals(EnvironmentUtils.getServerAddress())){ + ServiceInstance instance = ServiceInstance.apply(EnvironmentUtils.getServerName(), logAddress); + return new AbstractJobLogCache(scheduler, 100, 2000) { + @Override + public void flushCache(boolean isEnd) { + // Send rpc + if (!cacheQueue().isEmpty()) { + try { + List logLines = new ArrayList<>(); + cacheQueue().drainTo(logLines); + Sender.getSender(instance).send(new SendLogRequest(jobExecId, isEnd, logLines)); + } catch (Exception ex) { + LOG.error("Fail to send the log cache of [" + launchedExchangisJob.getJobExecutionId() + + "] to remote rpc [" + logAddress + "]", ex); + } + } + if (isEnd) { + cacheHolder.invalidate(jobExecId); + } + } + }; + } + logPath = logPath.substring(splitPos + 1); + } + File logFile = new File(Constraints.LOG_LOCAL_PATH.getValue() + IOUtils.DIR_SEPARATOR_UNIX + + logPath); + + if (!logFile.exists()){ + // Write empty string to create new file + FileUtils.writeStringToFile(logFile, ""); + LOG.info("Create the new job log file: {}", logFile.getAbsolutePath()); + } + RandomAccessFile file = new RandomAccessFile(logFile, "rw"); + // Seek to the end of file + file.seek(file.length()); + return new AbstractJobLogCache(scheduler, 100, 2000) { + @Override + public synchronized void flushCache(boolean isEnd) { + // Store into local path + if (!cacheQueue().isEmpty()) { + try { + List logLines = new ArrayList<>(); + cacheQueue().drainTo(logLines); + for (Object line : logLines) { + file.write(String.valueOf(line).getBytes(Charset.defaultCharset())); + } + } catch (IOException ex) { + LOG.error("Fail to flush the log cache of [" + launchedExchangisJob.getJobExecutionId() + "]", ex); + } + } + if (isEnd) { + cacheHolder.invalidate(jobExecId); + try { + file.close(); + } catch (IOException e) { + //Ignore + } + } + } + }; + } + + @Override + public LogResult logsFromPageAndPath(String logPath, LogQuery logQuery) { + int splitPos = logPath.indexOf("@"); + if (splitPos > 0) { + String logAddress = logPath.substring(0, splitPos); + if (!logAddress.equals(EnvironmentUtils.getServerAddress())) { + Object response; + try { + response = Sender.getSender(ServiceInstance.apply(EnvironmentUtils.getServerName(), logAddress)) + .ask(new FetchLogRequest(logQuery, logPath)); + } catch (Exception e){ + throw new ExchangisJobServerException.Runtime(LOG_OP_ERROR.getCode(), + "Remote exception in fetching log from: [" + logPath + "]", e); + } + if (response instanceof FetchLogResponse){ + return (LogResult) response; + } + throw new ExchangisJobServerException.Runtime(LOG_OP_ERROR.getCode(),"Unable to fetch log from: [" + logPath + + "], unknown request protocol: [" + response + "]", null); + } + logPath = logPath.substring(splitPos + 1); + } + String fullPath = Constraints.LOG_LOCAL_PATH.getValue() + IOUtils.DIR_SEPARATOR_UNIX + logPath; + LogResult result = new LogResult(0, false, Collections.emptyList()); + if (!new File(fullPath).exists()){ + return result; + } + if (logQuery.getLastRows() != null && logQuery.getLastRows() > 0){ + return getLastRows(fullPath, logQuery.getLastRows()); + } + RandomAccessFile logReader = null; + ReversedLinesFileReader reverseReader = null; + try { + String patternValue = Constraints.LOG_MULTILINE_PATTERN.getValue(); + Pattern linePattern = StringUtils.isNotBlank(patternValue)? Pattern.compile(patternValue) : null; + int readLine = 0; + int lineNum = 0; + int skippedLine = 0; + int ignoreLine = 0; + int pageSize = logQuery.getPageSize(); + int fromLine = logQuery.getFromLine(); + List ignoreKeywords = logQuery.getIgnoreKeywordsList(); + List onlyKeywords = logQuery.getOnlyKeywordsList(); + boolean rowIgnore = false; + Supplier lineSupplier = null; + if (logQuery.isEnableTail()){ + reverseReader = new ReversedLinesFileReader(new File(fullPath), Charset.defaultCharset()); + LOG.trace("Enable reverse read the log: {}, fromLine: {}, pageSize: {}", fullPath, fromLine, pageSize); + ReversedLinesFileReader finalReverseReader = reverseReader; + lineSupplier = () -> { + try { + return finalReverseReader.readLine(); + } catch (IOException e) { + throw new ExchangisJobServerException.Runtime(LOG_OP_ERROR.getCode(), e.getMessage(), e); + } + }; + } else { + logReader = new RandomAccessFile(fullPath, "rw"); + RandomAccessFile finalLogReader = logReader; + lineSupplier = () -> { + try { + String line = finalLogReader.readLine(); + if (null != line){ + return new String(line.getBytes(StandardCharsets.ISO_8859_1), Charset.defaultCharset()); + } + return null; + } catch (IOException e) { + throw new ExchangisJobServerException.Runtime(LOG_OP_ERROR.getCode(), e.getMessage(), e); + } + }; + } + String line = lineSupplier.get(); + List logs = new ArrayList<>(); + while (readLine < pageSize && line != null){ + lineNum += 1; + if (skippedLine < fromLine - 1){ + skippedLine += 1; + } else { + if (rowIgnore) { + if (Objects.nonNull(linePattern)){ + Matcher matcher = linePattern.matcher(line); + if (matcher.find()){ + ignoreLine = 0; + rowIgnore = !isIncludeLine(line, onlyKeywords, ignoreKeywords); + } else { + ignoreLine += 1; + // TODO limit the value of ignoreLine + } + }else{ + rowIgnore = !isIncludeLine(line, onlyKeywords, ignoreKeywords); + } + }else { + rowIgnore = !isIncludeLine(line, onlyKeywords, ignoreKeywords); + } + if (!rowIgnore) { + if (line.contains("password")) { + LOG.info("have error information"); + } + if (!line.contains("password")) { + logs.add(line); + } + readLine += 1; + } + } + line = lineSupplier.get(); + } + if (logQuery.isEnableTail()){ + Collections.reverse(logs); + } + result = new LogResult(lineNum, false, logs); + } catch (IOException e) { + throw new ExchangisJobServerException.Runtime(LOG_OP_ERROR.getCode(),"Unable to query the logs from path: [" + logPath + "]", e); + } finally { + if (Objects.nonNull(logReader)) { + try { + logReader.close(); + } catch (IOException e) { + //Ignore + } + } + if (Objects.nonNull(reverseReader)) { + try { + reverseReader.close(); + } catch (IOException e) { + //Ignore + } + } + } + return result; + } + + /** + * Get last rows + * @param fullPath full path + * @param lastRows last rows + * @return + */ + private LogResult getLastRows(String fullPath, int lastRows){ + try { + List logs = Arrays.asList(Utils.exec(new String[]{"tail", "-n", lastRows + "", fullPath}, 5000L).split("\n")); + return new LogResult(0, true, logs); + }catch (Exception e){ + throw new ExchangisJobServerException.Runtime(LOG_OP_ERROR.getCode(), "Fail to get last rows from path: [" + fullPath + "]", e); + } + } + + private boolean isIncludeLine(String line, List onlyKeywordList, List ignoreKeywordList){ + boolean accept = ignoreKeywordList.isEmpty() || ignoreKeywordList.stream().noneMatch(line::contains); + if (accept){ + accept = onlyKeywordList.isEmpty() || onlyKeywordList.stream().anyMatch(line::contains); + } + return accept; + } +} diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/DataxMetricConverter.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/DataxMetricConverter.java index bdd66f552..59c38f5ac 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/DataxMetricConverter.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/metrics/converter/DataxMetricConverter.java @@ -60,7 +60,9 @@ public ExchangisMetricsVo.ResourceUsed parseResourceUsed(String key, JsonEntity @Override public ExchangisMetricsVo.Traffic parseTraffic(String key, JsonEntity rawValue) { ExchangisMetricsVo.Traffic traffic = new ExchangisMetricsVo.Traffic(); - Double speed = rawValue.getDouble("recordSpeed"); + Double speed = Optional.ofNullable(rawValue.getDouble("recordSpeedPerSecond")).orElse( + rawValue.getDouble("recordSpeed") + ); if (Objects.nonNull(speed)){ traffic.setFlow(new BigDecimal(speed).setScale(2, RoundingMode.HALF_UP).doubleValue()); } @@ -70,8 +72,11 @@ public ExchangisMetricsVo.Traffic parseTraffic(String key, JsonEntity rawValue) @Override public ExchangisMetricsVo.Indicator parseIndicator(String key, JsonEntity rawValue) { ExchangisMetricsVo.Indicator indicator = new ExchangisMetricsVo.Indicator(); - Optional.ofNullable(rawValue.getLong("writeSucceedRecords")).ifPresent(indicator::setExchangedRecords); - Optional.ofNullable(rawValue.getLong("totalErrorRecords")).ifPresent(indicator::setErrorRecords); + long readSuccess = Optional.ofNullable(rawValue.getLong("readSucceedRecords")).orElse(0L); + long readFail = Optional.ofNullable(rawValue.getLong("readFailedRecords")).orElse(0L); + indicator.setExchangedRecords(readSuccess + readFail); + long writeFail = Optional.ofNullable(rawValue.getLong("writeFailedRecords")).orElse(0L); + indicator.setErrorRecords(readFail + writeFail); return indicator; } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformRequestVo.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformRequestVo.java index 124f894eb..2bbc689f9 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformRequestVo.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/TransformRequestVo.java @@ -32,6 +32,11 @@ public class TransformRequestVo { */ private String sourceTable; + /** + * Table (source) not exist + */ + private boolean srcTblNotExist = false; + /** * Sink type id */ @@ -54,6 +59,10 @@ public class TransformRequestVo { */ private String sinkTable; + /** + * Table (sink) not exist + */ + private boolean sinkTblNotExist = false; /** * Labels */ @@ -150,4 +159,20 @@ public String getOperator() { public void setOperator(String operator) { this.operator = operator; } + + public void setSrcTblNotExist(boolean srcTblNotExist) { + this.srcTblNotExist = srcTblNotExist; + } + + public void setSinkTblNotExist(boolean sinkTblNotExist) { + this.sinkTblNotExist = sinkTblNotExist; + } + + public boolean isSrcTblNotExist() { + return srcTblNotExist; + } + + public boolean isSinkTblNotExist() { + return sinkTblNotExist; + } } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingTransformer.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingTransformer.java index 6cbbc64a9..19454a4f2 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingTransformer.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/FieldMappingTransformer.java @@ -84,31 +84,35 @@ private FieldMappingSettings getFieldMappingSettings(FieldMappingRule rule, Tran settings.setTransformEnable(rule.isFieldTransformEnable()); // Get raw meta columns List sourceColumns = new ArrayList<>(); - try { - List metaColumns = getOrLoadMetadataInfoService(). - getColumns(requestVo.getOperator(), requestVo.getSourceDataSourceId(), - requestVo.getSourceDataBase(), requestVo.getSourceTable()); - boolean editable = rule.getFieldEditEnableRuleItem().getOrDefault(TransformRule.Direction.SOURCE.name(), true); - for (int i = 0; i < metaColumns.size(); i++) { - MetaColumn metaColumn = metaColumns.get(i); - sourceColumns.add(new FieldColumnWrapper(metaColumn.getName(), metaColumn.getType(), i, editable)); + if (!requestVo.isSrcTblNotExist()) { + try { + List metaColumns = getOrLoadMetadataInfoService(). + getColumns(requestVo.getOperator(), requestVo.getSourceDataSourceId(), + requestVo.getSourceDataBase(), requestVo.getSourceTable()); + boolean editable = rule.getFieldEditEnableRuleItem().getOrDefault(TransformRule.Direction.SOURCE.name(), true); + for (int i = 0; i < metaColumns.size(); i++) { + MetaColumn metaColumn = metaColumns.get(i); + sourceColumns.add(new FieldColumnWrapper(metaColumn.getName(), metaColumn.getType(), i, editable)); + } + } catch (ExchangisDataSourceException e) { + throw new ExchangisJobException.Runtime(ExchangisJobExceptionCode.RENDER_TRANSFORM_ERROR.getCode(), "Fail to get source meta columns in generating field mapping settings", e); } - } catch (ExchangisDataSourceException e) { - throw new ExchangisJobException.Runtime(ExchangisJobExceptionCode.RENDER_TRANSFORM_ERROR.getCode(), "Fail to get source meta columns in generating field mapping settings", e); } settings.setSourceFields(sourceColumns); List sinkColumns = new ArrayList<>(); - try { - List metaColumns = getOrLoadMetadataInfoService(). - getColumns(requestVo.getOperator(), requestVo.getSinkDataSourceId(), - requestVo.getSinkDataBase(), requestVo.getSinkTable()); - boolean editable = rule.getFieldEditEnableRuleItem().getOrDefault(TransformRule.Direction.SINK.name(), true); - for (int i = 0; i < metaColumns.size(); i++) { - MetaColumn metaColumn = metaColumns.get(i); - sinkColumns.add(new FieldColumnWrapper(metaColumn.getName(), metaColumn.getType(), i, editable)); + if (!requestVo.isSinkTblNotExist()) { + try { + List metaColumns = getOrLoadMetadataInfoService(). + getColumns(requestVo.getOperator(), requestVo.getSinkDataSourceId(), + requestVo.getSinkDataBase(), requestVo.getSinkTable()); + boolean editable = rule.getFieldEditEnableRuleItem().getOrDefault(TransformRule.Direction.SINK.name(), true); + for (int i = 0; i < metaColumns.size(); i++) { + MetaColumn metaColumn = metaColumns.get(i); + sinkColumns.add(new FieldColumnWrapper(metaColumn.getName(), metaColumn.getType(), i, editable)); + } + } catch (ExchangisDataSourceException e) { + throw new ExchangisJobException.Runtime(ExchangisJobExceptionCode.RENDER_TRANSFORM_ERROR.getCode(), "Fail to get sink meta columns in generating field mapping settings", e); } - } catch (ExchangisDataSourceException e) { - throw new ExchangisJobException.Runtime(ExchangisJobExceptionCode.RENDER_TRANSFORM_ERROR.getCode(), "Fail to get sink meta columns in generating field mapping settings", e); } settings.setSinkFields(sinkColumns); FieldMatchStrategy matchStrategy = rule.getFieldMatchStrategy(); diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/AbstractFieldMatchStrategy.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/AbstractFieldMatchStrategy.java index 66be28d00..883936bb3 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/AbstractFieldMatchStrategy.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/render/transform/field/mapping/match/AbstractFieldMatchStrategy.java @@ -25,7 +25,9 @@ public List match(List dependColumns, List 0) { + fieldColumnMatches.add(new FieldColumnMatch(dependColumn, searchColumns.get(i % searchColumns.size()))); + } } } } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/execute/ExchangisJobExecuteRestfulApi.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/execute/ExchangisJobExecuteRestfulApi.java index 5ed4cfb4a..62d74e884 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/execute/ExchangisJobExecuteRestfulApi.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/execute/ExchangisJobExecuteRestfulApi.java @@ -76,7 +76,7 @@ public Message executeJob(@RequestBody(required = false) Map per jobInfo.getExecuteUser() : loginUser); result.data("jobExecutionId", jobExecutionId); } catch (Exception e) { - String message; + String message; if (Objects.nonNull(jobInfo)) { message = "Error occur while executing job: [id: " + jobInfo.getId() + " name: " + jobInfo.getName() + "]"; result = Message.error(message + "(执行任务出错), reason: " + e.getMessage()); @@ -157,11 +157,15 @@ public Message getJobExecutionLogs(@PathVariable(value = "jobExecutionId") Strin @RequestParam(value = "pageSize", required = false) Integer pageSize, @RequestParam(value = "ignoreKeywords", required = false) String ignoreKeywords, @RequestParam(value = "onlyKeywords", required = false) String onlyKeywords, + @RequestParam(value = "enableTail", required = false) Boolean enableTail, @RequestParam(value = "lastRows", required = false) Integer lastRows, HttpServletRequest request) { Message result = Message.ok("Submitted succeed(提交成功)!"); LogQuery logQuery = new LogQuery(fromLine, pageSize, ignoreKeywords, onlyKeywords, lastRows); + if (null != enableTail) { + logQuery.setEnableTail(enableTail); + } String loginUser = UserUtils.getLoginUser(request); try { if(!JobAuthorityUtils.hasJobExecuteSituationAuthority(loginUser, jobExecutionId, OperationType.JOB_QUERY)) { diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/execute/ExchangisTaskExecuteRestfulApi.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/execute/ExchangisTaskExecuteRestfulApi.java index 003c24c1e..aa2a45e1b 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/execute/ExchangisTaskExecuteRestfulApi.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/execute/ExchangisTaskExecuteRestfulApi.java @@ -77,10 +77,14 @@ public Message getTaskExecutionLogs(@PathVariable(value = "taskId") String taskI @RequestParam(value = "pageSize", required = false) Integer pageSize, @RequestParam(value = "ignoreKeywords", required = false) String ignoreKeywords, @RequestParam(value = "onlyKeywords", required = false) String onlyKeywords, + @RequestParam(value = "enableTail", required = false) Boolean enableTail, @RequestParam(value = "lastRows", required = false) Integer lastRows, HttpServletRequest request) { Message result = Message.ok("Submitted succeed(提交成功)!"); LogQuery logQuery = new LogQuery(fromLine, pageSize, ignoreKeywords, onlyKeywords, lastRows); + if (null != enableTail) { + logQuery.setEnableTail(enableTail); + } String userName = UserUtils.getLoginUser(request); try { if (!JobAuthorityUtils.hasJobExecuteSituationAuthority(userName, jobExecutionId, OperationType.JOB_QUERY)) { diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/external/ExchangisJobDssAppConnRestfulApi.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/external/ExchangisJobDssAppConnRestfulApi.java index 9b91a5814..72ae0dc09 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/external/ExchangisJobDssAppConnRestfulApi.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/restful/external/ExchangisJobDssAppConnRestfulApi.java @@ -82,10 +82,10 @@ public Message createJob( } catch (Exception e){ String message = "Fail to create dss job: " + exchangisJobVo.getJobName() +" (创建DSS任务失败)"; LOG.error(message, e); - response = Message.error(message); + return Message.error(message); } assert id != null; - AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB, id.toString(), "Job name is: " + exchangisJobVo.getJobName(), OperateTypeEnum.CREATE, request); + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB, String.valueOf(id), "Job name is: " + exchangisJobVo.getJobName(), OperateTypeEnum.CREATE, request); return response; } @@ -115,9 +115,9 @@ public Message deleteJob(@PathVariable("id") Long id, HttpServletRequest request } catch (Exception e){ String message = "Fail to delete dss job [ id: " + id + "] (删除DSS任务失败)"; LOG.error(message, e); - response = Message.error(message); + return Message.error(message); } - AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB, id.toString(), "Job", OperateTypeEnum.DELETE, request); + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB, String.valueOf(id), "Job", OperateTypeEnum.DELETE, request); return response; } @@ -155,9 +155,9 @@ public Message updateJob(@PathVariable("id") Long id, } catch (Exception e){ String message = "Fail to update dss job: " + exchangisJobVo.getJobName() +" (更新DSS任务失败)"; LOG.error(message, e); - response = Message.error(message); + return Message.error(message); } - AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB, id.toString(), "Job name is: " + exchangisJobVo.getJobName(), OperateTypeEnum.UPDATE, request); + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB, String.valueOf(id), "Job name is: " + exchangisJobVo.getJobName(), OperateTypeEnum.UPDATE, request); return response; } @@ -178,7 +178,7 @@ public Message executeJob(@PathVariable("id") Long id, HttpServletRequest reques String submitUser = params.get("submitUser").toString(); String oringinUser = SecurityFilter.getLoginUsername(request); String loginUser = UserUtils.getLoginUser(request); - Message result = Message.ok(); + Message response = Message.ok(); ExchangisJobInfo jobInfo = null; LOG.info("wds execute user: {}", loginUser); try { @@ -203,7 +203,7 @@ public Message executeJob(@PathVariable("id") Long id, HttpServletRequest reques // Send to execute service String jobExecutionId = executeService.executeJob(jobInfo, StringUtils.isNotBlank(jobInfo.getExecuteUser()) ? jobInfo.getExecuteUser() : loginUser); - result.data("jobExecutionId", jobExecutionId); + response.data("jobExecutionId", jobExecutionId); LOG.info("Prepare to get job status"); /*while (true) { @@ -223,15 +223,16 @@ public Message executeJob(@PathVariable("id") Long id, HttpServletRequest reques String message; if (Objects.nonNull(jobInfo)) { message = "Error occur while executing job: [id: " + jobInfo.getId() + " name: " + jobInfo.getName() + "]"; - result = Message.error(message + "(执行任务出错), reason: " + e.getMessage()); + response = Message.error(message + "(执行任务出错), reason: " + e.getMessage()); } else { message = "Error to get the job detail (获取任务信息出错)"; - result = Message.error(message); + response = Message.error(message); } LOG.error(message, e); + return response; } assert jobInfo != null; - AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB, id.toString(), "Execute task is: " + jobInfo.getName(), OperateTypeEnum.EXECUTE, request); - return result; + AuditLogUtils.printLog(oringinUser, loginUser, TargetTypeEnum.JOB, String.valueOf(id), "Execute task is: " + jobInfo.getName(), OperateTypeEnum.EXECUTE, request); + return response; } } diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultJobExecuteService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultJobExecuteService.java index 8719c5284..6d02407a0 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultJobExecuteService.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultJobExecuteService.java @@ -32,6 +32,7 @@ import com.webank.wedatasphere.exchangis.job.server.service.JobExecuteService; import com.webank.wedatasphere.exchangis.job.server.vo.*; import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.conf.CommonVars; import org.modelmapper.ModelMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -39,6 +40,7 @@ import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; +import javax.annotation.PostConstruct; import javax.annotation.Resource; import javax.servlet.http.HttpServletRequest; import java.util.*; @@ -49,6 +51,10 @@ public class DefaultJobExecuteService implements JobExecuteService { private final static Logger LOG = LoggerFactory.getLogger(DefaultJobExecuteService.class); + private static final CommonVars TASK_LOG_IGNORE_KEYS = CommonVars.apply( + "wds.exchangis.job.task.log.ignore-keys", + "service.DefaultManagerService,info.DefaultNodeHealthyInfoManager"); + @Autowired private LaunchedTaskDao launchedTaskDao; @@ -89,6 +95,18 @@ public class DefaultJobExecuteService implements JobExecuteService { @Resource private MetricConverterFactory metricConverterFactory; + /** + * Log ignore key set + */ + private final Set logIgnoreKeySet = new HashSet<>(); + + @PostConstruct + public void init(){ + String defaultIgnoreKeys = TASK_LOG_IGNORE_KEYS.getValue(); + if (StringUtils.isNotBlank(defaultIgnoreKeys)){ + logIgnoreKeySet.addAll(Arrays.asList(defaultIgnoreKeys.split(","))); + } + } @Override public List getExecutedJobTaskList(String jobExecutionId) throws ExchangisJobServerException{ List launchedExchangisTaskEntities = launchedTaskDao.selectTaskListByJobExecutionId(jobExecutionId); @@ -189,6 +207,16 @@ public ExchangisCategoryLogVo getJobLogInfo(String jobExecutionId, LogQuery logQ public ExchangisCategoryLogVo getTaskLogInfo(String taskId, String jobExecutionId, LogQuery logQuery) throws ExchangisJobServerException, ExchangisTaskLaunchException { LaunchedExchangisTaskEntity launchedTaskEntity = this.launchedTaskDao.getLaunchedTaskEntity(taskId); + if (logIgnoreKeySet.size() > 0){ + String ignoreKeys = logQuery.getIgnoreKeywords(); + if (StringUtils.isNotBlank(ignoreKeys)){ + Set ignores = new HashSet<>(Arrays.asList(ignoreKeys.split(","))); + ignores.addAll(logIgnoreKeySet); + logQuery.setIgnoreKeywords(StringUtils.join(ignores, ",")); + } else { + logQuery.setIgnoreKeywords(StringUtils.join(logIgnoreKeySet, ",")); + } + } if (Objects.isNull(launchedTaskEntity)){ return resultToCategoryLog(logQuery, new LogResult(0, false, new ArrayList<>()), TaskStatus.Inited); } @@ -321,7 +349,7 @@ private ExchangisCategoryLogVo resultToCategoryLog(LogQuery logQuery, LogResult } if (Objects.nonNull(logQuery.getLastRows())){ logResult.setEnd(true); - }else if (noLogs){ + }else if (noLogs || logQuery.isEnableTail()){ // logResult.getLogs().add("<>"); if (TaskStatus.isCompleted(status)){ logResult.setEnd(true); diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultTaskObserverService.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultTaskObserverService.java index d346bd8c5..6a4e58822 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultTaskObserverService.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/DefaultTaskObserverService.java @@ -45,6 +45,8 @@ public boolean subscribe(LaunchableExchangisTask task) { if (Objects.isNull(jobEntity) || TaskStatus.isCompleted(jobEntity.getStatus())){ taskEntity.setStatus(jobEntity.getStatus()); this.launchedTaskDao.insertLaunchedTaskOrUpdate(taskEntity); + // TODO delete the launch able task + return false; } else { return this.launchedTaskDao.insertLaunchedTaskOrUpdate(taskEntity) == 1; diff --git a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/ExchangisJobDsBindServiceImpl.java b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/ExchangisJobDsBindServiceImpl.java index 567895e08..7ec485afd 100644 --- a/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/ExchangisJobDsBindServiceImpl.java +++ b/exchangis-job/exchangis-job-server/src/main/java/com/webank/wedatasphere/exchangis/job/server/service/impl/ExchangisJobDsBindServiceImpl.java @@ -32,7 +32,7 @@ public void updateJobDsBind(Long jobId, List dsBinds) { public boolean inUse(Long datasourceId) { QueryWrapper condition = new QueryWrapper<>(); condition.eq("source_ds_id", datasourceId).or().eq("sink_ds_id", datasourceId); - Long count = Optional.ofNullable(this.dsBindMapper.selectCount(condition)).orElse(0L); + Long count = Optional.ofNullable(this.dsBindMapper.selectCount(condition)).orElse(0l); return count > 0; } } diff --git a/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/cache/JobLogCache.scala b/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/cache/JobLogCache.scala index 1986373ae..75721f8ee 100644 --- a/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/cache/JobLogCache.scala +++ b/exchangis-job/exchangis-job-server/src/main/scala/com/webank/wedatasphere/exchangis/job/server/log/cache/JobLogCache.scala @@ -35,7 +35,7 @@ abstract class AbstractJobLogCache[V](scheduler: Scheduler, maxSize: Int = 100, var lastFlush: Long = -1L - var cacheQueue: util.concurrent.ArrayBlockingQueue[Any] = new ArrayBlockingQueue[Any](maxSize) + var cacheQueue: util.concurrent.ArrayBlockingQueue[V] = new ArrayBlockingQueue[V](maxSize) var isShutdown: Boolean = false @@ -65,7 +65,7 @@ abstract class AbstractJobLogCache[V](scheduler: Scheduler, maxSize: Int = 100, override def cacheLog(log: V): Unit = { val element: Any = getCacheQueueElement(log) - if (!cacheQueue.offer(element)) { + if (!cacheQueue.offer(element.asInstanceOf[V])) { warn("The cache queue is full, should flush the cache immediately") flushCache(false) } else if (lastFlush + flushInterval < System.currentTimeMillis){ diff --git a/exchangis-job/exchangis-job-server/src/main/test/com/webank/wedatasphere/exchangis/job/server/builder/JobBuilderTest.java b/exchangis-job/exchangis-job-server/src/main/test/com/webank/wedatasphere/exchangis/job/server/builder/JobBuilderTest.java index e2472d34e..5a30a1231 100644 --- a/exchangis-job/exchangis-job-server/src/main/test/com/webank/wedatasphere/exchangis/job/server/builder/JobBuilderTest.java +++ b/exchangis-job/exchangis-job-server/src/main/test/com/webank/wedatasphere/exchangis/job/server/builder/JobBuilderTest.java @@ -57,7 +57,7 @@ public static void main(String[] args) throws ExchangisJobException, JsonProcess " \"preSql\": [], \n" + " \"connection\": [\n" + " {\n" + - " \"jdbcUrl\":\"jdbc:mysql://172.24.2.61:3306/test\", \n" + + " \"jdbcUrl\":\"jdbc:mysql://127.0.0.1:3306/test\", \n" + " \"table\": [\"testtab\"]\n" + " }\n" + " ]\n" + diff --git a/exchangis-job/exchangis-job-service/pom.xml b/exchangis-job/exchangis-job-service/pom.xml index 44ffac3db..10c0afb92 100644 --- a/exchangis-job/exchangis-job-service/pom.xml +++ b/exchangis-job/exchangis-job-service/pom.xml @@ -5,7 +5,7 @@ exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} ../../pom.xml 4.0.0 @@ -21,12 +21,12 @@ com.webank.wedatasphere.exchangis exchangis-job-common - 1.1.2 + ${revision} com.webank.wedatasphere.exchangis exchangis-job-launcher - 1.1.2 + ${revision} diff --git a/exchangis-job/pom.xml b/exchangis-job/pom.xml index e28c390ac..2ccba4766 100644 --- a/exchangis-job/pom.xml +++ b/exchangis-job/pom.xml @@ -5,13 +5,14 @@ exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../pom.xml 4.0.0 exchangis-job pom - 1.1.2 + ${revision} exchangis-job-common @@ -26,11 +27,4 @@ 8 - - - org.modelmapper - modelmapper - 2.4.3 - - \ No newline at end of file diff --git a/exchangis-plugins/exchangis-appconn/pom.xml b/exchangis-plugins/exchangis-appconn/pom.xml index 0c1212479..aa84a4261 100644 --- a/exchangis-plugins/exchangis-appconn/pom.xml +++ b/exchangis-plugins/exchangis-appconn/pom.xml @@ -3,10 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-plugins + exchangis com.webank.wedatasphere.exchangis - 1.1.2 - ../pom.xml + ${revision} + ../../pom.xml 4.0.0 @@ -25,8 +25,24 @@ provided - httpclient org.apache.httpcomponents + httpclient + + + org.springframework + spring-core + + + org.springframework.boot + spring-boot + + + org.springframework.boot + spring-boot-starter-cache + + + org.springframework.cloud + spring-cloud-starter-netflix-eureka-client true @@ -48,8 +64,8 @@ provided - linkis-common org.apache.linkis + linkis-common @@ -73,6 +89,28 @@ dss-common ${dss.version} provided + + + org.springframework + spring-aop + + + org.springframework + spring-context-support + + + org.springframework + spring-webmvc + + + org.springframework + spring-jdbc + + + org.springframework + spring-tx + + org.reflections @@ -86,15 +124,12 @@ org.apache.maven.plugins maven-deploy-plugin - - - - net.alchim31.maven - scala-maven-plugin + ${maven-deploy-plugin.version} org.apache.maven.plugins maven-jar-plugin + ${maven-jar-plugin.version} org.apache.maven.plugins diff --git a/exchangis-plugins/exchangis-appconn/src/main/assembly/distribution.xml b/exchangis-plugins/exchangis-appconn/src/main/assembly/distribution.xml index 9174d8a49..8a2f5187d 100644 --- a/exchangis-plugins/exchangis-appconn/src/main/assembly/distribution.xml +++ b/exchangis-plugins/exchangis-appconn/src/main/assembly/distribution.xml @@ -72,6 +72,7 @@ ${basedir}/src/main/resources + datax.icon sqoop.icon 0777 diff --git a/exchangis-plugins/exchangis-appconn/src/main/resources/init.sql b/exchangis-plugins/exchangis-appconn/src/main/resources/init.sql index 55105578c..0b98053c0 100644 --- a/exchangis-plugins/exchangis-appconn/src/main/resources/init.sql +++ b/exchangis-plugins/exchangis-appconn/src/main/resources/init.sql @@ -1,51 +1,42 @@ --- TODO 这里只适用于第一次安装时。如果是更新的话dss_appconn表不能先删除再插入,因为其他表如dss_workspace_appconn_role关联了appconn_id(不能变),需要使用update、alter语句更新 - -- 删除exchangis关联的数据 -- delete from `dss_appconn_instance` where `appconn_id` in (select `id` from `dss_appconn` where `appconn_name` = 'exchangis'); delete from `dss_workspace_menu_appconn` where `appconn_id` in (select `id` from `dss_appconn` where `appconn_name` = 'exchangis'); - delete from `dss_appconn` where `appconn_name`='exchangis'; + INSERT INTO `dss_appconn` (`appconn_name`, `is_user_need_init`, `level`, `if_iframe`, `is_external`, `reference`, `class_name`, `appconn_class_path`, `resource`) -VALUES ('exchangis', 0, 1, 1, 1, NULL, 'com.webank.wedatasphere.exchangis.dss.appconn.ExchangisAppConn', 'DSS_INSTALL_HOME_VAL/dss-appconns/exchangis', ''); -select @dss_appconn_exchangis_id:=id from `dss_appconn` where `appconn_name` = "exchangis"; +VALUES ('exchangis', 0, 1, 1, 1, NULL, 'com.webank.wedatasphere.exchangis.dss.appconn.ExchangisAppConn', '/appcom/Install/dss/dss-appconns/exchangis', ''); INSERT INTO `dss_appconn_instance` (`appconn_id`, `label`, `url`, `enhance_json`, `homepage_uri`) -VALUES (@dss_appconn_exchangis_id, 'DEV', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/', '', '#/projectManage'); +VALUES ((select id from `dss_appconn` where `appconn_name` = "exchangis" limit 1), 'DEV', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/', '', '#/projectManage'); -- 看appconn组件是要归属于哪个菜单 -select @exchangis_menuId:=id from `dss_workspace_menu` where `name` = "数据交换"; INSERT INTO `dss_workspace_menu_appconn` (`appconn_id`, `menu_id`, `title_en`, `title_cn`, `desc_en`, `desc_cn`, `labels_en`, `labels_cn`, `is_active`, `access_button_en`, `access_button_cn`, `manual_button_en`, `manual_button_cn`, `manual_button_url`, `icon`, `order`, `create_by`, `create_time`, `last_update_time`, `last_update_user`, `image`) - VALUES(@dss_appconn_exchangis_id,@exchangis_menuId,'Exchangis','Exchangis','Exchangis','' - ,'exchangis, statement','数据交换,数据源','1','enter Exchangis','进入Exchangis','user manual','用户手册','http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/projectManage','shujukeshihua-logo',NULL,NULL,NULL,NULL,NULL,'shujukeshihua-icon'); +VALUES((select id from `dss_appconn` where `appconn_name` = "exchangis" limit 1), (select id from `dss_workspace_menu` where `name` = "数据交换") +,'Exchangis','Exchangis','Exchangis','Exchangis数据交换平台','exchangis, statement','数据交换,数据源','1','enter Exchangis','进入Exchangis','user manual','用户手册','http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/projectManage','shujujiaohuan-logo',NULL,NULL,NULL,NULL,NULL,'shujujiaohuan-icon'); --- Sqoop节点安装 -select @old_dss_exchangis_sqoopId:=id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.sqoop'; -select @old_dss_exchangis_dataxId:=id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.datax'; +-- 卸载节点 +delete from `dss_workflow_node_to_group` where `node_id` in (select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.sqoop' or `node_type` = 'linkis.appconn.exchangis.datax'); +delete from `dss_workflow_node_to_ui` where `workflow_node_id` in (select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.sqoop' or `node_type` = 'linkis.appconn.exchangis.datax'); delete from `dss_workflow_node` where `node_type` like '%exchangis%'; -delete from `dss_workflow_node_to_group` where `node_id`=@old_dss_exchangis_sqoopId or `node_id`=@old_dss_exchangis_dataxId; -delete from `dss_workflow_node_to_ui` where `workflow_node_id`=@old_dss_exchangis_sqoopId or `workflow_node_id`=@old_dss_exchangis_dataxId; -- 节点表dss_workflow_node insert into `dss_workflow_node` (`name`, `appconn_name`, `node_type`, `jump_type`, `support_jump`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `icon_path`) values('sqoop','exchangis','linkis.appconn.exchangis.sqoop',1,'1','1','0','1','icons/sqoop.icon'); -select @dss_exchangis_sqoopId:=id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.sqoop'; insert into `dss_workflow_node` (`name`, `appconn_name`, `node_type`, `jump_type`, `support_jump`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `icon_path`) values('datax','exchangis','linkis.appconn.exchangis.datax',1,'1','1','0','1','icons/datax.icon'); -select @dss_exchangis_dataxId:=id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.datax'; -- 节点组表dss_workflow_node_to_group -select @dss_workflow_node_group_id:=id from `dss_workflow_node_group` where `name` = '数据交换'; -INSERT INTO `dss_workflow_node_to_group`(`node_id`,`group_id`) values (@dss_exchangis_sqoopId, @dss_workflow_node_group_id); -INSERT INTO `dss_workflow_node_to_group`(`node_id`,`group_id`) values (@dss_exchangis_dataxId, @dss_workflow_node_group_id); +INSERT INTO `dss_workflow_node_to_group`(`node_id`,`group_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.sqoop' limit 1), (select id from `dss_workflow_node_group` where `name` = '数据交换')); +INSERT INTO `dss_workflow_node_to_group`(`node_id`,`group_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.datax' limit 1), (select id from `dss_workflow_node_group` where `name` = '数据交换')); -- 节点UI表dss_workflow_node_to_ui -INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@dss_exchangis_sqoopId, 1); -INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@dss_exchangis_sqoopId, 2); -INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@dss_exchangis_sqoopId, 3); -INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@dss_exchangis_sqoopId, 4); -INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@dss_exchangis_sqoopId, 5); -INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@dss_exchangis_dataxId, 1); -INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@dss_exchangis_dataxId, 2); -INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@dss_exchangis_dataxId, 3); -INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@dss_exchangis_dataxId, 4); -INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values (@dss_exchangis_dataxId, 5); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.sqoop' limit 1), 1); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.sqoop' limit 1), 2); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.sqoop' limit 1), 3); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.sqoop' limit 1), 4); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.sqoop' limit 1), 5); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.datax' limit 1), 1); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.datax' limit 1), 2); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.datax' limit 1), 3); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.datax' limit 1), 4); +INSERT INTO `dss_workflow_node_to_ui`(`workflow_node_id`,`ui_id`) values ((select id from `dss_workflow_node` where `node_type` = 'linkis.appconn.exchangis.datax' limit 1), 5); diff --git a/exchangis-plugins/pom.xml b/exchangis-plugins/pom.xml index 8b290ca55..2a93dfb19 100644 --- a/exchangis-plugins/pom.xml +++ b/exchangis-plugins/pom.xml @@ -5,13 +5,14 @@ exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../pom.xml 4.0.0 exchangis-plugins pom - 1.1.2 + ${revision} exchangis-appconn diff --git a/exchangis-project/exchangis-project-entity/pom.xml b/exchangis-project/exchangis-project-entity/pom.xml index 198497921..44bba43b6 100644 --- a/exchangis-project/exchangis-project-entity/pom.xml +++ b/exchangis-project/exchangis-project-entity/pom.xml @@ -3,9 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-project + exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../../pom.xml 4.0.0 @@ -20,7 +21,7 @@ com.webank.wedatasphere.exchangis exchangis-dao - 1.1.2 + ${project.version} org.apache.commons diff --git a/exchangis-project/exchangis-project-provider/pom.xml b/exchangis-project/exchangis-project-provider/pom.xml index b658e8b07..d87a4d29a 100644 --- a/exchangis-project/exchangis-project-provider/pom.xml +++ b/exchangis-project/exchangis-project-provider/pom.xml @@ -3,9 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-project + exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../../pom.xml 4.0.0 @@ -20,7 +21,7 @@ com.webank.wedatasphere.exchangis exchangis-project-entity - 1.1.2 + ${project.version} diff --git a/exchangis-project/exchangis-project-server/pom.xml b/exchangis-project/exchangis-project-server/pom.xml index 0853557d1..d19247918 100644 --- a/exchangis-project/exchangis-project-server/pom.xml +++ b/exchangis-project/exchangis-project-server/pom.xml @@ -3,9 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - exchangis-project + exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../../pom.xml 4.0.0 @@ -20,22 +21,22 @@ com.webank.wedatasphere.exchangis exchangis-project-provider - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-job-server - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-dao - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-job-common - 1.1.2 + ${project.version} diff --git a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/ExchangisProjectRestfulApi.java b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/ExchangisProjectRestfulApi.java index b6661f1f5..fd4c49a77 100644 --- a/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/ExchangisProjectRestfulApi.java +++ b/exchangis-project/exchangis-project-server/src/main/java/com/webank/wedatasphere/exchangis/project/server/restful/ExchangisProjectRestfulApi.java @@ -64,15 +64,16 @@ public class ExchangisProjectRestfulApi { * @param queryVo query vo * @param current current page * @param size size + * @param name name * @return message */ - @RequestMapping( value = "projects", method = {RequestMethod.POST, RequestMethod.GET}) + @RequestMapping( value = "projects", method = RequestMethod.POST) public Message queryProjects(HttpServletRequest request, @RequestBody ProjectQueryVo queryVo, @RequestParam(value = "current", required = false) Integer current, - @RequestParam(value = "size", required = false) Integer size) { + @RequestParam(value = "size", required = false) Integer size, + @RequestParam(value = "name", required = false) String name) { String username = UserUtils.getLoginUser(request); - String name = queryVo.getName(); if (StringUtils.isNotBlank(name)) { name = name.replaceAll("_", "/_"); } diff --git a/exchangis-project/pom.xml b/exchangis-project/pom.xml index 99a3c3a17..f4decd1d6 100644 --- a/exchangis-project/pom.xml +++ b/exchangis-project/pom.xml @@ -5,16 +5,17 @@ exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../pom.xml 4.0.0 exchangis-project pom + exchangis-project-entity exchangis-project-server exchangis-project-provider - exchangis-project-entity diff --git a/exchangis-server/pom.xml b/exchangis-server/pom.xml index 2b16d972d..e8d5a5619 100644 --- a/exchangis-server/pom.xml +++ b/exchangis-server/pom.xml @@ -5,7 +5,8 @@ exchangis com.webank.wedatasphere.exchangis - 1.1.2 + ${revision} + ../pom.xml 4.0.0 @@ -20,46 +21,36 @@ com.webank.wedatasphere.exchangis exchangis-datasource-server - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-engine-server - ${exchangis.version} - - - org.apache.linkis - linkis-module - - - spring-jdbc - org.springframework - - + ${project.version} com.webank.wedatasphere.exchangis exchangis-job-server - 1.1.2 + ${project.version} com.webank.wedatasphere.exchangis exchangis-project-server - 1.1.2 + ${project.version} - org.apache.linkis - linkis-module + com.webank.wedatasphere.exchangis + exchangis-dao + ${project.version} - com.fasterxml classmate diff --git a/exchangis-server/src/main/assembly/distribution.xml b/exchangis-server/src/main/assembly/distribution.xml index 5f7ffc96c..5145471d5 100644 --- a/exchangis-server/src/main/assembly/distribution.xml +++ b/exchangis-server/src/main/assembly/distribution.xml @@ -22,7 +22,6 @@ exchangis-server tar.gz - false diff --git a/exchangis-server/src/main/java/com/webank/wedatasphere/exchangis/queue/BinlogArrayLockFreeQueue.java b/exchangis-server/src/main/java/com/webank/wedatasphere/exchangis/queue/BinlogArrayLockFreeQueue.java new file mode 100644 index 000000000..8a40a0c05 --- /dev/null +++ b/exchangis-server/src/main/java/com/webank/wedatasphere/exchangis/queue/BinlogArrayLockFreeQueue.java @@ -0,0 +1,465 @@ +package com.webank.wedatasphere.exchangis.queue; + +import sun.misc.Unsafe; + +import java.lang.reflect.Field; +import java.security.AccessController; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.locks.Condition; +import java.util.concurrent.locks.LockSupport; +import java.util.concurrent.locks.ReentrantLock; + +public class BinlogArrayLockFreeQueue { + /** + * Allocation 64 MB buffer default + */ + private static final int DEFAULT_QUEUE_BUFFER = 1024 * 1024 * 64; + + private static final long DEFAULT_MEASURE_INTERVAL = 2 * 1000L; + + /** + * We should reduce the cpu usage + */ + private static final long DEFAULT_SPIN_TIMES = 10; + + private final Unsafe unsafe = UnsafeUtil.unsafe; + + final Object[] items; + + /** + * take index + */ + private volatile long takeIndex; + + /** + * put index + */ + private volatile long putIndex; + + /** + * max take index + */ + private volatile long maxTakeIndex; + + /** + * Memory bytes accumulated + */ + private volatile long memoryBytes; + + /** + * Wait take time + */ + private volatile long waitTake; + + /** + * Wait put time + */ + private volatile long waitPut; + + /** + * Flag to measure + */ + private volatile long measureFlag; + + /** + * Buffer size limit + */ + private long bufferSize; + /** + * Measure interval + */ + private long measureInterval; + + + private final ReentrantLock waitLock = new ReentrantLock(false); + + private final Condition notEmpty = waitLock.newCondition(); + + public BinlogArrayLockFreeQueue(int capacity, long bufferSize, long measureInterval){ + // Init the array size as ring buffer, left one chunk + if ((capacity & (capacity - 1)) != 0){ + throw new IllegalArgumentException("the value of capacity must equal to 2^N and greater than 1"); + } + items = new Object[capacity]; + if (bufferSize <= 0){ + bufferSize = Integer.MAX_VALUE; + } + this.bufferSize = bufferSize; + this.measureInterval = measureInterval; + } + + public BinlogArrayLockFreeQueue(int capacity){ + this(capacity, DEFAULT_QUEUE_BUFFER, DEFAULT_MEASURE_INTERVAL); + } + + public void put(T message) throws InterruptedException { + if (Objects.nonNull(message)){ + long curTakeIndex; + long curPutIndex; + long nextPutIndex; + long waitTime = 0; + long clock = 0; + try { + do { + int counter = -1; + do { + counter ++; + // Lock and wait the queue not full + if (counter > 0) { + LockSupport.parkNanos(1L); + } + curPutIndex = this.putIndex; + curTakeIndex = this.takeIndex; + nextPutIndex = curPutIndex + 1; + clock = System.nanoTime(); + } while(toIndex(nextPutIndex) == toIndex(curTakeIndex)); + if (counter > 0){ + waitTime += (System.nanoTime() - clock); + } + } while (!unsafe.compareAndSwapLong(this, Offsets.putIndexOffset, curPutIndex, nextPutIndex)); + // Accumulate the memory + accumulateMemory(1); + // Write the circle + this.items[toIndex(curPutIndex)] = message; +// if (waitTime > 0) { +// unsafe.getAndAddLong(this, Offsets.waitTakeOffset, waitTime); +// } + while (!unsafe.compareAndSwapLong(this, Offsets.maxTakeIndexOffset, curPutIndex, nextPutIndex)){ + // Notify the older producer to update the max take index + Thread.yield(); + } + + }finally { + // Notify the waiter + waitLock.lock(); + try { + notEmpty.signalAll(); + } finally { + waitLock.unlock(); + } + // Try to measure the queue indicator +// measureIndicator(); + } + } + } + + + @SuppressWarnings("unchecked") + public T take(long timeout, TimeUnit unit) throws InterruptedException { + long nanos = unit.toNanos(timeout); + long curMaxTakeIndex; + long curTakeIndex; + long nextTakeIndex; + T element; + int takePos; + int iterator = 0; + long waitTime = 0; + do { + curMaxTakeIndex = this.maxTakeIndex; + curTakeIndex = this.takeIndex; + long clock = System.nanoTime(); + while (toIndex(curTakeIndex) == toIndex(curMaxTakeIndex)) { + // Wrap as wait strategy + ++ iterator; + // Enable to iterator times + if (iterator > DEFAULT_SPIN_TIMES && iterator <= DEFAULT_SPIN_TIMES * 2){ + // Try to park to release cpu + LockSupport.parkNanos(1L); + } else if (iterator > DEFAULT_SPIN_TIMES * 2){ + waitLock.lockInterruptibly(); + curTakeIndex = this.takeIndex; + curMaxTakeIndex = this.maxTakeIndex; + try { + if (toIndex(curTakeIndex) == toIndex(curMaxTakeIndex)) { + if (nanos <= 0) { + return null; + } + nanos = notEmpty.awaitNanos(nanos); + iterator = 0; + } + } finally { + waitLock.unlock(); + } + } + curTakeIndex = this.takeIndex; + curMaxTakeIndex = this.maxTakeIndex; + } + if (iterator > 0){ + waitTime += (System.nanoTime() - clock); + } + nextTakeIndex = curTakeIndex + 1; + takePos = toIndex(curTakeIndex); + element = (T) this.items[takePos]; + } while(!unsafe.compareAndSwapLong(this, Offsets.takeIndexOffset, curTakeIndex, nextTakeIndex)); + // Empty the cache and release the memory + if (null != element) { + this.items[takePos] = null; +// unsafe.getAndAddInt(this, Offsets.memoryBytesOffset, -1); + } +// if (waitTime > 0){ +// unsafe.getAndAddLong(this, Offsets.waitPutOffset, waitTime); +// } + this.items[takePos] = null; + // Try to measure the queue indicator + measureIndicator(); + return element; + } + + + @SuppressWarnings("unchecked") + public int drainTo(List elements, int maxElements) { + long curMaxTakeIndex = this.maxTakeIndex; + long curTakeIndex = this.takeIndex; + long nextTakeIndex; + int takePos; + int count = 0; + int bytesCnt = 0; + // Break if queue is empty + while(toIndex(curTakeIndex) != toIndex(curMaxTakeIndex)) { + nextTakeIndex = curTakeIndex + 1; + takePos = toIndex(curTakeIndex); + if (unsafe.compareAndSwapLong(this, Offsets.takeIndexOffset, curTakeIndex, nextTakeIndex)){ + T element = (T) this.items[takePos]; + elements.add(element); + count ++; + // Empty the cache + this.items[takePos] = null; + bytesCnt = bytesCnt + 1; + if (count >= maxElements){ + break; + } + } + curTakeIndex = this.takeIndex; + curMaxTakeIndex = this.maxTakeIndex; + } + if (bytesCnt > 0) { + unsafe.getAndAddInt(this, Offsets.memoryBytesOffset, -bytesCnt); + } + measureIndicator(); + return count; + } + + + + + public void adjustBuffer(long bufferSize) { + // Just update buffer size limit + this.bufferSize = bufferSize; + } + + /** + * Accumulate memory bytes + * @param byteSize byte Size + */ + private void accumulateMemory(int byteSize){ + // Add memory count + unsafe.getAndAddInt(this, Offsets.memoryBytesOffset, byteSize); + while(memoryBytes >= this.bufferSize){ + // Optimize the park strategy + LockSupport.parkNanos(1L); + } + } + /** + * Convert the long sequence to index + * @param sequence sequenceId + * @return position + */ + private int toIndex(long sequence){ + return (int) (sequence & (items.length - 1)); + } + + /** + * Measure method + */ + private void measureIndicator(){ +// long clock = System.currentTimeMillis(); +// long measureTime = this.measureFlag; +// if (clock >= measureTime){ +// // Only use the wait take time to measure pressure +// long waitTime = this.waitTake; +// if (unsafe.compareAndSwapLong(this, Offsets.measureFlagOffset, +// measureTime, clock + this.measureInterval)){ +// // decrease the wait take time +// indicator.setBufferUsed(memoryBytes); +// indicator.setPressure((double)waitTime/ ((double)(clock - measureTime) * Math.pow(10, 6))); +// long time = unsafe.getAndAddLong(this, Offsets.waitTakeOffset, -waitTime); +// if (time < waitTime){ +// // Occur some error? init to zero +// this.waitTake = 0; +// } +// this.waitPut = 0; +// //Invoke the listener +// try { +// listeners.forEach(listener -> listener.onMeasure(indicator)); +// }catch(Exception e){ +// LOG.warn("Error occurred while measuring the queue indicator", e); +// // Not to throw exception +// } +// } +// } + } + private static class UnsafeUtil{ + + private static final Unsafe unsafe; + + static { + final PrivilegedExceptionAction action = () -> { + Field theUnsafe = Unsafe.class.getDeclaredField("theUnsafe"); + theUnsafe.setAccessible(true); + return (Unsafe) theUnsafe.get(null); + }; + try { + unsafe = AccessController.doPrivileged(action); + } catch (PrivilegedActionException e) { + // Throw error + throw new Error(e); + } + } + } + + /** + * Queue field offsets + */ + private static class Offsets{ + /** + * Take index field offset + */ + private static final long takeIndexOffset; + + /** + * Put index field offset + */ + private static final long putIndexOffset; + + /** + * Max take index field offset + */ + private static final long maxTakeIndexOffset; + + /** + * Memory bytes field offset + */ + private static final long memoryBytesOffset; + + /** + * Wait put field offset + */ + private static final long waitPutOffset; + + /** + * Wait take field offset + */ + private static final long waitTakeOffset; + + /** + * Measure flag field offset + */ + private static final long measureFlagOffset; + + static { + Unsafe unsafe = UnsafeUtil.unsafe; + try { + takeIndexOffset = unsafe.objectFieldOffset + (BinlogArrayLockFreeQueue.class.getDeclaredField("takeIndex")); + putIndexOffset = unsafe.objectFieldOffset + (BinlogArrayLockFreeQueue.class.getDeclaredField("putIndex")); + maxTakeIndexOffset = unsafe.objectFieldOffset + (BinlogArrayLockFreeQueue.class.getDeclaredField("maxTakeIndex")); + memoryBytesOffset = unsafe.objectFieldOffset + (BinlogArrayLockFreeQueue.class.getDeclaredField("memoryBytes")); + waitPutOffset = unsafe.objectFieldOffset + (BinlogArrayLockFreeQueue.class.getDeclaredField("waitPut")); + waitTakeOffset = unsafe.objectFieldOffset + (BinlogArrayLockFreeQueue.class.getDeclaredField("waitTake")); + measureFlagOffset = unsafe.objectFieldOffset + (BinlogArrayLockFreeQueue.class.getDeclaredField("measureFlag")); + }catch (Exception e){ + throw new Error(e); + } + } + } + + public static void main(String[] args) { + ArrayBlockingQueue queue1 = new ArrayBlockingQueue<>((int)Math.pow(2, 10)); + Executors.newSingleThreadExecutor().submit(() -> { + int count = 0; + while(true) { + String value = null; + // value = queue1.poll(1, TimeUnit.SECONDS); +// queue1.drainTo(new ArrayList<>()); + queue1.take(); + // if (Objects.nonNull(value)){ +// count ++; +// } else { +// System.out.println("blockingQueue(num)" + count); +// break; +// } + } + }); + for (int j = 0; j < 1; j++){ + final int finalJ = j; + new Thread(new Runnable() { + + public void run() { + long time = System.currentTimeMillis(); + for(int i = 0; i < 6000000; i ++){ + try { + long clock = System.currentTimeMillis(); + queue1.put("hello"); + if (System.currentTimeMillis() - clock >= 3){ +// System.out.println("spend1: " + (System.currentTimeMillis() - clock)); + } + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + System.out.println("blockingQueue" + finalJ + ": " + (System.currentTimeMillis() - time)); + } + }).start(); + } + BinlogArrayLockFreeQueue queue = new BinlogArrayLockFreeQueue<>((int)Math.pow(2, 10)); + Executors.newSingleThreadExecutor().submit(() -> { + int count = 0; + while(true) { + // value = queue.take(1, TimeUnit.SECONDS); + int size = queue.drainTo(new ArrayList<>(), Integer.MAX_VALUE); + +// if (Objects.nonNull(value)){ +// count = count + 1; +// } else { +// System.out.println("lockFreeQueue(num)" + count); +// break; +// } + } + }); + for (int j = 0; j < 1; j++){ + final int finalJ = j; + new Thread(new Runnable() { + + public void run() { + long time = System.currentTimeMillis(); + for(int i = 0; i < 6000000; i ++){ + long clock = System.currentTimeMillis(); + try { + queue.put("hello"); + } catch (InterruptedException e) { + e.printStackTrace(); + } + if (System.currentTimeMillis() - clock >= 3){ +// System.out.println("spend2: " + i + ":" + (System.currentTimeMillis() - clock)); + } + } + System.out.println("lockFreeQueue" + finalJ + ": " + (System.currentTimeMillis() - time)); + } + }).start(); + } + } + +} diff --git a/images/en_US/ch1/code.png b/images/en_US/ch1/code.png new file mode 100644 index 000000000..cec5ef68c Binary files /dev/null and b/images/en_US/ch1/code.png differ diff --git a/images/zh_CN/ch1/code.png b/images/zh_CN/ch1/code.png new file mode 100644 index 000000000..cec5ef68c Binary files /dev/null and b/images/zh_CN/ch1/code.png differ diff --git a/pom.xml b/pom.xml index 85cd79e9d..f4415ef7c 100644 --- a/pom.xml +++ b/pom.xml @@ -22,7 +22,7 @@ com.webank.wedatasphere.exchangis exchangis - 1.1.2 + ${revision} pom exchangis @@ -37,17 +37,23 @@ - 1.1.2 + 1.1.3 1.1.2 1.4.0 1.4.0 0.1.0-SNAPSHOT - 2.12.12 + 1.3.0 + 3.0.0 + 3.0.0 + 2.11.12 4.7.1 1.8 - 3.3.3 + 3.8.1 + 3.8.2 + 2.6 2.8.5 - 2.11.3 + 2.13.4 + 2.13.4.2 1.9.13 3.1.1 4.5.4 @@ -64,8 +70,9 @@ 0.9.10 2.21 1.9.5 - 1.4.19 0.1.0-SNAPSHOT + 1.9.3 + 1.4.20 @@ -109,12 +116,6 @@ org.apache.linkis linkis-mybatis ${linkis.version} - - - org.springframework - spring-orm - - org.apache.linkis @@ -131,6 +132,12 @@ + + + org.apache.linkis + linkis-gateway-httpclient-support + ${linkis.version} + org.apache.linkis linkis-common @@ -146,6 +153,11 @@ linkis-datasource-client ${linkis.datasource.version} + + org.apache.linkis + linkis-metadata-query-common + ${linkis.datasource.version} + com.google.code.gson gson @@ -154,7 +166,7 @@ com.fasterxml.jackson.core jackson-databind - ${fasterxml.jackson.version} + ${jackson-databind.version} org.codehaus.jackson @@ -171,11 +183,6 @@ com.thoughtworks.xstream ${xstream.version} - - org.springframework - spring-orm - 5.2.15.RELEASE - @@ -185,12 +192,12 @@ org.apache.maven.plugins maven-deploy-plugin - 2.8.2 + ${maven-deploy-plugin.version} org.apache.maven.plugins maven-enforcer-plugin - 1.4.1 + ${maven-enforcer-plugin.version} enforce-versions @@ -223,6 +230,10 @@ ${jdk.compile.version} ${jdk.compile.version} + + + + @@ -234,6 +245,17 @@ net.alchim31.maven scala-maven-plugin ${scala-maven-plugin.version} + + + + + + + + + + + eclipse-add-source @@ -263,18 +285,45 @@ - - ${scala.version} - incremental - org.apache.maven.plugins maven-jar-plugin - 2.6 + ${maven-jar-plugin.version} + + + org.codehaus.mojo + flatten-maven-plugin + ${flatten-maven-plugin.version} + + true + resolveCiFriendliesOnly + + + + flatten + + flatten + + process-resources + + + flatten.clean + + clean + + clean + + + + + org.codehaus.mojo + flatten-maven-plugin + + diff --git a/web/src/app.js b/web/src/app.js index fd20a1e6f..6bb5903f0 100644 --- a/web/src/app.js +++ b/web/src/app.js @@ -72,3 +72,10 @@ if (localStr !== 'zh-CN') { localStorage.setItem('fes_locale', 'zh-CN') document.location = '/' } + + +window.addEventListener('beforeunload', function () { + if (localStorage.getItem('exchangis_environment')) { + localStorage.removeItem('exchangis_environment'); + } +}); \ No newline at end of file diff --git a/web/src/common/service.js b/web/src/common/service.js index 719a61378..f524a5662 100644 --- a/web/src/common/service.js +++ b/web/src/common/service.js @@ -503,7 +503,8 @@ export const getJobExecLog = (params) => { pageSize: params.pageSize || 50, onlyKeywords: params.onlyKeywords, ignoreKeywords: params.ignoreKeywords, - lastRows: params.lastRows + lastRows: params.lastRows, + enableTail: true }, { method: "GET", @@ -521,7 +522,8 @@ export const getTaskExecLog = (params) => { jobExecutionId: params.id, onlyKeywords: params.onlyKeywords, ignoreKeywords: params.ignoreKeywords, - lastRows: params.lastRows + lastRows: params.lastRows, + enableTail: true }, { method: "GET", @@ -534,7 +536,7 @@ export const getPartitionInfo = (params) => { if (!params.source) return const url = params.source.split(BASE_URL)[1] return request( - `${url}?labels=${getEnvironment()}&dataSourceId=${params.dataSourceId}&database=${params.database}&table=${params.table}&_=${Math.random()}`, + `${url}?labels=${getEnvironment()}&dataSourceId=${params.dataSourceId}&database=${params.database}&table=${params.table}&tableNotExist=${params.tableNotExist}&_=${Math.random()}`, {}, { method: "GET", diff --git a/web/src/images/dataSourceTypeIcon/STARROCKS.png b/web/src/images/dataSourceTypeIcon/STARROCKS.png new file mode 100644 index 000000000..50269cf08 Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/STARROCKS.png differ diff --git a/web/src/images/dataSourceTypeIcon/TDSQL.png b/web/src/images/dataSourceTypeIcon/TDSQL.png new file mode 100644 index 000000000..03b2815ab Binary files /dev/null and b/web/src/images/dataSourceTypeIcon/TDSQL.png differ diff --git a/web/src/pages/dataSourceManage/components/datasourceForm/index.vue b/web/src/pages/dataSourceManage/components/datasourceForm/index.vue index 8134077a7..d526d4e6d 100644 --- a/web/src/pages/dataSourceManage/components/datasourceForm/index.vue +++ b/web/src/pages/dataSourceManage/components/datasourceForm/index.vue @@ -1,6 +1,7 @@