diff --git a/README.md b/README.md index 121e70faec..232e5c3152 100644 --- a/README.md +++ b/README.md @@ -99,7 +99,7 @@ Dinky(原 Dlink): 抢先体验( main 主支):dlink-0.6.0-SNAPSHOT -稳定版本( 0.5.0 分支):dlink-0.5.0 +稳定版本( 0.5.1 分支):dlink-0.5.1 ### 从安装包开始 diff --git a/dlink-admin/pom.xml b/dlink-admin/pom.xml index 327d3dc79d..c78624e92d 100644 --- a/dlink-admin/pom.xml +++ b/dlink-admin/pom.xml @@ -5,7 +5,7 @@ dlink com.dlink - 0.5.0 + 0.5.1 4.0.0 @@ -66,7 +66,7 @@ - - - org.apache.logging.log4j - log4j-api - - - org.apache.logging.log4j - log4j-core - - - org.slf4j - slf4j-log4j12 mysql @@ -136,7 +124,7 @@ \ No newline at end of file diff --git a/dlink-client/dlink-client-hadoop/pom.xml b/dlink-client/dlink-client-hadoop/pom.xml index 792f125d8b..72897383ad 100644 --- a/dlink-client/dlink-client-hadoop/pom.xml +++ b/dlink-client/dlink-client-hadoop/pom.xml @@ -5,7 +5,7 @@ dlink-client com.dlink - 0.5.0 + 0.5.1 4.0.0 @@ -13,7 +13,7 @@ UTF-8 - 2.7.7 + 3.1.0 compile diff --git a/dlink-client/pom.xml b/dlink-client/pom.xml index 9a499e964c..371b42192d 100644 --- a/dlink-client/pom.xml +++ b/dlink-client/pom.xml @@ -5,7 +5,7 @@ dlink com.dlink - 0.5.0 + 0.5.1 4.0.0 diff --git a/dlink-common/pom.xml b/dlink-common/pom.xml index 22cc37c475..6f958a3ddd 100644 --- a/dlink-common/pom.xml +++ b/dlink-common/pom.xml @@ -5,7 +5,7 @@ dlink com.dlink - 0.5.0 + 0.5.1 4.0.0 diff --git a/dlink-common/src/main/java/com/dlink/constant/CommonConstant.java b/dlink-common/src/main/java/com/dlink/constant/CommonConstant.java index 8482081208..115e9e124e 100644 --- a/dlink-common/src/main/java/com/dlink/constant/CommonConstant.java +++ b/dlink-common/src/main/java/com/dlink/constant/CommonConstant.java @@ -10,7 +10,7 @@ public interface CommonConstant { /** * 项目版本号(banner使用) */ - String PROJECT_VERSION = "0.5.0"; + String PROJECT_VERSION = "0.5.1"; /** * 实例健康 */ diff --git a/dlink-connectors/dlink-connector-jdbc-1.12/pom.xml b/dlink-connectors/dlink-connector-jdbc-1.12/pom.xml index da3afb2b48..a015ced853 100644 --- a/dlink-connectors/dlink-connector-jdbc-1.12/pom.xml +++ b/dlink-connectors/dlink-connector-jdbc-1.12/pom.xml @@ -5,7 +5,7 @@ dlink-connectors com.dlink - 0.5.0 + 0.5.1 4.0.0 diff --git a/dlink-connectors/dlink-connector-jdbc-1.13/pom.xml b/dlink-connectors/dlink-connector-jdbc-1.13/pom.xml index 23af2b61b6..a738c96cc8 100644 --- a/dlink-connectors/dlink-connector-jdbc-1.13/pom.xml +++ b/dlink-connectors/dlink-connector-jdbc-1.13/pom.xml @@ -5,7 +5,7 @@ dlink-connectors com.dlink - 0.5.0 + 0.5.1 4.0.0 diff --git a/dlink-connectors/pom.xml b/dlink-connectors/pom.xml index 2eb74ead16..dd416c67ce 100644 --- a/dlink-connectors/pom.xml +++ b/dlink-connectors/pom.xml @@ -5,7 +5,7 @@ dlink com.dlink - 0.5.0 + 0.5.1 4.0.0 pom diff --git a/dlink-core/pom.xml b/dlink-core/pom.xml index 546bb23011..78150e3181 100644 --- a/dlink-core/pom.xml +++ b/dlink-core/pom.xml @@ -5,7 +5,7 @@ dlink com.dlink - 0.5.0 + 0.5.1 4.0.0 jar @@ -16,6 +16,8 @@ UTF-8 1.8 1.8 + + provided @@ -70,5 +72,10 @@ dlink-gateway ${scope.runtime} + + com.dlink + dlink-client-hadoop + ${scope.runtime} + \ No newline at end of file diff --git a/dlink-core/src/main/java/com/dlink/job/JobConfig.java b/dlink-core/src/main/java/com/dlink/job/JobConfig.java index 7bd03fa710..5edf1e64ca 100644 --- a/dlink-core/src/main/java/com/dlink/job/JobConfig.java +++ b/dlink-core/src/main/java/com/dlink/job/JobConfig.java @@ -159,9 +159,15 @@ public void setSessionConfig(SessionConfig sessionConfig){ public void buildGatewayConfig(Map config){ gatewayConfig = new GatewayConfig(); - gatewayConfig.setClusterConfig(ClusterConfig.build(config.get("flinkConfigPath").toString(), - config.get("flinkLibPath").toString(), - config.get("hadoopConfigPath").toString())); + if(config.containsKey("hadoopConfigPath")) { + gatewayConfig.setClusterConfig(ClusterConfig.build(config.get("flinkConfigPath").toString(), + config.get("flinkLibPath").toString(), + config.get("hadoopConfigPath").toString())); + }else { + gatewayConfig.setClusterConfig(ClusterConfig.build(config.get("flinkConfigPath").toString(), + config.get("flinkLibPath").toString(), + "")); + } AppConfig appConfig = new AppConfig(); if(config.containsKey("userJarPath") && Asserts.isNotNullString((String) config.get("userJarPath"))){ appConfig.setUserJarPath(config.get("userJarPath").toString()); @@ -176,6 +182,18 @@ public void buildGatewayConfig(Map config){ if(config.containsKey("flinkConfig") && Asserts.isNotNullMap((Map) config.get("flinkConfig"))){ gatewayConfig.setFlinkConfig(FlinkConfig.build((Map)config.get("flinkConfig"))); } + if(config.containsKey("kubernetesConfig")){ + Map kubernetesConfig = (Map) config.get("kubernetesConfig"); + if(kubernetesConfig.containsKey("kubernetes.namespace")) { + gatewayConfig.getFlinkConfig().getConfiguration().put("kubernetes.namespace", kubernetesConfig.get("kubernetes.namespace").toString()); + } + if(kubernetesConfig.containsKey("kubernetes.cluster-id")) { + gatewayConfig.getFlinkConfig().getConfiguration().put("kubernetes.cluster-id", kubernetesConfig.get("kubernetes.cluster-id").toString()); + } + if(kubernetesConfig.containsKey("kubernetes.container.image")) { + gatewayConfig.getFlinkConfig().getConfiguration().put("kubernetes.container.image", kubernetesConfig.get("kubernetes.container.image").toString()); + } + } } public void addGatewayConfig(List> configList){ diff --git a/dlink-doc/sql/dlink.sql b/dlink-doc/sql/dlink.sql index 06e82fa5a2..38835962c4 100644 --- a/dlink-doc/sql/dlink.sql +++ b/dlink-doc/sql/dlink.sql @@ -114,8 +114,8 @@ CREATE TABLE `dlink_flink_document` ( `type` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '类型', `subtype` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '子类型', `name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '信息', - `description` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '描述', - `fill_value` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '填充值', + `description` LONGTEXT CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '描述', + `fill_value` LONGTEXT CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '填充值', `version` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '版本号', `like_num` int(255) NULL DEFAULT 0 COMMENT '喜爱值', `enabled` tinyint(1) NOT NULL DEFAULT 0 COMMENT '是否启用', @@ -124,6 +124,28 @@ CREATE TABLE `dlink_flink_document` ( PRIMARY KEY (`id`) USING BTREE ) ENGINE = InnoDB AUTO_INCREMENT = 264 CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '文档管理' ROW_FORMAT = Dynamic; +INSERT INTO `dlink_flink_document` VALUES (1, 'Variable', '优化参数', 'Batch/Streaming', 'set table.exec.async-lookup.buffer-capacity', '异步查找连接可以触发的最大异步操作的操作数。 \nThe max number of async i/o operation that the async lookup join can trigger.', 'Set \'table.exec.async-lookup.buffer-capacity\'=\'100\';', '1.14', 0, 1, '2022-01-20 15:00:00', '2022-01-20 15:00:00'); +INSERT INTO `dlink_flink_document` VALUES (2, 'Variable', '优化参数', 'Batch/Streaming', 'set table.exec.async-lookup.timeout', '异步操作完成的超时时间。 \nThe async timeout for the asynchronous operation to complete.', 'Set \'table.exec.async-lookup.timeout\'=\'3 min\';', '1.14', 0, 1, '2022-01-20 15:00:00', '2022-01-20 15:00:00'); +INSERT INTO `dlink_flink_document` VALUES (3, 'Variable', '优化参数', 'Batch', 'set table.exec.disabled-operators', '禁用指定operators,用逗号分隔 \nMainly for testing. A comma-separated list of operator names, each name represents a kind of disabled operator. Operators that can be disabled include \"NestedLoopJoin\", \"ShuffleHashJoin\", \"BroadcastHashJoin\", \"SortMergeJoin\", \"HashAgg\", \"SortAgg\". By default no operator is disabled.', 'Set \'table.exec.disabled-operators\'=\'SortMergeJoin\';', '1.14', 0, 1, '2022-01-20 15:00:00', '2022-01-20 15:00:00'); +INSERT INTO `dlink_flink_document` VALUES (4, 'Variable', '优化参数', 'Streaming', 'set table.exec.mini-batch.allow-latency', '最大等待时间可用于MiniBatch缓冲输入记录。 MiniBatch是用于缓冲输入记录以减少状态访问的优化。MiniBatch以允许的等待时间间隔以及达到最大缓冲记录数触发。注意:如果将table.exec.mini-batch.enabled设置为true,则其值必须大于零.', 'Set \'table.exec.mini-batch.allow-latency\'=\'-1 ms\';', '1.14', 0, 1, '2022-01-20 15:00:00', '2022-01-20 15:00:00'); +INSERT INTO `dlink_flink_document` VALUES (5, 'Variable', '优化参数', 'Streaming', 'set table.exec.mini-batch.enabled', '指定是否启用MiniBatch优化。 MiniBatch是用于缓冲输入记录以减少状态访问的优化。默认情况下禁用此功能。 要启用此功能,用户应将此配置设置为true。注意:如果启用了mini batch 处理,则必须设置“ table.exec.mini-batch.allow-latency”和“ table.exec.mini-batch.size”.', 'Set \'table.exec.mini-batch.enabled\'=\'false\';', '1.14', 0, 1, '2022-01-20 15:00:00', '2022-01-20 15:00:00'); +INSERT INTO `dlink_flink_document` VALUES (6, 'Variable', '优化参数', 'Streaming', 'set table.exec.mini-batch.size', '可以为MiniBatch缓冲最大输入记录数。 MiniBatch是用于缓冲输入记录以减少状态访问的优化。MiniBatch以允许的等待时间间隔以及达到最大缓冲记录数触发。 注意:MiniBatch当前仅适用于非窗口聚合。如果将table.exec.mini-batch.enabled设置为true,则其值必须为正.', 'Set \'table.exec.mini-batch.size\'=\'-1\';', '1.14', 0, 1, '2022-01-20 15:00:00', '2022-01-20 15:00:00'); +INSERT INTO `dlink_flink_document` VALUES (7, 'Variable', '优化参数', 'Batch/Streaming', 'set table.exec.resource.default-parallelism', '设置所有Operator的默认并行度。 \nSets default parallelism for all operators (such as aggregate, join, filter) to run with parallel instances. This config has a higher priority than parallelism of StreamExecutionEnvironment (actually, this config overrides the parallelism of StreamExecutionEnvironment). A value of -1 indicates that no default parallelism is set, then it will fallback to use the parallelism of StreamExecutionEnvironment.', 'Set \'table.exec.resource.default-parallelism\'=\'1\';', '1.14', 0, 1, '2022-01-20 15:00:00', '2022-01-20 15:00:00'); +INSERT INTO `dlink_flink_document` VALUES (8, 'Variable', '优化参数', 'Batch/Streaming', 'set table.exec.sink.not-null-enforcer', '对表的NOT NULL列约束强制执行不能将空值插入到表中。Flink支持“error”(默认)和“drop”强制行为 \nThe NOT NULL column constraint on a table enforces that null values can\'t be inserted into the table. Flink supports \'error\' (default) and \'drop\' enforcement behavior. By default, Flink will check values and throw runtime exception when null values writing into NOT NULL columns. Users can change the behavior to \'drop\' to silently drop such records without throwing exception.\nPossible values:\n\"ERROR\" \n\"DROP\"', 'Set \'table.exec.sink.not-null-enforcer\'=\'ERROR\';', '1.14', 0, 1, '2022-01-20 15:00:00', '2022-01-20 15:00:00'); +INSERT INTO `dlink_flink_document` VALUES (9, 'Variable', '优化参数', 'Streaming', 'set table.exec.sink.upsert-materialize', '由于分布式系统中 Shuffle 导致 ChangeLog 数据混乱,Sink 接收到的数据可能不是全局 upsert 的顺序。因此,在 upsert sink 之前添加 upsert materialize 运算符。它接收上游的变更日志记录并为下游生成一个 upsert 视图。默认情况下,当唯一键出现分布式无序时,会添加具体化操作符。您也可以选择不实现(NONE)或强制实现(FORCE)。\nPossible values:\n\"NONE\" \n\"FORCE\" \n\"AUTO\"', 'Set \'table.exec.sink.upsert-materialize\'=\'AUTO\';', '1.14', 0, 1, '2022-01-20 15:00:00', '2022-01-20 15:00:00'); +INSERT INTO `dlink_flink_document` VALUES (10, 'Module', '建表语句', NULL, 'create.table.kafka', 'kafka快速建表格式', 'CREATE TABLE Kafka_Table (\n `event_time` TIMESTAMP(3) METADATA FROM \'timestamp\',\n `partition` BIGINT METADATA VIRTUAL,\n `offset` BIGINT METADATA VIRTUAL,\n `user_id` BIGINT,\n `item_id` BIGINT,\n `behavior` STRING\n) WITH (\n \'connector\' = \'kafka\',\n \'topic\' = \'user_behavior\',\n \'properties.bootstrap.servers\' = \'localhost:9092\',\n \'properties.group.id\' = \'testGroup\',\n \'scan.startup.mode\' = \'earliest-offset\',\n \'format\' = \'csv\'\n);\n--可选: \'value.fields-include\' = \'ALL\',\n--可选: \'json.ignore-parse-errors\' = \'true\',\n--可选: \'key.fields-prefix\' = \'k_\',', '1.14', 0, 1, '2022-01-20 16:59:18', '2022-01-20 17:57:32'); +INSERT INTO `dlink_flink_document` VALUES (11, 'Module', '建表语句', NULL, 'create.table.doris', 'Doris快速建表', 'CREATE TABLE doris_table (\n cid INT,\n sid INT,\n name STRING,\n cls STRING,\n score INT,\n PRIMARY KEY (cid) NOT ENFORCED\n) WITH ( \n\'connector\' = \'doris\',\n\'fenodes\' = \'127.0.0.1:8030\' ,\n\'table.identifier\' = \'test.scoreinfo\',\n\'username\' = \'root\',\n\'password\'=\'\'\n);', '1.14', 0, 1, '2022-01-20 17:08:00', '2022-01-20 17:57:26'); +INSERT INTO `dlink_flink_document` VALUES (12, 'Module', '建表语句', NULL, 'create.table.jdbc', 'JDBC建表语句', 'CREATE TABLE JDBC_table (\n id BIGINT,\n name STRING,\n age INT,\n status BOOLEAN,\n PRIMARY KEY (id) NOT ENFORCED\n) WITH (\n \'connector\' = \'jdbc\',\n \'url\' = \'jdbc:mysql://localhost:3306/mydatabase\',\n \'table-name\' = \'users\',\n \'username\' = \'root\',\n \'password\' = \'123456\'\n);\n--可选: \'sink.parallelism\'=\'1\',\n--可选: \'lookup.cache.ttl\'=\'1000s\',', '1.14', 0, 1, '2022-01-20 17:15:26', '2022-01-20 17:57:20'); +INSERT INTO `dlink_flink_document` VALUES (13, 'Module', '创建catalog模块', NULL, 'create.catalog.hive', '创建HIVE的catalog', 'CREATE CATALOG hive WITH ( \n \'type\' = \'hive\',\n \'default-database\' = \'default\',\n \'hive-conf-dir\' = \'/app/wwwroot/MBDC/hive/conf/\', --hive配置文件\n \'hadoop-conf-dir\'=\'/app/wwwroot/MBDC/hadoop/etc/hadoop/\' --hadoop配置文件,配了环境变量则不需要。\n);', '1.14', 0, 1, '2022-01-20 17:18:54', '2022-01-20 17:18:54'); +INSERT INTO `dlink_flink_document` VALUES (14, 'Operator', '', NULL, 'use.catalog.hive', '使用hive的catalog', 'USE CATALOG hive;', '1.14', 0, 1, '2022-01-20 17:22:53', '2022-01-20 17:22:53'); +INSERT INTO `dlink_flink_document` VALUES (15, 'Operator', NULL, NULL, 'use.catalog.default', '使用default的catalog', 'USE CATALOG default_catalog; \n', '1.14', 0, 1, '2022-01-20 17:23:48', '2022-01-20 17:24:23'); +INSERT INTO `dlink_flink_document` VALUES (16, 'Variable', '设置参数', NULL, 'set dialect.hive', '使用hive方言', 'Set table.sql-dialect=hive;', '1.14', 0, 1, '2022-01-20 17:25:37', '2022-01-20 17:27:23'); +INSERT INTO `dlink_flink_document` VALUES (17, 'Variable', '设置参数', NULL, 'set dialect.default', '使用default方言', 'Set table.sql-dialect=default;', '1.14', 0, 1, '2022-01-20 17:26:19', '2022-01-20 17:27:20'); +INSERT INTO `dlink_flink_document` VALUES (18, 'Module', '建表语句', NULL, 'create.stream.table.hive', '创建流式HIVE表', 'CREATE CATALOG hive WITH ( --创建hive的catalog\n \'type\' = \'hive\',\n \'hive-conf-dir\' = \'/app/wwwroot/MBDC/hive/conf/\',\n \'hadoop-conf-dir\'=\'/app/wwwroot/MBDC/hadoop/etc/hadoop/\'\n);\n\nUSE CATALOG hive; \nUSE offline_db; --选择库\nset table.sql-dialect=hive; --设置方言\n\nCREATE TABLE hive_stream_table (\n user_id STRING,\n order_amount DOUBLE\n) PARTITIONED BY (dt STRING, hr STRING) STORED AS parquet TBLPROPERTIES (\n \'partition.time-extractor.timestamp-pattern\'=\'$dt $hr:00:00\',\n \'sink.partition-commit.trigger\'=\'partition-time\',\n \'sink.partition-commit.delay\'=\'1min\',\n \'sink.semantic\' = \'exactly-once\',\n \'sink.rolling-policy.rollover-interval\' =\'1min\',\n \'sink.rolling-policy.check-interval\'=\'1min\',\n \'sink.partition-commit.policy.kind\'=\'metastore,success-file\'\n);', '1.14', 0, 1, '2022-01-20 17:34:06', '2022-01-20 17:46:41'); +INSERT INTO `dlink_flink_document` VALUES (19, 'Module', '建表语句', NULL, 'create.table.mysql_cdc', '创建Mysql_CDC表', 'CREATE TABLE mysql_cdc_table(\n cid INT,\n sid INT,\n cls STRING,\n score INT,\n PRIMARY KEY (cid) NOT ENFORCED\n) WITH (\n\'connector\' = \'mysql-cdc\',\n\'hostname\' = \'127.0.0.1\',\n\'port\' = \'3306\',\n\'username\' = \'test\',\n\'password\' = \'123456\',\n\'database-name\' = \'test\',\n\'server-time-zone\' = \'UTC\',\n\'scan.incremental.snapshot.enabled\' = \'true\',\n\'debezium.snapshot.mode\'=\'latest-offset\' ,-- 或者key是scan.startup.mode,initial表示要历史数据,latest-offset表示不要历史数据\n\'debezium.datetime.format.date\'=\'yyyy-MM-dd\',\n\'debezium.datetime.format.time\'=\'HH-mm-ss\',\n\'debezium.datetime.format.datetime\'=\'yyyy-MM-dd HH-mm-ss\',\n\'debezium.datetime.format.timestamp\'=\'yyyy-MM-dd HH-mm-ss\',\n\'debezium.datetime.format.timestamp.zone\'=\'UTC+8\',\n\'table-name\' = \'mysql_cdc_table\');', '1.14', 0, 1, '2022-01-20 17:49:14', '2022-01-20 17:52:20'); +INSERT INTO `dlink_flink_document` VALUES (20, 'Module', '建表语句', NULL, 'create.table.hudi', '创建hudi表', 'CREATE TABLE hudi_table\n(\n `goods_order_id` bigint COMMENT \'自增主键id\',\n `goods_order_uid` string COMMENT \'订单uid\',\n `customer_uid` string COMMENT \'客户uid\',\n `customer_name` string COMMENT \'客户name\',\n `create_time` timestamp(3) COMMENT \'创建时间\',\n `update_time` timestamp(3) COMMENT \'更新时间\',\n `create_by` string COMMENT \'创建人uid(唯一标识)\',\n `update_by` string COMMENT \'更新人uid(唯一标识)\',\n PRIMARY KEY (goods_order_id) NOT ENFORCED\n) COMMENT \'hudi_table\'\nWITH (\n\'connector\' = \'hudi\',\n\'path\' = \'hdfs://cluster1/data/bizdata/cdc/mysql/order/goods_order\', -- 路径会自动创建\n\'hoodie.datasource.write.recordkey.field\' = \'goods_order_id\', -- 主键\n\'write.precombine.field\' = \'update_time\', -- 相同的键值时,取此字段最大值,默认ts字段\n\'read.streaming.skip_compaction\' = \'true\', -- 避免重复消费问题\n\'write.bucket_assign.tasks\' = \'2\', -- 并发写的 bucekt 数\n\'write.tasks\' = \'2\',\n\'compaction.tasks\' = \'1\',\n\'write.operation\' = \'upsert\', -- UPSERT(插入更新)\\INSERT(插入)\\BULK_INSERT(批插入)(upsert性能会低些,不适合埋点上报)\n\'write.rate.limit\' = \'20000\', -- 限制每秒多少条\n\'table.type\' = \'COPY_ON_WRITE\', -- 默认COPY_ON_WRITE ,\n\'compaction.async.enabled\' = \'true\', -- 在线压缩\n\'compaction.trigger.strategy\' = \'num_or_time\', -- 按次数压缩\n\'compaction.delta_commits\' = \'20\', -- 默认为5\n\'compaction.delta_seconds\' = \'60\', -- 默认为1小时\n\'hive_sync.enable\' = \'true\', -- 启用hive同步\n\'hive_sync.mode\' = \'hms\', -- 启用hive hms同步,默认jdbc\n\'hive_sync.metastore.uris\' = \'thrift://cdh2.vision.com:9083\', -- required, metastore的端口\n\'hive_sync.jdbc_url\' = \'jdbc:hive2://cdh1.vision.com:10000\', -- required, hiveServer地址\n\'hive_sync.table\' = \'order_mysql_goods_order\', -- required, hive 新建的表名 会自动同步hudi的表结构和数据到hive\n\'hive_sync.db\' = \'cdc_ods\', -- required, hive 新建的数据库名\n\'hive_sync.username\' = \'hive\', -- required, HMS 用户名\n\'hive_sync.password\' = \'123456\', -- required, HMS 密码\n\'hive_sync.skip_ro_suffix\' = \'true\' -- 去除ro后缀\n);', '1.14', 0, 1, '2022-01-20 17:56:50', '2022-01-20 17:56:50'); + + -- ---------------------------- -- Table structure for dlink_history -- ---------------------------- diff --git a/dlink-executor/pom.xml b/dlink-executor/pom.xml index f0db475718..1ea08ed43d 100644 --- a/dlink-executor/pom.xml +++ b/dlink-executor/pom.xml @@ -5,7 +5,7 @@ dlink com.dlink - 0.5.0 + 0.5.1 4.0.0 diff --git a/dlink-extends/pom.xml b/dlink-extends/pom.xml index 9dd696fe44..3d9fcdd831 100644 --- a/dlink-extends/pom.xml +++ b/dlink-extends/pom.xml @@ -5,7 +5,7 @@ dlink com.dlink - 0.5.0 + 0.5.1 4.0.0 diff --git a/dlink-function/pom.xml b/dlink-function/pom.xml index 2bfc296f0a..fbe25bc23d 100644 --- a/dlink-function/pom.xml +++ b/dlink-function/pom.xml @@ -5,7 +5,7 @@ dlink com.dlink - 0.5.0 + 0.5.1 4.0.0 diff --git a/dlink-gateway/pom.xml b/dlink-gateway/pom.xml index 86a566deaf..58cf823d6c 100644 --- a/dlink-gateway/pom.xml +++ b/dlink-gateway/pom.xml @@ -5,7 +5,7 @@ dlink com.dlink - 0.5.0 + 0.5.1 4.0.0 diff --git a/dlink-metadata/dlink-metadata-base/pom.xml b/dlink-metadata/dlink-metadata-base/pom.xml index fab839d19c..50b15c0558 100644 --- a/dlink-metadata/dlink-metadata-base/pom.xml +++ b/dlink-metadata/dlink-metadata-base/pom.xml @@ -5,7 +5,7 @@ dlink-metadata com.dlink - 0.5.0 + 0.5.1 4.0.0 diff --git a/dlink-metadata/dlink-metadata-clickhouse/pom.xml b/dlink-metadata/dlink-metadata-clickhouse/pom.xml index 53b394d2d9..bdbd3cb295 100644 --- a/dlink-metadata/dlink-metadata-clickhouse/pom.xml +++ b/dlink-metadata/dlink-metadata-clickhouse/pom.xml @@ -5,7 +5,7 @@ dlink-metadata com.dlink - 0.5.0 + 0.5.1 4.0.0 diff --git a/dlink-metadata/dlink-metadata-mysql/pom.xml b/dlink-metadata/dlink-metadata-mysql/pom.xml index 1933999d18..059650e95f 100644 --- a/dlink-metadata/dlink-metadata-mysql/pom.xml +++ b/dlink-metadata/dlink-metadata-mysql/pom.xml @@ -5,7 +5,7 @@ dlink-metadata com.dlink - 0.5.0 + 0.5.1 4.0.0 diff --git a/dlink-metadata/dlink-metadata-oracle/pom.xml b/dlink-metadata/dlink-metadata-oracle/pom.xml index bf2c1e4ac6..01fbc368b0 100644 --- a/dlink-metadata/dlink-metadata-oracle/pom.xml +++ b/dlink-metadata/dlink-metadata-oracle/pom.xml @@ -5,7 +5,7 @@ dlink-metadata com.dlink - 0.5.0 + 0.5.1 4.0.0 diff --git a/dlink-metadata/dlink-metadata-postgresql/pom.xml b/dlink-metadata/dlink-metadata-postgresql/pom.xml index d95d1ba354..c27fdaa5a7 100644 --- a/dlink-metadata/dlink-metadata-postgresql/pom.xml +++ b/dlink-metadata/dlink-metadata-postgresql/pom.xml @@ -5,7 +5,7 @@ dlink-metadata com.dlink - 0.5.0 + 0.5.1 4.0.0 diff --git a/dlink-metadata/pom.xml b/dlink-metadata/pom.xml index 12277fccaa..ec060fb662 100644 --- a/dlink-metadata/pom.xml +++ b/dlink-metadata/pom.xml @@ -5,7 +5,7 @@ dlink com.dlink - 0.5.0 + 0.5.1 4.0.0 diff --git a/dlink-web/pom.xml b/dlink-web/pom.xml index a65173f24d..998b231d42 100644 --- a/dlink-web/pom.xml +++ b/dlink-web/pom.xml @@ -5,7 +5,7 @@ dlink com.dlink - 0.5.0 + 0.5.1 4.0.0 pom diff --git a/dlink-web/src/components/Chart/index.tsx b/dlink-web/src/components/Chart/index.tsx index 36ba6e5958..cb469c0929 100644 --- a/dlink-web/src/components/Chart/index.tsx +++ b/dlink-web/src/components/Chart/index.tsx @@ -28,7 +28,7 @@ const Chart = (props:any) => { const toRebuild = () => { if(!isSql(current.task.dialect)){ - showJobData(current.console.result.jobId,dispatch); + showJobData(current.key,current.console.result.jobId,dispatch); } }; diff --git a/dlink-web/src/components/Studio/StudioConsole/StudioHistory/index.less b/dlink-web/src/components/Studio/StudioConsole/StudioHistory/index.less index 773013f4c4..070fc3ef4e 100644 --- a/dlink-web/src/components/Studio/StudioConsole/StudioHistory/index.less +++ b/dlink-web/src/components/Studio/StudioConsole/StudioHistory/index.less @@ -1,9 +1,9 @@ .code{ - width: 100%; - max-height: 500px; + //width: 100%; + //max-height: 400px; display: block; padding: 16px; - overflow: auto; + //overflow: auto; font-size: 85%; line-height: 1.45; background-color: rgb(246, 248, 250); diff --git a/dlink-web/src/components/Studio/StudioConsole/StudioHistory/index.tsx b/dlink-web/src/components/Studio/StudioConsole/StudioHistory/index.tsx index 6824df16a9..206c1912a4 100644 --- a/dlink-web/src/components/Studio/StudioConsole/StudioHistory/index.tsx +++ b/dlink-web/src/components/Studio/StudioConsole/StudioHistory/index.tsx @@ -10,7 +10,7 @@ import { ModalForm, } from '@ant-design/pro-form'; import styles from "./index.less"; -import {showJobData} from "@/components/Studio/StudioEvent/DQL"; +import {Scrollbars} from 'react-custom-scrollbars'; import StudioPreview from "../StudioPreview"; import {getJobData} from "@/pages/FlinkSqlStudio/service"; @@ -39,19 +39,24 @@ type HistoryItem = { }; type HistoryConfig={ - useSession:boolean; - session:string; - useRemote:boolean; - clusterId:number; - host:string; - useResult:boolean; - maxRowNum:number; - taskId:number; - jobName:string; - useSqlFragment:boolean; - checkpoint:number; - parallelism:number; - savePointPath:string; + useSession: boolean; + session: string; + useRemote: boolean; + type: string; + clusterId: number; + clusterConfigurationId: number; + host: string; + useResult: boolean; + useChangeLog: boolean; + maxRowNum: number; + useAutoCancel: boolean; + taskId: number; + jobName: string; + useSqlFragment: boolean; + useStatementSet: boolean; + checkpoint: number; + parallelism: number; + savePointPath: string; }; const url = '/api/history'; @@ -289,15 +294,27 @@ const StudioHistory = (props: any) => { {config.useRemote?'远程':'本地'} + + {config.type} + {config.clusterId} + + {config.clusterConfigurationId} + {config.useResult?'启用':'禁用'} + + {config.useChangeLog?'启用':'禁用'} + {config.maxRowNum} + + {config.useAutoCancel?'启用':'禁用'} + {row.jobManagerAddress} @@ -310,12 +327,18 @@ const StudioHistory = (props: any) => { {config.useSqlFragment?'启用':'禁用'} + + {config.useStatementSet?'启用':'禁用'} + {config.parallelism} {config.checkpoint} + + {config.savePointStrategy} + {config.savePointPath} @@ -361,8 +384,10 @@ const StudioHistory = (props: any) => { {row.jobId} - + +
{row.error}
+
)} diff --git a/dlink-web/src/components/Studio/StudioConsole/StudioTable/index.tsx b/dlink-web/src/components/Studio/StudioConsole/StudioTable/index.tsx index 65f195467b..3d3f367fde 100644 --- a/dlink-web/src/components/Studio/StudioConsole/StudioTable/index.tsx +++ b/dlink-web/src/components/Studio/StudioConsole/StudioTable/index.tsx @@ -21,7 +21,7 @@ const StudioTable = (props: any) => { }; const showDetail=()=>{ - showJobData(current.console.result.jobId,dispatch) + showJobData(current.key,current.console.result.jobId,dispatch) }; const renderFlinkSQLContent = () => { diff --git a/dlink-web/src/components/Studio/StudioConsole/index.tsx b/dlink-web/src/components/Studio/StudioConsole/index.tsx index 9fdded3474..b5513fda74 100644 --- a/dlink-web/src/components/Studio/StudioConsole/index.tsx +++ b/dlink-web/src/components/Studio/StudioConsole/index.tsx @@ -64,19 +64,6 @@ const StudioConsole = (props: any) => { {current? :} - - - 指标 - - } - key="StudioMetrics" - > - - - - @@ -129,19 +116,6 @@ const StudioConsole = (props: any) => { - - - 文档 - - } - key="StudioDocument" - > - - - - ); }; diff --git a/dlink-web/src/components/Studio/StudioEdit/index.tsx b/dlink-web/src/components/Studio/StudioEdit/index.tsx index 309088a337..2dddff4ee5 100644 --- a/dlink-web/src/components/Studio/StudioEdit/index.tsx +++ b/dlink-web/src/components/Studio/StudioEdit/index.tsx @@ -6,7 +6,7 @@ import {connect,Dispatch} from "umi"; import {DocumentStateType} from "@/pages/Document/model"; import {DocumentTableListItem} from "@/pages/Document/data"; import {parseSqlMetaData} from "@/components/Studio/StudioEvent/Utils"; -import {Column, MetaData} from "@/components/Studio/StudioEvent/data"; +import {Column, MetaData, SqlMetaData} from "@/components/Studio/StudioEvent/data"; import StudioExplain from "@/components/Studio/StudioConsole/StudioExplain"; import {format} from "sql-formatter"; @@ -23,45 +23,35 @@ interface ISuggestions { const FlinkSqlEditor = (props:any) => { const { - tabsKey, + tabsKey, height = '100%', width = '100%', language = 'sql', - onChange=(val: string, event: { changes: { text: any }[] })=>{}, + onChange=(val: string, event: any)=>{}, options = { selectOnLineNumbers: true, renderSideBySide: false, }, - tabs, + sql, + monaco, + // sqlMetaData, fillDocuments, } = props; - const editorInstance:any = useRef(); + const editorInstance: any = useRef(); const monacoInstance: any = useRef(); const [modalVisible, handleModalVisible] = useState(false); - - const getTabIndex = ():number=>{ - for(let i=0;i({}); + const [code, setCode] = useState(sql); useEffect( () => () => { - provider.dispose(); - }, - [] - ); + reloadCompletion(); + }, [code]); useImperativeHandle(editorInstance, () => ({ handleSetEditorVal, - getEditorData: () => cache.current, + getEditorData: () => code, })); const handleSetEditorVal = (value: string): void => { @@ -82,23 +72,34 @@ const FlinkSqlEditor = (props:any) => { } }; - const onChangeHandle = (val: string, event: { changes: { text: any }[] }) => { - let sqlMetaData = parseSqlMetaData(val); - props.saveMetaData(sqlMetaData,tabs,tabIndex); + const onChangeHandle = (val: string, event: any) => { + setCode(val); onChange(val,event); - /*const curWord = event.changes[0].text; - if (curWord === ';') { - cache.current = val +'\r\n'; - setRefresh(!refresh); // 刷新页面 - return; - } - cache.current = val;*/ + /*let newSqlMetaData = parseSqlMetaData(val); + setMetaData(newSqlMetaData); + props.saveSqlMetaData(newSqlMetaData,tabsKey);*/ props.saveSql(val); }; + const reloadCompletion = () =>{ + let newSqlMetaData = parseSqlMetaData(code); + setMetaData({...newSqlMetaData}); + provider.dispose();// 清空提示项 + provider = monacoInstance.current.languages.registerCompletionItemProvider('sql', { + provideCompletionItems() { + return { + suggestions:buildSuggestions(), + }; + }, + // quickSuggestions: false, + // triggerCharacters: ['$', '.', '='], + }); + }; + const buildSuggestions = () => { let suggestions: ISuggestions[] = []; - tabs.panes[tabIndex].sqlMetaData?.metaData?.forEach((item:MetaData) => { + console.log(metaData); + metaData.metaData?.forEach((item: MetaData) => { suggestions.push({ label: item.table, kind: _monaco.languages.CompletionItemKind.Constant, @@ -141,9 +142,6 @@ const FlinkSqlEditor = (props:any) => { const editorDidMountHandle = (editor: any, monaco: any) => { monacoInstance.current = monaco; editorInstance.current = editor; - editor.addCommand(monaco.KeyMod.Alt|monaco.KeyCode.KEY_1,function (){ - props.saveText(tabs,tabIndex); - }) editor.addCommand(monaco.KeyMod.Alt|monaco.KeyCode.KEY_2,function (){ handleModalVisible(true); @@ -151,17 +149,9 @@ const FlinkSqlEditor = (props:any) => { editor.addCommand(monaco.KeyMod.Alt|monaco.KeyCode.KEY_3,function (){ editor.getAction(['editor.action.formatDocument'])._run(); }) - provider.dispose();// 清空提示项 - // 提示项设值 - provider = monaco.languages.registerCompletionItemProvider('sql', { - provideCompletionItems() { - return { - suggestions:buildSuggestions(), - }; - }, - // quickSuggestions: false, - // triggerCharacters: ['$', '.', '='], - }); + + reloadCompletion(); + monaco.languages.registerDocumentRangeFormattingEditProvider('sql', { provideDocumentRangeFormattingEdits(model, range, options) { var formatted = format(model.getValueInRange(range), { @@ -181,11 +171,11 @@ const FlinkSqlEditor = (props:any) => { return ( { } const mapDispatchToProps = (dispatch:Dispatch)=>({ - saveText:(tabs:any,tabIndex:any)=>dispatch({ + /*saveText:(tabs:any,tabIndex:any)=>dispatch({ type: "Studio/saveTask", payload: tabs.panes[tabIndex].task, - }), - saveSql:(val:any)=>dispatch({ + }),*/ + saveSql:(val: any)=>dispatch({ type: "Studio/saveSql", payload: val, + }),saveSqlMetaData:(sqlMetaData: any,key: number)=>dispatch({ + type: "Studio/saveSqlMetaData", + payload: { + activeKey:key, + sqlMetaData, + isModified: true, + } }), - saveMetaData:(sqlMetaData:any,tabs:any,tabIndex:any)=>dispatch({ - type: "Studio/saveSqlMetaData", - payload: { - activeKey:tabs.panes[tabIndex].key, - sqlMetaData, - isModified: true, - } - }) }) -export default connect(({ Studio,Document }: { Studio: StateType,Document: DocumentStateType }) => ({ - current: Studio.current, - sql: Studio.sql, - tabs: Studio.tabs, - monaco: Studio.monaco, +export default connect(({ Document }: { Document: DocumentStateType }) => ({ fillDocuments: Document.fillDocuments, }),mapDispatchToProps)(FlinkSqlEditor); diff --git a/dlink-web/src/components/Studio/StudioEvent/DQL.ts b/dlink-web/src/components/Studio/StudioEvent/DQL.ts index 26c53bddbf..4657dbd205 100644 --- a/dlink-web/src/components/Studio/StudioEvent/DQL.ts +++ b/dlink-web/src/components/Studio/StudioEvent/DQL.ts @@ -1,6 +1,6 @@ import {getJobData} from "@/pages/FlinkSqlStudio/service"; -export function showJobData(jobId:string,dispatch:any) { +export function showJobData(key: number,jobId: string,dispatch: any) { if(!jobId){ return; } @@ -8,7 +8,10 @@ export function showJobData(jobId:string,dispatch:any) { res.then((result)=>{ dispatch&&dispatch({ type: "Studio/saveResult", - payload: result.datas, + payload: { + key, + datas: result.datas + }, }); }); } diff --git a/dlink-web/src/components/Studio/StudioEvent/data.d.ts b/dlink-web/src/components/Studio/StudioEvent/data.d.ts index c92dfc60ea..57fc012403 100644 --- a/dlink-web/src/components/Studio/StudioEvent/data.d.ts +++ b/dlink-web/src/components/Studio/StudioEvent/data.d.ts @@ -1,6 +1,6 @@ export type SqlMetaData = { - statement: string, - metaData: MetaData[], + statement?: string, + metaData?: MetaData[], }; export type MetaData = { table: string, diff --git a/dlink-web/src/components/Studio/StudioHome/index.tsx b/dlink-web/src/components/Studio/StudioHome/index.tsx index e81bb38c63..216b065eb3 100644 --- a/dlink-web/src/components/Studio/StudioHome/index.tsx +++ b/dlink-web/src/components/Studio/StudioHome/index.tsx @@ -12,17 +12,17 @@ const StudioHome = (props: any) => { return ( - 欢迎使用 Dinky v0.5.0 + 欢迎使用 Dinky v0.5.1
实时即未来,Dinky 为 Apache Flink 而生,让 Flink SQL 纵享丝滑,并致力于实时计算平台建设。
快捷键 Ctrl + s 保存 - Alt + 1 保存 Alt + 2 校验 Alt + 3 美化 - Esc 关闭弹框 + F2 全屏 + Esc 关闭弹框及全屏 F1 更多快捷键 diff --git a/dlink-web/src/components/Studio/StudioMenu/index.tsx b/dlink-web/src/components/Studio/StudioMenu/index.tsx index cece7c536b..1db4c52717 100644 --- a/dlink-web/src/components/Studio/StudioMenu/index.tsx +++ b/dlink-web/src/components/Studio/StudioMenu/index.tsx @@ -2,7 +2,7 @@ import styles from "./index.less"; import {Menu, Dropdown, Tooltip, Row, Col, Popconfirm, notification, Modal, message} from "antd"; import { PauseCircleTwoTone, CopyTwoTone, DeleteTwoTone, PlayCircleTwoTone, DiffTwoTone,SnippetsTwoTone, - FileAddTwoTone, FolderOpenTwoTone, SafetyCertificateTwoTone, SaveTwoTone, FlagTwoTone, + FileAddTwoTone, FolderOpenTwoTone, SafetyCertificateTwoTone, SaveTwoTone, FlagTwoTone,CodeTwoTone, EnvironmentOutlined, SmileOutlined, RocketTwoTone, QuestionCircleTwoTone, MessageOutlined, ClusterOutlined } from "@ant-design/icons"; import Space from "antd/es/space"; @@ -16,7 +16,7 @@ import {executeSql, getJobPlan} from "@/pages/FlinkSqlStudio/service"; import StudioHelp from "./StudioHelp"; import StudioGraph from "./StudioGraph"; import {showCluster, showTables} from "@/components/Studio/StudioEvent/DDL"; -import {useCallback, useEffect, useState} from "react"; +import React, {useCallback, useEffect, useState} from "react"; import StudioExplain from "../StudioConsole/StudioExplain"; import {DIALECT, isOnline, isSql} from "@/components/Studio/conf"; import { @@ -24,6 +24,7 @@ import { } from '@ant-design/pro-form'; import SqlExport from "@/pages/FlinkSqlStudio/SqlExport"; import {Dispatch} from "@@/plugin-dva/connect"; +import StudioTabs from "@/components/Studio/StudioTabs"; const menu = ( @@ -34,10 +35,11 @@ const menu = ( const StudioMenu = (props: any) => { - const {tabs, current, currentPath, form, refs, dispatch, currentSession} = props; + const {isFullScreen, tabs, current, currentPath, form,width,height, refs, dispatch, currentSession} = props; const [modalVisible, handleModalVisible] = useState(false); const [exportModalVisible, handleExportModalVisible] = useState(false); const [graphModalVisible, handleGraphModalVisible] = useState(false); + // const [editModalVisible, handleEditModalVisible] = useState(false); const [graphData, setGraphData] = useState(); const onKeyDown = useCallback((e) => { @@ -47,6 +49,13 @@ const StudioMenu = (props: any) => { props.saveTask(current); } } + if(e.keyCode === 113){ + e.preventDefault(); + if(current) { + // handleEditModalVisible(true); + props.changeFullScreen(true); + } + } }, [current]); useEffect(() => { @@ -237,6 +246,12 @@ const StudioMenu = (props: any) => { return str.replace(/&(lt|gt|nbsp|amp|quot);/ig,function(all,t){return arrEntities[t];}); } + const toFullScreen = () => { + if(current) { + props.changeFullScreen(true); + } + }; + const saveSqlAndSettingToTask = () => { props.saveTask(current); }; @@ -327,6 +342,13 @@ const StudioMenu = (props: any) => { {current? + + + + + )} + + + :undefined} Flink 配置 = (props {...formLayout} form={form} initialValues={getConfigFormValues(formVals)} + onValuesChange={onValuesChange} > {renderContent(formVals)} diff --git a/dlink-web/src/pages/ClusterConfiguration/conf.ts b/dlink-web/src/pages/ClusterConfiguration/conf.ts index 0d32dbdef2..b86709c1a6 100644 --- a/dlink-web/src/pages/ClusterConfiguration/conf.ts +++ b/dlink-web/src/pages/ClusterConfiguration/conf.ts @@ -9,6 +9,19 @@ export const HADOOP_CONFIG_LIST: Config[] = [{ lable: 'ha.zookeeper.quorum', placeholder: '192.168.123.1:2181,192.168.123.2:2181,192.168.123.3:2181', }]; +export const KUBERNETES_CONFIG_LIST: Config[] = [{ + name: 'kubernetes.namespace', + lable: 'kubernetes.namespace', + placeholder: 'dlink', +},{ + name: 'kubernetes.cluster-id', + lable: 'kubernetes.cluster-id', + placeholder: 'dlink', +},{ + name: 'kubernetes.container.image', + lable: 'kubernetes.container.image', + placeholder: 'dlink', +}]; export const FLINK_CONFIG_LIST: Config[] = [{ name: 'jobmanager.memory.process.size', lable: 'jobmanager.memory.process.size', @@ -44,6 +57,14 @@ export function HADOOP_CONFIG_NAME_LIST () { return list; } +export function KUBERNETES_CONFIG_NAME_LIST () { + const list: string[] = []; + KUBERNETES_CONFIG_LIST.forEach(item => { + list.push(item.name); + }); + return list; +} + export function FLINK_CONFIG_NAME_LIST() { const list: string[] = []; FLINK_CONFIG_LIST.forEach(item => { diff --git a/dlink-web/src/pages/ClusterConfiguration/function.ts b/dlink-web/src/pages/ClusterConfiguration/function.ts index 89533efed0..af6c1f6c71 100644 --- a/dlink-web/src/pages/ClusterConfiguration/function.ts +++ b/dlink-web/src/pages/ClusterConfiguration/function.ts @@ -1,17 +1,32 @@ -import {FLINK_CONFIG_NAME_LIST, HADOOP_CONFIG_NAME_LIST} from "@/pages/ClusterConfiguration/conf"; +import { + FLINK_CONFIG_NAME_LIST, + HADOOP_CONFIG_NAME_LIST, + KUBERNETES_CONFIG_NAME_LIST +} from "@/pages/ClusterConfiguration/conf"; export function getConfig(values:any) { - let hadoopConfig = addValueToMap(values,HADOOP_CONFIG_NAME_LIST()); - addListToMap(values.hadoopConfigList,hadoopConfig); let flinkConfig = addValueToMap(values,FLINK_CONFIG_NAME_LIST()); addListToMap(values.flinkConfigList,flinkConfig); - return { - hadoopConfigPath:values.hadoopConfigPath, - flinkLibPath:values.flinkLibPath, - flinkConfigPath:values.flinkConfigPath, - hadoopConfig:hadoopConfig, - flinkConfig:flinkConfig, - }; + if(values.type=='Yarn') { + let hadoopConfig = addValueToMap(values, HADOOP_CONFIG_NAME_LIST()); + addListToMap(values.hadoopConfigList, hadoopConfig); + return { + hadoopConfigPath:values.hadoopConfigPath, + flinkLibPath:values.flinkLibPath, + flinkConfigPath:values.flinkConfigPath, + hadoopConfig, + flinkConfig, + }; + }else if(values.type=='Kubernetes') { + let kubernetesConfig = addValueToMap(values, KUBERNETES_CONFIG_NAME_LIST()); + addListToMap(values.kubernetesConfigList, kubernetesConfig); + return { + flinkLibPath:values.flinkLibPath, + flinkConfigPath:values.flinkConfigPath, + kubernetesConfig, + flinkConfig, + }; + } } type ConfigItem = { @@ -27,6 +42,9 @@ function addListToMap(list:[ConfigItem],config:{}){ function addValueToMap(values:{},keys: string []){ let config = {}; + if(!values){ + return config; + } for(let i in keys){ config[keys[i]]=values[keys[i]]; } @@ -53,16 +71,20 @@ export function getConfigFormValues(values:any) { 'flinkConfigPath', ]); let hadoopConfig = addValueToMap(config.hadoopConfig,HADOOP_CONFIG_NAME_LIST()); + let kubernetesConfig = addValueToMap(config.kubernetesConfig,KUBERNETES_CONFIG_NAME_LIST()); let flinkConfig = addValueToMap(config.flinkConfig,FLINK_CONFIG_NAME_LIST()); let hadoopConfigList = addMapToList(config.hadoopConfig,HADOOP_CONFIG_NAME_LIST()); + let kubernetesConfigList = addMapToList(config.kubernetesConfig,KUBERNETES_CONFIG_NAME_LIST()); let flinkConfigList = addMapToList(config.flinkConfig,FLINK_CONFIG_NAME_LIST()); return { ...formValues, ...configValues, ...hadoopConfig, - hadoopConfigList:hadoopConfigList, + ...kubernetesConfig, + hadoopConfigList, + kubernetesConfigList, ...flinkConfig, - flinkConfigList:flinkConfigList + flinkConfigList } } diff --git a/dlink-web/src/pages/FlinkSqlStudio/model.ts b/dlink-web/src/pages/FlinkSqlStudio/model.ts index a346336dd6..87da9c4ffc 100644 --- a/dlink-web/src/pages/FlinkSqlStudio/model.ts +++ b/dlink-web/src/pages/FlinkSqlStudio/model.ts @@ -136,6 +136,7 @@ export type SessionType = { } export type StateType = { + isFullScreen: boolean; toolHeight?: number; toolRightWidth?: number; toolLeftWidth?: number; @@ -147,7 +148,7 @@ export type StateType = { currentSession?: SessionType; current?: TabsItemType; sql?: string; - monaco?: any; + // monaco?: any; currentPath?: string[]; tabs?: TabsType; session?: SessionType[]; @@ -165,12 +166,13 @@ export type ModelType = { saveTask: Effect; }; reducers: { + changeFullScreen: Reducer; saveToolHeight: Reducer; saveToolRightWidth: Reducer; saveToolLeftWidth: Reducer; saveSql: Reducer; saveCurrentPath: Reducer; - saveMonaco: Reducer; + // saveMonaco: Reducer; saveSqlMetaData: Reducer; saveTabs: Reducer; closeTabs: Reducer; @@ -193,6 +195,7 @@ export type ModelType = { const Model: ModelType = { namespace: 'Studio', state: { + isFullScreen: false, toolHeight: 400, toolRightWidth: 300, toolLeftWidth: 300, @@ -206,7 +209,7 @@ const Model: ModelType = { }, current: undefined, sql: '', - monaco: {}, + // monaco: {}, currentPath: ['引导页'], tabs: { activeKey: 0, @@ -233,6 +236,12 @@ const Model: ModelType = { }, reducers: { + changeFullScreen(state, {payload}) { + return { + ...state, + isFullScreen: payload, + }; + }, saveToolHeight(state, {payload}) { return { ...state, @@ -250,19 +259,19 @@ const Model: ModelType = { }; }, saveSql(state, {payload}) { - const {tabs} = state; + const newTabs = state.tabs; const newCurrent = state.current; newCurrent.value = payload; - for (let i = 0; i < tabs.panes.length; i++) { - if (tabs.panes[i].key == tabs.activeKey) { - tabs.panes[i].value = payload; - tabs.panes[i].task && (tabs.panes[i].task.statement = payload); + for (let i = 0; i < newTabs.panes.length; i++) { + if (newTabs.panes[i].key == newTabs.activeKey) { + newTabs.panes[i].value = payload; + newTabs.panes[i].task && (newTabs.panes[i].task.statement = payload); } } return { ...state, - current: newCurrent, - tabs, + current: {...newCurrent}, + tabs:{...newTabs}, }; }, saveCurrentPath(state, {payload}) { @@ -271,32 +280,30 @@ const Model: ModelType = { currentPath: payload, }; }, - saveMonaco(state, {payload}) { + /*saveMonaco(state, {payload}) { return { ...state, - monaco: { - ...payload - }, + monaco:payload, }; - }, + },*/ saveSqlMetaData(state, {payload}) { const newCurrent = state.current; const newTabs = state.tabs; if(newCurrent.key == payload.activeKey){ - newCurrent.sqlMetaData = payload.sqlMetaData; + newCurrent.sqlMetaData = {...payload.sqlMetaData}; newCurrent.isModified = payload.isModified; } for (let i = 0; i < newTabs.panes.length; i++) { if (newTabs.panes[i].key == payload.activeKey) { - newTabs.panes[i].sqlMetaData = payload.sqlMetaData; + newTabs.panes[i].sqlMetaData = {...payload.sqlMetaData}; newTabs.panes[i].isModified = payload.isModified; break; } } return { ...state, - current: newCurrent, - tabs: newTabs, + current: {...newCurrent}, + tabs: {...newTabs}, }; }, saveTabs(state, {payload}) { @@ -320,7 +327,7 @@ const Model: ModelType = { ...newCurrent, isModified:false, }, - tabs: payload, + tabs: {...payload}, currentPath: newCurrent.path, }; }, @@ -341,8 +348,8 @@ const Model: ModelType = { } return { ...state, - current: newCurrent, - tabs: newTabs, + current: {...newCurrent}, + tabs: {...newTabs}, }; }, closeTabs(state, {payload}) { @@ -362,23 +369,23 @@ const Model: ModelType = { return { ...state, - current: newCurrent, - tabs: newTabs + current: {...newCurrent}, + tabs: {...newTabs} }; }, changeActiveKey(state, {payload}) { - const newTabs = state.tabs; - let newCurrent = state.current; - newTabs.activeKey = payload; + const newTabs = state?.tabs; + let newCurrent = state?.current; for (let i = 0; i < newTabs.panes.length; i++) { if (newTabs.panes[i].key == payload) { + newTabs.activeKey = payload; newCurrent = newTabs.panes[i]; } } return { ...state, - current: newCurrent, - tabs: newTabs, + current: {...newCurrent}, + tabs: {...newTabs}, currentPath: newCurrent.path, }; }, @@ -396,14 +403,14 @@ const Model: ModelType = { } return { ...state, - current: newCurrent, - tabs: newTabs, + current: {...newCurrent}, + tabs: {...newTabs}, }; }, saveSession(state, {payload}) { return { ...state, - session: payload, + session: [...payload], }; }, showRightClickMenu(state, {payload}) { @@ -431,44 +438,46 @@ const Model: ModelType = { }, saveResult(state, {payload}) { const newTabs = state?.tabs; - let newCurrent = state?.current; + const newCurrent = state?.current; for (let i = 0; i < newTabs.panes.length; i++) { - if (newTabs.panes[i].key == newTabs.activeKey) { - newTabs.panes[i].console.result.result = payload; - newCurrent = newTabs.panes[i]; + if (newTabs.panes[i].key == payload.key) { + newTabs.panes[i].console.result.result = payload.datas; + if(newCurrent.key == payload.key){ + newCurrent.console = newTabs.panes[i].console; + } break; } } return { ...state, - current: newCurrent, - tabs: newTabs, + current: {...newCurrent}, + tabs: {...newTabs}, }; }, saveCluster(state, {payload}) { return { ...state, - cluster: payload, + cluster: [...payload], }; },saveSessionCluster(state, {payload}) { return { ...state, - sessionCluster: payload, + sessionCluster: [...payload], }; },saveClusterConfiguration(state, {payload}) { return { ...state, - clusterConfiguration: payload, + clusterConfiguration: [...payload], }; },saveDataBase(state, {payload}) { return { ...state, - database: payload, + database: [...payload], }; },saveEnv(state, {payload}) { return { ...state, - env: payload, + env: [...payload], }; },saveChart(state, {payload}) { let newTabs = state?.tabs; @@ -482,8 +491,8 @@ const Model: ModelType = { } return { ...state, - current: newCurrent, - tabs: newTabs, + current: {...newCurrent}, + tabs: {...newTabs}, }; }, }, diff --git a/dlink-web/src/pages/Welcome.tsx b/dlink-web/src/pages/Welcome.tsx index b3b529bb94..f7001a297d 100644 --- a/dlink-web/src/pages/Welcome.tsx +++ b/dlink-web/src/pages/Welcome.tsx @@ -19,7 +19,7 @@ export default (): React.ReactNode => { { - 0.5.1 2022-01-? + 0.5.1 2022-01-24

    @@ -586,6 +586,42 @@ export default (): React.ReactNode => {
  • 修复 前端多处bug
  • +
  • + 新增 F2 全屏开发 +
  • +
  • + 升级 SpringBoot 至 2.6.3 +
  • +
  • + 优化 日志依赖 +
  • +
  • + 修复 前端 state 赋值 bug +
  • +
  • + 修复 异常预览内容溢出 bug +
  • +
  • + 修复 数据预览特殊条件下无法获取数据的 bug +
  • +
  • + 优化 SQL编辑器性能 +
  • +
  • + 修复 全屏开发退出后 sql 不同步 +
  • +
  • + 升级 Flink 1.14.2 到 1.14.3 +
  • +
  • + 修复 Flink 1.14 提交任务报错缺类 bug +
  • +
  • + 优化 作业配置查看及全屏开发按钮 +
  • +
  • + 新增 K8S集群配置 +
diff --git a/dlink-web/src/pages/document.ejs b/dlink-web/src/pages/document.ejs index b7e222f610..6779ed597a 100644 --- a/dlink-web/src/pages/document.ejs +++ b/dlink-web/src/pages/document.ejs @@ -14,15 +14,14 @@ /> - Ant Design Pro - + Dinky + @@ -201,11 +200,11 @@
- Ant Design + Dinky
diff --git a/pom.xml b/pom.xml index 4e259b4661..e5632b7bbd 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ com.dlink dlink pom - 0.5.0 + 0.5.1 dlink-common @@ -31,14 +31,14 @@ 1.8 UTF-8 UTF-8 - 2.3.8.RELEASE + 2.6.3 5.1.4 1.2.8 3.4.0 1.18.16 2.11.4 21.0 - 2.17.1 + 8.0.22 12.2.0.1 0.2.6 @@ -102,7 +102,7 @@ guava ${guava.version} - + + mysql mysql-connector-java