Skip to content

Commit

Permalink
init中的sql脚本中的错误漏改了,以及解决不能创建关系型数据库的任务的问题。 (#1140)
Browse files Browse the repository at this point in the history
init中的sql脚本中的错误,漏改了,有两条数据中的json串写错了。

---------

Co-authored-by: feng.du <[email protected]>
Co-authored-by: xjs1983boy <[email protected]>
  • Loading branch information
3 people authored Nov 21, 2023
1 parent 49a8c67 commit 181410c
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 3 deletions.
4 changes: 2 additions & 2 deletions sql/init.sql
Original file line number Diff line number Diff line change
Expand Up @@ -1572,12 +1572,12 @@ INSERT INTO `dict` VALUES (85, 'ResourceManager', 'ResourceManager', '3', '资
INSERT INTO `dict` VALUES (87, 'TaskManager', 'TaskManager', '1', '任务管理', 32, 1, 'STRING', '', 1, '2022-02-11 10:42:19', '2022-02-11 10:42:19', 0);
INSERT INTO `dict` VALUES (89, 'CustomFunction', 'CustomFunction', '6', '自定义函数', 33, 4, 'STRING', '', 1, '2022-02-11 10:42:57', '2022-02-11 10:42:57', 0);
INSERT INTO `dict` VALUES (91, 'SystemFunction', 'SystemFunction', '6', '系统函数', 33, 2, 'STRING', '', 1, '2022-02-11 10:42:57', '2022-02-11 10:42:57', 0);
INSERT INTO `dict` VALUES (95,'component_model_config', 'Apache Hadoop 2.x', '{"HDFS": {"HDFS": "yarn2-hdfs2-hadoop2", "FLINK": [{"112": "yarn2-hdfs2-flink112"}], "SPARK": [{"320": "yarn2-hdfs2-spark320"}, {"210": "yarn2-hdfs2-spark210"], "SCRIPT": "yarn2-hdfs2-script"}, "YARN": "yarn2"}', null, 14, 1, 'STRING', 'YARN', 0, '2021-12-28 11:01:55', '2021-12-28 11:01:55', 0);
INSERT INTO `dict` VALUES (95,'component_model_config', 'Apache Hadoop 2.x', '{"HDFS": {"HDFS": "yarn2-hdfs2-hadoop2", "FLINK": [{"112": "yarn2-hdfs2-flink112"}], "SPARK": [{"320": "yarn2-hdfs2-spark320"}, {"210": "yarn2-hdfs2-spark210"}], "SCRIPT": "yarn2-hdfs2-script"}, "YARN": "yarn2"}', null, 14, 1, 'STRING', 'YARN', 0, '2021-12-28 11:01:55', '2021-12-28 11:01:55', 0);
INSERT INTO `dict` VALUES (97,'component_model_config', 'Apache Hadoop 3.x', '{"HDFS": {"HDFS": "yarn3-hdfs3-hadoop3", "FLINK": [{"112": "yarn3-hdfs3-flink112"}], "SPARK": [{"320": "yarn3-hdfs3-spark320"}, {"210": "yarn3-hdfs3-spark210"}], "SCRIPT": "yarn3-hdfs3-script"}, "YARN": "yarn3"}', null, 14, 1, 'STRING', 'YARN', 0, '2021-12-28 11:03:45', '2021-12-28 11:03:45', 0);
INSERT INTO `dict` VALUES (99,'component_model_config', 'HDP 3.0.x', '{"HDFS": {"HDFS": "yarn3-hdfs3-hadoop3", "FLINK": [{"112": "yarn3-hdfs3-flink112"}], "SPARK": [{"320": "yarn3-hdfs3-spark320"}, {"210": "yarn3-hdfs3-spark210"}], "SCRIPT": "yarn3-hdfs3-script"}, "YARN": "yarn3"}', null, 14, 1, 'STRING', 'YARN', 0, '2021-12-28 11:04:23', '2021-12-28 11:04:23', 0);
INSERT INTO `dict` VALUES (101,'component_model_config', 'CDH 6.0.x', '{"HDFS": {"HDFS": "yarn3-hdfs3-hadoop3", "FLINK": [{"112": "yarn3-hdfs3-flink112"}], "SPARK": [{"320": "yarn3-hdfs3-spark320"}, {"210": "yarn3-hdfs3-spark210"}], "SCRIPT": "yarn3-hdfs3-script"}, "YARN": "yarn3"}', null, 14, 1, 'STRING', 'YARN', 0, '2021-12-28 11:04:40', '2021-12-28 11:04:40', 0);
INSERT INTO `dict` VALUES (103,'component_model_config', 'CDH 6.1.x', '{"HDFS": {"HDFS": "yarn3-hdfs3-hadoop3", "FLINK": [{"112": "yarn3-hdfs3-flink112"}], "SPARK": [{"320": "yarn3-hdfs3-spark320"}, {"210": "yarn3-hdfs3-spark210"}], "SCRIPT": "yarn3-hdfs3-script"}, "YARN": "yarn3"}', null, 14, 1, 'STRING', 'YARN', 0, '2021-12-28 11:04:55', '2021-12-28 11:04:55', 0);
INSERT INTO `dict` VALUES (105,'component_model_config', 'CDH 6.2.x', '{"HDFS": {"HDFS": "yarn3-hdfs3-hadoop3", "FLINK": [{"112": "yarn3-hdfs3-flink112"}], "SPARK": [{"320": "yarn3-hdfs3-spark320"}, {"210": "yarn3-hdfs3-spark210"], "SCRIPT": "yarn3-hdfs3-script"}, "YARN": "yarn3"}', null, 14, 1, 'STRING', 'YARN', 0, '2021-12-28 11:05:06', '2021-12-28 11:05:06', 0);
INSERT INTO `dict` VALUES (105,'component_model_config', 'CDH 6.2.x', '{"HDFS": {"HDFS": "yarn3-hdfs3-hadoop3", "FLINK": [{"112": "yarn3-hdfs3-flink112"}], "SPARK": [{"320": "yarn3-hdfs3-spark320"}, {"210": "yarn3-hdfs3-spark210"}], "SCRIPT": "yarn3-hdfs3-script"}, "YARN": "yarn3"}', null, 14, 1, 'STRING', 'YARN', 0, '2021-12-28 11:05:06', '2021-12-28 11:05:06', 0);
INSERT INTO `dict` VALUES (107,'component_model_config', 'HDP 2.6.x', '{"HDFS": {"HDFS": "yarn2-hdfs2-hadoop2", "FLINK": [{"112": "yarn2-hdfs2-flink112"}], "SPARK": [{"320": "yarn2-hdfs2-spark320"}, {"210": "yarn2-hdfs2-spark210"}], "SCRIPT": "yarn2-hdfs2-script"}, "YARN": "yarn2"}', null, 14, 1, 'STRING', 'YARN', 0, '2021-12-28 11:06:38', '2021-12-28 11:06:38', 0);
INSERT INTO `dict` VALUES (109,'component_model_config', 'CDH 5.x', '{"HDFS": {"HDFS": "yarn2-hdfs2-hadoop2", "FLINK": [{"112": "yarn2-hdfs2-flink112"}], "SPARK": [{"320": "yarn2-hdfs2-spark320"}, {"210": "yarn2-hdfs2-spark210"}], "SCRIPT": "yarn2-hdfs2-script"}, "YARN": "yarn2"}', null, 14, 1, 'STRING', 'YARN', 0, '2021-12-28 11:07:19', '2021-12-28 11:07:19', 0);
INSERT INTO `dict` VALUES (111,'component_model_config', 'HDP 3.x', '{"HDFS": {"HDFS": "yarn3-hdfs3-hadoop3", "FLINK": [{"112": "yarn3-hdfs3-flink112"}], "SPARK": [{"320": "yarn3-hdfs3-spark320"}, {"210": "yarn3-hdfs3-spark210"}], "SCRIPT": "yarn3-hdfs3-script"}, "YARN": "yarn3"}', null, 14, 1, 'STRING', 'YARN', 0, '2021-12-28 11:43:05', '2021-12-28 11:43:05', 0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import com.dtstack.taier.common.enums.EScheduleJobType;
import com.dtstack.taier.common.exception.ErrorCode;
import com.dtstack.taier.common.exception.TaierDefineException;
import com.dtstack.taier.dao.domain.TaskParamTemplate;
import com.dtstack.taier.develop.dto.devlop.TaskResourceParam;
import com.dtstack.taier.develop.dto.devlop.TaskVO;
import com.dtstack.taier.develop.service.develop.impl.DevelopTaskTaskService;
Expand Down Expand Up @@ -56,7 +57,21 @@ public class DefaultTaskSaver extends AbstractTaskSaver {
public TaskResourceParam beforeProcessing(TaskResourceParam taskResourceParam) {
// sql 任务必须选择数据源
EScheduleJobType scheduleJobType = EScheduleJobType.getByTaskType(taskResourceParam.getTaskType());
taskResourceParam.setTaskParams(taskResourceParam.getTaskParams() == null ? taskTemplateService.getTaskTemplate(taskResourceParam.getTaskType(), taskResourceParam.getComponentVersion()).getParams() : taskResourceParam.getTaskParams());

// 2023-11-21 Modified by ddwolf715
// 修改前代码如下,修改原因:关系型数据库在 task_param_template 中没有相关参数,所以查询不到任务数据,直接 getParams() 报空值错误
// taskResourceParam.setTaskParams(taskResourceParam.getTaskParams() == null ? taskTemplateService.getTaskTemplate(taskResourceParam.getTaskType(), taskResourceParam.getComponentVersion()).getParams() : taskResourceParam.getTaskParams());
// 修改后代码:从task_param_template查询数据后,判断是否不为空,再进行 setTaskParams
if(taskResourceParam.getTaskParams() != null){
taskResourceParam.setTaskParams(taskResourceParam.getTaskParams());
}else{
TaskParamTemplate taskParamTemplate = taskTemplateService.getTaskTemplate(taskResourceParam.getTaskType(), taskResourceParam.getComponentVersion());
if(taskParamTemplate != null){
taskResourceParam.setTaskParams(taskParamTemplate.getParams());
}
}
// 2023-11-21 Modified by ddwolf715

taskResourceParam.setComputeType(ComputeType.BATCH.getType());
if (EComputeType.BATCH.getType() == scheduleJobType.getComputeType().getType() && EJobType.SQL.getType() == scheduleJobType.getEngineJobType()) {
if (null == taskResourceParam.getDatasourceId()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<jar.package.name>mysql5</jar.package.name>
<jar.name>mysql5</jar.name>
</properties>

<dependencies>
Expand Down

0 comments on commit 181410c

Please sign in to comment.