Skip to content

Commit

Permalink
[feat_spark320][taier-all] support spark 3.2.0 version
Browse files Browse the repository at this point in the history
  • Loading branch information
vainhope committed Oct 31, 2023
1 parent c96e3bd commit adaae24
Show file tree
Hide file tree
Showing 67 changed files with 5,481 additions and 4,706 deletions.
15 changes: 14 additions & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,19 @@
<id>oss-sonatype</id>
<url>https://oss.sonatype.org/content/repositories/releases/</url>
</repository>

<repository>
<id>pentaho</id>
<name>public.nexus.pentaho.org</name>
<url>https://public.nexus.pentaho.org/repository/proxy-public-3rd-party-release</url>
<layout>default</layout>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
</repositories>

<build>
Expand Down Expand Up @@ -283,7 +296,7 @@
<configuration>
<aggregate>true</aggregate>
<reportOutputDirectory>javadocs</reportOutputDirectory>
<destDir>engine-java-docs</destDir>
<destDir>taier-java-docs</destDir>
</configuration>
</plugin>
</plugins>
Expand Down
95 changes: 95 additions & 0 deletions sql/1.5/1.5_increment.sql

Large diffs are not rendered by default.

99 changes: 84 additions & 15 deletions sql/init.sql

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion taier-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@
<version>3.0.1</version>
<configuration>
<reportOutputDirectory>../javadocs</reportOutputDirectory>
<destDir>engine-api-client</destDir>
<destDir>taier-api-client</destDir>
<additionalJOptions>
<additionalJOption>-Xdoclint:none</additionalJOption>
</additionalJOptions>
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,7 @@ public String sendSqlTask(String sql, String taskParams, String jobId, Task task
paramActionExt.setTenantId(task.getTenantId());
paramActionExt.setQueueName(task.getQueueName());
paramActionExt.setDatasourceId(task.getDatasourceId());
paramActionExt.setComponentVersion(task.getComponentVersion());
actionService.start(paramActionExt);
return jobId;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.dtstack.taier.common.enums.EComponentType;
import com.dtstack.taier.common.enums.EComputeType;
import com.dtstack.taier.common.enums.EScheduleJobType;
import com.dtstack.taier.common.enums.EScheduleStatus;
import com.dtstack.taier.common.enums.ESubmitStatus;
Expand Down Expand Up @@ -183,6 +184,9 @@ public TaskVO updateTaskInfo(TaskResourceParam taskResourceParam) {
Task task = developTaskService.getOne(Wrappers.lambdaQuery(Task.class)
.eq(Task::getName, taskVO.getName())
.eq(Task::getTenantId, taskVO.getTenantId()));
if(EComputeType.BATCH.getType() == taskVO.getComputeType()){
taskVO.setJobId(null);
}

if (taskVO.getId() != null && taskVO.getId() > 0) {
//update
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import com.dtstack.taier.develop.dto.devlop.TaskVO;
import com.dtstack.taier.develop.service.develop.impl.DevelopTaskTaskService;
import com.dtstack.taier.develop.service.user.UserService;
import com.dtstack.taier.pluginapi.enums.ComputeType;
import com.dtstack.taier.pluginapi.enums.EJobType;
import org.apache.commons.lang.BooleanUtils;
import org.apache.commons.lang.StringUtils;
Expand All @@ -50,13 +51,13 @@ public class DefaultTaskSaver extends AbstractTaskSaver {
@Autowired
private UserService userService;

@Autowired
private DevelopTaskTaskService developTaskTaskService;

@Override
public TaskResourceParam beforeProcessing(TaskResourceParam taskResourceParam) {
// sql 任务必须选择数据源
EScheduleJobType scheduleJobType = EScheduleJobType.getByTaskType(taskResourceParam.getTaskType());
taskResourceParam.setTaskParams(taskResourceParam.getTaskParams() == null ? taskTemplateService.getTaskTemplate(taskResourceParam.getTaskType(), taskResourceParam.getComponentVersion()).getParams() : taskResourceParam.getTaskParams());
taskResourceParam.setComputeType(ComputeType.BATCH.getType());
if (EComputeType.BATCH.getType() == scheduleJobType.getComputeType().getType() && EJobType.SQL.getType() == scheduleJobType.getEngineJobType()) {
if (null == taskResourceParam.getDatasourceId()) {
throw new TaierDefineException(ErrorCode.DATA_SOURCE_NOT_SET);
Expand Down
2 changes: 1 addition & 1 deletion taier-datasource/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.21</version>
<scope>provided</scope>
<!-- <scope>provided</scope>-->
</dependency>

<dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,15 @@
import com.dtstack.taier.datasource.api.client.IRestful;
import com.dtstack.taier.datasource.api.client.ITable;
import com.dtstack.taier.datasource.api.client.IYarn;
import com.dtstack.taier.datasource.api.config.Configuration;
import com.dtstack.taier.datasource.api.context.ClientEnvironment;
import com.dtstack.taier.datasource.api.exception.InitializeException;
import com.dtstack.taier.datasource.api.manager.ManagerFactory;
import com.dtstack.taier.datasource.api.manager.list.ClientManager;
import com.dtstack.taier.datasource.api.source.DataSourceType;
import lombok.extern.slf4j.Slf4j;

import java.util.HashMap;
import java.util.Objects;

/**
Expand Down Expand Up @@ -78,6 +82,18 @@ public static IClient getClient(Integer dataSourceType) {
return getClientByType(IClient.class, dataSourceType);
}

public static void main(String[] args) {
Configuration configuration = new Configuration(new HashMap<>());
ClientEnvironment clientEnvironment = new ClientEnvironment(configuration);
clientEnvironment.start();
ClientCache.setEnv(clientEnvironment.getManagerFactory().getManager(ClientManager.class));
ClientManager clientManager = new ClientManager();
clientManager.setManagerFactory(new ManagerFactory());
setEnv(clientManager);
IClient client = getClient(DataSourceType.KAFKA.getVal());
System.out.println(client);
}

/**
* 获取 HDFS 文件客户端
*
Expand Down
2 changes: 1 addition & 1 deletion taier-ui/src/components/scaffolds/create.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ const ComponentVersion = ({ onChange }: ICreateFormProps) => {
const [versions, setVersions] = useState<{ label: string; value: string }[]>([]);

useEffect(() => {
if (taskType) {
if (taskType !== undefined) {
api.getComponentVersionByTaskType<{ componentVersion: string; default: boolean; componentName: string }[]>({
taskType,
}).then((res) => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,4 +113,11 @@ public class ConfigConstant {

public static final String DATAX_PYTHON_BIN = "DataX.python.path";


public static final String SPARK_KERBEROS_REMOTE_KRB5 = "spark.kerberos.remotekrb5";

public static final String SPARK_KERBEROS_REMOTE_KEYTAB = "spark.kerberos.remotekeytab";

public static final String SPARK_HADOOP_CONF_REMOTE_DIR = "spark.hadoopconf.remotedir";

}
Original file line number Diff line number Diff line change
Expand Up @@ -432,7 +432,7 @@ public static synchronized String[] getKerberosFile(BaseConfig config, String lo
public static String getKeytabPath(BaseConfig config) {
String fileName = config.getPrincipalFile();
String remoteDir = config.getRemoteDir();
String localDir = USER_DIR + remoteDir;
String localDir = ConfigConstant.LOCAL_KEYTAB_DIR_PARENT + remoteDir;

File path = new File(localDir);
if (!path.exists()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -252,6 +252,21 @@

</dependencies>

<repositories>
<repository>
<id>pentaho</id>
<name>public.nexus.pentaho.org</name>
<url>https://public.nexus.pentaho.org/repository/proxy-public-3rd-party-release</url>
<layout>default</layout>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
</repositories>

<build>
<plugins>
<plugin>
Expand Down
2 changes: 1 addition & 1 deletion taier-worker/taier-worker-plugin/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@
<configuration>
<aggregate>true</aggregate>
<reportOutputDirectory>javadocs</reportOutputDirectory>
<destDir>engine-java-docs</destDir>
<destDir>taier-java-docs</destDir>
</configuration>
</plugin>
</plugins>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,6 @@
<version>1.0.0</version>
</dependency>

<!--<dependency>-->
<!-- <groupId>com.dtstack.engine</groupId>-->
<!-- <artifactId>native-hadoop2</artifactId>-->
<!-- <version>1.0.0</version>-->
<!--</dependency>-->

<!-- hadoop2 -->
<dependency>
<groupId>org.apache.hadoop</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,6 @@
<version>1.0.0</version>
</dependency>

<!--<dependency>-->
<!-- <groupId>com.dtstack.engine</groupId>-->
<!-- <artifactId>native-hadoop3</artifactId>-->
<!-- <version>1.0.0</version>-->
<!--</dependency>-->

<dependency>
<groupId>org.apache.hadoop</groupId>
Expand Down
Loading

0 comments on commit adaae24

Please sign in to comment.