status
stringclasses
1 value
repo_name
stringclasses
31 values
repo_url
stringclasses
31 values
issue_id
int64
1
104k
title
stringlengths
4
233
body
stringlengths
0
186k
issue_url
stringlengths
38
56
pull_url
stringlengths
37
54
before_fix_sha
stringlengths
40
40
after_fix_sha
stringlengths
40
40
report_datetime
unknown
language
stringclasses
5 values
commit_datetime
unknown
updated_file
stringlengths
7
188
chunk_content
stringlengths
1
1.03M
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java
} return TaskExecutionContextBuilder.get() .buildTaskInstanceRelatedInfo(taskInstance) .buildProcessInstanceRelatedInfo(taskInstance.getProcessInstance()) .buildProcessDefinitionRelatedInfo(taskInstance.getProcessDefine()) .buildSQLTaskRelatedInfo(sqlTaskExecutionContext) .buildDataxTaskRelatedInfo(dataxTaskExecutionContext) .buildProcedureTaskRelatedInfo(procedureTaskExecutionContext) .buildSqoopTaskRelatedInfo(sqoopTaskExecutionContext) .create(); } /** * set procedure task relation * @param procedureTaskExecutionContext procedureTaskExecutionContext * @param taskNode taskNode */ private void setProcedureTaskRelation(ProcedureTaskExecutionContext procedureTaskExecutionContext, TaskNode taskNode) { ProcedureParameters procedureParameters = JSONObject.parseObject(taskNode.getParams(), ProcedureParameters.class); int datasourceId = procedureParameters.getDatasource(); DataSource datasource = processService.findDataSourceById(datasourceId); procedureTaskExecutionContext.setConnectionParams(datasource.getConnectionParams()); } /** * set datax task relation * @param dataxTaskExecutionContext dataxTaskExecutionContext * @param taskNode taskNode */ private void setDataxTaskRelation(DataxTaskExecutionContext dataxTaskExecutionContext, TaskNode taskNode) { DataxParameters dataxParameters = JSONObject.parseObject(taskNode.getParams(), DataxParameters.class); DataSource dataSource = processService.findDataSourceById(dataxParameters.getDataSource());
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java
DataSource dataTarget = processService.findDataSourceById(dataxParameters.getDataTarget()); if (dataSource != null){ dataxTaskExecutionContext.setDataSourceId(dataxParameters.getDataSource()); dataxTaskExecutionContext.setSourcetype(dataSource.getType().getCode()); dataxTaskExecutionContext.setSourceConnectionParams(dataSource.getConnectionParams()); } if (dataTarget != null){ dataxTaskExecutionContext.setDataTargetId(dataxParameters.getDataTarget()); dataxTaskExecutionContext.setTargetType(dataTarget.getType().getCode()); dataxTaskExecutionContext.setTargetConnectionParams(dataTarget.getConnectionParams()); } } /** * set datax task relation * @param sqoopTaskExecutionContext sqoopTaskExecutionContext * @param taskNode taskNode */ private void setSqoopTaskRelation(SqoopTaskExecutionContext sqoopTaskExecutionContext, TaskNode taskNode) { SqoopParameters sqoopParameters = JSONObject.parseObject(taskNode.getParams(), SqoopParameters.class); SourceMysqlParameter sourceMysqlParameter = JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceMysqlParameter.class); TargetMysqlParameter targetMysqlParameter = JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetMysqlParameter.class); DataSource dataSource = processService.findDataSourceById(sourceMysqlParameter.getSrcDatasource()); DataSource dataTarget = processService.findDataSourceById(targetMysqlParameter.getTargetDatasource()); if (dataSource != null){ sqoopTaskExecutionContext.setDataSourceId(dataSource.getId()); sqoopTaskExecutionContext.setSourcetype(dataSource.getType().getCode()); sqoopTaskExecutionContext.setSourceConnectionParams(dataSource.getConnectionParams()); } if (dataTarget != null){ sqoopTaskExecutionContext.setDataTargetId(dataTarget.getId());
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java
sqoopTaskExecutionContext.setTargetType(dataTarget.getType().getCode()); sqoopTaskExecutionContext.setTargetConnectionParams(dataTarget.getConnectionParams()); } } /** * set SQL task relation * @param sqlTaskExecutionContext sqlTaskExecutionContext * @param taskNode taskNode */ private void setSQLTaskRelation(SQLTaskExecutionContext sqlTaskExecutionContext, TaskNode taskNode) { SqlParameters sqlParameters = JSONObject.parseObject(taskNode.getParams(), SqlParameters.class); int datasourceId = sqlParameters.getDatasource(); DataSource datasource = processService.findDataSourceById(datasourceId); sqlTaskExecutionContext.setConnectionParams(datasource.getConnectionParams()); // whether udf type boolean udfTypeFlag = EnumUtils.isValidEnum(UdfType.class, sqlParameters.getType()) && StringUtils.isNotEmpty(sqlParameters.getUdfs()); if (udfTypeFlag){ String[] udfFunIds = sqlParameters.getUdfs().split(","); int[] udfFunIdsArray = new int[udfFunIds.length]; for(int i = 0 ; i < udfFunIds.length;i++){ udfFunIdsArray[i]=Integer.parseInt(udfFunIds[i]); } List<UdfFunc> udfFuncList = processService.queryUdfFunListByids(udfFunIdsArray); Map<UdfFunc,String> udfFuncMap = new HashMap<>(); for(UdfFunc udfFunc : udfFuncList) { String tenantCode = processService.queryTenantCodeByResName(udfFunc.getResourceName(), ResourceType.UDF); udfFuncMap.put(udfFunc,tenantCode); } sqlTaskExecutionContext.setUdfFuncTenantCodeMap(udfFuncMap);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java
} } /** * get execute local path * * @return execute local path */ private String getExecLocalPath(TaskInstance taskInstance){ return FileUtils.getProcessExecDir(taskInstance.getProcessDefine().getProjectId(), taskInstance.getProcessDefine().getId(), taskInstance.getProcessInstance().getId(), taskInstance.getId()); } /** * whehter tenant is null * @param tenant tenant * @param taskInstance taskInstance * @return result */ private boolean verifyTenantIsNull(Tenant tenant, TaskInstance taskInstance) { if(tenant == null){ logger.error("tenant not exists,process instance id : {},task instance id : {}", taskInstance.getProcessInstance().getId(), taskInstance.getId()); return true; } return false; } /** * get resource map key is full name and value is tenantCode
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java
*/ private Map<String,String> getResourceFullNames(TaskNode taskNode) { Map<String,String> resourceMap = new HashMap<>(); AbstractParameters baseParam = TaskParametersUtils.getParameters(taskNode.getType(), taskNode.getParams()); if (baseParam != null) { List<ResourceInfo> projectResourceFiles = baseParam.getResourceFilesList(); if (CollectionUtils.isNotEmpty(projectResourceFiles)) { // filter Set<ResourceInfo> oldVersionResources = projectResourceFiles.stream().filter(t -> t.getId() == 0).collect(Collectors.toSet()); if (CollectionUtils.isNotEmpty(oldVersionResources)) { oldVersionResources.forEach( (t)->resourceMap.put(t.getRes(), processService.queryTenantCodeByResName(t.getRes(), ResourceType.FILE)) ); } // get the Stream<Integer> resourceIdStream = projectResourceFiles.stream().map(resourceInfo -> resourceInfo.getId()); Set<Integer> resourceIdsSet = resourceIdStream.collect(Collectors.toSet()); if (CollectionUtils.isNotEmpty(resourceIdsSet)) { Integer[] resourceIds = resourceIdsSet.toArray(new Integer[resourceIdsSet.size()]); List<Resource> resources = processService.listResourceByIds(resourceIds); resources.forEach( (t)->resourceMap.put(t.getFullName(),processService.queryTenantCodeByResName(t.getFullName(), ResourceType.FILE)) ); } } } return resourceMap; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.service.process; import com.alibaba.fastjson.JSON;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
import com.alibaba.fastjson.JSONObject; import com.cronutils.model.Cron; import org.apache.commons.lang.ArrayUtils; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.*; import org.apache.dolphinscheduler.common.model.DateInterval; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.subprocess.SubProcessParameters; import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.dao.mapper.*; import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.service.log.LogClientService; import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; import org.quartz.CronExpression; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.transaction.annotation.Transactional; import java.io.File; import java.util.*; import java.util.stream.Collectors; import static java.util.stream.Collectors.toSet; import static org.apache.dolphinscheduler.common.Constants.*; /** * process relative dao that some mappers in this. */ @Component
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
public class ProcessService { private final Logger logger = LoggerFactory.getLogger(getClass()); private final int[] stateArray = new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(), ExecutionStatus.RUNNING_EXEUTION.ordinal(), ExecutionStatus.READY_PAUSE.ordinal(), ExecutionStatus.READY_STOP.ordinal()}; @Autowired private UserMapper userMapper; @Autowired private ProcessDefinitionMapper processDefineMapper; @Autowired private ProcessInstanceMapper processInstanceMapper; @Autowired private DataSourceMapper dataSourceMapper; @Autowired private ProcessInstanceMapMapper processInstanceMapMapper; @Autowired private TaskInstanceMapper taskInstanceMapper; @Autowired private CommandMapper commandMapper; @Autowired private ScheduleMapper scheduleMapper; @Autowired
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
private UdfFuncMapper udfFuncMapper; @Autowired private ResourceMapper resourceMapper; @Autowired private ErrorCommandMapper errorCommandMapper; @Autowired private TenantMapper tenantMapper; @Autowired private ProjectMapper projectMapper; /** * handle Command (construct ProcessInstance from Command) , wrapped in transaction * @param logger logger * @param host host * @param validThreadNum validThreadNum * @param command found command * @return process instance */ @Transactional(rollbackFor = Exception.class) public ProcessInstance handleCommand(Logger logger, String host, int validThreadNum, Command command) { ProcessInstance processInstance = constructProcessInstance(command, host); if(processInstance == null){ logger.error("scan command, command parameter is error: {}", command); moveToErrorCommand(command, "process instance is null"); return null; } if(!checkThreadNum(command, validThreadNum)){ logger.info("there is not enough thread for this command: {}", command); return setWaitingThreadProcess(command, processInstance); }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
processInstance.setCommandType(command.getCommandType()); processInstance.addHistoryCmd(command.getCommandType()); saveProcessInstance(processInstance); this.setSubProcessParam(processInstance); delCommandByid(command.getId()); return processInstance; } /** * save error command, and delete original command * @param command command * @param message message */ @Transactional(rollbackFor = Exception.class) public void moveToErrorCommand(Command command, String message) { ErrorCommand errorCommand = new ErrorCommand(command, message); this.errorCommandMapper.insert(errorCommand); delCommandByid(command.getId()); } /** * set process waiting thread * @param command command * @param processInstance processInstance * @return process instance */ private ProcessInstance setWaitingThreadProcess(Command command, ProcessInstance processInstance) { processInstance.setState(ExecutionStatus.WAITTING_THREAD); if(command.getCommandType() != CommandType.RECOVER_WAITTING_THREAD){ processInstance.addHistoryCmd(command.getCommandType()); } saveProcessInstance(processInstance);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
this.setSubProcessParam(processInstance); createRecoveryWaitingThreadCommand(command, processInstance); return null; } /** * check thread num * @param command command * @param validThreadNum validThreadNum * @return if thread is enough */ private boolean checkThreadNum(Command command, int validThreadNum) { int commandThreadCount = this.workProcessThreadNumCount(command.getProcessDefinitionId()); return validThreadNum >= commandThreadCount; } /** * insert one command * @param command command * @return create result */ public int createCommand(Command command) { int result = 0; if (command != null){ result = commandMapper.insert(command); } return result; } /** * find one command from queue list * @return command */
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
public Command findOneCommand(){ return commandMapper.getOneToRun(); } /** * check the input command exists in queue list * @param command command * @return create command result */ public Boolean verifyIsNeedCreateCommand(Command command){ Boolean isNeedCreate = true; Map<CommandType,Integer> cmdTypeMap = new HashMap<CommandType,Integer>(); cmdTypeMap.put(CommandType.REPEAT_RUNNING,1); cmdTypeMap.put(CommandType.RECOVER_SUSPENDED_PROCESS,1); cmdTypeMap.put(CommandType.START_FAILURE_TASK_PROCESS,1); CommandType commandType = command.getCommandType(); if(cmdTypeMap.containsKey(commandType)){ JSONObject cmdParamObj = (JSONObject) JSON.parse(command.getCommandParam()); JSONObject tempObj; int processInstanceId = cmdParamObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING); List<Command> commands = commandMapper.selectList(null); for (Command tmpCommand:commands){ if(cmdTypeMap.containsKey(tmpCommand.getCommandType())){ tempObj = (JSONObject) JSON.parse(tmpCommand.getCommandParam()); if(tempObj != null && processInstanceId == tempObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING)){ isNeedCreate = false; break; } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
} return isNeedCreate; } /** * find process instance detail by id * @param processId processId * @return process instance */ public ProcessInstance findProcessInstanceDetailById(int processId){ return processInstanceMapper.queryDetailById(processId); } /** * get task node list by definitionId * @param defineId * @return */ public List<TaskNode> getTaskNodeListByDefinitionId(Integer defineId){ ProcessDefinition processDefinition = processDefineMapper.selectById(defineId); if (processDefinition == null) { logger.info("process define not exists"); return null; } String processDefinitionJson = processDefinition.getProcessDefinitionJson(); ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); if (null == processData) { logger.error("process data is null"); return new ArrayList<>(); } return processData.getTasks();
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
} /** * find process instance by id * @param processId processId * @return process instance */ public ProcessInstance findProcessInstanceById(int processId){ return processInstanceMapper.selectById(processId); } /** * find process define by id. * @param processDefinitionId processDefinitionId * @return process definition */ public ProcessDefinition findProcessDefineById(int processDefinitionId) { return processDefineMapper.selectById(processDefinitionId); } /** * delete work process instance by id * @param processInstanceId processInstanceId * @return delete process instance result */ public int deleteWorkProcessInstanceById(int processInstanceId){ return processInstanceMapper.deleteById(processInstanceId); } /** * delete all sub process by parent instance id * @param processInstanceId processInstanceId * @return delete all sub process instance result */
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
public int deleteAllSubWorkProcessByParentId(int processInstanceId){ List<Integer> subProcessIdList = processInstanceMapMapper.querySubIdListByParentId(processInstanceId); for(Integer subId : subProcessIdList){ deleteAllSubWorkProcessByParentId(subId); deleteWorkProcessMapByParentId(subId); removeTaskLogFile(subId); deleteWorkProcessInstanceById(subId); } return 1; } /** * remove task log file * @param processInstanceId processInstanceId */ public void removeTaskLogFile(Integer processInstanceId){ LogClientService logClient = new LogClientService(); List<TaskInstance> taskInstanceList = findValidTaskListByProcessId(processInstanceId); if (CollectionUtils.isEmpty(taskInstanceList)){ return; } for (TaskInstance taskInstance : taskInstanceList){ String taskLogPath = taskInstance.getLogPath(); if (StringUtils.isEmpty(taskInstance.getHost())){ continue; } int port = Constants.RPC_PORT; String ip = ""; try { ip = Host.of(taskInstance.getHost()).getIp(); }catch (Exception e){
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
ip = taskInstance.getHost(); } logClient.removeTaskLog(ip,port,taskLogPath); } } /** * calculate sub process number in the process define. * @param processDefinitionId processDefinitionId * @return process thread num count */ private Integer workProcessThreadNumCount(Integer processDefinitionId){ List<Integer> ids = new ArrayList<>(); recurseFindSubProcessId(processDefinitionId, ids); return ids.size()+1; } /** * recursive query sub process definition id by parent id. * @param parentId parentId * @param ids ids */ public void recurseFindSubProcessId(int parentId, List<Integer> ids){ ProcessDefinition processDefinition = processDefineMapper.selectById(parentId); String processDefinitionJson = processDefinition.getProcessDefinitionJson(); ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); List<TaskNode> taskNodeList = processData.getTasks(); if (taskNodeList != null && taskNodeList.size() > 0){ for (TaskNode taskNode : taskNodeList){ String parameter = taskNode.getParams();
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
JSONObject parameterJson = JSONObject.parseObject(parameter); if (parameterJson.getInteger(CMDPARAM_SUB_PROCESS_DEFINE_ID) != null){ SubProcessParameters subProcessParam = JSON.parseObject(parameter, SubProcessParameters.class); ids.add(subProcessParam.getProcessDefinitionId()); recurseFindSubProcessId(subProcessParam.getProcessDefinitionId(),ids); } } } } /** * create recovery waiting thread command when thread pool is not enough for the process instance. * sub work process instance need not to create recovery command. * create recovery waiting thread command and delete origin command at the same time. * if the recovery command is exists, only update the field update_time * @param originCommand originCommand * @param processInstance processInstance */ public void createRecoveryWaitingThreadCommand(Command originCommand, ProcessInstance processInstance) { if(processInstance.getIsSubProcess() == Flag.YES){ if(originCommand != null){ commandMapper.deleteById(originCommand.getId()); } return; } Map<String, String> cmdParam = new HashMap<>(); cmdParam.put(Constants.CMDPARAM_RECOVERY_WAITTING_THREAD, String.valueOf(processInstance.getId())); if(originCommand == null){ Command command = new Command(
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
CommandType.RECOVER_WAITTING_THREAD, processInstance.getTaskDependType(), processInstance.getFailureStrategy(), processInstance.getExecutorId(), processInstance.getProcessDefinitionId(), JSONUtils.toJson(cmdParam), processInstance.getWarningType(), processInstance.getWarningGroupId(), processInstance.getScheduleTime(), processInstance.getProcessInstancePriority() ); saveCommand(command); return ; } if(originCommand.getCommandType() == CommandType.RECOVER_WAITTING_THREAD){ originCommand.setUpdateTime(new Date()); saveCommand(originCommand); }else{ commandMapper.deleteById(originCommand.getId()); originCommand.setId(0); originCommand.setCommandType(CommandType.RECOVER_WAITTING_THREAD); originCommand.setUpdateTime(new Date()); originCommand.setCommandParam(JSONUtils.toJson(cmdParam)); originCommand.setProcessInstancePriority(processInstance.getProcessInstancePriority()); saveCommand(originCommand); } } /**
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
* get schedule time from command * @param command command * @param cmdParam cmdParam map * @return date */ private Date getScheduleTime(Command command, Map<String, String> cmdParam){ Date scheduleTime = command.getScheduleTime(); if(scheduleTime == null){ if(cmdParam != null && cmdParam.containsKey(CMDPARAM_COMPLEMENT_DATA_START_DATE)){ scheduleTime = DateUtils.stringToDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE)); } } return scheduleTime; } /** * generate a new work process instance from command. * @param processDefinition processDefinition * @param command command * @param cmdParam cmdParam map * @return process instance */ private ProcessInstance generateNewProcessInstance(ProcessDefinition processDefinition, Command command, Map<String, String> cmdParam){ ProcessInstance processInstance = new ProcessInstance(processDefinition); processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); processInstance.setRecovery(Flag.NO); processInstance.setStartTime(new Date()); processInstance.setRunTimes(1); processInstance.setMaxTryTimes(0);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
processInstance.setProcessDefinitionId(command.getProcessDefinitionId()); processInstance.setCommandParam(command.getCommandParam()); processInstance.setCommandType(command.getCommandType()); processInstance.setIsSubProcess(Flag.NO); processInstance.setTaskDependType(command.getTaskDependType()); processInstance.setFailureStrategy(command.getFailureStrategy()); processInstance.setExecutorId(command.getExecutorId()); WarningType warningType = command.getWarningType() == null ? WarningType.NONE : command.getWarningType(); processInstance.setWarningType(warningType); Integer warningGroupId = command.getWarningGroupId() == null ? 0 : command.getWarningGroupId(); processInstance.setWarningGroupId(warningGroupId); Date scheduleTime = getScheduleTime(command, cmdParam); if(scheduleTime != null){ processInstance.setScheduleTime(scheduleTime); } processInstance.setCommandStartTime(command.getStartTime()); processInstance.setLocations(processDefinition.getLocations()); processInstance.setConnects(processDefinition.getConnects()); processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( processDefinition.getGlobalParamMap(), processDefinition.getGlobalParamList(), getCommandTypeIfComplement(processInstance, command), processInstance.getScheduleTime())); processInstance.setProcessInstanceJson(processDefinition.getProcessDefinitionJson()); processInstance.setProcessInstancePriority(command.getProcessInstancePriority()); String workerGroup = StringUtils.isBlank(command.getWorkerGroup()) ? Constants.DEFAULT_WORKER_GROUP : command.getWorkerGroup();
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
processInstance.setWorkerGroup(workerGroup); processInstance.setTimeout(processDefinition.getTimeout()); processInstance.setTenantId(processDefinition.getTenantId()); return processInstance; } /** * get process tenant * there is tenant id in definition, use the tenant of the definition. * if there is not tenant id in the definiton or the tenant not exist * use definition creator's tenant. * @param tenantId tenantId * @param userId userId * @return tenant */ public Tenant getTenantForProcess(int tenantId, int userId){ Tenant tenant = null; if(tenantId >= 0){ tenant = tenantMapper.queryById(tenantId); } if (userId == 0){ return null; } if(tenant == null){ User user = userMapper.selectById(userId); tenant = tenantMapper.queryById(user.getTenantId()); } return tenant; } /** * check command parameters is valid
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
* @param command command * @param cmdParam cmdParam map * @return whether command param is valid */ private Boolean checkCmdParam(Command command, Map<String, String> cmdParam){ if(command.getTaskDependType() == TaskDependType.TASK_ONLY || command.getTaskDependType()== TaskDependType.TASK_PRE){ if(cmdParam == null || !cmdParam.containsKey(Constants.CMDPARAM_START_NODE_NAMES) || cmdParam.get(Constants.CMDPARAM_START_NODE_NAMES).isEmpty()){ logger.error("command node depend type is {}, but start nodes is null ", command.getTaskDependType()); return false; } } return true; } /** * construct process instance according to one command. * @param command command * @param host host * @return process instance */ private ProcessInstance constructProcessInstance(Command command, String host){ ProcessInstance processInstance = null; CommandType commandType = command.getCommandType(); Map<String, String> cmdParam = JSONUtils.toMap(command.getCommandParam()); ProcessDefinition processDefinition = null; if(command.getProcessDefinitionId() != 0){ processDefinition = processDefineMapper.selectById(command.getProcessDefinitionId()); if(processDefinition == null){ logger.error("cannot find the work process define! define id : {}", command.getProcessDefinitionId());
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
return null; } } if(cmdParam != null ){ Integer processInstanceId = 0; if(cmdParam.containsKey(Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING)) { String processId = cmdParam.get(Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING); processInstanceId = Integer.parseInt(processId); if (processInstanceId == 0) { logger.error("command parameter is error, [ ProcessInstanceId ] is 0"); return null; } }else if(cmdParam.containsKey(Constants.CMDPARAM_SUB_PROCESS)){ String pId = cmdParam.get(Constants.CMDPARAM_SUB_PROCESS); processInstanceId = Integer.parseInt(pId); }else if(cmdParam.containsKey(Constants.CMDPARAM_RECOVERY_WAITTING_THREAD)){ String pId = cmdParam.get(Constants.CMDPARAM_RECOVERY_WAITTING_THREAD); processInstanceId = Integer.parseInt(pId); } if(processInstanceId ==0){ processInstance = generateNewProcessInstance(processDefinition, command, cmdParam); }else{ processInstance = this.findProcessInstanceDetailById(processInstanceId); } processDefinition = processDefineMapper.selectById(processInstance.getProcessDefinitionId()); processInstance.setProcessDefinition(processDefinition);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
if(processInstance.getCommandParam() != null){ Map<String, String> processCmdParam = JSONUtils.toMap(processInstance.getCommandParam()); for(Map.Entry<String, String> entry: processCmdParam.entrySet()) { if(!cmdParam.containsKey(entry.getKey())){ cmdParam.put(entry.getKey(), entry.getValue()); } } } if(cmdParam.containsKey(Constants.CMDPARAM_SUB_PROCESS)){ processInstance.setCommandParam(command.getCommandParam()); } }else{ processInstance = generateNewProcessInstance(processDefinition, command, cmdParam); } if(!checkCmdParam(command, cmdParam)){ logger.error("command parameter check failed!"); return null; } if(command.getScheduleTime() != null){ processInstance.setScheduleTime(command.getScheduleTime()); } processInstance.setHost(host); ExecutionStatus runStatus = ExecutionStatus.RUNNING_EXEUTION; int runTime = processInstance.getRunTimes(); switch (commandType){ case START_PROCESS: break; case START_FAILURE_TASK_PROCESS:
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
List<Integer> failedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.FAILURE); List<Integer> toleranceList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.NEED_FAULT_TOLERANCE); List<Integer> killedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.KILL); cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING); failedList.addAll(killedList); failedList.addAll(toleranceList); for(Integer taskId : failedList){ initTaskInstance(this.findTaskInstanceById(taskId)); } cmdParam.put(Constants.CMDPARAM_RECOVERY_START_NODE_STRING, String.join(Constants.COMMA, convertIntListToString(failedList))); processInstance.setCommandParam(JSONUtils.toJson(cmdParam)); processInstance.setRunTimes(runTime +1 ); break; case START_CURRENT_TASK_PROCESS: break; case RECOVER_WAITTING_THREAD: break; case RECOVER_SUSPENDED_PROCESS: cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING); List<Integer> suspendedNodeList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.PAUSE); List<Integer> stopNodeList = findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.KILL); suspendedNodeList.addAll(stopNodeList); for(Integer taskId : suspendedNodeList){ initTaskInstance(this.findTaskInstanceById(taskId)); }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
cmdParam.put(Constants.CMDPARAM_RECOVERY_START_NODE_STRING, String.join(",", convertIntListToString(suspendedNodeList))); processInstance.setCommandParam(JSONUtils.toJson(cmdParam)); processInstance.setRunTimes(runTime +1); break; case RECOVER_TOLERANCE_FAULT_PROCESS: processInstance.setRecovery(Flag.YES); runStatus = processInstance.getState(); break; case COMPLEMENT_DATA: List<TaskInstance> taskInstanceList = this.findValidTaskListByProcessId(processInstance.getId()); for(TaskInstance taskInstance : taskInstanceList){ taskInstance.setFlag(Flag.NO); this.updateTaskInstance(taskInstance); } initComplementDataParam(processDefinition, processInstance, cmdParam); break; case REPEAT_RUNNING: if(cmdParam.containsKey(Constants.CMDPARAM_RECOVERY_START_NODE_STRING)){ cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING); processInstance.setCommandParam(JSONUtils.toJson(cmdParam)); } List<TaskInstance> validTaskList = findValidTaskListByProcessId(processInstance.getId()); for(TaskInstance taskInstance : validTaskList){ taskInstance.setFlag(Flag.NO); updateTaskInstance(taskInstance); }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
processInstance.setStartTime(new Date()); processInstance.setEndTime(null); processInstance.setRunTimes(runTime +1); initComplementDataParam(processDefinition, processInstance, cmdParam); break; case SCHEDULER: break; default: break; } processInstance.setState(runStatus); return processInstance; } /** * return complement data if the process start with complement data * @param processInstance processInstance * @param command command * @return command type */ private CommandType getCommandTypeIfComplement(ProcessInstance processInstance, Command command){ if(CommandType.COMPLEMENT_DATA == processInstance.getCmdTypeIfComplement()){ return CommandType.COMPLEMENT_DATA; }else{ return command.getCommandType(); } } /** * initialize complement data parameters * @param processDefinition processDefinition * @param processInstance processInstance
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
* @param cmdParam cmdParam */ private void initComplementDataParam(ProcessDefinition processDefinition, ProcessInstance processInstance, Map<String, String> cmdParam) { if(!processInstance.isComplementData()){ return; } Date startComplementTime = DateUtils.parse(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE), YYYY_MM_DD_HH_MM_SS); if(Flag.NO == processInstance.getIsSubProcess()) { processInstance.setScheduleTime(startComplementTime); } processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( processDefinition.getGlobalParamMap(), processDefinition.getGlobalParamList(), CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime())); } /** * set sub work process parameters. * handle sub work process instance, update relation table and command parameters * set sub work process flag, extends parent work process command parameters * @param subProcessInstance subProcessInstance * @return process instance */ public ProcessInstance setSubProcessParam(ProcessInstance subProcessInstance){ String cmdParam = subProcessInstance.getCommandParam(); if(StringUtils.isEmpty(cmdParam)){ return subProcessInstance; }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
Map<String, String> paramMap = JSONUtils.toMap(cmdParam); if(paramMap.containsKey(CMDPARAM_SUB_PROCESS) && CMDPARAM_EMPTY_SUB_PROCESS.equals(paramMap.get(CMDPARAM_SUB_PROCESS))){ paramMap.remove(CMDPARAM_SUB_PROCESS); paramMap.put(CMDPARAM_SUB_PROCESS, String.valueOf(subProcessInstance.getId())); subProcessInstance.setCommandParam(JSONUtils.toJson(paramMap)); subProcessInstance.setIsSubProcess(Flag.YES); this.saveProcessInstance(subProcessInstance); } String parentInstanceId = paramMap.get(CMDPARAM_SUB_PROCESS_PARENT_INSTANCE_ID); if(StringUtils.isNotEmpty(parentInstanceId)){ ProcessInstance parentInstance = findProcessInstanceDetailById(Integer.parseInt(parentInstanceId)); if(parentInstance != null){ subProcessInstance.setGlobalParams( joinGlobalParams(parentInstance.getGlobalParams(), subProcessInstance.getGlobalParams())); this.saveProcessInstance(subProcessInstance); }else{ logger.error("sub process command params error, cannot find parent instance: {} ", cmdParam); } } ProcessInstanceMap processInstanceMap = JSONUtils.parseObject(cmdParam, ProcessInstanceMap.class); if(processInstanceMap == null || processInstanceMap.getParentProcessInstanceId() == 0){ return subProcessInstance; } processInstanceMap.setProcessInstanceId(subProcessInstance.getId()); this.updateWorkProcessInstanceMap(processInstanceMap); return subProcessInstance;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
} /** * join parent global params into sub process. * only the keys doesn't in sub process global would be joined. * @param parentGlobalParams parentGlobalParams * @param subGlobalParams subGlobalParams * @return global params join */ private String joinGlobalParams(String parentGlobalParams, String subGlobalParams){ List<Property> parentPropertyList = JSONUtils.toList(parentGlobalParams, Property.class); List<Property> subPropertyList = JSONUtils.toList(subGlobalParams, Property.class); Map<String,String> subMap = subPropertyList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); for(Property parent : parentPropertyList){ if(!subMap.containsKey(parent.getProp())){ subPropertyList.add(parent); } } return JSONUtils.toJson(subPropertyList); } /** * initialize task instance * @param taskInstance taskInstance */ private void initTaskInstance(TaskInstance taskInstance){ if(!taskInstance.isSubProcess()){ if(taskInstance.getState().typeIsCancel() || taskInstance.getState().typeIsFailure()){ taskInstance.setFlag(Flag.NO); updateTaskInstance(taskInstance); return; }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
} taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS); updateTaskInstance(taskInstance); } /** * submit task to db * submit sub process to command * @param taskInstance taskInstance * @return task instance */ @Transactional(rollbackFor = Exception.class) public TaskInstance submitTask(TaskInstance taskInstance){ ProcessInstance processInstance = this.findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); logger.info("start submit task : {}, instance id:{}, state: {}", taskInstance.getName(), taskInstance.getProcessInstanceId(), processInstance.getState()); TaskInstance task = submitTaskInstanceToDB(taskInstance, processInstance); if(task == null){ logger.error("end submit task to db error, task name:{}, process id:{} state: {} ", taskInstance.getName(), taskInstance.getProcessInstance(), processInstance.getState()); return task; } if(!task.getState().typeIsFinished()){ createSubWorkProcessCommand(processInstance, task); } logger.info("end submit task to db successfully:{} state:{} complete, instance id:{} state: {} ", taskInstance.getName(), task.getState(), processInstance.getId(), processInstance.getState()); return task; } /**
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
* set work process instance map * @param parentInstance parentInstance * @param parentTask parentTask * @return process instance map */ private ProcessInstanceMap setProcessInstanceMap(ProcessInstance parentInstance, TaskInstance parentTask){ ProcessInstanceMap processMap = findWorkProcessMapByParent(parentInstance.getId(), parentTask.getId()); if(processMap != null){ return processMap; }else if(parentInstance.getCommandType() == CommandType.REPEAT_RUNNING || parentInstance.isComplementData()){ processMap = findPreviousTaskProcessMap(parentInstance, parentTask); if(processMap!= null){ processMap.setParentTaskInstanceId(parentTask.getId()); updateWorkProcessInstanceMap(processMap); return processMap; } } processMap = new ProcessInstanceMap(); processMap.setParentProcessInstanceId(parentInstance.getId()); processMap.setParentTaskInstanceId(parentTask.getId()); createWorkProcessInstanceMap(processMap); return processMap; } /** * find previous task work process map. * @param parentProcessInstance parentProcessInstance
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
* @param parentTask parentTask * @return process instance map */ private ProcessInstanceMap findPreviousTaskProcessMap(ProcessInstance parentProcessInstance, TaskInstance parentTask) { Integer preTaskId = 0; List<TaskInstance> preTaskList = this.findPreviousTaskListByWorkProcessId(parentProcessInstance.getId()); for(TaskInstance task : preTaskList){ if(task.getName().equals(parentTask.getName())){ preTaskId = task.getId(); ProcessInstanceMap map = findWorkProcessMapByParent(parentProcessInstance.getId(), preTaskId); if(map!=null){ return map; } } } logger.info("sub process instance is not found,parent task:{},parent instance:{}", parentTask.getId(), parentProcessInstance.getId()); return null; } /** * create sub work process command * @param parentProcessInstance parentProcessInstance * @param task task */ private void createSubWorkProcessCommand(ProcessInstance parentProcessInstance, TaskInstance task){ if(!task.isSubProcess()){ return; }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
ProcessInstanceMap instanceMap = setProcessInstanceMap(parentProcessInstance, task); TaskNode taskNode = JSONUtils.parseObject(task.getTaskJson(), TaskNode.class); Map<String, String> subProcessParam = JSONUtils.toMap(taskNode.getParams()); Integer childDefineId = Integer.parseInt(subProcessParam.get(Constants.CMDPARAM_SUB_PROCESS_DEFINE_ID)); ProcessInstance childInstance = findSubProcessInstance(parentProcessInstance.getId(), task.getId()); CommandType fatherType = parentProcessInstance.getCommandType(); CommandType commandType = fatherType; if(childInstance == null){ String fatherHistoryCommand = parentProcessInstance.getHistoryCmd(); if(fatherHistoryCommand.startsWith(CommandType.SCHEDULER.toString()) || fatherHistoryCommand.startsWith(CommandType.COMPLEMENT_DATA.toString())){ commandType = CommandType.valueOf(fatherHistoryCommand.split(Constants.COMMA)[0]); } } if(childInstance != null){ childInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS); updateProcessInstance(childInstance); } String processMapStr = JSONUtils.toJson(instanceMap); Map<String, String> cmdParam = JSONUtils.toMap(processMapStr); if(commandType == CommandType.COMPLEMENT_DATA || (childInstance != null && childInstance.isComplementData())){ Map<String, String> parentParam = JSONUtils.toMap(parentProcessInstance.getCommandParam()); String endTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE); String startTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, endTime); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, startTime);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
processMapStr = JSONUtils.toJson(cmdParam); } updateSubProcessDefinitionByParent(parentProcessInstance, childDefineId); Command command = new Command(); command.setWarningType(parentProcessInstance.getWarningType()); command.setWarningGroupId(parentProcessInstance.getWarningGroupId()); command.setFailureStrategy(parentProcessInstance.getFailureStrategy()); command.setProcessDefinitionId(childDefineId); command.setScheduleTime(parentProcessInstance.getScheduleTime()); command.setExecutorId(parentProcessInstance.getExecutorId()); command.setCommandParam(processMapStr); command.setCommandType(commandType); command.setProcessInstancePriority(parentProcessInstance.getProcessInstancePriority()); command.setWorkerGroup(parentProcessInstance.getWorkerGroup()); createCommand(command); logger.info("sub process command created: {} ", command.toString()); } /** * update sub process definition * @param parentProcessInstance parentProcessInstance * @param childDefinitionId childDefinitionId */ private void updateSubProcessDefinitionByParent(ProcessInstance parentProcessInstance, int childDefinitionId) { ProcessDefinition fatherDefinition = this.findProcessDefineById(parentProcessInstance.getProcessDefinitionId()); ProcessDefinition childDefinition = this.findProcessDefineById(childDefinitionId); if(childDefinition != null && fatherDefinition != null){ childDefinition.setReceivers(fatherDefinition.getReceivers()); childDefinition.setReceiversCc(fatherDefinition.getReceiversCc()); processDefineMapper.updateById(childDefinition); }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
} /** * submit task to mysql * @param taskInstance taskInstance * @param processInstance processInstance * @return task instance */ public TaskInstance submitTaskInstanceToDB(TaskInstance taskInstance, ProcessInstance processInstance){ ExecutionStatus processInstanceState = processInstance.getState(); if(taskInstance.getState().typeIsFailure()){ if(taskInstance.isSubProcess()){ taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1 ); }else { if( processInstanceState != ExecutionStatus.READY_STOP && processInstanceState != ExecutionStatus.READY_PAUSE){ taskInstance.setFlag(Flag.NO); updateTaskInstance(taskInstance); if(taskInstance.getState() != ExecutionStatus.NEED_FAULT_TOLERANCE){ taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1 ); } taskInstance.setEndTime(null); taskInstance.setStartTime(new Date()); taskInstance.setFlag(Flag.YES); taskInstance.setHost(null); taskInstance.setId(0); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
taskInstance.setExecutorId(processInstance.getExecutorId()); taskInstance.setProcessInstancePriority(processInstance.getProcessInstancePriority()); taskInstance.setState(getSubmitTaskState(taskInstance, processInstanceState)); taskInstance.setSubmitTime(new Date()); boolean saveResult = saveTaskInstance(taskInstance); if(!saveResult){ return null; } return taskInstance; } /** * ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskInstanceId}_${task executed by ip1},${ip2}... * The tasks with the highest priority are selected by comparing the priorities of the above four levels from high to low. * @param taskInstance taskInstance * @return task zk queue str */ public String taskZkInfo(TaskInstance taskInstance) { String taskWorkerGroup = getTaskWorkerGroup(taskInstance); ProcessInstance processInstance = this.findProcessInstanceById(taskInstance.getProcessInstanceId()); if(processInstance == null){ logger.error("process instance is null. please check the task info, task id: " + taskInstance.getId()); return ""; } StringBuilder sb = new StringBuilder(100); sb.append(processInstance.getProcessInstancePriority().ordinal()).append(Constants.UNDERLINE) .append(taskInstance.getProcessInstanceId()).append(Constants.UNDERLINE) .append(taskInstance.getTaskInstancePriority().ordinal()).append(Constants.UNDERLINE) .append(taskInstance.getId()).append(Constants.UNDERLINE) .append(taskInstance.getWorkerGroup()); return sb.toString();
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
} /** * get submit task instance state by the work process state * cannot modify the task state when running/kill/submit success, or this * task instance is already exists in task queue . * return pause if work process state is ready pause * return stop if work process state is ready stop * if all of above are not satisfied, return submit success * * @param taskInstance taskInstance * @param processInstanceState processInstanceState * @return process instance state */ public ExecutionStatus getSubmitTaskState(TaskInstance taskInstance, ExecutionStatus processInstanceState){ ExecutionStatus state = taskInstance.getState(); if( state == ExecutionStatus.RUNNING_EXEUTION || state == ExecutionStatus.KILL || checkTaskExistsInTaskQueue(taskInstance) ){ return state; } if( processInstanceState == ExecutionStatus.READY_PAUSE){ state = ExecutionStatus.PAUSE; }else if(processInstanceState == ExecutionStatus.READY_STOP
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
|| !checkProcessStrategy(taskInstance)) { state = ExecutionStatus.KILL; }else{ state = ExecutionStatus.SUBMITTED_SUCCESS; } return state; } /** * check process instance strategy * @param taskInstance taskInstance * @return check strategy result */ private boolean checkProcessStrategy(TaskInstance taskInstance){ ProcessInstance processInstance = this.findProcessInstanceById(taskInstance.getProcessInstanceId()); FailureStrategy failureStrategy = processInstance.getFailureStrategy(); if(failureStrategy == FailureStrategy.CONTINUE){ return true; } List<TaskInstance> taskInstances = this.findValidTaskListByProcessId(taskInstance.getProcessInstanceId()); for(TaskInstance task : taskInstances){ if(task.getState() == ExecutionStatus.FAILURE){ return false; } } return true; } /** * check the task instance existing in queue * @param taskInstance taskInstance * @return whether taskinstance exists queue
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
*/ public boolean checkTaskExistsInTaskQueue(TaskInstance taskInstance){ if(taskInstance.isSubProcess()){ return false; } String taskZkInfo = taskZkInfo(taskInstance); return false; } /** * create a new process instance * @param processInstance processInstance */ public void createProcessInstance(ProcessInstance processInstance){ if (processInstance != null){ processInstanceMapper.insert(processInstance); } } /** * insert or update work process instance to data base * @param processInstance processInstance */ public void saveProcessInstance(ProcessInstance processInstance){ if (processInstance == null){ logger.error("save error, process instance is null!"); return ; } if(processInstance.getId() != 0){ processInstanceMapper.updateById(processInstance); }else{ createProcessInstance(processInstance);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
} } /** * insert or update command * @param command command * @return save command result */ public int saveCommand(Command command){ if(command.getId() != 0){ return commandMapper.updateById(command); }else{ return commandMapper.insert(command); } } /** * insert or update task instance * @param taskInstance taskInstance * @return save task instance result */ public boolean saveTaskInstance(TaskInstance taskInstance){ if(taskInstance.getId() != 0){ return updateTaskInstance(taskInstance); }else{ return createTaskInstance(taskInstance); } } /** * insert task instance * @param taskInstance taskInstance * @return create task instance result
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
*/ public boolean createTaskInstance(TaskInstance taskInstance) { int count = taskInstanceMapper.insert(taskInstance); return count > 0; } /** * update task instance * @param taskInstance taskInstance * @return update task instance result */ public boolean updateTaskInstance(TaskInstance taskInstance){ int count = taskInstanceMapper.updateById(taskInstance); return count > 0; } /** * delete a command by id * @param id id */ public void delCommandByid(int id) { commandMapper.deleteById(id); } /** * find task instance by id * @param taskId task id * @return task intance */ public TaskInstance findTaskInstanceById(Integer taskId){ return taskInstanceMapper.selectById(taskId); } /**
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
* package task instance,associate processInstance and processDefine * @param taskInstId taskInstId * @return task instance */ public TaskInstance getTaskInstanceDetailByTaskId(int taskInstId){ // TaskInstance taskInstance = findTaskInstanceById(taskInstId); if(taskInstance == null){ return taskInstance; } // ProcessInstance processInstance = findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); // ProcessDefinition processDefine = findProcessDefineById(taskInstance.getProcessDefinitionId()); taskInstance.setProcessInstance(processInstance); taskInstance.setProcessDefine(processDefine); return taskInstance; } /** * get id list by task state * @param instanceId instanceId * @param state state * @return task instance states */ public List<Integer> findTaskIdByInstanceState(int instanceId, ExecutionStatus state){ return taskInstanceMapper.queryTaskByProcessIdAndState(instanceId, state.ordinal()); } /** * find valid task list by process definition id * @param processInstanceId processInstanceId
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
* @return task instance list */ public List<TaskInstance> findValidTaskListByProcessId(Integer processInstanceId){ return taskInstanceMapper.findValidTaskListByProcessId(processInstanceId, Flag.YES); } /** * find previous task list by work process id * @param processInstanceId processInstanceId * @return task instance list */ public List<TaskInstance> findPreviousTaskListByWorkProcessId(Integer processInstanceId){ return taskInstanceMapper.findValidTaskListByProcessId(processInstanceId, Flag.NO); } /** * update work process instance map * @param processInstanceMap processInstanceMap * @return update process instance result */ public int updateWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap){ return processInstanceMapMapper.updateById(processInstanceMap); } /** * create work process instance map * @param processInstanceMap processInstanceMap * @return create process instance result */ public int createWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap){ Integer count = 0; if(processInstanceMap !=null){ return processInstanceMapMapper.insert(processInstanceMap);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
} return count; } /** * find work process map by parent process id and parent task id. * @param parentWorkProcessId parentWorkProcessId * @param parentTaskId parentTaskId * @return process instance map */ public ProcessInstanceMap findWorkProcessMapByParent(Integer parentWorkProcessId, Integer parentTaskId){ return processInstanceMapMapper.queryByParentId(parentWorkProcessId, parentTaskId); } /** * delete work process map by parent process id * @param parentWorkProcessId parentWorkProcessId * @return delete process map result */ public int deleteWorkProcessMapByParentId(int parentWorkProcessId){ return processInstanceMapMapper.deleteByParentProcessId(parentWorkProcessId); } /** * find sub process instance * @param parentProcessId parentProcessId * @param parentTaskId parentTaskId * @return process instance */ public ProcessInstance findSubProcessInstance(Integer parentProcessId, Integer parentTaskId){ ProcessInstance processInstance = null; ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryByParentId(parentProcessId, parentTaskId); if(processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0){
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
return processInstance; } processInstance = findProcessInstanceById(processInstanceMap.getProcessInstanceId()); return processInstance; } /** * find parent process instance * @param subProcessId subProcessId * @return process instance */ public ProcessInstance findParentProcessInstance(Integer subProcessId) { ProcessInstance processInstance = null; ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryBySubProcessId(subProcessId); if(processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0){ return processInstance; } processInstance = findProcessInstanceById(processInstanceMap.getParentProcessInstanceId()); return processInstance; } /** * change task state * @param state state * @param startTime startTime * @param host host * @param executePath executePath * @param logPath logPath * @param taskInstId taskInstId */ public void changeTaskState(ExecutionStatus state, Date startTime, String host, String executePath,
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
String logPath, int taskInstId) { TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstId); taskInstance.setState(state); taskInstance.setStartTime(startTime); taskInstance.setHost(host); taskInstance.setExecutePath(executePath); taskInstance.setLogPath(logPath); saveTaskInstance(taskInstance); } /** * update process instance * @param processInstance processInstance * @return update process instance result */ public int updateProcessInstance(ProcessInstance processInstance){ return processInstanceMapper.updateById(processInstance); } /** * update the process instance * @param processInstanceId processInstanceId * @param processJson processJson * @param globalParams globalParams * @param scheduleTime scheduleTime * @param flag flag * @param locations locations * @param connects connects * @return update process instance result */ public int updateProcessInstance(Integer processInstanceId, String processJson,
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
String globalParams, Date scheduleTime, Flag flag, String locations, String connects){ ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); if(processInstance!= null){ processInstance.setProcessInstanceJson(processJson); processInstance.setGlobalParams(globalParams); processInstance.setScheduleTime(scheduleTime); processInstance.setLocations(locations); processInstance.setConnects(connects); return processInstanceMapper.updateById(processInstance); } return 0; } /** * change task state * @param state state * @param endTime endTime * @param taskInstId taskInstId */ public void changeTaskState(ExecutionStatus state, Date endTime, int processId, String appIds, int taskInstId) { TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstId); taskInstance.setPid(processId); taskInstance.setAppLink(appIds); taskInstance.setState(state); taskInstance.setEndTime(endTime); saveTaskInstance(taskInstance);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
} /** * convert integer list to string list * @param intList intList * @return string list */ public List<String> convertIntListToString(List<Integer> intList){ if(intList == null){ return new ArrayList<>(); } List<String> result = new ArrayList<String>(intList.size()); for(Integer intVar : intList){ result.add(String.valueOf(intVar)); } return result; } /** * update pid and app links field by task instance id * @param taskInstId taskInstId * @param pid pid * @param appLinks appLinks */ public void updatePidByTaskInstId(int taskInstId, int pid,String appLinks) { TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstId); taskInstance.setPid(pid); taskInstance.setAppLink(appLinks); saveTaskInstance(taskInstance); } /** * query schedule by id
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
* @param id id * @return schedule */ public Schedule querySchedule(int id) { return scheduleMapper.selectById(id); } /** * query Schedule by processDefinitionId * @param processDefinitionId processDefinitionId * @see Schedule */ public List<Schedule> queryReleaseSchedulerListByProcessDefinitionId(int processDefinitionId) { return scheduleMapper.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId); } /** * query need failover process instance * @param host host * @return process instance list */ public List<ProcessInstance> queryNeedFailoverProcessInstances(String host){ return processInstanceMapper.queryByHostAndStatus(host, stateArray); } /** * process need failover process instance * @param processInstance processInstance */ @Transactional(rollbackFor = Exception.class) public void processNeedFailoverProcessInstances(ProcessInstance processInstance){ // processInstance.setHost(Constants.NULL);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
processInstanceMapper.updateById(processInstance); // Command cmd = new Command(); cmd.setProcessDefinitionId(processInstance.getProcessDefinitionId()); cmd.setCommandParam(String.format("{\"%s\":%d}", Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING, processInstance.getId())); cmd.setExecutorId(processInstance.getExecutorId()); cmd.setCommandType(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS); createCommand(cmd); } /** * query all need failover task instances by host * @param host host * @return task instance list */ public List<TaskInstance> queryNeedFailoverTaskInstances(String host){ return taskInstanceMapper.queryByHostAndStatus(host, stateArray); } /** * find data source by id * @param id id * @return datasource */ public DataSource findDataSourceById(int id){ return dataSourceMapper.selectById(id); } /** * update process instance state by id * @param processInstanceId processInstanceId * @param executionStatus executionStatus
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
* @return update process result */ public int updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) { ProcessInstance instance = processInstanceMapper.selectById(processInstanceId); instance.setState(executionStatus); return processInstanceMapper.updateById(instance); } /** * find process instance by the task id * @param taskId taskId * @return process instance */ public ProcessInstance findProcessInstanceByTaskId(int taskId){ TaskInstance taskInstance = taskInstanceMapper.selectById(taskId); if(taskInstance!= null){ return processInstanceMapper.selectById(taskInstance.getProcessInstanceId()); } return null; } /** * find udf function list by id list string * @param ids ids * @return udf function list */ public List<UdfFunc> queryUdfFunListByids(int[] ids){ return udfFuncMapper.queryUdfByIdStr(ids, null); } /** * find tenant code by resource name * @param resName resource name
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
* @param resourceType resource type * @return tenant code */ public String queryTenantCodeByResName(String resName,ResourceType resourceType){ return resourceMapper.queryTenantCodeByResourceName(resName, resourceType.ordinal()); } /** * find schedule list by process define id. * @param ids ids * @return schedule list */ public List<Schedule> selectAllByProcessDefineId(int[] ids){ return scheduleMapper.selectAllByProcessDefineArray( ids); } /** * get dependency cycle by work process define id and scheduler fire time * @param masterId masterId * @param processDefinitionId processDefinitionId * @param scheduledFireTime the time the task schedule is expected to trigger * @return CycleDependency * @throws Exception if error throws Exception */ public CycleDependency getCycleDependency(int masterId, int processDefinitionId, Date scheduledFireTime) throws Exception { List<CycleDependency> list = getCycleDependencies(masterId,new int[]{processDefinitionId},scheduledFireTime); return list.size()>0 ? list.get(0) : null; } /** * get dependency cycle list by work process define id list and scheduler fire time * @param masterId masterId
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
* @param ids ids * @param scheduledFireTime the time the task schedule is expected to trigger * @return CycleDependency list * @throws Exception if error throws Exception */ public List<CycleDependency> getCycleDependencies(int masterId,int[] ids,Date scheduledFireTime) throws Exception { List<CycleDependency> cycleDependencyList = new ArrayList<CycleDependency>(); if(ArrayUtils.isEmpty(ids)){ logger.warn("ids[] is empty!is invalid!"); return cycleDependencyList; } if(scheduledFireTime == null){ logger.warn("scheduledFireTime is null!is invalid!"); return cycleDependencyList; } String strCrontab = ""; CronExpression depCronExpression; Cron depCron; List<Date> list; List<Schedule> schedules = this.selectAllByProcessDefineId(ids); // for(Schedule depSchedule:schedules){ strCrontab = depSchedule.getCrontab(); depCronExpression = CronUtils.parse2CronExpression(strCrontab); depCron = CronUtils.parse2Cron(strCrontab); CycleEnum cycleEnum = CronUtils.getMiniCycle(depCron); if(cycleEnum == null){ logger.error("{} is not valid",strCrontab); continue; }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
Calendar calendar = Calendar.getInstance(); switch (cycleEnum){ /*case MINUTE: calendar.add(Calendar.MINUTE,-61);*/ case HOUR: calendar.add(Calendar.HOUR,-25); break; case DAY: calendar.add(Calendar.DATE,-32); break; case WEEK: calendar.add(Calendar.DATE,-32); break; case MONTH: calendar.add(Calendar.MONTH,-13); break; default: logger.warn("Dependent process definition's cycleEnum is {},not support!!", cycleEnum.name()); continue; } Date start = calendar.getTime(); if(depSchedule.getProcessDefinitionId() == masterId){ list = CronUtils.getSelfFireDateList(start, scheduledFireTime, depCronExpression); }else { list = CronUtils.getFireDateList(start, scheduledFireTime, depCronExpression); } if(list.size()>=1){ start = list.get(list.size()-1); CycleDependency dependency = new CycleDependency(depSchedule.getProcessDefinitionId(),start, CronUtils.getExpirationTime(start, cycleEnum), cycleEnum); cycleDependencyList.add(dependency);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
} } return cycleDependencyList; } /** * find last scheduler process instance in the date interval * @param definitionId definitionId * @param dateInterval dateInterval * @return process instance */ public ProcessInstance findLastSchedulerProcessInterval(int definitionId, DateInterval dateInterval) { return processInstanceMapper.queryLastSchedulerProcess(definitionId, dateInterval.getStartTime(), dateInterval.getEndTime()); } /** * find last manual process instance interval * @param definitionId process definition id * @param dateInterval dateInterval * @return process instance */ public ProcessInstance findLastManualProcessInterval(int definitionId, DateInterval dateInterval) { return processInstanceMapper.queryLastManualProcess(definitionId, dateInterval.getStartTime(), dateInterval.getEndTime()); } /** * find last running process instance * @param definitionId process definition id * @param startTime start time
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
* @param endTime end time * @return process instance */ public ProcessInstance findLastRunningProcess(int definitionId, Date startTime, Date endTime) { return processInstanceMapper.queryLastRunningProcess(definitionId, startTime, endTime, stateArray); } /** * query user queue by process instance id * @param processInstanceId processInstanceId * @return queue */ public String queryUserQueueByProcessInstanceId(int processInstanceId){ String queue = ""; ProcessInstance processInstance = processInstanceMapper.selectById(processInstanceId); if(processInstance == null){ return queue; } User executor = userMapper.selectById(processInstance.getExecutorId()); if(executor != null){ queue = executor.getQueue(); } return queue; } /** * get task worker group * @param taskInstance taskInstance * @return workerGroupId
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
*/ public String getTaskWorkerGroup(TaskInstance taskInstance) { String workerGroup = taskInstance.getWorkerGroup(); if(StringUtils.isNotBlank(workerGroup)){ return workerGroup; } int processInstanceId = taskInstance.getProcessInstanceId(); ProcessInstance processInstance = findProcessInstanceById(processInstanceId); if(processInstance != null){ return processInstance.getWorkerGroup(); } logger.info("task : {} will use default worker group", taskInstance.getId()); return Constants.DEFAULT_WORKER_GROUP; } /** * get have perm project list * @param userId userId * @return project list */ public List<Project> getProjectListHavePerm(int userId){ List<Project> createProjects = projectMapper.queryProjectCreatedByUser(userId); List<Project> authedProjects = projectMapper.queryAuthedProjectListByUserId(userId); if(createProjects == null){ createProjects = new ArrayList<>(); } if(authedProjects != null){ createProjects.addAll(authedProjects); } return createProjects; }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
/** * get have perm project ids * @param userId userId * @return project ids */ public List<Integer> getProjectIdListHavePerm(int userId){ List<Integer> projectIdList = new ArrayList<>(); for(Project project : getProjectListHavePerm(userId)){ projectIdList.add(project.getId()); } return projectIdList; } /** * list unauthorized udf function * @param userId user id * @param needChecks data source id array * @return unauthorized udf function list */ public <T> List<T> listUnauthorized(int userId,T[] needChecks,AuthorizationType authorizationType){ List<T> resultList = new ArrayList<T>(); if (!ArrayUtils.isEmpty(needChecks)) { Set<T> originResSet = new HashSet<T>(Arrays.asList(needChecks)); switch (authorizationType){ case RESOURCE_FILE_ID: Set<Integer> authorizedResourceFiles = resourceMapper.listAuthorizedResourceById(userId, needChecks).stream().map(t -> t.getId()).collect(toSet()); originResSet.removeAll(authorizedResourceFiles); break; case RESOURCE_FILE_NAME: Set<String> authorizedResources = resourceMapper.listAuthorizedResource(userId, needChecks).stream().map(t -> t.getFullName()).collect(toSet()); originResSet.removeAll(authorizedResources);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
break; case UDF_FILE: Set<Integer> authorizedUdfFiles = resourceMapper.listAuthorizedResourceById(userId, needChecks).stream().map(t -> t.getId()).collect(toSet()); originResSet.removeAll(authorizedUdfFiles); break; case DATASOURCE: Set<Integer> authorizedDatasources = dataSourceMapper.listAuthorizedDataSource(userId,needChecks).stream().map(t -> t.getId()).collect(toSet()); originResSet.removeAll(authorizedDatasources); break; case UDF: Set<Integer> authorizedUdfs = udfFuncMapper.listAuthorizedUdfFunc(userId, needChecks).stream().map(t -> t.getId()).collect(toSet()); originResSet.removeAll(authorizedUdfs); break; } resultList.addAll(originResSet); } return resultList; } /** * get user by user id * @param userId user id * @return User */ public User getUserById(int userId){ return userMapper.selectById(userId); } /** * get resource by resoruce id * @param resoruceId resource id * @return Resource
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,364
After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar
*For better global communication, please give priority to using English description, thx! * *Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* **Describe the question** 1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound 即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理??? 又或者是1.3.1没有兼容1.2.0的任务???? 导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。 **Which version of DolphinScheduler:** -[1.3.1-preview] **Additional context** **Requirement or improvement** - Please describe about your requirements or improvement suggestions.
https://github.com/apache/dolphinscheduler/issues/3364
https://github.com/apache/dolphinscheduler/pull/3378
ff99ef383f647627dc13cfb71f5dd3c9b26ea76c
3744167d52a52892b5ff98d3b07ab250970fdb5d
"2020-07-31T03:56:02Z"
java
"2020-08-03T02:31:27Z"
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
*/ public Resource getResourceById(int resoruceId){ return resourceMapper.selectById(resoruceId); } /** * list resources by ids * @param resIds resIds * @return resource list */ public List<Resource> listResourceByIds(Integer[] resIds){ return resourceMapper.listResourceByIds(resIds); } /** * format task app id in task instance * @param taskInstance * @return */ public String formatTaskAppId(TaskInstance taskInstance){ ProcessDefinition definition = this.findProcessDefineById(taskInstance.getProcessDefinitionId()); ProcessInstance processInstanceById = this.findProcessInstanceById(taskInstance.getProcessInstanceId()); if(definition == null || processInstanceById == null){ return ""; } return String.format("%s_%s_%s", definition.getId(), processInstanceById.getId(), taskInstance.getId()); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.utils; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import org.apache.commons.io.IOUtils; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ResUploadType; import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
import org.apache.hadoop.fs.*; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.client.cli.RMAdminCLI; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.*; import java.nio.file.Files; import java.security.PrivilegedExceptionAction; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import static org.apache.dolphinscheduler.common.Constants.RESOURCE_UPLOAD_PATH; /** * hadoop utils * single instance */ public class HadoopUtils implements Closeable { private static final Logger logger = LoggerFactory.getLogger(HadoopUtils.class); private static String hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER); public static final String resourceUploadPath = PropertyUtils.getString(RESOURCE_UPLOAD_PATH, "/dolphinscheduler"); public static final String rmHaIds = PropertyUtils.getString(Constants.YARN_RESOURCEMANAGER_HA_RM_IDS); public static final String appAddress = PropertyUtils.getString(Constants.YARN_APPLICATION_STATUS_ADDRESS); public static final String jobHistoryAddress = PropertyUtils.getString(Constants.YARN_JOB_HISTORY_STATUS_ADDRESS); private static final String HADOOP_UTILS_KEY = "HADOOP_UTILS_KEY"; private static final LoadingCache<String, HadoopUtils> cache = CacheBuilder .newBuilder() .expireAfterWrite(PropertyUtils.getInt(Constants.KERBEROS_EXPIRE_TIME, 2), TimeUnit.HOURS)
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
.build(new CacheLoader<String, HadoopUtils>() { @Override public HadoopUtils load(String key) throws Exception { return new HadoopUtils(); } }); private static volatile boolean yarnEnabled = false; private Configuration configuration; private FileSystem fs; private HadoopUtils() { init(); initHdfsPath(); } public static HadoopUtils getInstance() { return cache.getUnchecked(HADOOP_UTILS_KEY); } /** * init dolphinscheduler root path in hdfs */ private void initHdfsPath() { Path path = new Path(resourceUploadPath); try { if (!fs.exists(path)) { fs.mkdirs(path); } } catch (Exception e) { logger.error(e.getMessage(), e); } } /**
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
* init hadoop configuration */ private void init() { try { configuration = new Configuration(); String resourceStorageType = PropertyUtils.getUpperCaseString(Constants.RESOURCE_STORAGE_TYPE); ResUploadType resUploadType = ResUploadType.valueOf(resourceStorageType); if (resUploadType == ResUploadType.HDFS) { if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false)) { System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF, PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH)); configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); hdfsUser = ""; UserGroupInformation.setConfiguration(configuration); UserGroupInformation.loginUserFromKeytab(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME), PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH)); } String defaultFS = configuration.get(Constants.FS_DEFAULTFS); if (defaultFS.startsWith("file")) { String defaultFSProp = PropertyUtils.getString(Constants.FS_DEFAULTFS); if (StringUtils.isNotBlank(defaultFSProp)) { Map<String, String> fsRelatedProps = PropertyUtils.getPrefixedProperties("fs."); configuration.set(Constants.FS_DEFAULTFS, defaultFSProp); fsRelatedProps.forEach((key, value) -> configuration.set(key, value)); } else { logger.error("property:{} can not to be empty, please set!", Constants.FS_DEFAULTFS); throw new RuntimeException( String.format("property: %s can not to be empty, please set!", Constants.FS_DEFAULTFS)
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
); } } else { logger.info("get property:{} -> {}, from core-site.xml hdfs-site.xml ", Constants.FS_DEFAULTFS, defaultFS); } if (fs == null) { if (StringUtils.isNotEmpty(hdfsUser)) { UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hdfsUser); ugi.doAs(new PrivilegedExceptionAction<Boolean>() { @Override public Boolean run() throws Exception { fs = FileSystem.get(configuration); return true; } }); } else { logger.warn("hdfs.root.user is not set value!"); fs = FileSystem.get(configuration); } } } else if (resUploadType == ResUploadType.S3) { System.setProperty(Constants.AWS_S3_V4, Constants.STRING_TRUE); configuration.set(Constants.FS_DEFAULTFS, PropertyUtils.getString(Constants.FS_DEFAULTFS)); configuration.set(Constants.FS_S3A_ENDPOINT, PropertyUtils.getString(Constants.FS_S3A_ENDPOINT)); configuration.set(Constants.FS_S3A_ACCESS_KEY, PropertyUtils.getString(Constants.FS_S3A_ACCESS_KEY)); configuration.set(Constants.FS_S3A_SECRET_KEY, PropertyUtils.getString(Constants.FS_S3A_SECRET_KEY)); fs = FileSystem.get(configuration); } } catch (Exception e) { logger.error(e.getMessage(), e);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
} } /** * @return Configuration */ public Configuration getConfiguration() { return configuration; } /** * get application url * * @param applicationId application id * @return url of application */ public String getApplicationUrl(String applicationId) throws Exception { /** * if rmHaIds contains xx, it signs not use resourcemanager * otherwise: * if rmHaIds is empty, single resourcemanager enabled * if rmHaIds not empty: resourcemanager HA enabled */ String appUrl = ""; if (StringUtils.isEmpty(rmHaIds)){ appUrl = appAddress; yarnEnabled = true; } else { appUrl = getAppAddress(appAddress, rmHaIds); yarnEnabled = true;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
logger.info("application url : {}", appUrl); } if(StringUtils.isBlank(appUrl)){ throw new Exception("application url is blank"); } return String.format(appUrl, applicationId); } public String getJobHistoryUrl(String applicationId) { String jobId = applicationId.replace("application", "job"); return String.format(jobHistoryAddress, jobId); } /** * cat file on hdfs * * @param hdfsFilePath hdfs file path * @return byte[] byte array * @throws IOException errors */ public byte[] catFile(String hdfsFilePath) throws IOException { if (StringUtils.isBlank(hdfsFilePath)) { logger.error("hdfs file path:{} is blank", hdfsFilePath); return new byte[0]; } FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath)); return IOUtils.toByteArray(fsDataInputStream); } /** * cat file on hdfs *
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
* @param hdfsFilePath hdfs file path * @param skipLineNums skip line numbers * @param limit read how many lines * @return content of file * @throws IOException errors */ public List<String> catFile(String hdfsFilePath, int skipLineNums, int limit) throws IOException { if (StringUtils.isBlank(hdfsFilePath)) { logger.error("hdfs file path:{} is blank", hdfsFilePath); return Collections.emptyList(); } try (FSDataInputStream in = fs.open(new Path(hdfsFilePath))) { BufferedReader br = new BufferedReader(new InputStreamReader(in)); Stream<String> stream = br.lines().skip(skipLineNums).limit(limit); return stream.collect(Collectors.toList()); } } /** * make the given file and all non-existent parents into * directories. Has the semantics of Unix 'mkdir -p'. * Existence of the directory hierarchy is not an error. * * @param hdfsPath path to create * @return mkdir result * @throws IOException errors */ public boolean mkdir(String hdfsPath) throws IOException { return fs.mkdirs(new Path(hdfsPath)); } /**
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
* copy files between FileSystems * * @param srcPath source hdfs path * @param dstPath destination hdfs path * @param deleteSource whether to delete the src * @param overwrite whether to overwrite an existing file * @return if success or not * @throws IOException errors */ public boolean copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) throws IOException { return FileUtil.copy(fs, new Path(srcPath), fs, new Path(dstPath), deleteSource, overwrite, fs.getConf()); } /** * the src file is on the local disk. Add it to FS at * the given dst name. * * @param srcFile local file * @param dstHdfsPath destination hdfs path * @param deleteSource whether to delete the src * @param overwrite whether to overwrite an existing file * @return if success or not * @throws IOException errors */ public boolean copyLocalToHdfs(String srcFile, String dstHdfsPath, boolean deleteSource, boolean overwrite) throws IOException { Path srcPath = new Path(srcFile); Path dstPath = new Path(dstHdfsPath); fs.copyFromLocalFile(deleteSource, overwrite, srcPath, dstPath); return true; } /**
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
* copy hdfs file to local * * @param srcHdfsFilePath source hdfs file path * @param dstFile destination file * @param deleteSource delete source * @param overwrite overwrite * @return result of copy hdfs file to local * @throws IOException errors */ public boolean copyHdfsToLocal(String srcHdfsFilePath, String dstFile, boolean deleteSource, boolean overwrite) throws IOException { Path srcPath = new Path(srcHdfsFilePath); File dstPath = new File(dstFile); if (dstPath.exists()) { if (dstPath.isFile()) { if (overwrite) { Files.delete(dstPath.toPath()); } } else { logger.error("destination file must be a file"); } } if (!dstPath.getParentFile().exists()) { dstPath.getParentFile().mkdirs(); } return FileUtil.copy(fs, srcPath, dstPath, deleteSource, fs.getConf()); } /** * delete a file * * @param hdfsFilePath the path to delete.
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
* @param recursive if path is a directory and set to * true, the directory is deleted else throws an exception. In * case of a file the recursive can be set to either true or false. * @return true if delete is successful else false. * @throws IOException errors */ public boolean delete(String hdfsFilePath, boolean recursive) throws IOException { return fs.delete(new Path(hdfsFilePath), recursive); } /** * check if exists * * @param hdfsFilePath source file path * @return result of exists or not * @throws IOException errors */ public boolean exists(String hdfsFilePath) throws IOException { return fs.exists(new Path(hdfsFilePath)); } /** * Gets a list of files in the directory * * @param filePath file path * @return {@link FileStatus} file status * @throws Exception errors */ public FileStatus[] listFileStatus(String filePath) throws Exception { try { return fs.listStatus(new Path(filePath)); } catch (IOException e) {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
logger.error("Get file list exception", e); throw new Exception("Get file list exception", e); } } /** * Renames Path src to Path dst. Can take place on local fs * or remote DFS. * * @param src path to be renamed * @param dst new path after rename * @return true if rename is successful * @throws IOException on failure */ public boolean rename(String src, String dst) throws IOException { return fs.rename(new Path(src), new Path(dst)); } /** * hadoop resourcemanager enabled or not * * @return result */ public boolean isYarnEnabled() { return yarnEnabled; } /** * get the state of an application * * @param applicationId application id * @return the return may be null or there may be other parse exceptions */
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
public ExecutionStatus getApplicationStatus(String applicationId) throws Exception { if (StringUtils.isEmpty(applicationId)) { return null; } String result = Constants.FAILED; String applicationUrl = getApplicationUrl(applicationId); logger.info("applicationUrl={}", applicationUrl); String responseContent = HttpUtils.get(applicationUrl); if (responseContent != null) { ObjectNode jsonObject = JSONUtils.parseObject(responseContent); result = jsonObject.path("app").path("finalStatus").asText(); } else { String jobHistoryUrl = getJobHistoryUrl(applicationId); logger.info("jobHistoryUrl={}", jobHistoryUrl); responseContent = HttpUtils.get(jobHistoryUrl); ObjectNode jsonObject = JSONUtils.parseObject(responseContent); if (!jsonObject.has("job")){ return ExecutionStatus.FAILURE; } result = jsonObject.path("job").path("state").asText(); } switch (result) { case Constants.ACCEPTED: return ExecutionStatus.SUBMITTED_SUCCESS; case Constants.SUCCEEDED: return ExecutionStatus.SUCCESS; case Constants.NEW: case Constants.NEW_SAVING: case Constants.SUBMITTED:
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
case Constants.FAILED: return ExecutionStatus.FAILURE; case Constants.KILLED: return ExecutionStatus.KILL; case Constants.RUNNING: default: return ExecutionStatus.RUNNING_EXECUTION; } } /** * get data hdfs path * * @return data hdfs path */ public static String getHdfsDataBasePath() { if ("/".equals(resourceUploadPath)) { return ""; } else { return resourceUploadPath; } } /** * hdfs resource dir * * @param tenantCode tenant code * @param resourceType resource type * @return hdfs resource dir */ public static String getHdfsDir(ResourceType resourceType, String tenantCode) {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
String hdfsDir = ""; if (resourceType.equals(ResourceType.FILE)) { hdfsDir = getHdfsResDir(tenantCode); } else if (resourceType.equals(ResourceType.UDF)) { hdfsDir = getHdfsUdfDir(tenantCode); } return hdfsDir; } /** * hdfs resource dir * * @param tenantCode tenant code * @return hdfs resource dir */ public static String getHdfsResDir(String tenantCode) { return String.format("%s/resources", getHdfsTenantDir(tenantCode)); } /** * hdfs user dir * * @param tenantCode tenant code * @param userId user id * @return hdfs resource dir */ public static String getHdfsUserDir(String tenantCode, int userId) { return String.format("%s/home/%d", getHdfsTenantDir(tenantCode), userId); } /** * hdfs udf dir *
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
* @param tenantCode tenant code * @return get udf dir on hdfs */ public static String getHdfsUdfDir(String tenantCode) { return String.format("%s/udfs", getHdfsTenantDir(tenantCode)); } /** * get hdfs file name * * @param resourceType resource type * @param tenantCode tenant code * @param fileName file name * @return hdfs file name */ public static String getHdfsFileName(ResourceType resourceType, String tenantCode, String fileName) { if (fileName.startsWith("/")) { fileName = fileName.replaceFirst("/", ""); } return String.format("%s/%s", getHdfsDir(resourceType, tenantCode), fileName); } /** * get absolute path and name for resource file on hdfs * * @param tenantCode tenant code * @param fileName file name * @return get absolute path and name for file on hdfs */ public static String getHdfsResourceFileName(String tenantCode, String fileName) { if (fileName.startsWith("/")) { fileName = fileName.replaceFirst("/", "");
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
} return String.format("%s/%s", getHdfsResDir(tenantCode), fileName); } /** * get absolute path and name for udf file on hdfs * * @param tenantCode tenant code * @param fileName file name * @return get absolute path and name for udf file on hdfs */ public static String getHdfsUdfFileName(String tenantCode, String fileName) { if (fileName.startsWith("/")) { fileName = fileName.replaceFirst("/", ""); } return String.format("%s/%s", getHdfsUdfDir(tenantCode), fileName); } /** * @param tenantCode tenant code * @return file directory of tenants on hdfs */ public static String getHdfsTenantDir(String tenantCode) { return String.format("%s/%s", getHdfsDataBasePath(), tenantCode); } /** * getAppAddress * * @param appAddress app address * @param rmHa resource manager ha * @return app address */
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
public static String getAppAddress(String appAddress, String rmHa) { String activeRM = YarnHAAdminUtils.getAcitveRMName(rmHa); String[] split1 = appAddress.split(Constants.DOUBLE_SLASH); if (split1.length != 2) { return null; } String start = split1[0] + Constants.DOUBLE_SLASH; String[] split2 = split1[1].split(Constants.COLON); if (split2.length != 2) { return null; } String end = Constants.COLON + split2[1]; return start + activeRM + end; } @Override public void close() throws IOException { if (fs != null) { try { fs.close(); } catch (IOException e) { logger.error("Close HadoopUtils instance failed", e); throw new IOException("Close HadoopUtils instance failed", e); } } } /** * yarn ha admin utils */ private static final class YarnHAAdminUtils extends RMAdminCLI {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
/** * get active resourcemanager * * @param rmIds * @return */ public static String getAcitveRMName(String rmIds) { String[] rmIdArr = rmIds.split(Constants.COMMA); int activeResourceManagerPort = PropertyUtils.getInt(Constants.HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT, 8088); String yarnUrl = "http://%s:" + activeResourceManagerPort + "/ws/v1/cluster/info"; String state = null; try { /** * send http get request to rm1 */ state = getRMState(String.format(yarnUrl, rmIdArr[0])); if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) { return rmIdArr[0]; } else if (Constants.HADOOP_RM_STATE_STANDBY.equals(state)) { state = getRMState(String.format(yarnUrl, rmIdArr[1])); if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) { return rmIdArr[1]; } } else { return null; }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
} catch (Exception e) { state = getRMState(String.format(yarnUrl, rmIdArr[1])); if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) { return rmIdArr[0]; } } return null; } /** * get ResourceManager state * * @param url * @return */ public static String getRMState(String url) { String retStr = HttpUtils.get(url); if (StringUtils.isEmpty(retStr)) { return null; } ObjectNode jsonObject = JSONUtils.parseObject(retStr); if (!jsonObject.has("clusterInfo")){ return null; } return jsonObject.get("clusterInfo").path("haState").asText(); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.utils; import org.apache.dolphinscheduler.common.Constants; import org.apache.http.HttpEntity; import org.apache.http.client.config.AuthSchemes; import org.apache.http.client.config.CookieSpecs; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.config.Registry; import org.apache.http.config.RegistryBuilder;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java
import org.apache.http.conn.socket.ConnectionSocketFactory; import org.apache.http.conn.socket.PlainConnectionSocketFactory; import org.apache.http.conn.ssl.NoopHostnameVerifier; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.http.util.EntityUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import java.io.IOException; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.security.cert.X509Certificate; import java.util.Arrays; /** * http utils */ public class HttpUtils { public static final Logger logger = LoggerFactory.getLogger(HttpUtils.class); private HttpUtils() { } public static CloseableHttpClient getInstance(){ return HttpClientInstance.httpClient; } private static class HttpClientInstance{
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java
private static final CloseableHttpClient httpClient = HttpClients.custom().setConnectionManager(cm).setDefaultRequestConfig(requestConfig).build(); } private static PoolingHttpClientConnectionManager cm; private static SSLContext ctx = null; private static SSLConnectionSocketFactory socketFactory; private static RequestConfig requestConfig; private static Registry<ConnectionSocketFactory> socketFactoryRegistry; private static X509TrustManager xtm = new X509TrustManager() { @Override public void checkClientTrusted(X509Certificate[] chain, String authType) { } @Override public void checkServerTrusted(X509Certificate[] chain, String authType) { } @Override public X509Certificate[] getAcceptedIssuers() { return null; } }; static { try { ctx = SSLContext.getInstance(SSLConnectionSocketFactory.TLS); ctx.init(null, new TrustManager[] { xtm }, null); } catch (NoSuchAlgorithmException e) { logger.error("SSLContext init with NoSuchAlgorithmException", e); } catch (KeyManagementException e) {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java
logger.error("SSLContext init with KeyManagementException", e); } socketFactory = new SSLConnectionSocketFactory(ctx, NoopHostnameVerifier.INSTANCE); /** set timeout、request time、socket timeout */ requestConfig = RequestConfig.custom().setCookieSpec(CookieSpecs.IGNORE_COOKIES) .setExpectContinueEnabled(Boolean.TRUE) .setTargetPreferredAuthSchemes(Arrays.asList(AuthSchemes.NTLM, AuthSchemes.DIGEST)) .setProxyPreferredAuthSchemes(Arrays.asList(AuthSchemes.BASIC)) .setConnectTimeout(Constants.HTTP_CONNECT_TIMEOUT).setSocketTimeout(Constants.SOCKET_TIMEOUT) .setConnectionRequestTimeout(Constants.HTTP_CONNECTION_REQUEST_TIMEOUT).setRedirectsEnabled(true) .build(); socketFactoryRegistry = RegistryBuilder.<ConnectionSocketFactory>create() .register("http", PlainConnectionSocketFactory.INSTANCE).register("https", socketFactory).build(); cm = new PoolingHttpClientConnectionManager(socketFactoryRegistry); cm.setDefaultMaxPerRoute(60); cm.setMaxTotal(100); } /** * get http request content * @param url url * @return http get request response content */ public static String get(String url){ CloseableHttpClient httpclient = HttpUtils.getInstance(); HttpGet httpget = new HttpGet(url); String responseContent = null; CloseableHttpResponse response = null; try { response = httpclient.execute(httpget); //ch
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java
if (response.getStatusLine().getStatusCode() == 200) { HttpEntity entity = response.getEntity(); if (entity != null) { responseContent = EntityUtils.toString(entity, Constants.UTF_8); }else{ logger.warn("http entity is null"); } }else{ logger.error("http get:{} response status code is not 200!", response.getStatusLine().getStatusCode()); } }catch (Exception e){ logger.error(e.getMessage(),e); }finally { try { if (response != null) { EntityUtils.consume(response.getEntity()); response.close(); } } catch (IOException e) { logger.error(e.getMessage(),e); } if (!httpget.isAborted()) { httpget.releaseConnection(); httpget.abort(); } } return responseContent; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.utils; import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.http.impl.client.CloseableHttpClient; import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * HttpClient utils test */ public class HttpUtilsTest {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,262
[Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled eg: ![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png) yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
https://github.com/apache/dolphinscheduler/issues/3262
https://github.com/apache/dolphinscheduler/pull/3264
232a24441bbb559c18739a74abb69ecdb718e158
5584f0cb4d27e3ae64ec28c65a8669b38c75d188
"2020-07-21T13:20:54Z"
java
"2020-08-10T07:18:01Z"
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java
public static final Logger logger = LoggerFactory.getLogger(HttpUtilsTest.class); @Test public void testGetTest(){ String result = HttpUtils.get("https://github.com/manifest.json"); Assert.assertNotNull(result); ObjectNode jsonObject = JSONUtils.parseObject(result); Assert.assertEquals("GitHub", jsonObject.path("name").asText()); result = HttpUtils.get("https://123.333.111.33/ccc"); Assert.assertNull(result); } @Test public void testGetHttpClient() { CloseableHttpClient httpClient1 = HttpUtils.getInstance(); CloseableHttpClient httpClient2 = HttpUtils.getInstance(); Assert.assertEquals(httpClient1, httpClient2); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,466
[Bug][Worker] ZooKeeper path must not end with / character
**Describe the bug** worker registry path end with / character, so it throws exeption **To Reproduce** **Expected behavior** **Screenshots** ![image](https://user-images.githubusercontent.com/10829956/89892178-de8f3e00-dc08-11ea-9e60-20eaf7dd2273.png) **Which version of Dolphin Scheduler:** -[dev]
https://github.com/apache/dolphinscheduler/issues/3466
https://github.com/apache/dolphinscheduler/pull/3470
d6a32ac65225fa4f8d9a80c09c5ea8a29e392dac
a7a1156ff1002031c87c29bd737d8f9f0488cea1
"2020-08-11T11:49:59Z"
java
"2020-08-12T09:16:43Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.registry; import java.util.Date; import java.util.Set; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import javax.annotation.PostConstruct; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.state.ConnectionState; import org.apache.curator.framework.state.ConnectionStateListener; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.NetUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,466
[Bug][Worker] ZooKeeper path must not end with / character
**Describe the bug** worker registry path end with / character, so it throws exeption **To Reproduce** **Expected behavior** **Screenshots** ![image](https://user-images.githubusercontent.com/10829956/89892178-de8f3e00-dc08-11ea-9e60-20eaf7dd2273.png) **Which version of Dolphin Scheduler:** -[dev]
https://github.com/apache/dolphinscheduler/issues/3466
https://github.com/apache/dolphinscheduler/pull/3470
d6a32ac65225fa4f8d9a80c09c5ea8a29e392dac
a7a1156ff1002031c87c29bd737d8f9f0488cea1
"2020-08-11T11:49:59Z"
java
"2020-08-12T09:16:43Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java
import org.apache.dolphinscheduler.server.registry.HeartBeatTask; import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import com.google.common.collect.Sets; import static org.apache.dolphinscheduler.common.Constants.*; /** * worker registry */ @Service public class WorkerRegistry { private final Logger logger = LoggerFactory.getLogger(WorkerRegistry.class); /** * zookeeper registry center */ @Autowired private ZookeeperRegistryCenter zookeeperRegistryCenter; /** * worker config */ @Autowired private WorkerConfig workerConfig; /** * heartbeat executor */ private ScheduledExecutorService heartBeatExecutor; /**
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,466
[Bug][Worker] ZooKeeper path must not end with / character
**Describe the bug** worker registry path end with / character, so it throws exeption **To Reproduce** **Expected behavior** **Screenshots** ![image](https://user-images.githubusercontent.com/10829956/89892178-de8f3e00-dc08-11ea-9e60-20eaf7dd2273.png) **Which version of Dolphin Scheduler:** -[dev]
https://github.com/apache/dolphinscheduler/issues/3466
https://github.com/apache/dolphinscheduler/pull/3470
d6a32ac65225fa4f8d9a80c09c5ea8a29e392dac
a7a1156ff1002031c87c29bd737d8f9f0488cea1
"2020-08-11T11:49:59Z"
java
"2020-08-12T09:16:43Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java
* worker start time */ private String startTime; private Set<String> workerGroups; @PostConstruct public void init() { this.workerGroups = workerConfig.getWorkerGroups(); this.startTime = DateUtils.dateToString(new Date()); this.heartBeatExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("HeartBeatExecutor")); } /** * registry */ public void registry() { String address = NetUtils.getHost(); Set<String> workerZkPaths = getWorkerZkPaths(); int workerHeartbeatInterval = workerConfig.getWorkerHeartbeatInterval(); for (String workerZKPath : workerZkPaths) { zookeeperRegistryCenter.getZookeeperCachedOperator().persistEphemeral(workerZKPath, ""); zookeeperRegistryCenter.getZookeeperCachedOperator().getZkClient().getConnectionStateListenable().addListener(new ConnectionStateListener() { @Override public void stateChanged(CuratorFramework client, ConnectionState newState) { if (newState == ConnectionState.LOST) { logger.error("worker : {} connection lost from zookeeper", address); } else if (newState == ConnectionState.RECONNECTED) { logger.info("worker : {} reconnected to zookeeper", address); zookeeperRegistryCenter.getZookeeperCachedOperator().persistEphemeral(workerZKPath, ""); } else if (newState == ConnectionState.SUSPENDED) { logger.warn("worker : {} connection SUSPENDED ", address); }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,466
[Bug][Worker] ZooKeeper path must not end with / character
**Describe the bug** worker registry path end with / character, so it throws exeption **To Reproduce** **Expected behavior** **Screenshots** ![image](https://user-images.githubusercontent.com/10829956/89892178-de8f3e00-dc08-11ea-9e60-20eaf7dd2273.png) **Which version of Dolphin Scheduler:** -[dev]
https://github.com/apache/dolphinscheduler/issues/3466
https://github.com/apache/dolphinscheduler/pull/3470
d6a32ac65225fa4f8d9a80c09c5ea8a29e392dac
a7a1156ff1002031c87c29bd737d8f9f0488cea1
"2020-08-11T11:49:59Z"
java
"2020-08-12T09:16:43Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java
} }); logger.info("worker node : {} registry to ZK {} successfully", address, workerZKPath); } HeartBeatTask heartBeatTask = new HeartBeatTask(this.startTime, this.workerConfig.getWorkerReservedMemory(), this.workerConfig.getWorkerMaxCpuloadAvg(), workerZkPaths, this.zookeeperRegistryCenter); this.heartBeatExecutor.scheduleAtFixedRate(heartBeatTask, workerHeartbeatInterval, workerHeartbeatInterval, TimeUnit.SECONDS); logger.info("worker node : {} heartbeat interval {} s", address, workerHeartbeatInterval); } /** * remove registry info */ public void unRegistry() { String address = getLocalAddress(); Set<String> workerZkPaths = getWorkerZkPaths(); for (String workerZkPath : workerZkPaths) { zookeeperRegistryCenter.getZookeeperCachedOperator().remove(workerZkPath); logger.info("worker node : {} unRegistry from ZK {}.", address, workerZkPath); } this.heartBeatExecutor.shutdownNow(); } /** * get worker path */ private Set<String> getWorkerZkPaths() { Set<String> workerZkPaths = Sets.newHashSet(); String address = getLocalAddress();
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,466
[Bug][Worker] ZooKeeper path must not end with / character
**Describe the bug** worker registry path end with / character, so it throws exeption **To Reproduce** **Expected behavior** **Screenshots** ![image](https://user-images.githubusercontent.com/10829956/89892178-de8f3e00-dc08-11ea-9e60-20eaf7dd2273.png) **Which version of Dolphin Scheduler:** -[dev]
https://github.com/apache/dolphinscheduler/issues/3466
https://github.com/apache/dolphinscheduler/pull/3470
d6a32ac65225fa4f8d9a80c09c5ea8a29e392dac
a7a1156ff1002031c87c29bd737d8f9f0488cea1
"2020-08-11T11:49:59Z"
java
"2020-08-12T09:16:43Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java
String workerZkPathPrefix = this.zookeeperRegistryCenter.getWorkerPath(); String weight = getWorkerWeight(); for (String workGroup : this.workerGroups) { StringBuilder workerZkPathBuilder = new StringBuilder(100); workerZkPathBuilder.append(workerZkPathPrefix).append(SLASH); if (StringUtils.isEmpty(workGroup)) { workGroup = DEFAULT_WORKER_GROUP; } workerZkPathBuilder.append(workGroup.trim().toLowerCase()).append(SLASH); workerZkPathBuilder.append(address); workerZkPathBuilder.append(weight).append(SLASH); workerZkPaths.add(workerZkPathBuilder.toString()); } return workerZkPaths; } /** * get local address */ private String getLocalAddress() { return NetUtils.getHost() + ":" + workerConfig.getListenPort(); } /** * get Worker Weight */ private String getWorkerWeight() { return ":" + workerConfig.getWeight(); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,463
[Bug][api] rename the udf resource file associated with the udf function, Failed to execute hive task
1. Rename the udf resource file associated with the udf function ![image](https://user-images.githubusercontent.com/55787491/89854573-ceef0580-dbc6-11ea-90d4-2b4587d37f0f.png) ![image](https://user-images.githubusercontent.com/55787491/89854587-d9a99a80-dbc6-11ea-8b1a-f0b189824287.png) 2. Executing hive task is fail ![image](https://user-images.githubusercontent.com/55787491/89854496-9bac7680-dbc6-11ea-8ab4-e6ab2bd08ed2.png) **Which version of Dolphin Scheduler:** -[1.3.2-release]
https://github.com/apache/dolphinscheduler/issues/3463
https://github.com/apache/dolphinscheduler/pull/3482
a678c827600d44623f30311574b8226c1c59ace2
c8322482bbd021a89c407809abfcdd50cf3b2dc6
"2020-08-11T03:37:09Z"
java
"2020-08-13T08:19:11Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service; import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.serializer.SerializerFeature; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.commons.collections.BeanMap;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,463
[Bug][api] rename the udf resource file associated with the udf function, Failed to execute hive task
1. Rename the udf resource file associated with the udf function ![image](https://user-images.githubusercontent.com/55787491/89854573-ceef0580-dbc6-11ea-90d4-2b4587d37f0f.png) ![image](https://user-images.githubusercontent.com/55787491/89854587-d9a99a80-dbc6-11ea-8b1a-f0b189824287.png) 2. Executing hive task is fail ![image](https://user-images.githubusercontent.com/55787491/89854496-9bac7680-dbc6-11ea-8ab4-e6ab2bd08ed2.png) **Which version of Dolphin Scheduler:** -[1.3.2-release]
https://github.com/apache/dolphinscheduler/issues/3463
https://github.com/apache/dolphinscheduler/pull/3482
a678c827600d44623f30311574b8226c1c59ace2
c8322482bbd021a89c407809abfcdd50cf3b2dc6
"2020-08-11T03:37:09Z"
java
"2020-08-13T08:19:11Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java
import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent; import org.apache.dolphinscheduler.api.dto.resources.filter.ResourceFilter; import org.apache.dolphinscheduler.api.dto.resources.visitor.ResourceTreeVisitor; import org.apache.dolphinscheduler.api.dto.resources.visitor.Visitor; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.dao.mapper.*; import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.springframework.web.multipart.MultipartFile; import java.io.IOException; import java.text.MessageFormat; import java.util.*; import java.util.regex.Matcher; import java.util.stream.Collectors; import static org.apache.dolphinscheduler.common.Constants.*; /** * resources service */ @Service
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,463
[Bug][api] rename the udf resource file associated with the udf function, Failed to execute hive task
1. Rename the udf resource file associated with the udf function ![image](https://user-images.githubusercontent.com/55787491/89854573-ceef0580-dbc6-11ea-90d4-2b4587d37f0f.png) ![image](https://user-images.githubusercontent.com/55787491/89854587-d9a99a80-dbc6-11ea-8b1a-f0b189824287.png) 2. Executing hive task is fail ![image](https://user-images.githubusercontent.com/55787491/89854496-9bac7680-dbc6-11ea-8ab4-e6ab2bd08ed2.png) **Which version of Dolphin Scheduler:** -[1.3.2-release]
https://github.com/apache/dolphinscheduler/issues/3463
https://github.com/apache/dolphinscheduler/pull/3482
a678c827600d44623f30311574b8226c1c59ace2
c8322482bbd021a89c407809abfcdd50cf3b2dc6
"2020-08-11T03:37:09Z"
java
"2020-08-13T08:19:11Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java
public class ResourcesService extends BaseService { private static final Logger logger = LoggerFactory.getLogger(ResourcesService.class); @Autowired private ResourceMapper resourcesMapper; @Autowired private UdfFuncMapper udfFunctionMapper; @Autowired private TenantMapper tenantMapper; @Autowired
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,463
[Bug][api] rename the udf resource file associated with the udf function, Failed to execute hive task
1. Rename the udf resource file associated with the udf function ![image](https://user-images.githubusercontent.com/55787491/89854573-ceef0580-dbc6-11ea-90d4-2b4587d37f0f.png) ![image](https://user-images.githubusercontent.com/55787491/89854587-d9a99a80-dbc6-11ea-8b1a-f0b189824287.png) 2. Executing hive task is fail ![image](https://user-images.githubusercontent.com/55787491/89854496-9bac7680-dbc6-11ea-8ab4-e6ab2bd08ed2.png) **Which version of Dolphin Scheduler:** -[1.3.2-release]
https://github.com/apache/dolphinscheduler/issues/3463
https://github.com/apache/dolphinscheduler/pull/3482
a678c827600d44623f30311574b8226c1c59ace2
c8322482bbd021a89c407809abfcdd50cf3b2dc6
"2020-08-11T03:37:09Z"
java
"2020-08-13T08:19:11Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java
private UserMapper userMapper; @Autowired private ResourceUserMapper resourceUserMapper; @Autowired private ProcessDefinitionMapper processDefinitionMapper; /** * create directory * * @param loginUser login user * @param name alias * @param description description * @param type type * @param pid parent id * @param currentDir current directory * @return create directory result */ @Transactional(rollbackFor = Exception.class) public Result createDirectory(User loginUser, String name, String description, ResourceType type, int pid, String currentDir) { Result result = new Result(); if (!PropertyUtils.getResUploadStartupState()){ logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); putMsg(result, Status.HDFS_NOT_STARTUP); return result; }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,463
[Bug][api] rename the udf resource file associated with the udf function, Failed to execute hive task
1. Rename the udf resource file associated with the udf function ![image](https://user-images.githubusercontent.com/55787491/89854573-ceef0580-dbc6-11ea-90d4-2b4587d37f0f.png) ![image](https://user-images.githubusercontent.com/55787491/89854587-d9a99a80-dbc6-11ea-8b1a-f0b189824287.png) 2. Executing hive task is fail ![image](https://user-images.githubusercontent.com/55787491/89854496-9bac7680-dbc6-11ea-8ab4-e6ab2bd08ed2.png) **Which version of Dolphin Scheduler:** -[1.3.2-release]
https://github.com/apache/dolphinscheduler/issues/3463
https://github.com/apache/dolphinscheduler/pull/3482
a678c827600d44623f30311574b8226c1c59ace2
c8322482bbd021a89c407809abfcdd50cf3b2dc6
"2020-08-11T03:37:09Z"
java
"2020-08-13T08:19:11Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java
String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name):String.format("%s/%s",currentDir,name); if (pid != -1) { Resource parentResource = resourcesMapper.selectById(pid); if (parentResource == null) { putMsg(result, Status.PARENT_RESOURCE_NOT_EXIST); return result; } if (!hasPerm(loginUser, parentResource.getUserId())) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } } if (checkResourceExists(fullName, 0, type.ordinal())) { logger.error("resource directory {} has exist, can't recreate", fullName); putMsg(result, Status.RESOURCE_EXIST); return result; } Date now = new Date(); Resource resource = new Resource(pid,name,fullName,true,description,name,loginUser.getId(),type,0,now,now); try { resourcesMapper.insert(resource); putMsg(result, Status.SUCCESS); Map<Object, Object> dataMap = new BeanMap(resource); Map<String, Object> resultMap = new HashMap<String, Object>(); for (Map.Entry<Object, Object> entry: dataMap.entrySet()) { if (!"class".equalsIgnoreCase(entry.getKey().toString())) { resultMap.put(entry.getKey().toString(), entry.getValue()); } } result.setData(resultMap);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,463
[Bug][api] rename the udf resource file associated with the udf function, Failed to execute hive task
1. Rename the udf resource file associated with the udf function ![image](https://user-images.githubusercontent.com/55787491/89854573-ceef0580-dbc6-11ea-90d4-2b4587d37f0f.png) ![image](https://user-images.githubusercontent.com/55787491/89854587-d9a99a80-dbc6-11ea-8b1a-f0b189824287.png) 2. Executing hive task is fail ![image](https://user-images.githubusercontent.com/55787491/89854496-9bac7680-dbc6-11ea-8ab4-e6ab2bd08ed2.png) **Which version of Dolphin Scheduler:** -[1.3.2-release]
https://github.com/apache/dolphinscheduler/issues/3463
https://github.com/apache/dolphinscheduler/pull/3482
a678c827600d44623f30311574b8226c1c59ace2
c8322482bbd021a89c407809abfcdd50cf3b2dc6
"2020-08-11T03:37:09Z"
java
"2020-08-13T08:19:11Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java
} catch (Exception e) { logger.error("resource already exists, can't recreate ", e); throw new RuntimeException("resource already exists, can't recreate"); } createDirecotry(loginUser,fullName,type,result); return result; } /** * create resource * * @param loginUser login user * @param name alias * @param desc description * @param file file * @param type type * @param pid parent id * @param currentDir current directory * @return create result code */ @Transactional(rollbackFor = Exception.class) public Result createResource(User loginUser, String name, String desc, ResourceType type, MultipartFile file, int pid, String currentDir) { Result result = new Result();
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,463
[Bug][api] rename the udf resource file associated with the udf function, Failed to execute hive task
1. Rename the udf resource file associated with the udf function ![image](https://user-images.githubusercontent.com/55787491/89854573-ceef0580-dbc6-11ea-90d4-2b4587d37f0f.png) ![image](https://user-images.githubusercontent.com/55787491/89854587-d9a99a80-dbc6-11ea-8b1a-f0b189824287.png) 2. Executing hive task is fail ![image](https://user-images.githubusercontent.com/55787491/89854496-9bac7680-dbc6-11ea-8ab4-e6ab2bd08ed2.png) **Which version of Dolphin Scheduler:** -[1.3.2-release]
https://github.com/apache/dolphinscheduler/issues/3463
https://github.com/apache/dolphinscheduler/pull/3482
a678c827600d44623f30311574b8226c1c59ace2
c8322482bbd021a89c407809abfcdd50cf3b2dc6
"2020-08-11T03:37:09Z"
java
"2020-08-13T08:19:11Z"
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java
if (!PropertyUtils.getResUploadStartupState()){ logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); putMsg(result, Status.HDFS_NOT_STARTUP); return result; } if (pid != -1) { Resource parentResource = resourcesMapper.selectById(pid); if (parentResource == null) { putMsg(result, Status.PARENT_RESOURCE_NOT_EXIST); return result; } if (!hasPerm(loginUser, parentResource.getUserId())) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } } if (file.isEmpty()) { logger.error("file is empty: {}", file.getOriginalFilename()); putMsg(result, Status.RESOURCE_FILE_IS_EMPTY); return result; } String fileSuffix = FileUtils.suffix(file.getOriginalFilename()); String nameSuffix = FileUtils.suffix(name); if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) { /** * rename file suffix and original suffix must be consistent */