status
stringclasses
1 value
repo_name
stringclasses
31 values
repo_url
stringclasses
31 values
issue_id
int64
1
104k
title
stringlengths
4
233
body
stringlengths
0
186k
issue_url
stringlengths
38
56
pull_url
stringlengths
37
54
before_fix_sha
stringlengths
40
40
after_fix_sha
stringlengths
40
40
report_datetime
unknown
language
stringclasses
5 values
commit_datetime
unknown
updated_file
stringlengths
7
188
chunk_content
stringlengths
1
1.03M
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,985
[Feature][Datax] Datax supports setting up running memory
DataX default memory is 1g, when the amount of data is large, memory will not be enough So you need to support setting runtime JVM memory ![image](https://user-images.githubusercontent.com/39816903/97070670-d8f0a400-160c-11eb-89ca-50b3244f6a61.png)
https://github.com/apache/dolphinscheduler/issues/3985
https://github.com/apache/dolphinscheduler/pull/3986
89f1e93bcf936b527856f658e33fe38ead5ec8b9
fe3026627fc2d38da08ae396724cc61bc922374a
"2020-10-24T07:25:07Z"
java
"2020-11-16T02:55:20Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java
sql = String.format("SELECT t.* FROM ( %s ) t WHERE 0 = 1", sql); sql = sql.replace(";", ""); try ( Connection connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(), baseDataSource.getUser(), baseDataSource.getPassword()); PreparedStatement stmt = connection.prepareStatement(sql); ResultSet resultSet = stmt.executeQuery()) { ResultSetMetaData md = resultSet.getMetaData(); int num = md.getColumnCount(); columnNames = new String[num]; for (int i = 1; i <= num; i++ ) { columnNames[i - 1] = md.getColumnName(i); } } catch (SQLException e) { logger.warn(e.getMessage(), e); return null; } return columnNames; } @Override public AbstractParameters getParameters() { return dataXParameters; } private void notNull(Object obj, String message) { if (obj == null) { throw new RuntimeException(message); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,985
[Feature][Datax] Datax supports setting up running memory
DataX default memory is 1g, when the amount of data is large, memory will not be enough So you need to support setting runtime JVM memory ![image](https://user-images.githubusercontent.com/39816903/97070670-d8f0a400-160c-11eb-89ca-50b3244f6a61.png)
https://github.com/apache/dolphinscheduler/issues/3985
https://github.com/apache/dolphinscheduler/pull/3986
89f1e93bcf936b527856f658e33fe38ead5ec8b9
fe3026627fc2d38da08ae396724cc61bc922374a
"2020-10-24T07:25:07Z"
java
"2020-11-16T02:55:20Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.task.datax; import java.lang.reflect.Method; import java.util.Arrays; import java.util.Date; import java.util.List; import java.util.Map;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,985
[Feature][Datax] Datax supports setting up running memory
DataX default memory is 1g, when the amount of data is large, memory will not be enough So you need to support setting runtime JVM memory ![image](https://user-images.githubusercontent.com/39816903/97070670-d8f0a400-160c-11eb-89ca-50b3244f6a61.png)
https://github.com/apache/dolphinscheduler/issues/3985
https://github.com/apache/dolphinscheduler/pull/3986
89f1e93bcf936b527856f658e33fe38ead5ec8b9
fe3026627fc2d38da08ae396724cc61bc922374a
"2020-10-24T07:25:07Z"
java
"2020-11-16T02:55:20Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java
import java.util.UUID; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.server.entity.DataxTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.DataxUtils; import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor; import org.apache.dolphinscheduler.server.worker.task.TaskProps; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.powermock.api.mockito.PowerMockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.ApplicationContext; import static org.apache.dolphinscheduler.common.enums.CommandType.START_PROCESS; /** * DataxTask Tester. */ public class DataxTaskTest {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,985
[Feature][Datax] Datax supports setting up running memory
DataX default memory is 1g, when the amount of data is large, memory will not be enough So you need to support setting runtime JVM memory ![image](https://user-images.githubusercontent.com/39816903/97070670-d8f0a400-160c-11eb-89ca-50b3244f6a61.png)
https://github.com/apache/dolphinscheduler/issues/3985
https://github.com/apache/dolphinscheduler/pull/3986
89f1e93bcf936b527856f658e33fe38ead5ec8b9
fe3026627fc2d38da08ae396724cc61bc922374a
"2020-10-24T07:25:07Z"
java
"2020-11-16T02:55:20Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java
private static final Logger logger = LoggerFactory.getLogger(DataxTaskTest.class); private static final String CONNECTION_PARAMS = "{\"user\":\"root\",\"password\":\"123456\",\"address\":\"jdbc:mysql://127.0.0.1:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://127.0.0.1:3306/test\"}"; private DataxTask dataxTask; private ProcessService processService; private ShellCommandExecutor shellCommandExecutor; private ApplicationContext applicationContext; private TaskExecutionContext taskExecutionContext; private TaskProps props = new TaskProps(); @Before public void before() throws Exception { setTaskParems(0); } private void setTaskParems(Integer customConfig) { processService = Mockito.mock(ProcessService.class); shellCommandExecutor = Mockito.mock(ShellCommandExecutor.class); applicationContext = Mockito.mock(ApplicationContext.class); SpringApplicationContext springApplicationContext = new SpringApplicationContext(); springApplicationContext.setApplicationContext(applicationContext); Mockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService); TaskProps props = new TaskProps(); props.setExecutePath("/tmp"); props.setTaskAppId(String.valueOf(System.currentTimeMillis())); props.setTaskInstanceId(1); props.setTenantCode("1"); props.setEnvFile(".dolphinscheduler_env.sh"); props.setTaskStartTime(new Date()); props.setTaskTimeout(0);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,985
[Feature][Datax] Datax supports setting up running memory
DataX default memory is 1g, when the amount of data is large, memory will not be enough So you need to support setting runtime JVM memory ![image](https://user-images.githubusercontent.com/39816903/97070670-d8f0a400-160c-11eb-89ca-50b3244f6a61.png)
https://github.com/apache/dolphinscheduler/issues/3985
https://github.com/apache/dolphinscheduler/pull/3986
89f1e93bcf936b527856f658e33fe38ead5ec8b9
fe3026627fc2d38da08ae396724cc61bc922374a
"2020-10-24T07:25:07Z"
java
"2020-11-16T02:55:20Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java
if (customConfig == 1) { props.setTaskParams( "{\"customConfig\":1, \"localParams\":[{\"prop\":\"test\",\"value\":\"38294729\"}],\"json\":\"{\\\"job\\\":{\\\"setting\\\":{\\\"speed\\\":{\\\"byte\\\":1048576},\\\"errorLimit\\\":{\\\"record\\\":0,\\\"percentage\\\":0.02}},\\\"content\\\":[{\\\"reader\\\":{\\\"name\\\":\\\"rdbmsreader\\\",\\\"parameter\\\":{\\\"username\\\":\\\"xxx\\\",\\\"password\\\":\\\"${test}\\\",\\\"column\\\":[\\\"id\\\",\\\"name\\\"],\\\"splitPk\\\":\\\"pk\\\",\\\"connection\\\":[{\\\"querySql\\\":[\\\"SELECT * from dual\\\"],\\\"jdbcUrl\\\":[\\\"jdbc:dm://ip:port/database\\\"]}],\\\"fetchSize\\\":1024,\\\"where\\\":\\\"1 = 1\\\"}},\\\"writer\\\":{\\\"name\\\":\\\"streamwriter\\\",\\\"parameter\\\":{\\\"print\\\":true}}}]}}\"}"); } else { props.setTaskParams( "{\"customConfig\":0,\"targetTable\":\"test\",\"postStatements\":[],\"jobSpeedByte\":1024,\"jobSpeedRecord\":1000,\"dtType\":\"MYSQL\",\"dataSource\":1,\"dsType\":\"MYSQL\",\"dataTarget\":2,\"jobSpeedByte\":0,\"sql\":\"select 1 as test from dual\",\"preStatements\":[\"delete from test\"],\"postStatements\":[\"delete from test\"]}"); } taskExecutionContext = Mockito.mock(TaskExecutionContext.class); Mockito.when(taskExecutionContext.getTaskParams()).thenReturn(props.getTaskParams()); Mockito.when(taskExecutionContext.getExecutePath()).thenReturn("/tmp"); Mockito.when(taskExecutionContext.getTaskAppId()).thenReturn(UUID.randomUUID().toString()); Mockito.when(taskExecutionContext.getTenantCode()).thenReturn("root"); Mockito.when(taskExecutionContext.getStartTime()).thenReturn(new Date()); Mockito.when(taskExecutionContext.getTaskTimeout()).thenReturn(10000); Mockito.when(taskExecutionContext.getLogPath()).thenReturn("/tmp/dx"); DataxTaskExecutionContext dataxTaskExecutionContext = new DataxTaskExecutionContext(); dataxTaskExecutionContext.setSourcetype(0); dataxTaskExecutionContext.setTargetType(0); dataxTaskExecutionContext.setSourceConnectionParams(CONNECTION_PARAMS); dataxTaskExecutionContext.setTargetConnectionParams(CONNECTION_PARAMS); Mockito.when(taskExecutionContext.getDataxTaskExecutionContext()).thenReturn(dataxTaskExecutionContext); dataxTask = PowerMockito.spy(new DataxTask(taskExecutionContext, logger)); dataxTask.init(); props.setCmdTypeIfComplement(START_PROCESS); Mockito.when(processService.findDataSourceById(1)).thenReturn(getDataSource()); Mockito.when(processService.findDataSourceById(2)).thenReturn(getDataSource()); Mockito.when(processService.findProcessInstanceByTaskId(1)).thenReturn(getProcessInstance()); String fileName = String.format("%s/%s_node.sh", props.getExecutePath(), props.getTaskAppId()); try { Mockito.when(shellCommandExecutor.run(fileName)).thenReturn(null);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,985
[Feature][Datax] Datax supports setting up running memory
DataX default memory is 1g, when the amount of data is large, memory will not be enough So you need to support setting runtime JVM memory ![image](https://user-images.githubusercontent.com/39816903/97070670-d8f0a400-160c-11eb-89ca-50b3244f6a61.png)
https://github.com/apache/dolphinscheduler/issues/3985
https://github.com/apache/dolphinscheduler/pull/3986
89f1e93bcf936b527856f658e33fe38ead5ec8b9
fe3026627fc2d38da08ae396724cc61bc922374a
"2020-10-24T07:25:07Z"
java
"2020-11-16T02:55:20Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java
} catch (Exception e) { e.printStackTrace(); } dataxTask = PowerMockito.spy(new DataxTask(taskExecutionContext, logger)); dataxTask.init(); } private DataSource getDataSource() { DataSource dataSource = new DataSource(); dataSource.setType(DbType.MYSQL); dataSource.setConnectionParams(CONNECTION_PARAMS); dataSource.setUserId(1); return dataSource; } private ProcessInstance getProcessInstance() { ProcessInstance processInstance = new ProcessInstance(); processInstance.setCommandType(START_PROCESS); processInstance.setScheduleTime(new Date()); return processInstance; } @After public void after() throws Exception { } /** * Method: DataxTask() */ @Test public void testDataxTask() throws Exception { TaskProps props = new TaskProps();
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,985
[Feature][Datax] Datax supports setting up running memory
DataX default memory is 1g, when the amount of data is large, memory will not be enough So you need to support setting runtime JVM memory ![image](https://user-images.githubusercontent.com/39816903/97070670-d8f0a400-160c-11eb-89ca-50b3244f6a61.png)
https://github.com/apache/dolphinscheduler/issues/3985
https://github.com/apache/dolphinscheduler/pull/3986
89f1e93bcf936b527856f658e33fe38ead5ec8b9
fe3026627fc2d38da08ae396724cc61bc922374a
"2020-10-24T07:25:07Z"
java
"2020-11-16T02:55:20Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java
props.setExecutePath("/tmp"); props.setTaskAppId(String.valueOf(System.currentTimeMillis())); props.setTaskInstanceId(1); props.setTenantCode("1"); Assert.assertNotNull(new DataxTask(null, logger)); } /** * Method: init */ @Test public void testInit() throws Exception { try { dataxTask.init(); } catch (Exception e) { Assert.fail(e.getMessage()); } } /** * Method: handle() */ @Test public void testHandle() throws Exception { } /** * Method: cancelApplication() */ @Test public void testCancelApplication()
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,985
[Feature][Datax] Datax supports setting up running memory
DataX default memory is 1g, when the amount of data is large, memory will not be enough So you need to support setting runtime JVM memory ![image](https://user-images.githubusercontent.com/39816903/97070670-d8f0a400-160c-11eb-89ca-50b3244f6a61.png)
https://github.com/apache/dolphinscheduler/issues/3985
https://github.com/apache/dolphinscheduler/pull/3986
89f1e93bcf936b527856f658e33fe38ead5ec8b9
fe3026627fc2d38da08ae396724cc61bc922374a
"2020-10-24T07:25:07Z"
java
"2020-11-16T02:55:20Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java
throws Exception { try { dataxTask.cancelApplication(true); } catch (Exception e) { Assert.fail(e.getMessage()); } } /** * Method: parsingSqlColumnNames(DbType dsType, DbType dtType, BaseDataSource * dataSourceCfg, String sql) */ @Test public void testParsingSqlColumnNames() throws Exception { try { BaseDataSource dataSource = DataSourceFactory.getDatasource(getDataSource().getType(), getDataSource().getConnectionParams()); Method method = DataxTask.class.getDeclaredMethod("parsingSqlColumnNames", DbType.class, DbType.class, BaseDataSource.class, String.class); method.setAccessible(true); String[] columns = (String[]) method.invoke(dataxTask, DbType.MYSQL, DbType.MYSQL, dataSource, "select 1 as a, 2 as `table` from dual"); Assert.assertNotNull(columns); Assert.assertTrue(columns.length == 2); Assert.assertEquals("[`a`, `table`]", Arrays.toString(columns)); } catch (Exception e) { Assert.fail(e.getMessage()); } } /** * Method: tryGrammaticalParsingSqlColumnNames(DbType dbType, String sql) */
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,985
[Feature][Datax] Datax supports setting up running memory
DataX default memory is 1g, when the amount of data is large, memory will not be enough So you need to support setting runtime JVM memory ![image](https://user-images.githubusercontent.com/39816903/97070670-d8f0a400-160c-11eb-89ca-50b3244f6a61.png)
https://github.com/apache/dolphinscheduler/issues/3985
https://github.com/apache/dolphinscheduler/pull/3986
89f1e93bcf936b527856f658e33fe38ead5ec8b9
fe3026627fc2d38da08ae396724cc61bc922374a
"2020-10-24T07:25:07Z"
java
"2020-11-16T02:55:20Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java
@Test public void testTryGrammaticalAnalysisSqlColumnNames() throws Exception { try { Method method = DataxTask.class.getDeclaredMethod("tryGrammaticalAnalysisSqlColumnNames", DbType.class, String.class); method.setAccessible(true); String[] columns = (String[]) method.invoke(dataxTask, DbType.MYSQL, "select t1.a, t1.b from test t1 union all select a, t2.b from (select a, b from test) t2"); Assert.assertNotNull(columns); Assert.assertTrue(columns.length == 2); Assert.assertEquals("[a, b]", Arrays.toString(columns)); } catch (Exception e) { Assert.fail(e.getMessage()); } } /** * Method: tryExecuteSqlResolveColumnNames(BaseDataSource baseDataSource, * String sql) */ @Test public void testTryExecuteSqlResolveColumnNames() throws Exception { } /** * Method: buildDataxJsonFile() */ @Test public void testBuildDataxJsonFile() throws Exception { try {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,985
[Feature][Datax] Datax supports setting up running memory
DataX default memory is 1g, when the amount of data is large, memory will not be enough So you need to support setting runtime JVM memory ![image](https://user-images.githubusercontent.com/39816903/97070670-d8f0a400-160c-11eb-89ca-50b3244f6a61.png)
https://github.com/apache/dolphinscheduler/issues/3985
https://github.com/apache/dolphinscheduler/pull/3986
89f1e93bcf936b527856f658e33fe38ead5ec8b9
fe3026627fc2d38da08ae396724cc61bc922374a
"2020-10-24T07:25:07Z"
java
"2020-11-16T02:55:20Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java
setTaskParems(1); Method method = DataxTask.class.getDeclaredMethod("buildDataxJsonFile"); method.setAccessible(true); String filePath = (String) method.invoke(dataxTask, null); Assert.assertNotNull(filePath); } catch (Exception e) { Assert.fail(e.getMessage()); } } /** * Method: buildDataxJsonFile() */ @Test public void testBuildDataxJsonFile0() throws Exception { try { setTaskParems(0); Method method = DataxTask.class.getDeclaredMethod("buildDataxJsonFile"); method.setAccessible(true); String filePath = (String) method.invoke(dataxTask, null); Assert.assertNotNull(filePath); } catch (Exception e) { Assert.fail(e.getMessage()); } } /** * Method: buildDataxJobContentJson() */ @Test public void testBuildDataxJobContentJson()
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,985
[Feature][Datax] Datax supports setting up running memory
DataX default memory is 1g, when the amount of data is large, memory will not be enough So you need to support setting runtime JVM memory ![image](https://user-images.githubusercontent.com/39816903/97070670-d8f0a400-160c-11eb-89ca-50b3244f6a61.png)
https://github.com/apache/dolphinscheduler/issues/3985
https://github.com/apache/dolphinscheduler/pull/3986
89f1e93bcf936b527856f658e33fe38ead5ec8b9
fe3026627fc2d38da08ae396724cc61bc922374a
"2020-10-24T07:25:07Z"
java
"2020-11-16T02:55:20Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java
throws Exception { try { Method method = DataxTask.class.getDeclaredMethod("buildDataxJobContentJson"); method.setAccessible(true); List<ObjectNode> contentList = (List<ObjectNode>) method.invoke(dataxTask, null); Assert.assertNotNull(contentList); ObjectNode content = contentList.get(0); JsonNode reader = JSONUtils.parseObject(content.path("reader").asText()); Assert.assertNotNull(reader); String readerPluginName = reader.path("name").asText(); Assert.assertEquals(DataxUtils.DATAX_READER_PLUGIN_MYSQL, readerPluginName); JsonNode writer = JSONUtils.parseObject(content.path("writer").asText()); Assert.assertNotNull(writer); String writerPluginName = writer.path("name").asText(); Assert.assertEquals(DataxUtils.DATAX_WRITER_PLUGIN_MYSQL, writerPluginName); } catch (Exception e) { Assert.fail(e.getMessage()); } } /** * Method: buildDataxJobSettingJson() */ @Test public void testBuildDataxJobSettingJson() throws Exception { try { Method method = DataxTask.class.getDeclaredMethod("buildDataxJobSettingJson"); method.setAccessible(true); JsonNode setting = (JsonNode) method.invoke(dataxTask, null); Assert.assertNotNull(setting);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,985
[Feature][Datax] Datax supports setting up running memory
DataX default memory is 1g, when the amount of data is large, memory will not be enough So you need to support setting runtime JVM memory ![image](https://user-images.githubusercontent.com/39816903/97070670-d8f0a400-160c-11eb-89ca-50b3244f6a61.png)
https://github.com/apache/dolphinscheduler/issues/3985
https://github.com/apache/dolphinscheduler/pull/3986
89f1e93bcf936b527856f658e33fe38ead5ec8b9
fe3026627fc2d38da08ae396724cc61bc922374a
"2020-10-24T07:25:07Z"
java
"2020-11-16T02:55:20Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java
Assert.assertNotNull(setting.get("speed")); Assert.assertNotNull(setting.get("errorLimit")); } catch (Exception e) { Assert.fail(e.getMessage()); } } /** * Method: buildDataxCoreJson() */ @Test public void testBuildDataxCoreJson() throws Exception { try { Method method = DataxTask.class.getDeclaredMethod("buildDataxCoreJson"); method.setAccessible(true); ObjectNode coreConfig = (ObjectNode) method.invoke(dataxTask, null); Assert.assertNotNull(coreConfig); Assert.assertNotNull(coreConfig.get("transport")); } catch (Exception e) { Assert.fail(e.getMessage()); } } /** * Method: buildShellCommandFile(String jobConfigFilePath) */ @Test public void testBuildShellCommandFile() throws Exception { try { Method method = DataxTask.class.getDeclaredMethod("buildShellCommandFile", String.class);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,985
[Feature][Datax] Datax supports setting up running memory
DataX default memory is 1g, when the amount of data is large, memory will not be enough So you need to support setting runtime JVM memory ![image](https://user-images.githubusercontent.com/39816903/97070670-d8f0a400-160c-11eb-89ca-50b3244f6a61.png)
https://github.com/apache/dolphinscheduler/issues/3985
https://github.com/apache/dolphinscheduler/pull/3986
89f1e93bcf936b527856f658e33fe38ead5ec8b9
fe3026627fc2d38da08ae396724cc61bc922374a
"2020-10-24T07:25:07Z"
java
"2020-11-16T02:55:20Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java
method.setAccessible(true); Assert.assertNotNull(method.invoke(dataxTask, "test.json")); } catch (Exception e) { Assert.fail(e.getMessage()); } } /** * Method: getParameters */ @Test public void testGetParameters() throws Exception { Assert.assertTrue(dataxTask.getParameters() != null); } /** * Method: notNull(Object obj, String message) */ @Test public void testNotNull() throws Exception { try { Method method = DataxTask.class.getDeclaredMethod("notNull", Object.class, String.class); method.setAccessible(true); method.invoke(dataxTask, "abc", "test throw RuntimeException"); } catch (Exception e) { Assert.fail(e.getMessage()); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,104
[Bug][Sqoop] when jdbc connect string contains special characters, import fail
**Describe the bug** when jdbc connect string contains special characters, import fail **To Reproduce** Steps to reproduce the behavior, for example: 1. sqoop import data source string contains special characters like "&" 2. run sqoop task 3. find command not found error **Expected behavior** sqoop import RDBMS success **Which version of Dolphin Scheduler:** -[ds-1.3.3]
https://github.com/apache/dolphinscheduler/issues/4104
https://github.com/apache/dolphinscheduler/pull/4105
656ec295b9e09468fc6a871df821ba42436c5e57
1cf40e1d1e4379e6b50a92871987d59291ccfd50
"2020-11-25T15:22:52Z"
java
"2020-11-27T01:46:23Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DbType;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,104
[Bug][Sqoop] when jdbc connect string contains special characters, import fail
**Describe the bug** when jdbc connect string contains special characters, import fail **To Reproduce** Steps to reproduce the behavior, for example: 1. sqoop import data source string contains special characters like "&" 2. run sqoop task 3. find command not found error **Expected behavior** sqoop import RDBMS success **Which version of Dolphin Scheduler:** -[ds-1.3.3]
https://github.com/apache/dolphinscheduler/issues/4104
https://github.com/apache/dolphinscheduler/pull/4105
656ec295b9e09468fc6a871df821ba42436c5e57
1cf40e1d1e4379e6b50a92871987d59291ccfd50
"2020-11-25T15:22:52Z"
java
"2020-11-27T01:46:23Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java
import org.apache.dolphinscheduler.common.enums.SqoopQueryType; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceMysqlParameter; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ISourceGenerator; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * mysql source generator */ public class MysqlSourceGenerator implements ISourceGenerator { private static final Logger logger = LoggerFactory.getLogger(MysqlSourceGenerator.class); @Override public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) { StringBuilder mysqlSourceSb = new StringBuilder(); try { SourceMysqlParameter sourceMysqlParameter = JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceMysqlParameter.class); SqoopTaskExecutionContext sqoopTaskExecutionContext = taskExecutionContext.getSqoopTaskExecutionContext(); if (null != sourceMysqlParameter) { BaseDataSource baseDataSource = DataSourceFactory.getDatasource(DbType.of(sqoopTaskExecutionContext.getSourcetype()), sqoopTaskExecutionContext.getSourceConnectionParams()); if (null != baseDataSource) {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,104
[Bug][Sqoop] when jdbc connect string contains special characters, import fail
**Describe the bug** when jdbc connect string contains special characters, import fail **To Reproduce** Steps to reproduce the behavior, for example: 1. sqoop import data source string contains special characters like "&" 2. run sqoop task 3. find command not found error **Expected behavior** sqoop import RDBMS success **Which version of Dolphin Scheduler:** -[ds-1.3.3]
https://github.com/apache/dolphinscheduler/issues/4104
https://github.com/apache/dolphinscheduler/pull/4105
656ec295b9e09468fc6a871df821ba42436c5e57
1cf40e1d1e4379e6b50a92871987d59291ccfd50
"2020-11-25T15:22:52Z"
java
"2020-11-27T01:46:23Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java
mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.DB_CONNECT) .append(Constants.SPACE).append(baseDataSource.getJdbcUrl()) .append(Constants.SPACE).append(SqoopConstants.DB_USERNAME) .append(Constants.SPACE).append(baseDataSource.getUser()) .append(Constants.SPACE).append(SqoopConstants.DB_PWD) .append(Constants.SPACE).append(Constants.DOUBLE_QUOTES).append(baseDataSource.getPassword()).append(Constants.DOUBLE_QUOTES); if (sourceMysqlParameter.getSrcQueryType() == SqoopQueryType.FORM.getCode()) { if (StringUtils.isNotEmpty(sourceMysqlParameter.getSrcTable())) { mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.TABLE) .append(Constants.SPACE).append(sourceMysqlParameter.getSrcTable()); } if (StringUtils.isNotEmpty(sourceMysqlParameter.getSrcColumns())) { mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.COLUMNS) .append(Constants.SPACE).append(sourceMysqlParameter.getSrcColumns()); } } else if (sourceMysqlParameter.getSrcQueryType() == SqoopQueryType.SQL.getCode() && StringUtils.isNotEmpty(sourceMysqlParameter.getSrcQuerySql())) { String srcQuery = sourceMysqlParameter.getSrcQuerySql(); mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.QUERY) .append(Constants.SPACE).append(Constants.DOUBLE_QUOTES).append(srcQuery); if (srcQuery.toLowerCase().contains(SqoopConstants.QUERY_WHERE)) { mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.QUERY_CONDITION).append(Constants.DOUBLE_QUOTES); } else { mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.QUERY_WITHOUT_CONDITION).append(Constants.DOUBLE_QUOTES); } } List<Property> mapColumnHive = sourceMysqlParameter.getMapColumnHive(); if (null != mapColumnHive && !mapColumnHive.isEmpty()) {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,104
[Bug][Sqoop] when jdbc connect string contains special characters, import fail
**Describe the bug** when jdbc connect string contains special characters, import fail **To Reproduce** Steps to reproduce the behavior, for example: 1. sqoop import data source string contains special characters like "&" 2. run sqoop task 3. find command not found error **Expected behavior** sqoop import RDBMS success **Which version of Dolphin Scheduler:** -[ds-1.3.3]
https://github.com/apache/dolphinscheduler/issues/4104
https://github.com/apache/dolphinscheduler/pull/4105
656ec295b9e09468fc6a871df821ba42436c5e57
1cf40e1d1e4379e6b50a92871987d59291ccfd50
"2020-11-25T15:22:52Z"
java
"2020-11-27T01:46:23Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java
StringBuilder columnMap = new StringBuilder(); for (Property item : mapColumnHive) { columnMap.append(item.getProp()).append(Constants.EQUAL_SIGN).append(item.getValue()).append(Constants.COMMA); } if (StringUtils.isNotEmpty(columnMap.toString())) { mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.MAP_COLUMN_HIVE) .append(Constants.SPACE).append(columnMap.substring(0, columnMap.length() - 1)); } } List<Property> mapColumnJava = sourceMysqlParameter.getMapColumnJava(); if (null != mapColumnJava && !mapColumnJava.isEmpty()) { StringBuilder columnMap = new StringBuilder(); for (Property item : mapColumnJava) { columnMap.append(item.getProp()).append(Constants.EQUAL_SIGN).append(item.getValue()).append(Constants.COMMA); } if (StringUtils.isNotEmpty(columnMap.toString())) { mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.MAP_COLUMN_JAVA) .append(Constants.SPACE).append(columnMap.substring(0, columnMap.length() - 1)); } } } } } catch (Exception e) { logger.error(String.format("Sqoop task mysql source params build failed: [%s]", e.getMessage())); } return mysqlSourceSb.toString(); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,104
[Bug][Sqoop] when jdbc connect string contains special characters, import fail
**Describe the bug** when jdbc connect string contains special characters, import fail **To Reproduce** Steps to reproduce the behavior, for example: 1. sqoop import data source string contains special characters like "&" 2. run sqoop task 3. find command not found error **Expected behavior** sqoop import RDBMS success **Which version of Dolphin Scheduler:** -[ds-1.3.3]
https://github.com/apache/dolphinscheduler/issues/4104
https://github.com/apache/dolphinscheduler/pull/4105
656ec295b9e09468fc6a871df821ba42436c5e57
1cf40e1d1e4379e6b50a92871987d59291ccfd50
"2020-11-25T15:22:52Z"
java
"2020-11-27T01:46:23Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,104
[Bug][Sqoop] when jdbc connect string contains special characters, import fail
**Describe the bug** when jdbc connect string contains special characters, import fail **To Reproduce** Steps to reproduce the behavior, for example: 1. sqoop import data source string contains special characters like "&" 2. run sqoop task 3. find command not found error **Expected behavior** sqoop import RDBMS success **Which version of Dolphin Scheduler:** -[ds-1.3.3]
https://github.com/apache/dolphinscheduler/issues/4104
https://github.com/apache/dolphinscheduler/pull/4105
656ec295b9e09468fc6a871df821ba42436c5e57
1cf40e1d1e4379e6b50a92871987d59291ccfd50
"2020-11-25T15:22:52Z"
java
"2020-11-27T01:46:23Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java
* (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetMysqlParameter; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ITargetGenerator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * mysql target generator */ public class MysqlTargetGenerator implements ITargetGenerator {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,104
[Bug][Sqoop] when jdbc connect string contains special characters, import fail
**Describe the bug** when jdbc connect string contains special characters, import fail **To Reproduce** Steps to reproduce the behavior, for example: 1. sqoop import data source string contains special characters like "&" 2. run sqoop task 3. find command not found error **Expected behavior** sqoop import RDBMS success **Which version of Dolphin Scheduler:** -[ds-1.3.3]
https://github.com/apache/dolphinscheduler/issues/4104
https://github.com/apache/dolphinscheduler/pull/4105
656ec295b9e09468fc6a871df821ba42436c5e57
1cf40e1d1e4379e6b50a92871987d59291ccfd50
"2020-11-25T15:22:52Z"
java
"2020-11-27T01:46:23Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java
private static final Logger logger = LoggerFactory.getLogger(MysqlTargetGenerator.class); @Override public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) { StringBuilder mysqlTargetSb = new StringBuilder(); try { TargetMysqlParameter targetMysqlParameter = JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetMysqlParameter.class); SqoopTaskExecutionContext sqoopTaskExecutionContext = taskExecutionContext.getSqoopTaskExecutionContext(); if (null != targetMysqlParameter && targetMysqlParameter.getTargetDatasource() != 0) { BaseDataSource baseDataSource = DataSourceFactory.getDatasource(DbType.of(sqoopTaskExecutionContext.getTargetType()), sqoopTaskExecutionContext.getTargetConnectionParams()); if (null != baseDataSource) { mysqlTargetSb.append(Constants.SPACE).append(SqoopConstants.DB_CONNECT) .append(Constants.SPACE).append(baseDataSource.getJdbcUrl()) .append(Constants.SPACE).append(SqoopConstants.DB_USERNAME) .append(Constants.SPACE).append(baseDataSource.getUser()) .append(Constants.SPACE).append(SqoopConstants.DB_PWD) .append(Constants.SPACE).append(Constants.DOUBLE_QUOTES).append(baseDataSource.getPassword()).append(Constants.DOUBLE_QUOTES) .append(Constants.SPACE).append(SqoopConstants.TABLE) .append(Constants.SPACE).append(targetMysqlParameter.getTargetTable()); if (StringUtils.isNotEmpty(targetMysqlParameter.getTargetColumns())) { mysqlTargetSb.append(Constants.SPACE).append(SqoopConstants.COLUMNS) .append(Constants.SPACE).append(targetMysqlParameter.getTargetColumns()); } if (StringUtils.isNotEmpty(targetMysqlParameter.getFieldsTerminated())) { mysqlTargetSb.append(Constants.SPACE).append(SqoopConstants.FIELDS_TERMINATED_BY); if (targetMysqlParameter.getFieldsTerminated().contains("'")) {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,104
[Bug][Sqoop] when jdbc connect string contains special characters, import fail
**Describe the bug** when jdbc connect string contains special characters, import fail **To Reproduce** Steps to reproduce the behavior, for example: 1. sqoop import data source string contains special characters like "&" 2. run sqoop task 3. find command not found error **Expected behavior** sqoop import RDBMS success **Which version of Dolphin Scheduler:** -[ds-1.3.3]
https://github.com/apache/dolphinscheduler/issues/4104
https://github.com/apache/dolphinscheduler/pull/4105
656ec295b9e09468fc6a871df821ba42436c5e57
1cf40e1d1e4379e6b50a92871987d59291ccfd50
"2020-11-25T15:22:52Z"
java
"2020-11-27T01:46:23Z"
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java
mysqlTargetSb.append(Constants.SPACE).append(targetMysqlParameter.getFieldsTerminated()); } else { mysqlTargetSb.append(Constants.SPACE).append(Constants.SINGLE_QUOTES).append(targetMysqlParameter.getFieldsTerminated()).append(Constants.SINGLE_QUOTES); } } if (StringUtils.isNotEmpty(targetMysqlParameter.getLinesTerminated())) { mysqlTargetSb.append(Constants.SPACE).append(SqoopConstants.LINES_TERMINATED_BY); if (targetMysqlParameter.getLinesTerminated().contains(Constants.SINGLE_QUOTES)) { mysqlTargetSb.append(Constants.SPACE).append(targetMysqlParameter.getLinesTerminated()); } else { mysqlTargetSb.append(Constants.SPACE).append(Constants.SINGLE_QUOTES).append(targetMysqlParameter.getLinesTerminated()).append(Constants.SINGLE_QUOTES); } } if (targetMysqlParameter.getIsUpdate() && StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateKey()) && StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateMode())) { mysqlTargetSb.append(Constants.SPACE).append(SqoopConstants.UPDATE_KEY) .append(Constants.SPACE).append(targetMysqlParameter.getTargetUpdateKey()) .append(Constants.SPACE).append(SqoopConstants.UPDATE_MODE) .append(Constants.SPACE).append(targetMysqlParameter.getTargetUpdateMode()); } } } } catch (Exception e) { logger.error(String.format("Sqoop mysql target params build failed: [%s]", e.getMessage())); } return mysqlTargetSb.toString(); } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,104
[Bug][Sqoop] when jdbc connect string contains special characters, import fail
**Describe the bug** when jdbc connect string contains special characters, import fail **To Reproduce** Steps to reproduce the behavior, for example: 1. sqoop import data source string contains special characters like "&" 2. run sqoop task 3. find command not found error **Expected behavior** sqoop import RDBMS success **Which version of Dolphin Scheduler:** -[ds-1.3.3]
https://github.com/apache/dolphinscheduler/issues/4104
https://github.com/apache/dolphinscheduler/pull/4105
656ec295b9e09468fc6a871df821ba42436c5e57
1cf40e1d1e4379e6b50a92871987d59291ccfd50
"2020-11-25T15:22:52Z"
java
"2020-11-27T01:46:23Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.server.worker.task.sqoop; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.SqoopJobGenerator; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; import java.util.Date; import org.junit.Assert; import org.junit.Before; import org.junit.Test;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,104
[Bug][Sqoop] when jdbc connect string contains special characters, import fail
**Describe the bug** when jdbc connect string contains special characters, import fail **To Reproduce** Steps to reproduce the behavior, for example: 1. sqoop import data source string contains special characters like "&" 2. run sqoop task 3. find command not found error **Expected behavior** sqoop import RDBMS success **Which version of Dolphin Scheduler:** -[ds-1.3.3]
https://github.com/apache/dolphinscheduler/issues/4104
https://github.com/apache/dolphinscheduler/pull/4105
656ec295b9e09468fc6a871df821ba42436c5e57
1cf40e1d1e4379e6b50a92871987d59291ccfd50
"2020-11-25T15:22:52Z"
java
"2020-11-27T01:46:23Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java
import org.junit.runner.RunWith; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.ApplicationContext; /** * sqoop task test */ @RunWith(MockitoJUnitRunner.Silent.class) public class SqoopTaskTest { private static final Logger logger = LoggerFactory.getLogger(SqoopTaskTest.class); private SqoopTask sqoopTask; @Before public void before() { ProcessService processService = Mockito.mock(ProcessService.class); ApplicationContext applicationContext = Mockito.mock(ApplicationContext.class); SpringApplicationContext springApplicationContext = new SpringApplicationContext(); springApplicationContext.setApplicationContext(applicationContext); Mockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService); TaskExecutionContext taskExecutionContext = new TaskExecutionContext(); taskExecutionContext.setTaskAppId(String.valueOf(System.currentTimeMillis())); taskExecutionContext.setTenantCode("1"); taskExecutionContext.setEnvFile(".dolphinscheduler_env.sh"); taskExecutionContext.setStartTime(new Date()); taskExecutionContext.setTaskTimeout(0); taskExecutionContext.setTaskParams("{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1," + "\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\"," + "\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\"," + "\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],"
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,104
[Bug][Sqoop] when jdbc connect string contains special characters, import fail
**Describe the bug** when jdbc connect string contains special characters, import fail **To Reproduce** Steps to reproduce the behavior, for example: 1. sqoop import data source string contains special characters like "&" 2. run sqoop task 3. find command not found error **Expected behavior** sqoop import RDBMS success **Which version of Dolphin Scheduler:** -[ds-1.3.3]
https://github.com/apache/dolphinscheduler/issues/4104
https://github.com/apache/dolphinscheduler/pull/4105
656ec295b9e09468fc6a871df821ba42436c5e57
1cf40e1d1e4379e6b50a92871987d59291ccfd50
"2020-11-25T15:22:52Z"
java
"2020-11-27T01:46:23Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java
+ "\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\"" + ",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true," + "\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\"," + "\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}"); sqoopTask = new SqoopTask(taskExecutionContext, logger); sqoopTask.init(); } /** * test SqoopJobGenerator */ @Test public void testGenerator() { TaskExecutionContext mysqlTaskExecutionContext = getMysqlTaskExecutionContext(); String mysqlToHdfs = "{\"jobName\":\"sqoop_import\",\"hadoopCustomParams\":[{\"prop\":\"mapreduce.map.memory.mb\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"4096\"}]," + "\"sqoopAdvancedParams\":[{\"prop\":\"--direct\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"\"}],\"jobType\":\"TEMPLATE\",\"concurrency\":1," + "\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HDFS\"," + "\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"0\\\",\\\"srcQuerySql\\\":\\\"\\\",\\\"srcColumnType\\\":\\\"0\\\"," + "\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[]}\",\"targetParams\":\"{\\\"targetPath\\\":\\\"/ods/tmp/test/person7\\\"," + "\\\"deleteTargetDir\\\":true,\\\"fileType\\\":\\\"--as-textfile\\\",\\\"compressionCodec\\\":\\\"\\\",\\\"fieldsTerminated\\\":\\\"@\\\"," + "\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; SqoopParameters mysqlToHdfsParams = JSONUtils.parseObject(mysqlToHdfs, SqoopParameters.class); SqoopJobGenerator generator = new SqoopJobGenerator(); String mysqlToHdfsScript = generator.generateSqoopJob(mysqlToHdfsParams, mysqlTaskExecutionContext); String mysqlToHdfsExpected = "sqoop import -D mapred.job.name=sqoop_import -D mapreduce.map.memory.mb=4096 --direct -m 1 --connect jdbc:mysql://192.168.0.111:3306/test " + "--username kylo --password \"123456\" --table person_2 --target-dir /ods/tmp/test/person7 --as-textfile "
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,104
[Bug][Sqoop] when jdbc connect string contains special characters, import fail
**Describe the bug** when jdbc connect string contains special characters, import fail **To Reproduce** Steps to reproduce the behavior, for example: 1. sqoop import data source string contains special characters like "&" 2. run sqoop task 3. find command not found error **Expected behavior** sqoop import RDBMS success **Which version of Dolphin Scheduler:** -[ds-1.3.3]
https://github.com/apache/dolphinscheduler/issues/4104
https://github.com/apache/dolphinscheduler/pull/4105
656ec295b9e09468fc6a871df821ba42436c5e57
1cf40e1d1e4379e6b50a92871987d59291ccfd50
"2020-11-25T15:22:52Z"
java
"2020-11-27T01:46:23Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java
+ "--delete-target-dir --fields-terminated-by '@' --lines-terminated-by '\\n' --null-non-string 'NULL' --null-string 'NULL'"; Assert.assertEquals(mysqlToHdfsExpected, mysqlToHdfsScript); String hdfsToMysql = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HDFS\"," + "\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"exportDir\\\":\\\"/ods/tmp/test/person7\\\"}\"," + "\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"id,name,age,sex,create_time\\\"," + "\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":true,\\\"targetUpdateKey\\\":\\\"id\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\"," + "\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; SqoopParameters hdfsToMysqlParams = JSONUtils.parseObject(hdfsToMysql, SqoopParameters.class); String hdfsToMysqlScript = generator.generateSqoopJob(hdfsToMysqlParams, mysqlTaskExecutionContext); String hdfsToMysqlScriptExpected = "sqoop export -D mapred.job.name=sqoop_import -m 1 --export-dir /ods/tmp/test/person7 --connect jdbc:mysql://192.168.0.111:3306/test " + "--username kylo --password \"123456\" --table person_3 --columns id,name,age,sex,create_time --fields-terminated-by '@' " + "--lines-terminated-by '\\n' --update-key id --update-mode allowinsert"; Assert.assertEquals(hdfsToMysqlScriptExpected, hdfsToMysqlScript); String hiveToMysql = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HIVE\"," + "\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal\\\"," + "\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-17\\\"}\"," + "\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"\\\",\\\"preQuery\\\":\\\"\\\"," + "\\\"isUpdate\\\":false,\\\"targetUpdateKey\\\":\\\"\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\"," + "\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; SqoopParameters hiveToMysqlParams = JSONUtils.parseObject(hiveToMysql, SqoopParameters.class); String hiveToMysqlScript = generator.generateSqoopJob(hiveToMysqlParams, mysqlTaskExecutionContext); String hiveToMysqlExpected = "sqoop export -D mapred.job.name=sqoop_import -m 1 --hcatalog-database stg --hcatalog-table person_internal --hcatalog-partition-keys date " + "--hcatalog-partition-values 2020-02-17 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password \"123456\" --table person_3 " + "--fields-terminated-by '@' --lines-terminated-by '\\n'"; Assert.assertEquals(hiveToMysqlExpected, hiveToMysqlScript);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,104
[Bug][Sqoop] when jdbc connect string contains special characters, import fail
**Describe the bug** when jdbc connect string contains special characters, import fail **To Reproduce** Steps to reproduce the behavior, for example: 1. sqoop import data source string contains special characters like "&" 2. run sqoop task 3. find command not found error **Expected behavior** sqoop import RDBMS success **Which version of Dolphin Scheduler:** -[ds-1.3.3]
https://github.com/apache/dolphinscheduler/issues/4104
https://github.com/apache/dolphinscheduler/pull/4105
656ec295b9e09468fc6a871df821ba42436c5e57
1cf40e1d1e4379e6b50a92871987d59291ccfd50
"2020-11-25T15:22:52Z"
java
"2020-11-27T01:46:23Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java
String mysqlToHive = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\"," + "\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\"," + "\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[]," + "\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\"," + "\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false," + "\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}"; SqoopParameters mysqlToHiveParams = JSONUtils.parseObject(mysqlToHive, SqoopParameters.class); String mysqlToHiveScript = generator.generateSqoopJob(mysqlToHiveParams, mysqlTaskExecutionContext); String mysqlToHiveExpected = "sqoop import -D mapred.job.name=sqoop_import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password \"123456\" " + "--query \"SELECT * FROM person_2 WHERE \\$CONDITIONS\" --map-column-java id=Integer --hive-import --hive-table stg.person_internal_2 " + "--create-hive-table --hive-overwrite --delete-target-dir --hive-partition-key date --hive-partition-value 2020-02-16"; Assert.assertEquals(mysqlToHiveExpected, mysqlToHiveScript); String sqoopCustomString = "{\"jobType\":\"CUSTOM\",\"localParams\":[],\"customShell\":\"sqoop import\"}"; SqoopParameters sqoopCustomParams = JSONUtils.parseObject(sqoopCustomString, SqoopParameters.class); String sqoopCustomScript = generator.generateSqoopJob(sqoopCustomParams, new TaskExecutionContext()); String sqoopCustomExpected = "sqoop import"; Assert.assertEquals(sqoopCustomExpected, sqoopCustomScript); } /** * get taskExecutionContext include mysql * * @return TaskExecutionContext */ private TaskExecutionContext getMysqlTaskExecutionContext() { TaskExecutionContext taskExecutionContext = new TaskExecutionContext(); SqoopTaskExecutionContext sqoopTaskExecutionContext = new SqoopTaskExecutionContext();
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
4,104
[Bug][Sqoop] when jdbc connect string contains special characters, import fail
**Describe the bug** when jdbc connect string contains special characters, import fail **To Reproduce** Steps to reproduce the behavior, for example: 1. sqoop import data source string contains special characters like "&" 2. run sqoop task 3. find command not found error **Expected behavior** sqoop import RDBMS success **Which version of Dolphin Scheduler:** -[ds-1.3.3]
https://github.com/apache/dolphinscheduler/issues/4104
https://github.com/apache/dolphinscheduler/pull/4105
656ec295b9e09468fc6a871df821ba42436c5e57
1cf40e1d1e4379e6b50a92871987d59291ccfd50
"2020-11-25T15:22:52Z"
java
"2020-11-27T01:46:23Z"
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java
String mysqlSourceConnectionParams = "{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}"; String mysqlTargetConnectionParams = "{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}"; sqoopTaskExecutionContext.setDataSourceId(2); sqoopTaskExecutionContext.setDataTargetId(2); sqoopTaskExecutionContext.setSourcetype(0); sqoopTaskExecutionContext.setTargetConnectionParams(mysqlTargetConnectionParams); sqoopTaskExecutionContext.setSourceConnectionParams(mysqlSourceConnectionParams); sqoopTaskExecutionContext.setTargetType(0); taskExecutionContext.setSqoopTaskExecutionContext(sqoopTaskExecutionContext); return taskExecutionContext; } @Test public void testGetParameters() { Assert.assertNotNull(sqoopTask.getParameters()); } /** * Method: init */ @Test public void testInit() { try { sqoopTask.init(); } catch (Exception e) { Assert.fail(e.getMessage()); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.*; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.mock.web.MockMultipartFile; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; @RunWith(PowerMockRunner.class) @PowerMockIgnore({"sun.security.*", "javax.net.*"}) @PrepareForTest({HadoopUtils.class,PropertyUtils.class, FileUtils.class,org.apache.dolphinscheduler.api.utils.FileUtils.class}) public class ResourcesServiceTest { private static final Logger logger = LoggerFactory.getLogger(ResourcesServiceTest.class); @InjectMocks
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
private ResourcesService resourcesService; @Mock private ResourceMapper resourcesMapper; @Mock private TenantMapper tenantMapper; @Mock private ResourceUserMapper resourceUserMapper; @Mock private HadoopUtils hadoopUtils; @Mock private UserMapper userMapper; @Mock private UdfFuncMapper udfFunctionMapper; @Mock private ProcessDefinitionMapper processDefinitionMapper; @Before public void setUp() { PowerMockito.mockStatic(HadoopUtils.class); PowerMockito.mockStatic(FileUtils.class); PowerMockito.mockStatic(org.apache.dolphinscheduler.api.utils.FileUtils.class); try { PowerMockito.whenNew(HadoopUtils.class).withNoArguments().thenReturn(hadoopUtils); } catch (Exception e) { e.printStackTrace(); } PowerMockito.when(HadoopUtils.getInstance()).thenReturn(hadoopUtils); PowerMockito.mockStatic(PropertyUtils.class); } @Test
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
public void testCreateResource(){ PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false); User user = new User(); Result result = resourcesService.createResource(user,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE,null,-1,"/"); logger.info(result.toString()); Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(),result.getMsg()); MockMultipartFile mockMultipartFile = new MockMultipartFile("test.pdf",new String().getBytes()); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); result = resourcesService.createResource(user,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE,mockMultipartFile,-1,"/"); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_FILE_IS_EMPTY.getMsg(),result.getMsg()); mockMultipartFile = new MockMultipartFile("test.pdf","test.pdf","pdf",new String("test").getBytes()); PowerMockito.when(FileUtils.suffix("test.pdf")).thenReturn("pdf"); PowerMockito.when(FileUtils.suffix("ResourcesServiceTest.jar")).thenReturn("jar"); result = resourcesService.createResource(user,"ResourcesServiceTest.jar","ResourcesServiceTest",ResourceType.FILE,mockMultipartFile,-1,"/"); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_SUFFIX_FORBID_CHANGE.getMsg(),result.getMsg()); mockMultipartFile = new MockMultipartFile("ResourcesServiceTest.pdf","ResourcesServiceTest.pdf","pdf",new String("test").getBytes()); PowerMockito.when(FileUtils.suffix("ResourcesServiceTest.pdf")).thenReturn("pdf"); result = resourcesService.createResource(user,"ResourcesServiceTest.pdf","ResourcesServiceTest",ResourceType.UDF,mockMultipartFile,-1,"/"); logger.info(result.toString()); Assert.assertEquals(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg(),result.getMsg()); } @Test public void testCreateDirecotry(){ PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
User user = new User(); Result result = resourcesService.createDirectory(user,"directoryTest","directory test",ResourceType.FILE,-1,"/"); logger.info(result.toString()); Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(),result.getMsg()); user.setId(1); user.setTenantId(1); Mockito.when(userMapper.selectById(1)).thenReturn(getUser()); Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); Mockito.when(resourcesMapper.selectById(Mockito.anyInt())).thenReturn(null); result = resourcesService.createDirectory(user,"directoryTest","directory test",ResourceType.FILE,1,"/"); logger.info(result.toString()); Assert.assertEquals(Status.PARENT_RESOURCE_NOT_EXIST.getMsg(),result.getMsg()); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); Mockito.when(resourcesMapper.queryResourceList("/directoryTest", 0, 0)).thenReturn(getResourceList()); result = resourcesService.createDirectory(user,"directoryTest","directory test",ResourceType.FILE,-1,"/"); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_EXIST.getMsg(),result.getMsg()); } @Test public void testUpdateResource(){ PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false); User user = new User(); Result result = resourcesService.updateResource(user,1,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE,null); logger.info(result.toString()); Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(),result.getMsg());
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
Mockito.when(resourcesMapper.selectById(1)).thenReturn(getResource()); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); result = resourcesService.updateResource(user,0,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE,null); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(),result.getMsg()); result = resourcesService.updateResource(user,1,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE,null); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM.getMsg(),result.getMsg()); user.setId(1); Mockito.when(userMapper.selectById(1)).thenReturn(getUser()); Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); PowerMockito.when(HadoopUtils.getHdfsFileName(Mockito.any(), Mockito.any(),Mockito.anyString())).thenReturn("test1"); try { Mockito.when(HadoopUtils.getInstance().exists(Mockito.any())).thenReturn(false); } catch (IOException e) { logger.error(e.getMessage(),e); } result = resourcesService.updateResource(user, 1, "ResourcesServiceTest1.jar", "ResourcesServiceTest", ResourceType.UDF,null); Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(),result.getMsg()); user.setId(1); Mockito.when(userMapper.queryDetailsById(1)).thenReturn(getUser()); Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); try { Mockito.when(HadoopUtils.getInstance().exists(Mockito.any())).thenReturn(true); } catch (IOException e) { logger.error(e.getMessage(),e);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
} result = resourcesService.updateResource(user,1,"ResourcesServiceTest.jar","ResourcesServiceTest",ResourceType.FILE,null); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg()); Mockito.when(resourcesMapper.queryResourceList("/ResourcesServiceTest1.jar", 0, 0)).thenReturn(getResourceList()); result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.FILE,null); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_EXIST.getMsg(),result.getMsg()); Mockito.when(userMapper.selectById(Mockito.anyInt())).thenReturn(null); result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.UDF,null); logger.info(result.toString()); Assert.assertTrue(Status.USER_NOT_EXIST.getCode() == result.getCode()); Mockito.when(userMapper.selectById(1)).thenReturn(getUser()); Mockito.when(tenantMapper.queryById(Mockito.anyInt())).thenReturn(null); result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.UDF,null); logger.info(result.toString()); Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(),result.getMsg()); Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); PowerMockito.when(HadoopUtils.getHdfsResourceFileName(Mockito.any(), Mockito.any())).thenReturn("test"); try { PowerMockito.when(HadoopUtils.getInstance().copy(Mockito.anyString(),Mockito.anyString(),true,true)).thenReturn(true); } catch (Exception e) { logger.error(e.getMessage(),e); } result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest1.jar",ResourceType.UDF,null); logger.info(result.toString());
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg()); } @Test public void testQueryResourceListPaging(){ User loginUser = new User(); loginUser.setUserType(UserType.ADMIN_USER); IPage<Resource> resourcePage = new Page<>(1,10); resourcePage.setTotal(1); resourcePage.setRecords(getResourceList()); Mockito.when(resourcesMapper.queryResourcePaging(Mockito.any(Page.class), Mockito.eq(0),Mockito.eq(-1), Mockito.eq(0), Mockito.eq("test"))).thenReturn(resourcePage); Map<String, Object> result = resourcesService.queryResourceListPaging(loginUser,-1,ResourceType.FILE,"test",1,10); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); PageInfo pageInfo = (PageInfo) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getLists())); } @Test public void testQueryResourceList(){ User loginUser = new User(); loginUser.setId(0); loginUser.setUserType(UserType.ADMIN_USER); Mockito.when(resourcesMapper.queryResourceListAuthored(0, 0,0)).thenReturn(getResourceList()); Map<String, Object> result = resourcesService.queryResourceList(loginUser, ResourceType.FILE); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); List<Resource> resourceList = (List<Resource>) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(resourceList)); } @Test
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
public void testDelete(){ User loginUser = new User(); loginUser.setId(0); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false); Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); try { Result result = resourcesService.delete(loginUser,1); logger.info(result.toString()); Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(), result.getMsg()); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); Mockito.when(resourcesMapper.selectById(1)).thenReturn(getResource()); result = resourcesService.delete(loginUser,2); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg()); result = resourcesService.delete(loginUser,2); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg()); loginUser.setUserType(UserType.ADMIN_USER); loginUser.setTenantId(2); Mockito.when(userMapper.selectById(Mockito.anyInt())).thenReturn(loginUser); result = resourcesService.delete(loginUser,1); logger.info(result.toString()); Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(), result.getMsg()); loginUser.setTenantId(1); Mockito.when(hadoopUtils.delete(Mockito.anyString(), Mockito.anyBoolean())).thenReturn(true);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
result = resourcesService.delete(loginUser,1); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); } catch (Exception e) { logger.error("delete error",e); Assert.assertTrue(false); } } @Test public void testVerifyResourceName(){ User user = new User(); user.setId(1); Mockito.when(resourcesMapper.queryResourceList("/ResourcesServiceTest.jar", 0, 0)).thenReturn(getResourceList()); Result result = resourcesService.verifyResourceName("/ResourcesServiceTest.jar",ResourceType.FILE,user); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_EXIST.getMsg(), result.getMsg()); Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); String unExistFullName = "/test.jar"; try { Mockito.when(hadoopUtils.exists(unExistFullName)).thenReturn(false); } catch (IOException e) { logger.error("hadoop error",e); } result = resourcesService.verifyResourceName("/test.jar",ResourceType.FILE,user); logger.info(result.toString()); Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(), result.getMsg()); user.setTenantId(1); try {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
Mockito.when(hadoopUtils.exists("test")).thenReturn(true); } catch (IOException e) { logger.error("hadoop error",e); } PowerMockito.when(HadoopUtils.getHdfsResourceFileName("123", "test1")).thenReturn("test"); result = resourcesService.verifyResourceName("/ResourcesServiceTest.jar",ResourceType.FILE,user); logger.info(result.toString()); Assert.assertTrue(Status.RESOURCE_EXIST.getCode()==result.getCode()); result = resourcesService.verifyResourceName("test2",ResourceType.FILE,user); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); } @Test public void testReadResource(){ PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false); Result result = resourcesService.readResource(1,1,10); logger.info(result.toString()); Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(),result.getMsg()); Mockito.when(resourcesMapper.selectById(1)).thenReturn(getResource()); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); result = resourcesService.readResource(2,1,10); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(),result.getMsg()); PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("class"); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); result = resourcesService.readResource(1,1,10);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW.getMsg(),result.getMsg()); PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("jar"); PowerMockito.when(FileUtils.suffix("ResourcesServiceTest.jar")).thenReturn("jar"); result = resourcesService.readResource(1,1,10); logger.info(result.toString()); Assert.assertTrue(Status.USER_NOT_EXIST.getCode()==result.getCode()); Mockito.when(userMapper.selectById(1)).thenReturn(getUser()); result = resourcesService.readResource(1,1,10); logger.info(result.toString()); Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(),result.getMsg()); Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); try { Mockito.when(hadoopUtils.exists(Mockito.anyString())).thenReturn(false); } catch (IOException e) { logger.error("hadoop error",e); } result = resourcesService.readResource(1,1,10); logger.info(result.toString()); Assert.assertTrue(Status.RESOURCE_FILE_NOT_EXIST.getCode()==result.getCode()); try { Mockito.when(hadoopUtils.exists(null)).thenReturn(true); Mockito.when(hadoopUtils.catFile(null,1,10)).thenReturn(getContent()); } catch (IOException e) { logger.error("hadoop error",e); }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
result = resourcesService.readResource(1,1,10); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg()); } @Test public void testOnlineCreateResource() { PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false); PowerMockito.when(HadoopUtils.getHdfsResDir("hdfsdDir")).thenReturn("hdfsDir"); PowerMockito.when(HadoopUtils.getHdfsUdfDir("udfDir")).thenReturn("udfDir"); User user = getUser(); Result result = resourcesService.onlineCreateResource(user,ResourceType.FILE,"test","jar","desc","content",-1,"/"); logger.info(result.toString()); Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(),result.getMsg()); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("class"); result = resourcesService.onlineCreateResource(user,ResourceType.FILE,"test","jar","desc","content",-1,"/"); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW.getMsg(),result.getMsg()); try { PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("jar"); Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); result = resourcesService.onlineCreateResource(user, ResourceType.FILE, "test", "jar", "desc", "content",-1,"/"); }catch (RuntimeException ex){ logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), ex.getMessage()); }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
Mockito.when(FileUtils.getUploadFilename(Mockito.anyString(), Mockito.anyString())).thenReturn("test"); PowerMockito.when(FileUtils.writeContent2File(Mockito.anyString(), Mockito.anyString())).thenReturn(true); result = resourcesService.onlineCreateResource(user,ResourceType.FILE,"test","jar","desc","content",-1,"/"); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg()); } @Test public void testUpdateResourceContent(){ User loginUser = new User(); loginUser.setId(0); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false); Result result = resourcesService.updateResourceContent(1,"content"); logger.info(result.toString()); Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(), result.getMsg()); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); Mockito.when(resourcesMapper.selectById(1)).thenReturn(getResource()); result = resourcesService.updateResourceContent(2,"content"); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg()); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("class"); result = resourcesService.updateResourceContent(1,"content"); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW.getMsg(),result.getMsg()); PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("jar"); PowerMockito.when(FileUtils.suffix("ResourcesServiceTest.jar")).thenReturn("jar");
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
result = resourcesService.updateResourceContent(1,"content"); logger.info(result.toString()); Assert.assertTrue(Status.USER_NOT_EXIST.getCode() == result.getCode()); Mockito.when(userMapper.selectById(1)).thenReturn(getUser()); result = resourcesService.updateResourceContent(1,"content"); logger.info(result.toString()); Assert.assertTrue(Status.TENANT_NOT_EXIST.getCode() == result.getCode()); Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); Mockito.when(FileUtils.getUploadFilename(Mockito.anyString(), Mockito.anyString())).thenReturn("test"); PowerMockito.when(FileUtils.writeContent2File(Mockito.anyString(), Mockito.anyString())).thenReturn(true); result = resourcesService.updateResourceContent(1,"content"); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); } @Test public void testDownloadResource(){ PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); Mockito.when(userMapper.selectById(1)).thenReturn(getUser()); org.springframework.core.io.Resource resourceMock = Mockito.mock(org.springframework.core.io.Resource.class); try { org.springframework.core.io.Resource resource = resourcesService.downloadResource(1); Assert.assertNull(resource); Mockito.when(resourcesMapper.selectById(1)).thenReturn(getResource()); PowerMockito.when(org.apache.dolphinscheduler.api.utils.FileUtils.file2Resource(Mockito.any())).thenReturn(resourceMock); resource = resourcesService.downloadResource(1); Assert.assertNotNull(resource);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
} catch (Exception e) { logger.error("DownloadResource error",e); Assert.assertTrue(false); } } @Test public void testUnauthorizedFile(){ User user = getUser(); Map<String, Object> result = resourcesService.unauthorizedFile(user,1); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM,result.get(Constants.STATUS)); user.setUserType(UserType.ADMIN_USER); Mockito.when(resourcesMapper.queryResourceExceptUserId(1)).thenReturn(getResourceList()); result = resourcesService.unauthorizedFile(user,1); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); List<Resource> resources = (List<Resource>) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(resources)); } @Test public void testUnauthorizedUDFFunction(){ User user = getUser(); Map<String, Object> result = resourcesService.unauthorizedUDFFunction(user,1); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM,result.get(Constants.STATUS)); user.setUserType(UserType.ADMIN_USER);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
Mockito.when(udfFunctionMapper.queryUdfFuncExceptUserId(1)).thenReturn(getUdfFuncList()); result = resourcesService.unauthorizedUDFFunction(user,1); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); List<UdfFunc> udfFuncs = (List<UdfFunc>) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(udfFuncs)); } @Test public void testAuthorizedUDFFunction(){ User user = getUser(); Map<String, Object> result = resourcesService.authorizedUDFFunction(user,1); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM,result.get(Constants.STATUS)); user.setUserType(UserType.ADMIN_USER); Mockito.when(udfFunctionMapper.queryAuthedUdfFunc(1)).thenReturn(getUdfFuncList()); result = resourcesService.authorizedUDFFunction(user,1); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); List<UdfFunc> udfFuncs = (List<UdfFunc>) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(udfFuncs)); } @Test public void testAuthorizedFile(){ User user = getUser(); Map<String, Object> result = resourcesService.authorizedFile(user,1); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM,result.get(Constants.STATUS));
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
user.setUserType(UserType.ADMIN_USER); Mockito.when(resourcesMapper.queryAuthorizedResourceList(1)).thenReturn(getResourceList()); result = resourcesService.authorizedFile(user,1); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); List<Resource> resources = (List<Resource>) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(resources)); } private List<Resource> getResourceList(){ List<Resource> resources = new ArrayList<>(); resources.add(getResource()); return resources; } private Tenant getTenant() { Tenant tenant = new Tenant(); tenant.setTenantCode("123"); return tenant; } private Resource getResource(){ Resource resource = new Resource(); resource.setPid(-1); resource.setUserId(1); resource.setDescription("ResourcesServiceTest.jar"); resource.setAlias("ResourcesServiceTest.jar"); resource.setFullName("/ResourcesServiceTest.jar"); resource.setType(ResourceType.FILE); return resource; } private Resource getUdfResource(){
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java
Resource resource = new Resource(); resource.setUserId(1); resource.setDescription("udfTest"); resource.setAlias("udfTest.jar"); resource.setFullName("/udfTest.jar"); resource.setType(ResourceType.UDF); return resource; } private UdfFunc getUdfFunc(){ UdfFunc udfFunc = new UdfFunc(); udfFunc.setId(1); return udfFunc; } private List<UdfFunc> getUdfFuncList(){ List<UdfFunc> udfFuncs = new ArrayList<>(); udfFuncs.add(getUdfFunc()); return udfFuncs; } private User getUser(){ User user = new User(); user.setId(1); user.setTenantId(1); return user; } private List<String> getContent(){ List<String> contentList = new ArrayList<>(); contentList.add("test"); return contentList; } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
* See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.utils; import static org.apache.dolphinscheduler.common.Constants.DATA_BASEDIR_PATH; import static org.apache.dolphinscheduler.common.Constants.RESOURCE_VIEW_SUFFIXS; import static org.apache.dolphinscheduler.common.Constants.RESOURCE_VIEW_SUFFIXS_DEFAULT_VALUE; import static org.apache.dolphinscheduler.common.Constants.YYYYMMDDHHMMSS; import org.apache.commons.io.Charsets; import org.apache.commons.io.IOUtils; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.StringReader; import java.io.UnsupportedEncodingException; import java.nio.charset.Charset; import java.nio.charset.UnsupportedCharsetException; import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * file utils */ public class FileUtils {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
public static final Logger logger = LoggerFactory.getLogger(FileUtils.class); public static final String DATA_BASEDIR = PropertyUtils.getString(DATA_BASEDIR_PATH, "/tmp/dolphinscheduler"); public static final ThreadLocal<Logger> taskLoggerThreadLocal = new ThreadLocal<>(); private FileUtils() { throw new UnsupportedOperationException("Construct FileUtils"); } /** * get file suffix * * @param filename file name * @return file suffix */ public static String suffix(String filename) { String fileSuffix = ""; if (StringUtils.isNotEmpty(filename)) { int lastIndex = filename.lastIndexOf('.'); if (lastIndex > 0) { fileSuffix = filename.substring(lastIndex + 1); } } return fileSuffix; }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
/** * get download file absolute path and name * * @param filename file name * @return download file name */ public static String getDownloadFilename(String filename) { String fileName = String.format("%s/download/%s/%s", DATA_BASEDIR, DateUtils.getCurrentTime(YYYYMMDDHHMMSS), filename); File file = new File(fileName); if (!file.getParentFile().exists()) { file.getParentFile().mkdirs(); } return fileName; } /** * get upload file absolute path and name * * @param tenantCode tenant code * @param filename file name * @return local file path */ public static String getUploadFilename(String tenantCode, String filename) { String fileName = String.format("%s/%s/resources/%s", DATA_BASEDIR, tenantCode, filename); File file = new File(fileName); if (!file.getParentFile().exists()) { file.getParentFile().mkdirs(); } return fileName; } /**
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
* directory of process execution * * @param projectId project id * @param processDefineId process definition id * @param processInstanceId process instance id * @param taskInstanceId task instance id * @return directory of process execution */ public static String getProcessExecDir(int projectId, int processDefineId, int processInstanceId, int taskInstanceId) { String fileName = String.format("%s/exec/process/%s/%s/%s/%s", DATA_BASEDIR, Integer.toString(projectId), Integer.toString(processDefineId), Integer.toString(processInstanceId), Integer.toString(taskInstanceId)); File file = new File(fileName); if (!file.getParentFile().exists()) { file.getParentFile().mkdirs(); } return fileName; } /** * directory of process instances * * @param projectId project id * @param processDefineId process definition id * @param processInstanceId process instance id * @return directory of process instances */ public static String getProcessExecDir(int projectId, int processDefineId, int processInstanceId) { String fileName = String.format("%s/exec/process/%s/%s/%s", DATA_BASEDIR, Integer.toString(projectId), Integer.toString(processDefineId), Integer.toString(processInstanceId)); File file = new File(fileName); if (!file.getParentFile().exists()) {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
file.getParentFile().mkdirs(); } return fileName; } /** * @return get suffixes for resource files that support online viewing */ public static String getResourceViewSuffixs() { return PropertyUtils.getString(RESOURCE_VIEW_SUFFIXS, RESOURCE_VIEW_SUFFIXS_DEFAULT_VALUE); } /** * create directory and user * * @param execLocalPath execute local path * @param userName user name * @throws IOException errors */ public static void createWorkDirAndUserIfAbsent(String execLocalPath, String userName) throws IOException { File execLocalPathFile = new File(execLocalPath); if (execLocalPathFile.exists()) { org.apache.commons.io.FileUtils.forceDelete(execLocalPathFile); } org.apache.commons.io.FileUtils.forceMkdir(execLocalPathFile); String mkdirLog = "create dir success " + execLocalPath; LoggerUtils.logInfo(Optional.ofNullable(logger), mkdirLog); LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), mkdirLog); OSUtils.taskLoggerThreadLocal.set(taskLoggerThreadLocal.get());
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
try { if (!OSUtils.getUserList().contains(userName)) { boolean isSuccessCreateUser = OSUtils.createUser(userName); String infoLog; if (isSuccessCreateUser) { infoLog = String.format("create user name success %s", userName); } else { infoLog = String.format("create user name fail %s", userName); } LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog); LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog); } } catch (Throwable e) { LoggerUtils.logError(Optional.ofNullable(logger), e); LoggerUtils.logError(Optional.ofNullable(taskLoggerThreadLocal.get()), e); } OSUtils.taskLoggerThreadLocal.remove(); } /** * write content to file ,if parent path not exists, it will do one's utmost to mkdir * * @param content content * @param filePath target file path * @return true if write success */ public static boolean writeContent2File(String content, String filePath) { boolean flag = true; BufferedReader bufferedReader = null; BufferedWriter bufferedWriter = null; try {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
File distFile = new File(filePath); if (!distFile.getParentFile().exists() && !distFile.getParentFile().mkdirs()) { FileUtils.logger.error("mkdir parent failed"); return false; } bufferedReader = new BufferedReader(new StringReader(content)); bufferedWriter = new BufferedWriter(new FileWriter(distFile)); char[] buf = new char[1024]; int len; while ((len = bufferedReader.read(buf)) != -1) { bufferedWriter.write(buf, 0, len); } bufferedWriter.flush(); bufferedReader.close(); bufferedWriter.close(); } catch (IOException e) { FileUtils.logger.error(e.getMessage(), e); flag = false; return flag; } finally { IOUtils.closeQuietly(bufferedWriter); IOUtils.closeQuietly(bufferedReader); } return flag; } /** * Writes a String to a file creating the file if it does not exist. * <p> * NOTE: As from v1.3, the parent directories of the file will be created * if they do not exist.
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
* * @param file the file to write * @param data the content to write to the file * @param encoding the encoding to use, {@code null} means platform default * @throws IOException in case of an I/O error * @throws java.io.UnsupportedEncodingException if the encoding is not supported by the VM * @since 2.4 */ public static void writeStringToFile(File file, String data, Charset encoding) throws IOException { writeStringToFile(file, data, encoding, false); } /** * Writes a String to a file creating the file if it does not exist. * <p> * NOTE: As from v1.3, the parent directories of the file will be created * if they do not exist. * * @param file the file to write * @param data the content to write to the file * @param encoding the encoding to use, {@code null} means platform default * @throws IOException in case of an I/O error * @throws java.io.UnsupportedEncodingException if the encoding is not supported by the VM */ public static void writeStringToFile(File file, String data, String encoding) throws IOException { writeStringToFile(file, data, encoding, false); } /** * Writes a String to a file creating the file if it does not exist. * * @param file the file to write
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
* @param data the content to write to the file * @param encoding the encoding to use, {@code null} means platform default * @param append if {@code true}, then the String will be added to the * end of the file rather than overwriting * @throws IOException in case of an I/O error * @since 2.3 */ public static void writeStringToFile(File file, String data, Charset encoding, boolean append) throws IOException { OutputStream out = null; try { out = openOutputStream(file, append); IOUtils.write(data, out, encoding); out.close(); } finally { IOUtils.closeQuietly(out); } } /** * Writes a String to a file creating the file if it does not exist. * * @param file the file to write * @param data the content to write to the file * @param encoding the encoding to use, {@code null} means platform default * @param append if {@code true}, then the String will be added to the * end of the file rather than overwriting * @throws IOException in case of an I/O error * @throws UnsupportedCharsetException thrown instead of {@link UnsupportedEncodingException} in version 2.2 if the encoding is not * supported by the VM * @since 2.1 */
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
public static void writeStringToFile(File file, String data, String encoding, boolean append) throws IOException { writeStringToFile(file, data, Charsets.toCharset(encoding), append); } /** * Writes a String to a file creating the file if it does not exist using the default encoding for the VM. * * @param file the file to write * @param data the content to write to the file * @throws IOException in case of an I/O error */ public static void writeStringToFile(File file, String data) throws IOException { writeStringToFile(file, data, Charset.defaultCharset(), false); } /** * Writes a String to a file creating the file if it does not exist using the default encoding for the VM. * * @param file the file to write * @param data the content to write to the file * @param append if {@code true}, then the String will be added to the * end of the file rather than overwriting * @throws IOException in case of an I/O error * @since 2.1 */ public static void writeStringToFile(File file, String data, boolean append) throws IOException { writeStringToFile(file, data, Charset.defaultCharset(), append); } /** * Opens a {@link FileOutputStream} for the specified file, checking and * creating the parent directory if it does not exist. * <p>
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
* At the end of the method either the stream will be successfully opened, * or an exception will have been thrown. * <p> * The parent directory will be created if it does not exist. * The file will be created if it does not exist. * An exception is thrown if the file object exists but is a directory. * An exception is thrown if the file exists but cannot be written to. * An exception is thrown if the parent directory cannot be created. * * @param file the file to open for output, must not be {@code null} * @return a new {@link FileOutputStream} for the specified file * @throws IOException if the file object is a directory * @throws IOException if the file cannot be written to * @throws IOException if a parent directory needs creating but that fails * @since 1.3 */ public static FileOutputStream openOutputStream(File file) throws IOException { return openOutputStream(file, false); } /** * Opens a {@link FileOutputStream} for the specified file, checking and * creating the parent directory if it does not exist. * <p> * At the end of the method either the stream will be successfully opened, * or an exception will have been thrown. * <p> * The parent directory will be created if it does not exist. * The file will be created if it does not exist. * An exception is thrown if the file object exists but is a directory. * An exception is thrown if the file exists but cannot be written to.
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
* An exception is thrown if the parent directory cannot be created. * * @param file the file to open for output, must not be {@code null} * @param append if {@code true}, then bytes will be added to the * end of the file rather than overwriting * @return a new {@link FileOutputStream} for the specified file * @throws IOException if the file object is a directory * @throws IOException if the file cannot be written to * @throws IOException if a parent directory needs creating but that fails * @since 2.1 */ public static FileOutputStream openOutputStream(File file, boolean append) throws IOException { if (file.exists()) { if (file.isDirectory()) { throw new IOException("File '" + file + "' exists but is a directory"); } if (!file.canWrite()) { throw new IOException("File '" + file + "' cannot be written to"); } } else { File parent = file.getParentFile(); if (parent != null && !parent.mkdirs() && !parent.isDirectory()) { throw new IOException("Directory '" + parent + "' could not be created"); } } return new FileOutputStream(file, append); } /** * deletes a directory recursively *
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
* @param dir directory * @throws IOException in case deletion is unsuccessful */ public static void deleteDir(String dir) throws IOException { org.apache.commons.io.FileUtils.deleteDirectory(new File(dir)); } /** * Deletes a file. If file is a directory, delete it and all sub-directories. * <p> * The difference between File.delete() and this method are: * <ul> * <li>A directory to be deleted does not have to be empty.</li> * <li>You get exceptions when a file or directory cannot be deleted. * (java.io.File methods returns a boolean)</li> * </ul> * * @param filename file name * @throws IOException in case deletion is unsuccessful */ public static void deleteFile(String filename) throws IOException { org.apache.commons.io.FileUtils.forceDelete(new File(filename)); } /** * Gets all the parent subdirectories of the parentDir directory * * @param parentDir parent dir * @return all dirs */ public static File[] getAllDir(String parentDir) { if (parentDir == null || "".equals(parentDir)) {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
throw new RuntimeException("parentDir can not be empty"); } File file = new File(parentDir); if (!file.exists() || !file.isDirectory()) { throw new RuntimeException("parentDir not exist, or is not a directory:" + parentDir); } return file.listFiles(File::isDirectory); } /** * Get Content * * @param inputStream input stream * @return string of input stream */ public static String readFile2Str(InputStream inputStream) { try { ByteArrayOutputStream output = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; int length; while ((length = inputStream.read(buffer)) != -1) { output.write(buffer, 0, length); } return output.toString(); } catch (Exception e) { logger.error(e.getMessage(), e); throw new RuntimeException(e); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.utils; import static org.apache.dolphinscheduler.common.Constants.RESOURCE_UPLOAD_PATH; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ResUploadType; import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.client.cli.RMAdminCLI; import java.io.BufferedReader; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.nio.file.Files; import java.security.PrivilegedExceptionAction; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; /** * hadoop utils * single instance */ public class HadoopUtils implements Closeable {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
private static final Logger logger = LoggerFactory.getLogger(HadoopUtils.class); private static String hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER); public static final String resourceUploadPath = PropertyUtils.getString(RESOURCE_UPLOAD_PATH, "/dolphinscheduler"); public static final String rmHaIds = PropertyUtils.getString(Constants.YARN_RESOURCEMANAGER_HA_RM_IDS); public static final String appAddress = PropertyUtils.getString(Constants.YARN_APPLICATION_STATUS_ADDRESS); public static final String jobHistoryAddress = PropertyUtils.getString(Constants.YARN_JOB_HISTORY_STATUS_ADDRESS); private static final String HADOOP_UTILS_KEY = "HADOOP_UTILS_KEY"; private static final LoadingCache<String, HadoopUtils> cache = CacheBuilder .newBuilder() .expireAfterWrite(PropertyUtils.getInt(Constants.KERBEROS_EXPIRE_TIME, 2), TimeUnit.HOURS) .build(new CacheLoader<String, HadoopUtils>() { @Override public HadoopUtils load(String key) throws Exception { return new HadoopUtils(); } }); private static volatile boolean yarnEnabled = false; private Configuration configuration; private FileSystem fs; private HadoopUtils() { init(); initHdfsPath(); }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
public static HadoopUtils getInstance() { return cache.getUnchecked(HADOOP_UTILS_KEY); } /** * init dolphinscheduler root path in hdfs */ private void initHdfsPath() { Path path = new Path(resourceUploadPath); try { if (!fs.exists(path)) { fs.mkdirs(path); } } catch (Exception e) { logger.error(e.getMessage(), e); } } /** * init hadoop configuration */ private void init() { try { configuration = new Configuration(); String resourceStorageType = PropertyUtils.getUpperCaseString(Constants.RESOURCE_STORAGE_TYPE); ResUploadType resUploadType = ResUploadType.valueOf(resourceStorageType); if (resUploadType == ResUploadType.HDFS) { if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false)) { System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF, PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH)); configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); hdfsUser = "";
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
UserGroupInformation.setConfiguration(configuration); UserGroupInformation.loginUserFromKeytab(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME), PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH)); } String defaultFS = configuration.get(Constants.FS_DEFAULTFS); if (defaultFS.startsWith("file")) { String defaultFSProp = PropertyUtils.getString(Constants.FS_DEFAULTFS); if (StringUtils.isNotBlank(defaultFSProp)) { Map<String, String> fsRelatedProps = PropertyUtils.getPrefixedProperties("fs."); configuration.set(Constants.FS_DEFAULTFS, defaultFSProp); fsRelatedProps.forEach((key, value) -> configuration.set(key, value)); } else { logger.error("property:{} can not to be empty, please set!", Constants.FS_DEFAULTFS); throw new RuntimeException( String.format("property: %s can not to be empty, please set!", Constants.FS_DEFAULTFS) ); } } else { logger.info("get property:{} -> {}, from core-site.xml hdfs-site.xml ", Constants.FS_DEFAULTFS, defaultFS); } if (fs == null) { if (StringUtils.isNotEmpty(hdfsUser)) { UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hdfsUser); ugi.doAs(new PrivilegedExceptionAction<Boolean>() { @Override public Boolean run() throws Exception { fs = FileSystem.get(configuration); return true;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
} }); } else { logger.warn("hdfs.root.user is not set value!"); fs = FileSystem.get(configuration); } } } else if (resUploadType == ResUploadType.S3) { System.setProperty(Constants.AWS_S3_V4, Constants.STRING_TRUE); configuration.set(Constants.FS_DEFAULTFS, PropertyUtils.getString(Constants.FS_DEFAULTFS)); configuration.set(Constants.FS_S3A_ENDPOINT, PropertyUtils.getString(Constants.FS_S3A_ENDPOINT)); configuration.set(Constants.FS_S3A_ACCESS_KEY, PropertyUtils.getString(Constants.FS_S3A_ACCESS_KEY)); configuration.set(Constants.FS_S3A_SECRET_KEY, PropertyUtils.getString(Constants.FS_S3A_SECRET_KEY)); fs = FileSystem.get(configuration); } } catch (Exception e) { logger.error(e.getMessage(), e); } } /** * @return Configuration */ public Configuration getConfiguration() { return configuration; } /** * get application url * * @param applicationId application id * @return url of application
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
*/ public String getApplicationUrl(String applicationId) throws Exception { /** * if rmHaIds contains xx, it signs not use resourcemanager * otherwise: * if rmHaIds is empty, single resourcemanager enabled * if rmHaIds not empty: resourcemanager HA enabled */ String appUrl = ""; if (StringUtils.isEmpty(rmHaIds)) { appUrl = appAddress; yarnEnabled = true; } else { appUrl = getAppAddress(appAddress, rmHaIds); yarnEnabled = true; logger.info("application url : {}", appUrl); } if (StringUtils.isBlank(appUrl)) { throw new Exception("application url is blank"); } return String.format(appUrl, applicationId); } public String getJobHistoryUrl(String applicationId) { String jobId = applicationId.replace("application", "job"); return String.format(jobHistoryAddress, jobId); } /**
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
* cat file on hdfs * * @param hdfsFilePath hdfs file path * @return byte[] byte array * @throws IOException errors */ public byte[] catFile(String hdfsFilePath) throws IOException { if (StringUtils.isBlank(hdfsFilePath)) { logger.error("hdfs file path:{} is blank", hdfsFilePath); return new byte[0]; } try (FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath))) { return IOUtils.toByteArray(fsDataInputStream); } } /** * cat file on hdfs * * @param hdfsFilePath hdfs file path * @param skipLineNums skip line numbers * @param limit read how many lines * @return content of file * @throws IOException errors */ public List<String> catFile(String hdfsFilePath, int skipLineNums, int limit) throws IOException { if (StringUtils.isBlank(hdfsFilePath)) { logger.error("hdfs file path:{} is blank", hdfsFilePath); return Collections.emptyList(); } try (FSDataInputStream in = fs.open(new Path(hdfsFilePath))) {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
BufferedReader br = new BufferedReader(new InputStreamReader(in)); Stream<String> stream = br.lines().skip(skipLineNums).limit(limit); return stream.collect(Collectors.toList()); } } /** * make the given file and all non-existent parents into * directories. Has the semantics of Unix 'mkdir -p'. * Existence of the directory hierarchy is not an error. * * @param hdfsPath path to create * @return mkdir result * @throws IOException errors */ public boolean mkdir(String hdfsPath) throws IOException { return fs.mkdirs(new Path(hdfsPath)); } /** * copy files between FileSystems * * @param srcPath source hdfs path * @param dstPath destination hdfs path * @param deleteSource whether to delete the src * @param overwrite whether to overwrite an existing file * @return if success or not * @throws IOException errors */ public boolean copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) throws IOException { return FileUtil.copy(fs, new Path(srcPath), fs, new Path(dstPath), deleteSource, overwrite, fs.getConf()); }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
/** * the src file is on the local disk. Add it to FS at * the given dst name. * * @param srcFile local file * @param dstHdfsPath destination hdfs path * @param deleteSource whether to delete the src * @param overwrite whether to overwrite an existing file * @return if success or not * @throws IOException errors */ public boolean copyLocalToHdfs(String srcFile, String dstHdfsPath, boolean deleteSource, boolean overwrite) throws IOException { Path srcPath = new Path(srcFile); Path dstPath = new Path(dstHdfsPath); fs.copyFromLocalFile(deleteSource, overwrite, srcPath, dstPath); return true; } /** * copy hdfs file to local * * @param srcHdfsFilePath source hdfs file path * @param dstFile destination file * @param deleteSource delete source * @param overwrite overwrite * @return result of copy hdfs file to local * @throws IOException errors */ public boolean copyHdfsToLocal(String srcHdfsFilePath, String dstFile, boolean deleteSource, boolean overwrite) throws IOException { Path srcPath = new Path(srcHdfsFilePath); File dstPath = new File(dstFile);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
if (dstPath.exists()) { if (dstPath.isFile()) { if (overwrite) { Files.delete(dstPath.toPath()); } } else { logger.error("destination file must be a file"); } } if (!dstPath.getParentFile().exists()) { dstPath.getParentFile().mkdirs(); } return FileUtil.copy(fs, srcPath, dstPath, deleteSource, fs.getConf()); } /** * delete a file * * @param hdfsFilePath the path to delete. * @param recursive if path is a directory and set to * true, the directory is deleted else throws an exception. In * case of a file the recursive can be set to either true or false. * @return true if delete is successful else false. * @throws IOException errors */ public boolean delete(String hdfsFilePath, boolean recursive) throws IOException { return fs.delete(new Path(hdfsFilePath), recursive); } /** * check if exists *
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
* @param hdfsFilePath source file path * @return result of exists or not * @throws IOException errors */ public boolean exists(String hdfsFilePath) throws IOException { return fs.exists(new Path(hdfsFilePath)); } /** * Gets a list of files in the directory * * @param filePath file path * @return {@link FileStatus} file status * @throws Exception errors */ public FileStatus[] listFileStatus(String filePath) throws Exception { try { return fs.listStatus(new Path(filePath)); } catch (IOException e) { logger.error("Get file list exception", e); throw new Exception("Get file list exception", e); } } /** * Renames Path src to Path dst. Can take place on local fs * or remote DFS. * * @param src path to be renamed * @param dst new path after rename * @return true if rename is successful * @throws IOException on failure
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
*/ public boolean rename(String src, String dst) throws IOException { return fs.rename(new Path(src), new Path(dst)); } /** * hadoop resourcemanager enabled or not * * @return result */ public boolean isYarnEnabled() { return yarnEnabled; } /** * get the state of an application * * @param applicationId application id * @return the return may be null or there may be other parse exceptions */ public ExecutionStatus getApplicationStatus(String applicationId) throws Exception { if (StringUtils.isEmpty(applicationId)) { return null; } String result = Constants.FAILED; String applicationUrl = getApplicationUrl(applicationId); logger.info("applicationUrl={}", applicationUrl); String responseContent; if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false)) { responseContent = KerberosHttpClient.get(applicationUrl); } else { responseContent = HttpUtils.get(applicationUrl);
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
} if (responseContent != null) { ObjectNode jsonObject = JSONUtils.parseObject(responseContent); if (!jsonObject.has("app")) { return ExecutionStatus.FAILURE; } result = jsonObject.path("app").path("finalStatus").asText(); } else { String jobHistoryUrl = getJobHistoryUrl(applicationId); logger.info("jobHistoryUrl={}", jobHistoryUrl); responseContent = HttpUtils.get(jobHistoryUrl); if (null != responseContent) { ObjectNode jsonObject = JSONUtils.parseObject(responseContent); if (!jsonObject.has("job")) { return ExecutionStatus.FAILURE; } result = jsonObject.path("job").path("state").asText(); } else { return ExecutionStatus.FAILURE; } } switch (result) { case Constants.ACCEPTED: return ExecutionStatus.SUBMITTED_SUCCESS; case Constants.SUCCEEDED: return ExecutionStatus.SUCCESS; case Constants.NEW: case Constants.NEW_SAVING: case Constants.SUBMITTED:
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
case Constants.FAILED: return ExecutionStatus.FAILURE; case Constants.KILLED: return ExecutionStatus.KILL; case Constants.RUNNING: default: return ExecutionStatus.RUNNING_EXECUTION; } } /** * get data hdfs path * * @return data hdfs path */ public static String getHdfsDataBasePath() { if ("/".equals(resourceUploadPath)) { return ""; } else { return resourceUploadPath; } } /** * hdfs resource dir * * @param tenantCode tenant code * @param resourceType resource type * @return hdfs resource dir */ public static String getHdfsDir(ResourceType resourceType, String tenantCode) {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
String hdfsDir = ""; if (resourceType.equals(ResourceType.FILE)) { hdfsDir = getHdfsResDir(tenantCode); } else if (resourceType.equals(ResourceType.UDF)) { hdfsDir = getHdfsUdfDir(tenantCode); } return hdfsDir; } /** * hdfs resource dir * * @param tenantCode tenant code * @return hdfs resource dir */ public static String getHdfsResDir(String tenantCode) { return String.format("%s/resources", getHdfsTenantDir(tenantCode)); } /** * hdfs user dir * * @param tenantCode tenant code * @param userId user id * @return hdfs resource dir */ public static String getHdfsUserDir(String tenantCode, int userId) { return String.format("%s/home/%d", getHdfsTenantDir(tenantCode), userId); } /** * hdfs udf dir *
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
* @param tenantCode tenant code * @return get udf dir on hdfs */ public static String getHdfsUdfDir(String tenantCode) { return String.format("%s/udfs", getHdfsTenantDir(tenantCode)); } /** * get hdfs file name * * @param resourceType resource type * @param tenantCode tenant code * @param fileName file name * @return hdfs file name */ public static String getHdfsFileName(ResourceType resourceType, String tenantCode, String fileName) { if (fileName.startsWith("/")) { fileName = fileName.replaceFirst("/", ""); } return String.format("%s/%s", getHdfsDir(resourceType, tenantCode), fileName); } /** * get absolute path and name for resource file on hdfs * * @param tenantCode tenant code * @param fileName file name * @return get absolute path and name for file on hdfs */ public static String getHdfsResourceFileName(String tenantCode, String fileName) { if (fileName.startsWith("/")) { fileName = fileName.replaceFirst("/", "");
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
} return String.format("%s/%s", getHdfsResDir(tenantCode), fileName); } /** * get absolute path and name for udf file on hdfs * * @param tenantCode tenant code * @param fileName file name * @return get absolute path and name for udf file on hdfs */ public static String getHdfsUdfFileName(String tenantCode, String fileName) { if (fileName.startsWith("/")) { fileName = fileName.replaceFirst("/", ""); } return String.format("%s/%s", getHdfsUdfDir(tenantCode), fileName); } /** * @param tenantCode tenant code * @return file directory of tenants on hdfs */ public static String getHdfsTenantDir(String tenantCode) { return String.format("%s/%s", getHdfsDataBasePath(), tenantCode); } /** * getAppAddress * * @param appAddress app address * @param rmHa resource manager ha * @return app address */
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
public static String getAppAddress(String appAddress, String rmHa) { String activeRM = YarnHAAdminUtils.getAcitveRMName(rmHa); String[] split1 = appAddress.split(Constants.DOUBLE_SLASH); if (split1.length != 2) { return null; } String start = split1[0] + Constants.DOUBLE_SLASH; String[] split2 = split1[1].split(Constants.COLON); if (split2.length != 2) { return null; } String end = Constants.COLON + split2[1]; return start + activeRM + end; } @Override public void close() throws IOException { if (fs != null) { try { fs.close(); } catch (IOException e) { logger.error("Close HadoopUtils instance failed", e); throw new IOException("Close HadoopUtils instance failed", e); } } } /** * yarn ha admin utils */ private static final class YarnHAAdminUtils extends RMAdminCLI {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
/** * get active resourcemanager * * @param rmIds * @return */ public static String getAcitveRMName(String rmIds) { String[] rmIdArr = rmIds.split(Constants.COMMA); int activeResourceManagerPort = PropertyUtils.getInt(Constants.HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT, 8088); String yarnUrl = "http://%s:" + activeResourceManagerPort + "/ws/v1/cluster/info"; String state = null; try { /** * send http get request to rm1 */ state = getRMState(String.format(yarnUrl, rmIdArr[0])); if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) { return rmIdArr[0]; } else if (Constants.HADOOP_RM_STATE_STANDBY.equals(state)) { state = getRMState(String.format(yarnUrl, rmIdArr[1])); if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) { return rmIdArr[1]; } } else { return null; }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
} catch (Exception e) { state = getRMState(String.format(yarnUrl, rmIdArr[1])); if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) { return rmIdArr[0]; } } return null; } /** * get ResourceManager state * * @param url * @return */ public static String getRMState(String url) { String retStr = HttpUtils.get(url); if (StringUtils.isEmpty(retStr)) { return null; } ObjectNode jsonObject = JSONUtils.parseObject(retStr); if (!jsonObject.has("clusterInfo")) { return null; } return jsonObject.get("clusterInfo").path("haState").asText(); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.utils; import org.apache.dolphinscheduler.common.Constants; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import static org.apache.dolphinscheduler.common.Constants.YYYYMMDDHHMMSS; @RunWith(PowerMockRunner.class)
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java
@PrepareForTest(DateUtils.class) public class FileUtilsTest { @Test public void suffix() { Assert.assertEquals("java", FileUtils.suffix("ninfor.java")); Assert.assertEquals("", FileUtils.suffix(null)); Assert.assertEquals("", FileUtils.suffix("")); Assert.assertEquals("", FileUtils.suffix("ninfor-java")); } @Test public void testGetDownloadFilename() { PowerMockito.mockStatic(DateUtils.class); PowerMockito.when(DateUtils.getCurrentTime(YYYYMMDDHHMMSS)).thenReturn("20190101101059"); Assert.assertEquals("/tmp/dolphinscheduler/download/20190101101059/test", FileUtils.getDownloadFilename("test")); } @Test public void testGetUploadFilename() { Assert.assertEquals("/tmp/dolphinscheduler/aaa/resources/bbb", FileUtils.getUploadFilename("aaa","bbb"));
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,478
[Bug][Common] read the file garbled
When reading the file, no encoding is specified, resulting in garbled code ![image](https://user-images.githubusercontent.com/39816903/90090148-e3153d00-dd55-11ea-9eef-0a60784a153f.png) ![image](https://user-images.githubusercontent.com/39816903/90090344-5919a400-dd56-11ea-8366-64690029847e.png)
https://github.com/apache/dolphinscheduler/issues/3478
https://github.com/apache/dolphinscheduler/pull/3479
1cf40e1d1e4379e6b50a92871987d59291ccfd50
bb13f2eae78e4b829816068e8ca3d337d97a0fd2
"2020-08-13T03:13:35Z"
java
"2020-11-27T13:55:28Z"
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java
} @Test public void testGetProcessExecDir() { String dir = FileUtils.getProcessExecDir(1,2,3, 4); Assert.assertEquals("/tmp/dolphinscheduler/exec/process/1/2/3/4", dir); dir = FileUtils.getProcessExecDir(1,2,3); Assert.assertEquals("/tmp/dolphinscheduler/exec/process/1/2/3", dir); } @Test public void testCreateWorkDirAndUserIfAbsent() { try { FileUtils.createWorkDirAndUserIfAbsent("/tmp/createWorkDirAndUserIfAbsent", "test123"); Assert.assertTrue(true); } catch (Exception e) { Assert.assertTrue(false); } } @Test public void testSetValue() { try { PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE,"true"); Assert.assertTrue(PropertyUtils.getBoolean(Constants.DATASOURCE_ENCRYPTION_ENABLE)); PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE,"false"); Assert.assertFalse(PropertyUtils.getBoolean(Constants.DATASOURCE_ENCRYPTION_ENABLE)); } catch (Exception e) { Assert.assertTrue(false); } } }
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,457
[bug] flink args build problem
**Describe the bug** There was a BUG in the Dolphin Scheduler-1.3.1 that Dolphin set Flink launch parameters **To Reproduce** 1. If the slot number and taskManager number are set, Jar package cannot be found when flink task is executed 2. Among other parameters, specifying -yqu (the queue name of Flink on Yarn) or -ynm (Flink appName) is invalid !!!! Setting other boot parameters, such as -yqu and -ynm, can cause confusion in the order in which the flink boot parameters are set 2020-08-10 21:03:31.400 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Hadoop version: 3.0.0-cdh6.3.2 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - JVM Options: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog.file=/opt/flink-1.10.1/log/flink-dscheduler-client-cdh-05.log 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog4j.configuration=file:/opt/flink-1.10.1/conf/log4j-cli.properties 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlogback.configurationFile=file:/opt/flink-1.10.1/conf/logback.xml 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Program Arguments: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - run 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -m 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - yarn-cluster 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yjm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 1G 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ytm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 6G 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -d 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -c 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - cn.~~.analysis.DurationAndMileage 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - analysis-assembly-2.3.jar 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - --qu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - default 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yqu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - test 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ynm 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - DurationAndMileage
https://github.com/apache/dolphinscheduler/issues/3457
https://github.com/apache/dolphinscheduler/pull/4166
68541f281d0b0908f605ad49847d3e7acdd5a302
cbc30b4900215424dcbbfb49539259d32273efc3
"2020-08-10T12:56:16Z"
java
"2020-12-10T14:37:21Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.utils.OSUtils; import java.util.regex.Pattern; /** * Constants */ public final class Constants {
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,457
[bug] flink args build problem
**Describe the bug** There was a BUG in the Dolphin Scheduler-1.3.1 that Dolphin set Flink launch parameters **To Reproduce** 1. If the slot number and taskManager number are set, Jar package cannot be found when flink task is executed 2. Among other parameters, specifying -yqu (the queue name of Flink on Yarn) or -ynm (Flink appName) is invalid !!!! Setting other boot parameters, such as -yqu and -ynm, can cause confusion in the order in which the flink boot parameters are set 2020-08-10 21:03:31.400 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Hadoop version: 3.0.0-cdh6.3.2 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - JVM Options: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog.file=/opt/flink-1.10.1/log/flink-dscheduler-client-cdh-05.log 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog4j.configuration=file:/opt/flink-1.10.1/conf/log4j-cli.properties 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlogback.configurationFile=file:/opt/flink-1.10.1/conf/logback.xml 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Program Arguments: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - run 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -m 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - yarn-cluster 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yjm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 1G 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ytm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 6G 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -d 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -c 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - cn.~~.analysis.DurationAndMileage 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - analysis-assembly-2.3.jar 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - --qu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - default 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yqu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - test 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ynm 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - DurationAndMileage
https://github.com/apache/dolphinscheduler/issues/3457
https://github.com/apache/dolphinscheduler/pull/4166
68541f281d0b0908f605ad49847d3e7acdd5a302
cbc30b4900215424dcbbfb49539259d32273efc3
"2020-08-10T12:56:16Z"
java
"2020-12-10T14:37:21Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
private Constants() { throw new UnsupportedOperationException("Construct Constants"); } /** * quartz config */ public static final String ORG_QUARTZ_JOBSTORE_DRIVERDELEGATECLASS = "org.quartz.jobStore.driverDelegateClass"; public static final String ORG_QUARTZ_SCHEDULER_INSTANCENAME = "org.quartz.scheduler.instanceName"; public static final String ORG_QUARTZ_SCHEDULER_INSTANCEID = "org.quartz.scheduler.instanceId"; public static final String ORG_QUARTZ_SCHEDULER_MAKESCHEDULERTHREADDAEMON = "org.quartz.scheduler.makeSchedulerThreadDaemon";
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,457
[bug] flink args build problem
**Describe the bug** There was a BUG in the Dolphin Scheduler-1.3.1 that Dolphin set Flink launch parameters **To Reproduce** 1. If the slot number and taskManager number are set, Jar package cannot be found when flink task is executed 2. Among other parameters, specifying -yqu (the queue name of Flink on Yarn) or -ynm (Flink appName) is invalid !!!! Setting other boot parameters, such as -yqu and -ynm, can cause confusion in the order in which the flink boot parameters are set 2020-08-10 21:03:31.400 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Hadoop version: 3.0.0-cdh6.3.2 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - JVM Options: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog.file=/opt/flink-1.10.1/log/flink-dscheduler-client-cdh-05.log 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog4j.configuration=file:/opt/flink-1.10.1/conf/log4j-cli.properties 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlogback.configurationFile=file:/opt/flink-1.10.1/conf/logback.xml 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Program Arguments: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - run 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -m 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - yarn-cluster 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yjm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 1G 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ytm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 6G 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -d 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -c 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - cn.~~.analysis.DurationAndMileage 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - analysis-assembly-2.3.jar 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - --qu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - default 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yqu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - test 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ynm 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - DurationAndMileage
https://github.com/apache/dolphinscheduler/issues/3457
https://github.com/apache/dolphinscheduler/pull/4166
68541f281d0b0908f605ad49847d3e7acdd5a302
cbc30b4900215424dcbbfb49539259d32273efc3
"2020-08-10T12:56:16Z"
java
"2020-12-10T14:37:21Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
public static final String ORG_QUARTZ_JOBSTORE_USEPROPERTIES = "org.quartz.jobStore.useProperties"; public static final String ORG_QUARTZ_THREADPOOL_CLASS = "org.quartz.threadPool.class"; public static final String ORG_QUARTZ_THREADPOOL_THREADCOUNT = "org.quartz.threadPool.threadCount"; public static final String ORG_QUARTZ_THREADPOOL_MAKETHREADSDAEMONS = "org.quartz.threadPool.makeThreadsDaemons"; public static final String ORG_QUARTZ_THREADPOOL_THREADPRIORITY = "org.quartz.threadPool.threadPriority"; public static final String ORG_QUARTZ_JOBSTORE_CLASS = "org.quartz.jobStore.class"; public static final String ORG_QUARTZ_JOBSTORE_TABLEPREFIX = "org.quartz.jobStore.tablePrefix"; public static final String ORG_QUARTZ_JOBSTORE_ISCLUSTERED = "org.quartz.jobStore.isClustered"; public static final String ORG_QUARTZ_JOBSTORE_MISFIRETHRESHOLD = "org.quartz.jobStore.misfireThreshold"; public static final String ORG_QUARTZ_JOBSTORE_CLUSTERCHECKININTERVAL = "org.quartz.jobStore.clusterCheckinInterval"; public static final String ORG_QUARTZ_JOBSTORE_ACQUIRETRIGGERSWITHINLOCK = "org.quartz.jobStore.acquireTriggersWithinLock"; public static final String ORG_QUARTZ_JOBSTORE_DATASOURCE = "org.quartz.jobStore.dataSource"; public static final String ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS = "org.quartz.dataSource.myDs.connectionProvider.class"; /** * quartz config default value */ public static final String QUARTZ_TABLE_PREFIX = "QRTZ_"; public static final String QUARTZ_MISFIRETHRESHOLD = "60000"; public static final String QUARTZ_CLUSTERCHECKININTERVAL = "5000"; public static final String QUARTZ_DATASOURCE = "myDs"; public static final String QUARTZ_THREADCOUNT = "25"; public static final String QUARTZ_THREADPRIORITY = "5"; public static final String QUARTZ_INSTANCENAME = "DolphinScheduler"; public static final String QUARTZ_INSTANCEID = "AUTO"; public static final String QUARTZ_ACQUIRETRIGGERSWITHINLOCK = "true"; /** * common properties path */ public static final String COMMON_PROPERTIES_PATH = "/common.properties"; /**
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,457
[bug] flink args build problem
**Describe the bug** There was a BUG in the Dolphin Scheduler-1.3.1 that Dolphin set Flink launch parameters **To Reproduce** 1. If the slot number and taskManager number are set, Jar package cannot be found when flink task is executed 2. Among other parameters, specifying -yqu (the queue name of Flink on Yarn) or -ynm (Flink appName) is invalid !!!! Setting other boot parameters, such as -yqu and -ynm, can cause confusion in the order in which the flink boot parameters are set 2020-08-10 21:03:31.400 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Hadoop version: 3.0.0-cdh6.3.2 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - JVM Options: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog.file=/opt/flink-1.10.1/log/flink-dscheduler-client-cdh-05.log 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog4j.configuration=file:/opt/flink-1.10.1/conf/log4j-cli.properties 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlogback.configurationFile=file:/opt/flink-1.10.1/conf/logback.xml 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Program Arguments: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - run 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -m 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - yarn-cluster 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yjm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 1G 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ytm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 6G 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -d 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -c 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - cn.~~.analysis.DurationAndMileage 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - analysis-assembly-2.3.jar 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - --qu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - default 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yqu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - test 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ynm 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - DurationAndMileage
https://github.com/apache/dolphinscheduler/issues/3457
https://github.com/apache/dolphinscheduler/pull/4166
68541f281d0b0908f605ad49847d3e7acdd5a302
cbc30b4900215424dcbbfb49539259d32273efc3
"2020-08-10T12:56:16Z"
java
"2020-12-10T14:37:21Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
* fs.defaultFS */ public static final String FS_DEFAULTFS = "fs.defaultFS"; /** * fs s3a endpoint */ public static final String FS_S3A_ENDPOINT = "fs.s3a.endpoint"; /** * fs s3a access key */ public static final String FS_S3A_ACCESS_KEY = "fs.s3a.access.key"; /** * fs s3a secret key */ public static final String FS_S3A_SECRET_KEY = "fs.s3a.secret.key"; /** * yarn.resourcemanager.ha.rm.ids */ public static final String YARN_RESOURCEMANAGER_HA_RM_IDS = "yarn.resourcemanager.ha.rm.ids"; public static final String YARN_RESOURCEMANAGER_HA_XX = "xx"; /** * yarn.application.status.address */ public static final String YARN_APPLICATION_STATUS_ADDRESS = "yarn.application.status.address"; /** * yarn.job.history.status.address */ public static final String YARN_JOB_HISTORY_STATUS_ADDRESS = "yarn.job.history.status.address"; /** * hdfs configuration
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,457
[bug] flink args build problem
**Describe the bug** There was a BUG in the Dolphin Scheduler-1.3.1 that Dolphin set Flink launch parameters **To Reproduce** 1. If the slot number and taskManager number are set, Jar package cannot be found when flink task is executed 2. Among other parameters, specifying -yqu (the queue name of Flink on Yarn) or -ynm (Flink appName) is invalid !!!! Setting other boot parameters, such as -yqu and -ynm, can cause confusion in the order in which the flink boot parameters are set 2020-08-10 21:03:31.400 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Hadoop version: 3.0.0-cdh6.3.2 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - JVM Options: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog.file=/opt/flink-1.10.1/log/flink-dscheduler-client-cdh-05.log 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog4j.configuration=file:/opt/flink-1.10.1/conf/log4j-cli.properties 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlogback.configurationFile=file:/opt/flink-1.10.1/conf/logback.xml 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Program Arguments: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - run 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -m 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - yarn-cluster 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yjm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 1G 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ytm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 6G 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -d 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -c 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - cn.~~.analysis.DurationAndMileage 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - analysis-assembly-2.3.jar 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - --qu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - default 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yqu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - test 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ynm 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - DurationAndMileage
https://github.com/apache/dolphinscheduler/issues/3457
https://github.com/apache/dolphinscheduler/pull/4166
68541f281d0b0908f605ad49847d3e7acdd5a302
cbc30b4900215424dcbbfb49539259d32273efc3
"2020-08-10T12:56:16Z"
java
"2020-12-10T14:37:21Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
* hdfs.root.user */ public static final String HDFS_ROOT_USER = "hdfs.root.user"; /** * hdfs/s3 configuration * resource.upload.path */ public static final String RESOURCE_UPLOAD_PATH = "resource.upload.path"; /** * data basedir path */ public static final String DATA_BASEDIR_PATH = "data.basedir.path"; /** * dolphinscheduler.env.path */ public static final String DOLPHINSCHEDULER_ENV_PATH = "dolphinscheduler.env.path"; /** * environment properties default path */ public static final String ENV_PATH = "env/dolphinscheduler_env.sh"; /** * python home */ public static final String PYTHON_HOME = "PYTHON_HOME"; /** * resource.view.suffixs */ public static final String RESOURCE_VIEW_SUFFIXS = "resource.view.suffixs"; public static final String RESOURCE_VIEW_SUFFIXS_DEFAULT_VALUE = "txt,log,sh,conf,cfg,py,java,sql,hql,xml,properties"; /**
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,457
[bug] flink args build problem
**Describe the bug** There was a BUG in the Dolphin Scheduler-1.3.1 that Dolphin set Flink launch parameters **To Reproduce** 1. If the slot number and taskManager number are set, Jar package cannot be found when flink task is executed 2. Among other parameters, specifying -yqu (the queue name of Flink on Yarn) or -ynm (Flink appName) is invalid !!!! Setting other boot parameters, such as -yqu and -ynm, can cause confusion in the order in which the flink boot parameters are set 2020-08-10 21:03:31.400 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Hadoop version: 3.0.0-cdh6.3.2 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - JVM Options: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog.file=/opt/flink-1.10.1/log/flink-dscheduler-client-cdh-05.log 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog4j.configuration=file:/opt/flink-1.10.1/conf/log4j-cli.properties 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlogback.configurationFile=file:/opt/flink-1.10.1/conf/logback.xml 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Program Arguments: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - run 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -m 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - yarn-cluster 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yjm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 1G 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ytm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 6G 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -d 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -c 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - cn.~~.analysis.DurationAndMileage 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - analysis-assembly-2.3.jar 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - --qu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - default 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yqu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - test 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ynm 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - DurationAndMileage
https://github.com/apache/dolphinscheduler/issues/3457
https://github.com/apache/dolphinscheduler/pull/4166
68541f281d0b0908f605ad49847d3e7acdd5a302
cbc30b4900215424dcbbfb49539259d32273efc3
"2020-08-10T12:56:16Z"
java
"2020-12-10T14:37:21Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
* development.state */ public static final String DEVELOPMENT_STATE = "development.state"; public static final String DEVELOPMENT_STATE_DEFAULT_VALUE = "true"; /** * string true */ public static final String STRING_TRUE = "true"; /** * string false */ public static final String STRING_FALSE = "false"; /** * resource storage type */ public static final String RESOURCE_STORAGE_TYPE = "resource.storage.type"; /** * MasterServer directory registered in zookeeper */ public static final String ZOOKEEPER_DOLPHINSCHEDULER_MASTERS = "/nodes/master"; /** * WorkerServer directory registered in zookeeper */ public static final String ZOOKEEPER_DOLPHINSCHEDULER_WORKERS = "/nodes/worker"; /** * all servers directory registered in zookeeper */ public static final String ZOOKEEPER_DOLPHINSCHEDULER_DEAD_SERVERS = "/dead-servers"; /** * MasterServer lock directory registered in zookeeper
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,457
[bug] flink args build problem
**Describe the bug** There was a BUG in the Dolphin Scheduler-1.3.1 that Dolphin set Flink launch parameters **To Reproduce** 1. If the slot number and taskManager number are set, Jar package cannot be found when flink task is executed 2. Among other parameters, specifying -yqu (the queue name of Flink on Yarn) or -ynm (Flink appName) is invalid !!!! Setting other boot parameters, such as -yqu and -ynm, can cause confusion in the order in which the flink boot parameters are set 2020-08-10 21:03:31.400 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Hadoop version: 3.0.0-cdh6.3.2 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - JVM Options: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog.file=/opt/flink-1.10.1/log/flink-dscheduler-client-cdh-05.log 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog4j.configuration=file:/opt/flink-1.10.1/conf/log4j-cli.properties 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlogback.configurationFile=file:/opt/flink-1.10.1/conf/logback.xml 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Program Arguments: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - run 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -m 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - yarn-cluster 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yjm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 1G 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ytm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 6G 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -d 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -c 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - cn.~~.analysis.DurationAndMileage 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - analysis-assembly-2.3.jar 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - --qu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - default 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yqu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - test 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ynm 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - DurationAndMileage
https://github.com/apache/dolphinscheduler/issues/3457
https://github.com/apache/dolphinscheduler/pull/4166
68541f281d0b0908f605ad49847d3e7acdd5a302
cbc30b4900215424dcbbfb49539259d32273efc3
"2020-08-10T12:56:16Z"
java
"2020-12-10T14:37:21Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
*/ public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_MASTERS = "/lock/masters"; /** * MasterServer failover directory registered in zookeeper */ public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_MASTERS = "/lock/failover/masters"; /** * WorkerServer failover directory registered in zookeeper */ public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_WORKERS = "/lock/failover/workers"; /** * MasterServer startup failover runing and fault tolerance process */ public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_STARTUP_MASTERS = "/lock/failover/startup-masters"; /** * comma , */ public static final String COMMA = ","; /** * slash / */ public static final String SLASH = "/"; /** * COLON : */ public static final String COLON = ":"; /** * SPACE " " */ public static final String SPACE = " ";
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,457
[bug] flink args build problem
**Describe the bug** There was a BUG in the Dolphin Scheduler-1.3.1 that Dolphin set Flink launch parameters **To Reproduce** 1. If the slot number and taskManager number are set, Jar package cannot be found when flink task is executed 2. Among other parameters, specifying -yqu (the queue name of Flink on Yarn) or -ynm (Flink appName) is invalid !!!! Setting other boot parameters, such as -yqu and -ynm, can cause confusion in the order in which the flink boot parameters are set 2020-08-10 21:03:31.400 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Hadoop version: 3.0.0-cdh6.3.2 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - JVM Options: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog.file=/opt/flink-1.10.1/log/flink-dscheduler-client-cdh-05.log 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog4j.configuration=file:/opt/flink-1.10.1/conf/log4j-cli.properties 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlogback.configurationFile=file:/opt/flink-1.10.1/conf/logback.xml 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Program Arguments: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - run 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -m 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - yarn-cluster 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yjm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 1G 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ytm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 6G 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -d 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -c 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - cn.~~.analysis.DurationAndMileage 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - analysis-assembly-2.3.jar 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - --qu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - default 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yqu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - test 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ynm 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - DurationAndMileage
https://github.com/apache/dolphinscheduler/issues/3457
https://github.com/apache/dolphinscheduler/pull/4166
68541f281d0b0908f605ad49847d3e7acdd5a302
cbc30b4900215424dcbbfb49539259d32273efc3
"2020-08-10T12:56:16Z"
java
"2020-12-10T14:37:21Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
/** * SINGLE_SLASH / */ public static final String SINGLE_SLASH = "/"; /** * DOUBLE_SLASH // */ public static final String DOUBLE_SLASH = "//"; /** * SINGLE_QUOTES "'" */ public static final String SINGLE_QUOTES = "'"; /** * DOUBLE_QUOTES "\"" */ public static final String DOUBLE_QUOTES = "\""; /** * SEMICOLON ; */ public static final String SEMICOLON = ";"; /** * EQUAL SIGN */ public static final String EQUAL_SIGN = "="; /** * AT SIGN */ public static final String AT_SIGN = "@"; public static final String WORKER_MAX_CPULOAD_AVG = "worker.max.cpuload.avg"; public static final String WORKER_RESERVED_MEMORY = "worker.reserved.memory";
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,457
[bug] flink args build problem
**Describe the bug** There was a BUG in the Dolphin Scheduler-1.3.1 that Dolphin set Flink launch parameters **To Reproduce** 1. If the slot number and taskManager number are set, Jar package cannot be found when flink task is executed 2. Among other parameters, specifying -yqu (the queue name of Flink on Yarn) or -ynm (Flink appName) is invalid !!!! Setting other boot parameters, such as -yqu and -ynm, can cause confusion in the order in which the flink boot parameters are set 2020-08-10 21:03:31.400 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Hadoop version: 3.0.0-cdh6.3.2 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - JVM Options: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog.file=/opt/flink-1.10.1/log/flink-dscheduler-client-cdh-05.log 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog4j.configuration=file:/opt/flink-1.10.1/conf/log4j-cli.properties 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlogback.configurationFile=file:/opt/flink-1.10.1/conf/logback.xml 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Program Arguments: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - run 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -m 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - yarn-cluster 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yjm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 1G 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ytm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 6G 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -d 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -c 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - cn.~~.analysis.DurationAndMileage 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - analysis-assembly-2.3.jar 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - --qu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - default 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yqu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - test 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ynm 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - DurationAndMileage
https://github.com/apache/dolphinscheduler/issues/3457
https://github.com/apache/dolphinscheduler/pull/4166
68541f281d0b0908f605ad49847d3e7acdd5a302
cbc30b4900215424dcbbfb49539259d32273efc3
"2020-08-10T12:56:16Z"
java
"2020-12-10T14:37:21Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
public static final String MASTER_MAX_CPULOAD_AVG = "master.max.cpuload.avg"; public static final String MASTER_RESERVED_MEMORY = "master.reserved.memory"; /** * date format of yyyy-MM-dd HH:mm:ss */ public static final String YYYY_MM_DD_HH_MM_SS = "yyyy-MM-dd HH:mm:ss"; /** * date format of yyyyMMddHHmmss */ public static final String YYYYMMDDHHMMSS = "yyyyMMddHHmmss"; /** * date format of yyyyMMddHHmmssSSS */ public static final String YYYYMMDDHHMMSSSSS = "yyyyMMddHHmmssSSS"; /** * http connect time out */ public static final int HTTP_CONNECT_TIMEOUT = 60 * 1000; /** * http connect request time out */ public static final int HTTP_CONNECTION_REQUEST_TIMEOUT = 60 * 1000; /** * httpclient soceket time out */ public static final int SOCKET_TIMEOUT = 60 * 1000; /** * http header */ public static final String HTTP_HEADER_UNKNOWN = "unKnown";
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,457
[bug] flink args build problem
**Describe the bug** There was a BUG in the Dolphin Scheduler-1.3.1 that Dolphin set Flink launch parameters **To Reproduce** 1. If the slot number and taskManager number are set, Jar package cannot be found when flink task is executed 2. Among other parameters, specifying -yqu (the queue name of Flink on Yarn) or -ynm (Flink appName) is invalid !!!! Setting other boot parameters, such as -yqu and -ynm, can cause confusion in the order in which the flink boot parameters are set 2020-08-10 21:03:31.400 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Hadoop version: 3.0.0-cdh6.3.2 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - JVM Options: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog.file=/opt/flink-1.10.1/log/flink-dscheduler-client-cdh-05.log 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog4j.configuration=file:/opt/flink-1.10.1/conf/log4j-cli.properties 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlogback.configurationFile=file:/opt/flink-1.10.1/conf/logback.xml 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Program Arguments: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - run 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -m 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - yarn-cluster 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yjm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 1G 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ytm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 6G 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -d 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -c 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - cn.~~.analysis.DurationAndMileage 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - analysis-assembly-2.3.jar 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - --qu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - default 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yqu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - test 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ynm 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - DurationAndMileage
https://github.com/apache/dolphinscheduler/issues/3457
https://github.com/apache/dolphinscheduler/pull/4166
68541f281d0b0908f605ad49847d3e7acdd5a302
cbc30b4900215424dcbbfb49539259d32273efc3
"2020-08-10T12:56:16Z"
java
"2020-12-10T14:37:21Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
/** * http X-Forwarded-For */ public static final String HTTP_X_FORWARDED_FOR = "X-Forwarded-For"; /** * http X-Real-IP */ public static final String HTTP_X_REAL_IP = "X-Real-IP"; /** * UTF-8 */ public static final String UTF_8 = "UTF-8"; /** * user name regex */ public static final Pattern REGEX_USER_NAME = Pattern.compile("^[a-zA-Z0-9._-]{3,39}$"); /** * email regex */ public static final Pattern REGEX_MAIL_NAME = Pattern.compile("^([a-z0-9A-Z]+[_|\\-|\\.]?)+[a-z0-9A-Z]@([a-z0-9A-Z]+(-[a-z0-9A-Z]+)?\\.)+[a-zA-Z]{2,}$"); /** * read permission */ public static final int READ_PERMISSION = 2 * 1; /** * write permission */ public static final int WRITE_PERMISSION = 2 * 2; /** * execute permission
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,457
[bug] flink args build problem
**Describe the bug** There was a BUG in the Dolphin Scheduler-1.3.1 that Dolphin set Flink launch parameters **To Reproduce** 1. If the slot number and taskManager number are set, Jar package cannot be found when flink task is executed 2. Among other parameters, specifying -yqu (the queue name of Flink on Yarn) or -ynm (Flink appName) is invalid !!!! Setting other boot parameters, such as -yqu and -ynm, can cause confusion in the order in which the flink boot parameters are set 2020-08-10 21:03:31.400 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Hadoop version: 3.0.0-cdh6.3.2 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - JVM Options: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog.file=/opt/flink-1.10.1/log/flink-dscheduler-client-cdh-05.log 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog4j.configuration=file:/opt/flink-1.10.1/conf/log4j-cli.properties 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlogback.configurationFile=file:/opt/flink-1.10.1/conf/logback.xml 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Program Arguments: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - run 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -m 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - yarn-cluster 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yjm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 1G 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ytm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 6G 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -d 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -c 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - cn.~~.analysis.DurationAndMileage 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - analysis-assembly-2.3.jar 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - --qu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - default 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yqu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - test 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ynm 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - DurationAndMileage
https://github.com/apache/dolphinscheduler/issues/3457
https://github.com/apache/dolphinscheduler/pull/4166
68541f281d0b0908f605ad49847d3e7acdd5a302
cbc30b4900215424dcbbfb49539259d32273efc3
"2020-08-10T12:56:16Z"
java
"2020-12-10T14:37:21Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
*/ public static final int EXECUTE_PERMISSION = 1; /** * default admin permission */ public static final int DEFAULT_ADMIN_PERMISSION = 7; /** * all permissions */ public static final int ALL_PERMISSIONS = READ_PERMISSION | WRITE_PERMISSION | EXECUTE_PERMISSION; /** * max task timeout */ public static final int MAX_TASK_TIMEOUT = 24 * 3600; /** * master cpu load */ public static final int DEFAULT_MASTER_CPU_LOAD = Runtime.getRuntime().availableProcessors() * 2; /** * master reserved memory */ public static final double DEFAULT_MASTER_RESERVED_MEMORY = OSUtils.totalMemorySize() / 10; /** * worker cpu load */ public static final int DEFAULT_WORKER_CPU_LOAD = Runtime.getRuntime().availableProcessors() * 2; /** * worker reserved memory */ public static final double DEFAULT_WORKER_RESERVED_MEMORY = OSUtils.totalMemorySize() / 10;
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,457
[bug] flink args build problem
**Describe the bug** There was a BUG in the Dolphin Scheduler-1.3.1 that Dolphin set Flink launch parameters **To Reproduce** 1. If the slot number and taskManager number are set, Jar package cannot be found when flink task is executed 2. Among other parameters, specifying -yqu (the queue name of Flink on Yarn) or -ynm (Flink appName) is invalid !!!! Setting other boot parameters, such as -yqu and -ynm, can cause confusion in the order in which the flink boot parameters are set 2020-08-10 21:03:31.400 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Hadoop version: 3.0.0-cdh6.3.2 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - JVM Options: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog.file=/opt/flink-1.10.1/log/flink-dscheduler-client-cdh-05.log 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog4j.configuration=file:/opt/flink-1.10.1/conf/log4j-cli.properties 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlogback.configurationFile=file:/opt/flink-1.10.1/conf/logback.xml 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Program Arguments: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - run 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -m 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - yarn-cluster 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yjm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 1G 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ytm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 6G 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -d 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -c 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - cn.~~.analysis.DurationAndMileage 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - analysis-assembly-2.3.jar 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - --qu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - default 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yqu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - test 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ynm 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - DurationAndMileage
https://github.com/apache/dolphinscheduler/issues/3457
https://github.com/apache/dolphinscheduler/pull/4166
68541f281d0b0908f605ad49847d3e7acdd5a302
cbc30b4900215424dcbbfb49539259d32273efc3
"2020-08-10T12:56:16Z"
java
"2020-12-10T14:37:21Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
/** * default log cache rows num,output when reach the number */ public static final int DEFAULT_LOG_ROWS_NUM = 4 * 16; /** * log flush interval?output when reach the interval */ public static final int DEFAULT_LOG_FLUSH_INTERVAL = 1000; /** * time unit secong to minutes */ public static final int SEC_2_MINUTES_TIME_UNIT = 60; /*** * * rpc port */ public static final int RPC_PORT = 50051; /** * forbid running task */ public static final String FLOWNODE_RUN_FLAG_FORBIDDEN = "FORBIDDEN"; /** * datasource configuration path */ public static final String DATASOURCE_PROPERTIES = "/datasource.properties"; public static final String TASK_RECORD_URL = "task.record.datasource.url"; public static final String TASK_RECORD_FLAG = "task.record.flag"; public static final String TASK_RECORD_USER = "task.record.datasource.username"; public static final String TASK_RECORD_PWD = "task.record.datasource.password"; public static final String DEFAULT = "Default";
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,457
[bug] flink args build problem
**Describe the bug** There was a BUG in the Dolphin Scheduler-1.3.1 that Dolphin set Flink launch parameters **To Reproduce** 1. If the slot number and taskManager number are set, Jar package cannot be found when flink task is executed 2. Among other parameters, specifying -yqu (the queue name of Flink on Yarn) or -ynm (Flink appName) is invalid !!!! Setting other boot parameters, such as -yqu and -ynm, can cause confusion in the order in which the flink boot parameters are set 2020-08-10 21:03:31.400 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Hadoop version: 3.0.0-cdh6.3.2 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - JVM Options: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog.file=/opt/flink-1.10.1/log/flink-dscheduler-client-cdh-05.log 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog4j.configuration=file:/opt/flink-1.10.1/conf/log4j-cli.properties 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlogback.configurationFile=file:/opt/flink-1.10.1/conf/logback.xml 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Program Arguments: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - run 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -m 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - yarn-cluster 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yjm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 1G 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ytm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 6G 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -d 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -c 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - cn.~~.analysis.DurationAndMileage 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - analysis-assembly-2.3.jar 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - --qu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - default 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yqu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - test 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ynm 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - DurationAndMileage
https://github.com/apache/dolphinscheduler/issues/3457
https://github.com/apache/dolphinscheduler/pull/4166
68541f281d0b0908f605ad49847d3e7acdd5a302
cbc30b4900215424dcbbfb49539259d32273efc3
"2020-08-10T12:56:16Z"
java
"2020-12-10T14:37:21Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
public static final String USER = "user"; public static final String PASSWORD = "password"; public static final String XXXXXX = "******"; public static final String NULL = "NULL"; public static final String THREAD_NAME_MASTER_SERVER = "Master-Server"; public static final String THREAD_NAME_WORKER_SERVER = "Worker-Server"; public static final String TASK_RECORD_TABLE_HIVE_LOG = "eamp_hive_log_hd"; public static final String TASK_RECORD_TABLE_HISTORY_HIVE_LOG = "eamp_hive_hist_log_hd"; /** * command parameter keys */ public static final String CMD_PARAM_RECOVER_PROCESS_ID_STRING = "ProcessInstanceId"; public static final String CMD_PARAM_RECOVERY_START_NODE_STRING = "StartNodeIdList"; public static final String CMD_PARAM_RECOVERY_WAITING_THREAD = "WaitingThreadInstanceId"; public static final String CMD_PARAM_SUB_PROCESS = "processInstanceId"; public static final String CMD_PARAM_EMPTY_SUB_PROCESS = "0"; public static final String CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID = "parentProcessInstanceId"; public static final String CMD_PARAM_SUB_PROCESS_DEFINE_ID = "processDefinitionId"; public static final String CMD_PARAM_START_NODE_NAMES = "StartNodeNameList"; /** * complement data start date */ public static final String CMDPARAM_COMPLEMENT_DATA_START_DATE = "complementStartDate"; /** * complement data end date */ public static final String CMDPARAM_COMPLEMENT_DATA_END_DATE = "complementEndDate"; /** * hadoop configuration */
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,457
[bug] flink args build problem
**Describe the bug** There was a BUG in the Dolphin Scheduler-1.3.1 that Dolphin set Flink launch parameters **To Reproduce** 1. If the slot number and taskManager number are set, Jar package cannot be found when flink task is executed 2. Among other parameters, specifying -yqu (the queue name of Flink on Yarn) or -ynm (Flink appName) is invalid !!!! Setting other boot parameters, such as -yqu and -ynm, can cause confusion in the order in which the flink boot parameters are set 2020-08-10 21:03:31.400 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Hadoop version: 3.0.0-cdh6.3.2 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - JVM Options: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog.file=/opt/flink-1.10.1/log/flink-dscheduler-client-cdh-05.log 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog4j.configuration=file:/opt/flink-1.10.1/conf/log4j-cli.properties 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlogback.configurationFile=file:/opt/flink-1.10.1/conf/logback.xml 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Program Arguments: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - run 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -m 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - yarn-cluster 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yjm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 1G 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ytm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 6G 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -d 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -c 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - cn.~~.analysis.DurationAndMileage 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - analysis-assembly-2.3.jar 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - --qu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - default 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yqu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - test 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ynm 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - DurationAndMileage
https://github.com/apache/dolphinscheduler/issues/3457
https://github.com/apache/dolphinscheduler/pull/4166
68541f281d0b0908f605ad49847d3e7acdd5a302
cbc30b4900215424dcbbfb49539259d32273efc3
"2020-08-10T12:56:16Z"
java
"2020-12-10T14:37:21Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
public static final String HADOOP_RM_STATE_ACTIVE = "ACTIVE"; public static final String HADOOP_RM_STATE_STANDBY = "STANDBY"; public static final String HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT = "resource.manager.httpaddress.port"; /** * data source config */ public static final String SPRING_DATASOURCE_DRIVER_CLASS_NAME = "spring.datasource.driver-class-name"; public static final String SPRING_DATASOURCE_URL = "spring.datasource.url"; public static final String SPRING_DATASOURCE_USERNAME = "spring.datasource.username"; public static final String SPRING_DATASOURCE_PASSWORD = "spring.datasource.password"; public static final String SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT = "spring.datasource.validationQueryTimeout"; public static final String SPRING_DATASOURCE_INITIAL_SIZE = "spring.datasource.initialSize"; public static final String SPRING_DATASOURCE_MIN_IDLE = "spring.datasource.minIdle"; public static final String SPRING_DATASOURCE_MAX_ACTIVE = "spring.datasource.maxActive"; public static final String SPRING_DATASOURCE_MAX_WAIT = "spring.datasource.maxWait"; public static final String SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS = "spring.datasource.timeBetweenEvictionRunsMillis"; public static final String SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS = "spring.datasource.timeBetweenConnectErrorMillis"; public static final String SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS = "spring.datasource.minEvictableIdleTimeMillis"; public static final String SPRING_DATASOURCE_VALIDATION_QUERY = "spring.datasource.validationQuery"; public static final String SPRING_DATASOURCE_TEST_WHILE_IDLE = "spring.datasource.testWhileIdle"; public static final String SPRING_DATASOURCE_TEST_ON_BORROW = "spring.datasource.testOnBorrow"; public static final String SPRING_DATASOURCE_TEST_ON_RETURN = "spring.datasource.testOnReturn"; public static final String SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS = "spring.datasource.poolPreparedStatements"; public static final String SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT = "spring.datasource.defaultAutoCommit"; public static final String SPRING_DATASOURCE_KEEP_ALIVE = "spring.datasource.keepAlive"; public static final String SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE = "spring.datasource.maxPoolPreparedStatementPerConnectionSize"; public static final String DEVELOPMENT = "development"; public static final String QUARTZ_PROPERTIES_PATH = "quartz.properties"; /** * sleep time
closed
apache/dolphinscheduler
https://github.com/apache/dolphinscheduler
3,457
[bug] flink args build problem
**Describe the bug** There was a BUG in the Dolphin Scheduler-1.3.1 that Dolphin set Flink launch parameters **To Reproduce** 1. If the slot number and taskManager number are set, Jar package cannot be found when flink task is executed 2. Among other parameters, specifying -yqu (the queue name of Flink on Yarn) or -ynm (Flink appName) is invalid !!!! Setting other boot parameters, such as -yqu and -ynm, can cause confusion in the order in which the flink boot parameters are set 2020-08-10 21:03:31.400 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Hadoop version: 3.0.0-cdh6.3.2 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - JVM Options: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog.file=/opt/flink-1.10.1/log/flink-dscheduler-client-cdh-05.log 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlog4j.configuration=file:/opt/flink-1.10.1/conf/log4j-cli.properties 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -Dlogback.configurationFile=file:/opt/flink-1.10.1/conf/logback.xml 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - Program Arguments: 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - run 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -m 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - yarn-cluster 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yjm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 1G 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ytm 2020-08-10 21:03:31.401 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - 6G 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -d 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -c 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - cn.~~.analysis.DurationAndMileage 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - analysis-assembly-2.3.jar 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - --qu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - default 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -yqu 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - test 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - -ynm 2020-08-10 21:03:31.402 10.15.1.19 [main] INFO org.apache.flink.client.cli.CliFrontend - DurationAndMileage
https://github.com/apache/dolphinscheduler/issues/3457
https://github.com/apache/dolphinscheduler/pull/4166
68541f281d0b0908f605ad49847d3e7acdd5a302
cbc30b4900215424dcbbfb49539259d32273efc3
"2020-08-10T12:56:16Z"
java
"2020-12-10T14:37:21Z"
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
*/ public static final int SLEEP_TIME_MILLIS = 1000; /** * heartbeat for zk info length */ public static final int HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH = 10; /** * hadoop params * jar */ public static final String JAR = "jar"; /** * hadoop */ public static final String HADOOP = "hadoop"; /** * -D parameter */ public static final String D = "-D"; /** * -D mapreduce.job.queuename=ququename */ public static final String MR_QUEUE = "mapreduce.job.queuename"; /** * spark params constant */ public static final String MASTER = "--master"; public static final String DEPLOY_MODE = "--deploy-mode"; /** * --class CLASS_NAME