|
19 | 19 |
|
20 | 20 | import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.EXIT_CODE_FAILURE;
|
21 | 21 |
|
| 22 | +import java.io.File; |
| 23 | +import java.nio.file.Files; |
| 24 | +import java.nio.file.Path; |
| 25 | +import java.nio.file.Paths; |
| 26 | +import java.nio.file.StandardOpenOption; |
22 | 27 | import org.apache.dolphinscheduler.common.utils.JSONUtils;
|
23 | 28 | import org.apache.dolphinscheduler.plugin.task.api.AbstractRemoteTask;
|
24 | 29 | import org.apache.dolphinscheduler.plugin.task.api.ShellCommandExecutor;
|
|
38 | 43 | import java.util.Collections;
|
39 | 44 | import java.util.List;
|
40 | 45 | import java.util.Map;
|
| 46 | +import org.apache.dolphinscheduler.plugin.task.api.utils.FileUtils; |
41 | 47 |
|
42 | 48 | public class HiveCliTask extends AbstractRemoteTask {
|
43 | 49 |
|
@@ -107,37 +113,46 @@ protected String buildCommand() {
|
107 | 113 |
|
108 | 114 | final List<String> args = new ArrayList<>();
|
109 | 115 |
|
110 |
| - final String type = hiveCliParameters.getHiveCliTaskExecutionType(); |
| 116 | + String fileContent = HiveSqlScriptReader.readHiveSqlContent(taskExecutionContext.getExecutePath(), hiveCliParameters); |
| 117 | + fileContent = ParameterUtils.convertParameterPlaceholders(fileContent, ParamUtils.convert(taskExecutionContext.getPrepareParamsMap())); |
| 118 | + String sqlFilePath = generateSqlScriptFile(fileContent); |
111 | 119 |
|
112 |
| - // TODO: make sure type is not unknown |
113 |
| - if (HiveCliConstants.TYPE_FILE.equals(type)) { |
114 |
| - args.add(HiveCliConstants.HIVE_CLI_EXECUTE_FILE); |
115 |
| - final List<ResourceInfo> resourceInfos = hiveCliParameters.getResourceList(); |
116 |
| - if (resourceInfos.size() > 1) { |
117 |
| - logger.warn("more than 1 files detected, use the first one by default"); |
118 |
| - } |
119 |
| - |
120 |
| - args.add(StringUtils.stripStart(resourceInfos.get(0).getResourceName(), "/")); |
121 |
| - } else { |
122 |
| - final String script = hiveCliParameters.getHiveSqlScript(); |
123 |
| - args.add(String.format(HiveCliConstants.HIVE_CLI_EXECUTE_SCRIPT, script)); |
124 |
| - } |
| 120 | + args.add(HiveCliConstants.HIVE_CLI_EXECUTE_FILE); |
| 121 | + args.add(sqlFilePath); |
125 | 122 |
|
126 | 123 | final String hiveCliOptions = hiveCliParameters.getHiveCliOptions();
|
127 | 124 | if (StringUtils.isNotEmpty(hiveCliOptions)) {
|
128 | 125 | args.add(hiveCliOptions);
|
129 | 126 | }
|
130 | 127 |
|
131 |
| - final Map<String, Property> paramsMap = taskExecutionContext.getPrepareParamsMap(); |
132 |
| - final String command = |
133 |
| - ParameterUtils.convertParameterPlaceholders(String.join(" ", args), ParamUtils.convert(paramsMap)); |
| 128 | + String command = String.join(" ", args); |
134 | 129 |
|
135 | 130 | logger.info("hiveCli task command: {}", command);
|
136 | 131 |
|
137 | 132 | return command;
|
138 | 133 |
|
139 | 134 | }
|
140 | 135 |
|
| 136 | + protected String generateSqlScriptFile(String rawScript) { |
| 137 | + String scriptFileName = Paths.get(taskExecutionContext.getExecutePath(), "hive_cli.sql").toString(); |
| 138 | + |
| 139 | + try { |
| 140 | + File file = new File(scriptFileName); |
| 141 | + Path path = file.toPath(); |
| 142 | + if (Files.exists(path)) { |
| 143 | + logger.warn("The HiveCli sql file: {} is already exist, will delete it", scriptFileName); |
| 144 | + Files.deleteIfExists(path); |
| 145 | + } |
| 146 | + if (!Files.exists(path)) { |
| 147 | + org.apache.dolphinscheduler.plugin.task.api.utils.FileUtils.createFileWith755(path); |
| 148 | + Files.write(path, rawScript.getBytes(), StandardOpenOption.APPEND); |
| 149 | + } |
| 150 | + return scriptFileName; |
| 151 | + } catch (Exception ex) { |
| 152 | + throw new TaskException("Generate sql script file: " + scriptFileName + " failed", ex); |
| 153 | + } |
| 154 | + } |
| 155 | + |
141 | 156 | @Override
|
142 | 157 | public AbstractParameters getParameters() {
|
143 | 158 | return hiveCliParameters;
|
|
0 commit comments