This project has retired. For details please refer to its
Attic page.
001 /**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements. See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership. The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License. You may obtain a copy of the License at
009 *
010 * http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018 package org.apache.oozie.command.wf;
019
020 import java.io.IOException;
021 import java.net.URI;
022 import java.net.URISyntaxException;
023 import java.util.Collection;
024 import java.util.HashMap;
025 import java.util.HashSet;
026 import java.util.List;
027 import java.util.Map;
028 import java.util.Set;
029
030 import org.apache.hadoop.conf.Configuration;
031 import org.apache.hadoop.fs.FileSystem;
032 import org.apache.hadoop.fs.Path;
033 import org.apache.oozie.ErrorCode;
034 import org.apache.oozie.WorkflowActionBean;
035 import org.apache.oozie.WorkflowJobBean;
036 import org.apache.oozie.client.OozieClient;
037 import org.apache.oozie.client.WorkflowAction;
038 import org.apache.oozie.client.WorkflowJob;
039 import org.apache.oozie.command.CommandException;
040 import org.apache.oozie.command.PreconditionException;
041 import org.apache.oozie.executor.jpa.JPAExecutorException;
042 import org.apache.oozie.executor.jpa.WorkflowActionDeleteJPAExecutor;
043 import org.apache.oozie.executor.jpa.WorkflowActionsGetForJobJPAExecutor;
044 import org.apache.oozie.executor.jpa.WorkflowJobGetJPAExecutor;
045 import org.apache.oozie.executor.jpa.WorkflowJobUpdateJPAExecutor;
046 import org.apache.oozie.service.DagXLogInfoService;
047 import org.apache.oozie.service.HadoopAccessorException;
048 import org.apache.oozie.service.HadoopAccessorService;
049 import org.apache.oozie.service.JPAService;
050 import org.apache.oozie.service.Services;
051 import org.apache.oozie.service.WorkflowAppService;
052 import org.apache.oozie.service.WorkflowStoreService;
053 import org.apache.oozie.util.ConfigUtils;
054 import org.apache.oozie.util.InstrumentUtils;
055 import org.apache.oozie.util.LogUtils;
056 import org.apache.oozie.util.ParamChecker;
057 import org.apache.oozie.util.PropertiesUtils;
058 import org.apache.oozie.util.XConfiguration;
059 import org.apache.oozie.util.XLog;
060 import org.apache.oozie.util.XmlUtils;
061 import org.apache.oozie.workflow.WorkflowApp;
062 import org.apache.oozie.workflow.WorkflowException;
063 import org.apache.oozie.workflow.WorkflowInstance;
064 import org.apache.oozie.workflow.WorkflowLib;
065 import org.apache.oozie.workflow.lite.NodeHandler;
066
067 /**
068 * This is a RerunXCommand which is used for rerunn.
069 *
070 */
071 public class ReRunXCommand extends WorkflowXCommand<Void> {
072 private final String jobId;
073 private Configuration conf;
074 private final String authToken;
075 private final Set<String> nodesToSkip = new HashSet<String>();
076 public static final String TO_SKIP = "TO_SKIP";
077 private WorkflowJobBean wfBean;
078 private List<WorkflowActionBean> actions;
079 private JPAService jpaService;
080
081 private static final Set<String> DISALLOWED_DEFAULT_PROPERTIES = new HashSet<String>();
082 private static final Set<String> DISALLOWED_USER_PROPERTIES = new HashSet<String>();
083
084 static {
085 String[] badUserProps = { PropertiesUtils.DAYS, PropertiesUtils.HOURS, PropertiesUtils.MINUTES,
086 PropertiesUtils.KB, PropertiesUtils.MB, PropertiesUtils.GB, PropertiesUtils.TB, PropertiesUtils.PB,
087 PropertiesUtils.RECORDS, PropertiesUtils.MAP_IN, PropertiesUtils.MAP_OUT, PropertiesUtils.REDUCE_IN,
088 PropertiesUtils.REDUCE_OUT, PropertiesUtils.GROUPS };
089 PropertiesUtils.createPropertySet(badUserProps, DISALLOWED_USER_PROPERTIES);
090
091 String[] badDefaultProps = { PropertiesUtils.HADOOP_USER};
092 PropertiesUtils.createPropertySet(badUserProps, DISALLOWED_DEFAULT_PROPERTIES);
093 PropertiesUtils.createPropertySet(badDefaultProps, DISALLOWED_DEFAULT_PROPERTIES);
094 }
095
096 public ReRunXCommand(String jobId, Configuration conf, String authToken) {
097 super("rerun", "rerun", 1);
098 this.jobId = ParamChecker.notEmpty(jobId, "jobId");
099 this.conf = ParamChecker.notNull(conf, "conf");
100 this.authToken = ParamChecker.notEmpty(authToken, "authToken");
101 }
102
103 /* (non-Javadoc)
104 * @see org.apache.oozie.command.XCommand#execute()
105 */
106 @Override
107 protected Void execute() throws CommandException {
108 InstrumentUtils.incrJobCounter(getName(), 1, getInstrumentation());
109 LogUtils.setLogInfo(wfBean, logInfo);
110 WorkflowInstance oldWfInstance = this.wfBean.getWorkflowInstance();
111 WorkflowInstance newWfInstance;
112
113 WorkflowAppService wps = Services.get().get(WorkflowAppService.class);
114 try {
115 XLog.Info.get().setParameter(DagXLogInfoService.TOKEN, conf.get(OozieClient.LOG_TOKEN));
116 WorkflowApp app = wps.parseDef(conf, authToken);
117 XConfiguration protoActionConf = wps.createProtoActionConf(conf, authToken, true);
118 WorkflowLib workflowLib = Services.get().get(WorkflowStoreService.class).getWorkflowLibWithNoDB();
119
120 URI uri = new URI(conf.get(OozieClient.APP_PATH));
121 HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
122 Configuration fsConf = has.createJobConf(uri.getAuthority());
123 FileSystem fs = has.createFileSystem(wfBean.getUser(), uri, fsConf);
124
125 Path configDefault = null;
126 // app path could be a directory
127 Path path = new Path(uri.getPath());
128 if (!fs.isFile(path)) {
129 configDefault = new Path(path, SubmitXCommand.CONFIG_DEFAULT);
130 } else {
131 configDefault = new Path(path.getParent(), SubmitXCommand.CONFIG_DEFAULT);
132 }
133
134 if (fs.exists(configDefault)) {
135 Configuration defaultConf = new XConfiguration(fs.open(configDefault));
136 PropertiesUtils.checkDisallowedProperties(defaultConf, DISALLOWED_DEFAULT_PROPERTIES);
137 XConfiguration.injectDefaults(defaultConf, conf);
138 }
139
140 PropertiesUtils.checkDisallowedProperties(conf, DISALLOWED_USER_PROPERTIES);
141
142 // Resolving all variables in the job properties. This ensures the Hadoop Configuration semantics are preserved.
143 // The Configuration.get function within XConfiguration.resolve() works recursively to get the final value corresponding to a key in the map
144 // Resetting the conf to contain all the resolved values is necessary to ensure propagation of Oozie properties to Hadoop calls downstream
145 conf = ((XConfiguration) conf).resolve();
146
147 try {
148 newWfInstance = workflowLib.createInstance(app, conf, jobId);
149 }
150 catch (WorkflowException e) {
151 throw new CommandException(e);
152 }
153 wfBean.setAppName(app.getName());
154 wfBean.setProtoActionConf(protoActionConf.toXmlString());
155 }
156 catch (WorkflowException ex) {
157 throw new CommandException(ex);
158 }
159 catch (IOException ex) {
160 throw new CommandException(ErrorCode.E0803, ex);
161 }
162 catch (HadoopAccessorException ex) {
163 throw new CommandException(ex);
164 }
165 catch (URISyntaxException ex) {
166 throw new CommandException(ErrorCode.E0711, ex.getMessage(), ex);
167 }
168
169 try {
170 for (int i = 0; i < actions.size(); i++) {
171 if (!nodesToSkip.contains(actions.get(i).getName())) {
172 jpaService.execute(new WorkflowActionDeleteJPAExecutor(actions.get(i).getId()));
173 LOG.info("Deleting Action[{0}] for re-run", actions.get(i).getId());
174 }
175 else {
176 copyActionData(newWfInstance, oldWfInstance);
177 }
178 }
179
180 wfBean.setAppPath(conf.get(OozieClient.APP_PATH));
181 wfBean.setConf(XmlUtils.prettyPrint(conf).toString());
182 wfBean.setLogToken(conf.get(OozieClient.LOG_TOKEN, ""));
183 wfBean.setUser(conf.get(OozieClient.USER_NAME));
184 String group = ConfigUtils.getWithDeprecatedCheck(conf, OozieClient.JOB_ACL, OozieClient.GROUP_NAME, null);
185 wfBean.setGroup(group);
186 wfBean.setExternalId(conf.get(OozieClient.EXTERNAL_ID));
187 wfBean.setEndTime(null);
188 wfBean.setRun(wfBean.getRun() + 1);
189 wfBean.setStatus(WorkflowJob.Status.PREP);
190 wfBean.setWorkflowInstance(newWfInstance);
191 jpaService.execute(new WorkflowJobUpdateJPAExecutor(wfBean));
192 }
193 catch (JPAExecutorException e) {
194 throw new CommandException(e);
195 }
196
197 return null;
198 }
199
200 /**
201 * Loading the Wfjob and workflow actions. Parses the config and adds the nodes that are to be skipped to the
202 * skipped node list
203 *
204 * @throws CommandException
205 */
206 @Override
207 protected void eagerLoadState() throws CommandException {
208 super.eagerLoadState();
209 try {
210 jpaService = Services.get().get(JPAService.class);
211 if (jpaService != null) {
212 this.wfBean = jpaService.execute(new WorkflowJobGetJPAExecutor(this.jobId));
213 this.actions = jpaService.execute(new WorkflowActionsGetForJobJPAExecutor(this.jobId));
214 }
215 else {
216 throw new CommandException(ErrorCode.E0610);
217 }
218
219 if (conf != null) {
220 if (conf.getBoolean(OozieClient.RERUN_FAIL_NODES, false) == false) { //Rerun with skipNodes
221 Collection<String> skipNodes = conf.getStringCollection(OozieClient.RERUN_SKIP_NODES);
222 for (String str : skipNodes) {
223 // trimming is required
224 nodesToSkip.add(str.trim());
225 }
226 LOG.debug("Skipnode size :" + nodesToSkip.size());
227 }
228 else {
229 for (WorkflowActionBean action : actions) { // Rerun from failed nodes
230 if (action.getStatus() == WorkflowAction.Status.OK) {
231 nodesToSkip.add(action.getName());
232 }
233 }
234 LOG.debug("Skipnode size are to rerun from FAIL nodes :" + nodesToSkip.size());
235 }
236 StringBuilder tmp = new StringBuilder();
237 for (String node : nodesToSkip) {
238 tmp.append(node).append(",");
239 }
240 LOG.debug("SkipNode List :" + tmp);
241 }
242 }
243 catch (Exception ex) {
244 throw new CommandException(ErrorCode.E0603, ex);
245 }
246 }
247
248 /**
249 * Checks the pre-conditions that are required for workflow to recover - Last run of Workflow should be completed -
250 * The nodes that are to be skipped are to be completed successfully in the base run.
251 *
252 * @throws org.apache.oozie.command.CommandException,PreconditionException On failure of pre-conditions
253 */
254 @Override
255 protected void eagerVerifyPrecondition() throws CommandException, PreconditionException {
256 super.eagerVerifyPrecondition();
257 if (!(wfBean.getStatus().equals(WorkflowJob.Status.FAILED)
258 || wfBean.getStatus().equals(WorkflowJob.Status.KILLED) || wfBean.getStatus().equals(
259 WorkflowJob.Status.SUCCEEDED))) {
260 throw new CommandException(ErrorCode.E0805, wfBean.getStatus());
261 }
262 Set<String> unmachedNodes = new HashSet<String>(nodesToSkip);
263 for (WorkflowActionBean action : actions) {
264 if (nodesToSkip.contains(action.getName())) {
265 if (!action.getStatus().equals(WorkflowAction.Status.OK)
266 && !action.getStatus().equals(WorkflowAction.Status.ERROR)) {
267 throw new CommandException(ErrorCode.E0806, action.getName());
268 }
269 unmachedNodes.remove(action.getName());
270 }
271 }
272 if (unmachedNodes.size() > 0) {
273 StringBuilder sb = new StringBuilder();
274 String separator = "";
275 for (String s : unmachedNodes) {
276 sb.append(separator).append(s);
277 separator = ",";
278 }
279 throw new CommandException(ErrorCode.E0807, sb);
280 }
281 }
282
283 /**
284 * Copys the variables for skipped nodes from the old wfInstance to new one.
285 *
286 * @param newWfInstance : Source WF instance object
287 * @param oldWfInstance : Update WF instance
288 */
289 private void copyActionData(WorkflowInstance newWfInstance, WorkflowInstance oldWfInstance) {
290 Map<String, String> oldVars = new HashMap<String, String>();
291 Map<String, String> newVars = new HashMap<String, String>();
292 oldVars = oldWfInstance.getAllVars();
293 for (String var : oldVars.keySet()) {
294 String actionName = var.split(WorkflowInstance.NODE_VAR_SEPARATOR)[0];
295 if (nodesToSkip.contains(actionName)) {
296 newVars.put(var, oldVars.get(var));
297 }
298 }
299 for (String node : nodesToSkip) {
300 // Setting the TO_SKIP variable to true. This will be used by
301 // SignalCommand and LiteNodeHandler to skip the action.
302 newVars.put(node + WorkflowInstance.NODE_VAR_SEPARATOR + TO_SKIP, "true");
303 String visitedFlag = NodeHandler.getLoopFlag(node);
304 // Removing the visited flag so that the action won't be considered
305 // a loop.
306 if (newVars.containsKey(visitedFlag)) {
307 newVars.remove(visitedFlag);
308 }
309 }
310 newWfInstance.setAllVars(newVars);
311 }
312
313 /* (non-Javadoc)
314 * @see org.apache.oozie.command.XCommand#getEntityKey()
315 */
316 @Override
317 public String getEntityKey() {
318 return this.jobId;
319 }
320
321 /* (non-Javadoc)
322 * @see org.apache.oozie.command.XCommand#isLockRequired()
323 */
324 @Override
325 protected boolean isLockRequired() {
326 return true;
327 }
328
329 /* (non-Javadoc)
330 * @see org.apache.oozie.command.XCommand#loadState()
331 */
332 @Override
333 protected void loadState() throws CommandException {
334 }
335
336 /* (non-Javadoc)
337 * @see org.apache.oozie.command.XCommand#verifyPrecondition()
338 */
339 @Override
340 protected void verifyPrecondition() throws CommandException, PreconditionException {
341 }
342 }