This project has retired. For details please refer to its Attic page.
Source code
001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *      http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018
019package org.apache.oozie;
020
021import org.apache.oozie.service.XLogService;
022import org.apache.oozie.service.DagXLogInfoService;
023import org.apache.hadoop.conf.Configuration;
024import org.apache.oozie.client.CoordinatorJob;
025import org.apache.oozie.client.WorkflowJob;
026import org.apache.oozie.client.OozieClient;
027import org.apache.oozie.command.CommandException;
028import org.apache.oozie.command.wf.CompletedActionXCommand;
029import org.apache.oozie.command.wf.DefinitionXCommand;
030import org.apache.oozie.command.wf.ExternalIdXCommand;
031import org.apache.oozie.command.wf.JobXCommand;
032import org.apache.oozie.command.wf.JobsXCommand;
033import org.apache.oozie.command.wf.KillXCommand;
034import org.apache.oozie.command.wf.ReRunXCommand;
035import org.apache.oozie.command.wf.ResumeXCommand;
036import org.apache.oozie.command.wf.StartXCommand;
037import org.apache.oozie.command.wf.SubmitHiveXCommand;
038import org.apache.oozie.command.wf.SubmitHttpXCommand;
039import org.apache.oozie.command.wf.SubmitMRXCommand;
040import org.apache.oozie.command.wf.SubmitPigXCommand;
041import org.apache.oozie.command.wf.SubmitSqoopXCommand;
042import org.apache.oozie.command.wf.SubmitXCommand;
043import org.apache.oozie.command.wf.SuspendXCommand;
044import org.apache.oozie.command.wf.WorkflowActionInfoXCommand;
045import org.apache.oozie.command.OperationType;
046import org.apache.oozie.command.wf.BulkWorkflowXCommand;
047import org.apache.oozie.executor.jpa.JPAExecutorException;
048import org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor;
049import org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery;
050import org.apache.oozie.service.Services;
051import org.apache.oozie.service.CallableQueueService;
052import org.apache.oozie.util.XLogAuditFilter;
053import org.apache.oozie.util.XLogFilter;
054import org.apache.oozie.util.XLogUserFilterParam;
055import org.apache.oozie.util.ParamChecker;
056import org.apache.oozie.util.XCallable;
057import org.apache.oozie.util.XConfiguration;
058import org.apache.oozie.util.XLog;
059import org.apache.oozie.service.XLogStreamingService;
060
061import java.io.StringReader;
062import java.io.Writer;
063import java.util.Date;
064import java.util.List;
065import java.util.Properties;
066import java.util.Set;
067import java.util.HashSet;
068import java.util.StringTokenizer;
069import java.util.Map;
070import java.util.HashMap;
071import java.util.ArrayList;
072import java.io.IOException;
073
074/**
075 * The DagEngine provides all the DAG engine functionality for WS calls.
076 */
077public class DagEngine extends BaseEngine {
078
079    private static final int HIGH_PRIORITY = 2;
080    private static XLog LOG = XLog.getLog(DagEngine.class);
081
082    /**
083     * Create a system Dag engine, with no user and no group.
084     */
085    public DagEngine() {
086
087    }
088
089    /**
090     * Create a Dag engine to perform operations on behave of a user.
091     *
092     * @param user user name.
093     */
094    public DagEngine(String user) {
095        this();
096
097        this.user = ParamChecker.notEmpty(user, "user");
098    }
099
100    /**
101     * Submit a workflow job. <p/> It validates configuration properties.
102     *
103     * @param conf job configuration.
104     * @param startJob indicates if the job should be started or not.
105     * @return the job Id.
106     * @throws DagEngineException thrown if the job could not be created.
107     */
108    @Override
109    public String submitJob(Configuration conf, boolean startJob) throws DagEngineException {
110        validateSubmitConfiguration(conf);
111
112        try {
113            String jobId;
114            SubmitXCommand submit = new SubmitXCommand(conf);
115            jobId = submit.call();
116            if (startJob) {
117                start(jobId);
118            }
119            return jobId;
120        }
121        catch (CommandException ex) {
122            throw new DagEngineException(ex);
123        }
124    }
125
126    /**
127     * Submit a workflow through a coordinator. It validates configuration properties.
128     * @param conf job conf
129     * @param parentId parent of workflow
130     * @return
131     * @throws DagEngineException
132     */
133    public String submitJobFromCoordinator(Configuration conf, String parentId) throws DagEngineException {
134        validateSubmitConfiguration(conf);
135        try {
136            String jobId;
137            SubmitXCommand submit = new SubmitXCommand(conf, parentId);
138            jobId = submit.call();
139            start(jobId);
140            return jobId;
141        }
142        catch (CommandException ex) {
143            throw new DagEngineException(ex);
144        }
145    }
146
147    /**
148     * Submit a pig/hive/mapreduce job through HTTP.
149     * <p/>
150     * It validates configuration properties.
151     *
152     * @param conf job configuration.
153     * @param jobType job type - can be "pig", "hive", "sqoop" or "mapreduce".
154     * @return the job Id.
155     * @throws DagEngineException thrown if the job could not be created.
156     */
157    public String submitHttpJob(Configuration conf, String jobType) throws DagEngineException {
158        validateSubmitConfiguration(conf);
159
160        try {
161            String jobId;
162            SubmitHttpXCommand submit = null;
163            if (jobType.equals("pig")) {
164                submit = new SubmitPigXCommand(conf);
165            }
166            else if (jobType.equals("mapreduce")) {
167                submit = new SubmitMRXCommand(conf);
168            }
169            else if (jobType.equals("hive")) {
170                submit = new SubmitHiveXCommand(conf);
171            }
172            else if (jobType.equals("sqoop")) {
173                submit = new SubmitSqoopXCommand(conf);
174            }
175
176            jobId = submit.call();
177            start(jobId);
178            return jobId;
179        }
180        catch (CommandException ex) {
181            throw new DagEngineException(ex);
182        }
183    }
184
185    private void validateSubmitConfiguration(Configuration conf) throws DagEngineException {
186        if (conf.get(OozieClient.APP_PATH) == null) {
187            throw new DagEngineException(ErrorCode.E0401, OozieClient.APP_PATH);
188        }
189    }
190
191    /**
192     * Start a job.
193     *
194     * @param jobId job Id.
195     * @throws DagEngineException thrown if the job could not be started.
196     */
197    @Override
198    public void start(String jobId) throws DagEngineException {
199        // Changing to synchronous call from asynchronous queuing to prevent the
200        // loss of command if the queue is full or the queue is lost in case of
201        // failure.
202        try {
203                new StartXCommand(jobId).call();
204        }
205        catch (CommandException e) {
206            throw new DagEngineException(e);
207        }
208    }
209
210    /**
211     * Resume a job.
212     *
213     * @param jobId job Id.
214     * @throws DagEngineException thrown if the job could not be resumed.
215     */
216    @Override
217    public void resume(String jobId) throws DagEngineException {
218        // Changing to synchronous call from asynchronous queuing to prevent the
219        // loss of command if the queue is full or the queue is lost in case of
220        // failure.
221        try {
222                new ResumeXCommand(jobId).call();
223        }
224        catch (CommandException e) {
225            throw new DagEngineException(e);
226        }
227    }
228
229    /**
230     * Suspend a job.
231     *
232     * @param jobId job Id.
233     * @throws DagEngineException thrown if the job could not be suspended.
234     */
235    @Override
236    public void suspend(String jobId) throws DagEngineException {
237        // Changing to synchronous call from asynchronous queuing to prevent the
238        // loss of command if the queue is full or the queue is lost in case of
239        // failure.
240        try {
241                        new SuspendXCommand(jobId).call();
242        }
243        catch (CommandException e) {
244            throw new DagEngineException(e);
245        }
246    }
247
248    /**
249     * Kill a job.
250     *
251     * @param jobId job Id.
252     * @throws DagEngineException thrown if the job could not be killed.
253     */
254    @Override
255    public void kill(String jobId) throws DagEngineException {
256        // Changing to synchronous call from asynchronous queuing to prevent the
257        // loss of command if the queue is full or the queue is lost in case of
258        // failure.
259        try {
260                        new KillXCommand(jobId).call();
261                        LOG.info("User " + user + " killed the WF job " + jobId);
262        }
263        catch (CommandException e) {
264            throw new DagEngineException(e);
265        }
266    }
267
268    /* (non-Javadoc)
269     * @see org.apache.oozie.BaseEngine#change(java.lang.String, java.lang.String)
270     */
271    @Override
272    public void change(String jobId, String changeValue) throws DagEngineException {
273        // This code should not be reached.
274        throw new DagEngineException(ErrorCode.E1017);
275    }
276
277    /**
278     * Rerun a job.
279     *
280     * @param jobId job Id to rerun.
281     * @param conf configuration information for the rerun.
282     * @throws DagEngineException thrown if the job could not be rerun.
283     */
284    @Override
285    public void reRun(String jobId, Configuration conf) throws DagEngineException {
286        try {
287            WorkflowJobBean wfBean = WorkflowJobQueryExecutor.getInstance().get(WorkflowJobQuery.GET_WORKFLOW, jobId);
288            Configuration wfConf = new XConfiguration(new StringReader(wfBean.getConf()));
289            XConfiguration.copy(conf, wfConf);
290            validateReRunConfiguration(wfConf);
291            new ReRunXCommand(jobId, wfConf).call();
292        }
293        catch (CommandException ex) {
294            throw new DagEngineException(ex);
295        }
296        catch (JPAExecutorException ex) {
297            throw new DagEngineException(ex);
298        }
299        catch (IOException ex) {
300            throw new DagEngineException(ErrorCode.E0803, ex.getMessage());
301        }
302    }
303
304    private void validateReRunConfiguration(Configuration conf) throws DagEngineException {
305        if (conf.get(OozieClient.APP_PATH) == null) {
306            throw new DagEngineException(ErrorCode.E0401, OozieClient.APP_PATH);
307        }
308        if (conf.get(OozieClient.RERUN_SKIP_NODES) == null && conf.get(OozieClient.RERUN_FAIL_NODES) == null) {
309            throw new DagEngineException(ErrorCode.E0401, OozieClient.RERUN_SKIP_NODES + " OR "
310                    + OozieClient.RERUN_FAIL_NODES);
311        }
312        if (conf.get(OozieClient.RERUN_SKIP_NODES) != null && conf.get(OozieClient.RERUN_FAIL_NODES) != null) {
313            throw new DagEngineException(ErrorCode.E0404, OozieClient.RERUN_SKIP_NODES + " OR "
314                    + OozieClient.RERUN_FAIL_NODES);
315        }
316    }
317
318    /**
319     * Process an action callback.
320     *
321     * @param actionId the action Id.
322     * @param externalStatus the action external status.
323     * @param actionData the action output data, <code>null</code> if none.
324     * @throws DagEngineException thrown if the callback could not be processed.
325     */
326    public void processCallback(String actionId, String externalStatus, Properties actionData)
327            throws DagEngineException {
328        XLog.Info.get().clearParameter(XLogService.GROUP);
329        XLog.Info.get().clearParameter(XLogService.USER);
330        XCallable<Void> command = null;
331
332                command = new CompletedActionXCommand(actionId, externalStatus,
333                                actionData, HIGH_PRIORITY);
334        if (!Services.get().get(CallableQueueService.class).queue(command)) {
335            LOG.warn(XLog.OPS, "queue is full or system is in SAFEMODE, ignoring callback");
336        }
337    }
338
339    /**
340     * Return the info about a job.
341     *
342     * @param jobId job Id.
343     * @return the workflow job info.
344     * @throws DagEngineException thrown if the job info could not be obtained.
345     */
346    @Override
347    public WorkflowJob getJob(String jobId) throws DagEngineException {
348        try {
349                return new JobXCommand(jobId).call();
350        }
351        catch (CommandException ex) {
352            throw new DagEngineException(ex);
353        }
354    }
355
356    /**
357     * Return the info about a job with actions subset.
358     *
359     * @param jobId job Id
360     * @param start starting from this index in the list of actions belonging to the job
361     * @param length number of actions to be returned
362     * @return the workflow job info.
363     * @throws DagEngineException thrown if the job info could not be obtained.
364     */
365    @Override
366    public WorkflowJob getJob(String jobId, int start, int length) throws DagEngineException {
367        try {
368                        return new JobXCommand(jobId, start, length).call();
369        }
370        catch (CommandException ex) {
371            throw new DagEngineException(ex);
372        }
373    }
374
375    /**
376     * Return the a job definition.
377     *
378     * @param jobId job Id.
379     * @return the job definition.
380     * @throws DagEngineException thrown if the job definition could no be obtained.
381     */
382    @Override
383    public String getDefinition(String jobId) throws DagEngineException {
384        try {
385            return new DefinitionXCommand(jobId).call();
386        }
387        catch (CommandException ex) {
388            throw new DagEngineException(ex);
389        }
390    }
391
392    /**
393     * Stream the log of a job.
394     *
395     * @param jobId job Id.
396     * @param writer writer to stream the log to.
397     * @param params additional parameters from the request
398     * @throws IOException thrown if the log cannot be streamed.
399     * @throws DagEngineException thrown if there is error in getting the Workflow Information for jobId.
400     */
401    @Override
402    public void streamLog(String jobId, Writer writer, Map<String, String[]> params) throws IOException,
403            DagEngineException {
404        streamJobLog(jobId, writer, params, LOG_TYPE.LOG);
405    }
406
407    /**
408     * Stream the error log of a job.
409     *
410     * @param jobId job Id.
411     * @param writer writer to stream the log to.
412     * @param params additional parameters from the request
413     * @throws IOException thrown if the log cannot be streamed.
414     * @throws DagEngineException thrown if there is error in getting the Workflow Information for jobId.
415     */
416    @Override
417    public void streamErrorLog(String jobId, Writer writer, Map<String, String[]> params) throws IOException,
418            DagEngineException {
419        streamJobLog(jobId, writer, params, LOG_TYPE.ERROR_LOG);
420    }
421
422    /**
423     * Stream the audit log of a job.
424     *
425     * @param jobId job Id.
426     * @param writer writer to stream the log to.
427     * @param params additional parameters from the request
428     * @throws IOException thrown if the log cannot be streamed.
429     * @throws DagEngineException thrown if there is error in getting the Workflow Information for jobId.
430     */
431    @Override
432    public void streamAuditLog(String jobId, Writer writer, Map<String, String[]> params) throws IOException,
433            DagEngineException {
434        try {
435            streamJobLog(new XLogAuditFilter(new XLogUserFilterParam(params)),jobId, writer, params, LOG_TYPE.AUDIT_LOG);
436        }
437        catch (CommandException e) {
438            throw new IOException(e);
439        }
440    }
441
442    private void streamJobLog(String jobId, Writer writer, Map<String, String[]> params, LOG_TYPE logType)
443            throws IOException, DagEngineException {
444        try {
445            streamJobLog(new XLogFilter(new XLogUserFilterParam(params)), jobId, writer, params, logType);
446        }
447        catch (Exception e) {
448            throw new IOException(e);
449        }
450    }
451
452    private void streamJobLog(XLogFilter filter, String jobId, Writer writer, Map<String, String[]> params, LOG_TYPE logType)
453            throws IOException, DagEngineException {
454        try {
455            filter.setParameter(DagXLogInfoService.JOB, jobId);
456            WorkflowJob job = getJob(jobId);
457            Date lastTime = job.getEndTime();
458            if (lastTime == null) {
459                lastTime = job.getLastModifiedTime();
460            }
461            fetchLog(filter, job.getCreatedTime(), lastTime, writer, params, logType);
462        }
463        catch (Exception e) {
464            throw new IOException(e);
465        }
466    }
467
468    private static final Set<String> FILTER_NAMES = new HashSet<String>();
469
470    static {
471        FILTER_NAMES.add(OozieClient.FILTER_USER);
472        FILTER_NAMES.add(OozieClient.FILTER_NAME);
473        FILTER_NAMES.add(OozieClient.FILTER_GROUP);
474        FILTER_NAMES.add(OozieClient.FILTER_STATUS);
475        FILTER_NAMES.add(OozieClient.FILTER_ID);
476        FILTER_NAMES.add(OozieClient.FILTER_CREATED_TIME_START);
477        FILTER_NAMES.add(OozieClient.FILTER_CREATED_TIME_END);
478    }
479
480    /**
481     * Validate a jobs filter.
482     *
483     * @param filter filter to validate.
484     * @return the parsed filter.
485     * @throws DagEngineException thrown if the filter is invalid.
486     */
487    protected Map<String, List<String>> parseFilter(String filter) throws DagEngineException {
488        Map<String, List<String>> map = new HashMap<String, List<String>>();
489        if (filter != null) {
490            StringTokenizer st = new StringTokenizer(filter, ";");
491            while (st.hasMoreTokens()) {
492                String token = st.nextToken();
493                if (token.contains("=")) {
494                    String[] pair = token.split("=");
495                    if (pair.length != 2) {
496                        throw new DagEngineException(ErrorCode.E0420, filter, "elements must be name=value pairs");
497                    }
498                    pair[0] = pair[0].toLowerCase();
499                    if (!FILTER_NAMES.contains(pair[0])) {
500                        throw new DagEngineException(ErrorCode.E0420, filter, XLog
501                                .format("invalid name [{0}]", pair[0]));
502                    }
503                    if (pair[0].equals("status")) {
504                        try {
505                            WorkflowJob.Status.valueOf(pair[1]);
506                        }
507                        catch (IllegalArgumentException ex) {
508                            throw new DagEngineException(ErrorCode.E0420, filter, XLog.format("invalid status [{0}]",
509                                                                                              pair[1]));
510                        }
511                    }
512                    List<String> list = map.get(pair[0]);
513                    if (list == null) {
514                        list = new ArrayList<String>();
515                        map.put(pair[0], list);
516                    }
517                    list.add(pair[1]);
518                }
519                else {
520                    throw new DagEngineException(ErrorCode.E0420, filter, "elements must be name=value pairs");
521                }
522            }
523        }
524        return map;
525    }
526
527    /**
528     * Return the info about a set of jobs.
529     *
530     * @param filter job filter. Refer to the {@link org.apache.oozie.client.OozieClient} for the filter syntax.
531     * @param start offset, base 1.
532     * @param len number of jobs to return.
533     * @return job info for all matching jobs, the jobs don't contain node action information.
534     * @throws DagEngineException thrown if the jobs info could not be obtained.
535     */
536    public WorkflowsInfo getJobs(String filter, int start, int len) throws DagEngineException {
537        Map<String, List<String>> filterList = parseFilter(filter);
538        try {
539                        return new JobsXCommand(filterList, start, len).call();
540        }
541        catch (CommandException dce) {
542            throw new DagEngineException(dce);
543        }
544    }
545
546    /**
547     * Return the workflow Job ID for an external ID. <p/> This is reverse lookup for recovery purposes.
548     *
549     * @param externalId external ID provided at job submission time.
550     * @return the associated workflow job ID if any, <code>null</code> if none.
551     * @throws DagEngineException thrown if the lookup could not be done.
552     */
553    @Override
554    public String getJobIdForExternalId(String externalId) throws DagEngineException {
555        try {
556                        return new ExternalIdXCommand(externalId).call();
557        }
558        catch (CommandException dce) {
559            throw new DagEngineException(dce);
560        }
561    }
562
563    @Override
564    public CoordinatorJob getCoordJob(String jobId) throws BaseEngineException {
565        throw new BaseEngineException(new XException(ErrorCode.E0301, "cannot get a coordinator job from DagEngine"));
566    }
567
568    @Override
569    public CoordinatorJob getCoordJob(String jobId, String filter, int start, int length, boolean desc)
570            throws BaseEngineException {
571        throw new BaseEngineException(new XException(ErrorCode.E0301, "cannot get a coordinator job from DagEngine"));
572    }
573
574    public WorkflowActionBean getWorkflowAction(String actionId) throws BaseEngineException {
575        try {
576                        return new WorkflowActionInfoXCommand(actionId).call();
577        }
578        catch (CommandException ex) {
579            throw new BaseEngineException(ex);
580        }
581    }
582
583    /* (non-Javadoc)
584     * @see org.apache.oozie.BaseEngine#dryRunSubmit(org.apache.hadoop.conf.Configuration)
585     */
586    @Override
587    public String dryRunSubmit(Configuration conf) throws BaseEngineException {
588        try {
589            SubmitXCommand submit = new SubmitXCommand(true, conf);
590            return submit.call();
591        } catch (CommandException ex) {
592            throw new DagEngineException(ex);
593        }
594    }
595
596    /**
597     * Return the status for a Job ID
598     *
599     * @param jobId job Id.
600     * @return the job's status
601     * @throws DagEngineException thrown if the job's status could not be obtained
602     */
603    @Override
604    public String getJobStatus(String jobId) throws DagEngineException {
605        try {
606            WorkflowJobBean wfJob = WorkflowJobQueryExecutor.getInstance().get(WorkflowJobQuery.GET_WORKFLOW_STATUS, jobId);
607            return wfJob.getStatusStr();
608        }
609        catch (JPAExecutorException ex) {
610            throw new DagEngineException(ex);
611        }
612    }
613
614    @Override
615    public void enableSLAAlert(String id, String actions, String dates, String childIds) throws BaseEngineException {
616        throw new BaseEngineException(new XException(ErrorCode.E0301, "Not supported for workflow"));
617    }
618
619    @Override
620    public void disableSLAAlert(String id, String actions, String dates, String childIds) throws BaseEngineException {
621        throw new BaseEngineException(new XException(ErrorCode.E0301, "Not supported for workflow"));
622    }
623
624    @Override
625    public void changeSLA(String id, String actions, String dates, String childIds, String newParams) throws BaseEngineException {
626        throw new BaseEngineException(new XException(ErrorCode.E0301, "Not supported for workflow"));
627    }
628
629    /**
630     * return the jobs that've been killed
631     * @param filter Jobs that satisfy the filter will be killed
632     * @param start start index in the database of jobs
633     * @param len maximum number of jobs that will be killed
634     * @return
635     * @throws DagEngineException
636     */
637    public WorkflowsInfo killJobs(String filter, int start, int len) throws DagEngineException {
638        try {
639            Map<String, List<String>> filterList = parseFilter(filter);
640            WorkflowsInfo workflowsInfo = new BulkWorkflowXCommand(filterList, start, len, OperationType.Kill).call();
641            if (workflowsInfo == null) {
642                return new WorkflowsInfo(new ArrayList<WorkflowJobBean>(), 0, 0, 0);
643            }
644            return workflowsInfo;
645        }
646        catch (CommandException ex) {
647            throw new DagEngineException(ex);
648        }
649    }
650
651    /**
652     * return the jobs that've been suspended
653     * @param filter Filter for jobs that will be suspended, can be name, user, group, status, id or combination of any
654     * @param start Offset for the jobs that will be suspended
655     * @param len maximum number of jobs that will be suspended
656     * @return
657     * @throws DagEngineException
658     */
659    public WorkflowsInfo suspendJobs(String filter, int start, int len) throws DagEngineException {
660        try {
661            Map<String, List<String>> filterList = parseFilter(filter);
662            WorkflowsInfo workflowsInfo = new BulkWorkflowXCommand(filterList, start, len, OperationType.Suspend).call();
663            if (workflowsInfo == null) {
664                return new WorkflowsInfo(new ArrayList<WorkflowJobBean>(), 0, 0, 0);
665            }
666            return workflowsInfo;
667        }
668        catch (CommandException ex) {
669            throw new DagEngineException(ex);
670        }
671    }
672
673    /**
674     * return the jobs that've been resumed
675     * @param filter Filter for jobs that will be resumed, can be name, user, group, status, id or combination of any
676     * @param start Offset for the jobs that will be resumed
677     * @param len maximum number of jobs that will be resumed
678     * @return
679     * @throws DagEngineException
680     */
681    public WorkflowsInfo resumeJobs(String filter, int start, int len) throws DagEngineException {
682        try {
683            Map<String, List<String>> filterList = parseFilter(filter);
684            WorkflowsInfo workflowsInfo = new BulkWorkflowXCommand(filterList, start, len, OperationType.Resume).call();
685            if (workflowsInfo == null) {
686                return new WorkflowsInfo(new ArrayList<WorkflowJobBean>(), 0, 0, 0);
687            }
688            return workflowsInfo;
689        }
690        catch (CommandException ex) {
691            throw new DagEngineException(ex);
692        }
693    }
694}