public class DagEngine extends BaseEngine
BaseEngine.LOG_TYPE
USE_XCOMMAND, user
Constructor and Description |
---|
DagEngine()
Create a system Dag engine, with no user and no group.
|
DagEngine(String user)
Create a Dag engine to perform operations on behave of a user.
|
Modifier and Type | Method and Description |
---|---|
void |
change(String jobId,
String changeValue)
Change a coordinator job.
|
void |
changeSLA(String id,
String actions,
String dates,
String childIds,
String newParams)
Change SLA properties for job
|
void |
disableSLAAlert(String id,
String actions,
String dates,
String childIds)
Disable SLA alert for job
|
String |
dryRunSubmit(org.apache.hadoop.conf.Configuration conf)
Dry run a job; like {@link BaseEngine#submitJob(org.apache.hadoop.conf.Configuration, boolean) but doesn't actually execute
the job.
|
void |
enableSLAAlert(String id,
String actions,
String dates,
String childIds)
Enable SLA alert for job
|
CoordinatorJob |
getCoordJob(String jobId)
Return the info about a coord job.
|
CoordinatorJob |
getCoordJob(String jobId,
String filter,
int start,
int length,
boolean desc)
Return the info about a coord job with actions subset.
|
String |
getDefinition(String jobId)
Return the a job definition.
|
WorkflowJob |
getJob(String jobId)
Return the info about a job.
|
WorkflowJob |
getJob(String jobId,
int start,
int length)
Return the info about a job with actions subset.
|
String |
getJobIdForExternalId(String externalId)
Return the workflow Job ID for an external ID.
|
WorkflowsInfo |
getJobs(String filter,
int start,
int len)
Return the info about a set of jobs.
|
String |
getJobStatus(String jobId)
Return the status for a Job ID
|
WorkflowActionBean |
getWorkflowAction(String actionId) |
void |
kill(String jobId)
Kill a job.
|
WorkflowsInfo |
killJobs(String filter,
int start,
int len)
return the jobs that've been killed
|
protected Map<String,List<String>> |
parseFilter(String filter)
Validate a jobs filter.
|
void |
processCallback(String actionId,
String externalStatus,
Properties actionData)
Process an action callback.
|
void |
reRun(String jobId,
org.apache.hadoop.conf.Configuration conf)
Rerun a job.
|
void |
resume(String jobId)
Resume a job.
|
WorkflowsInfo |
resumeJobs(String filter,
int start,
int len)
return the jobs that've been resumed
|
void |
start(String jobId)
Start a job.
|
void |
streamAuditLog(String jobId,
Writer writer,
Map<String,String[]> params)
Stream the audit log of a job.
|
void |
streamErrorLog(String jobId,
Writer writer,
Map<String,String[]> params)
Stream the error log of a job.
|
void |
streamLog(String jobId,
Writer writer,
Map<String,String[]> params)
Stream the log of a job.
|
String |
submitHttpJob(org.apache.hadoop.conf.Configuration conf,
String jobType)
Submit a pig/hive/mapreduce job through HTTP.
|
String |
submitJob(org.apache.hadoop.conf.Configuration conf,
boolean startJob)
Submit a workflow job.
|
String |
submitJobFromCoordinator(org.apache.hadoop.conf.Configuration conf,
String parentId)
Submit a workflow through a coordinator.
|
void |
suspend(String jobId)
Suspend a job.
|
WorkflowsInfo |
suspendJobs(String filter,
int start,
int len)
return the jobs that've been suspended
|
fetchLog, getJMSTopicName, getUser
public DagEngine()
public String submitJob(org.apache.hadoop.conf.Configuration conf, boolean startJob) throws DagEngineException
submitJob
in class BaseEngine
conf
- job configuration.startJob
- indicates if the job should be started or not.DagEngineException
- thrown if the job could not be created.public String submitJobFromCoordinator(org.apache.hadoop.conf.Configuration conf, String parentId) throws DagEngineException
conf
- job confparentId
- parent of workflowDagEngineException
public String submitHttpJob(org.apache.hadoop.conf.Configuration conf, String jobType) throws DagEngineException
conf
- job configuration.jobType
- job type - can be "pig", "hive", "sqoop" or "mapreduce".DagEngineException
- thrown if the job could not be created.public void start(String jobId) throws DagEngineException
start
in class BaseEngine
jobId
- job Id.DagEngineException
- thrown if the job could not be started.public void resume(String jobId) throws DagEngineException
resume
in class BaseEngine
jobId
- job Id.DagEngineException
- thrown if the job could not be resumed.public void suspend(String jobId) throws DagEngineException
suspend
in class BaseEngine
jobId
- job Id.DagEngineException
- thrown if the job could not be suspended.public void kill(String jobId) throws DagEngineException
kill
in class BaseEngine
jobId
- job Id.DagEngineException
- thrown if the job could not be killed.public void change(String jobId, String changeValue) throws DagEngineException
BaseEngine
change
in class BaseEngine
jobId
- job Id.changeValue
- change value.DagEngineException
public void reRun(String jobId, org.apache.hadoop.conf.Configuration conf) throws DagEngineException
reRun
in class BaseEngine
jobId
- job Id to rerun.conf
- configuration information for the rerun.DagEngineException
- thrown if the job could not be rerun.public void processCallback(String actionId, String externalStatus, Properties actionData) throws DagEngineException
actionId
- the action Id.externalStatus
- the action external status.actionData
- the action output data, null
if none.DagEngineException
- thrown if the callback could not be processed.public WorkflowJob getJob(String jobId) throws DagEngineException
getJob
in class BaseEngine
jobId
- job Id.DagEngineException
- thrown if the job info could not be obtained.public WorkflowJob getJob(String jobId, int start, int length) throws DagEngineException
getJob
in class BaseEngine
jobId
- job Idstart
- starting from this index in the list of actions belonging to the joblength
- number of actions to be returnedDagEngineException
- thrown if the job info could not be obtained.public String getDefinition(String jobId) throws DagEngineException
getDefinition
in class BaseEngine
jobId
- job Id.DagEngineException
- thrown if the job definition could no be obtained.public void streamLog(String jobId, Writer writer, Map<String,String[]> params) throws IOException, DagEngineException
streamLog
in class BaseEngine
jobId
- job Id.writer
- writer to stream the log to.params
- additional parameters from the requestIOException
- thrown if the log cannot be streamed.DagEngineException
- thrown if there is error in getting the Workflow Information for jobId.public void streamErrorLog(String jobId, Writer writer, Map<String,String[]> params) throws IOException, DagEngineException
streamErrorLog
in class BaseEngine
jobId
- job Id.writer
- writer to stream the log to.params
- additional parameters from the requestIOException
- thrown if the log cannot be streamed.DagEngineException
- thrown if there is error in getting the Workflow Information for jobId.public void streamAuditLog(String jobId, Writer writer, Map<String,String[]> params) throws IOException, DagEngineException
streamAuditLog
in class BaseEngine
jobId
- job Id.writer
- writer to stream the log to.params
- additional parameters from the requestIOException
- thrown if the log cannot be streamed.DagEngineException
- thrown if there is error in getting the Workflow Information for jobId.protected Map<String,List<String>> parseFilter(String filter) throws DagEngineException
filter
- filter to validate.DagEngineException
- thrown if the filter is invalid.public WorkflowsInfo getJobs(String filter, int start, int len) throws DagEngineException
filter
- job filter. Refer to the OozieClient
for the filter syntax.start
- offset, base 1.len
- number of jobs to return.DagEngineException
- thrown if the jobs info could not be obtained.public String getJobIdForExternalId(String externalId) throws DagEngineException
getJobIdForExternalId
in class BaseEngine
externalId
- external ID provided at job submission time.null
if none.DagEngineException
- thrown if the lookup could not be done.public CoordinatorJob getCoordJob(String jobId) throws BaseEngineException
BaseEngine
getCoordJob
in class BaseEngine
jobId
- job Id.BaseEngineException
- thrown if the job info could not be obtained.public CoordinatorJob getCoordJob(String jobId, String filter, int start, int length, boolean desc) throws BaseEngineException
BaseEngine
getCoordJob
in class BaseEngine
jobId
- job Id.filter
- the status filterstart
- starting from this index in the list of actions belonging to the joblength
- number of actions to be returnedBaseEngineException
- thrown if the job info could not be obtained.public WorkflowActionBean getWorkflowAction(String actionId) throws BaseEngineException
BaseEngineException
public String dryRunSubmit(org.apache.hadoop.conf.Configuration conf) throws BaseEngineException
BaseEngine
dryRunSubmit
in class BaseEngine
conf
- job configuration.BaseEngineException
- thrown if there was a problem doing the dryrunpublic String getJobStatus(String jobId) throws DagEngineException
getJobStatus
in class BaseEngine
jobId
- job Id.DagEngineException
- thrown if the job's status could not be obtainedpublic void enableSLAAlert(String id, String actions, String dates, String childIds) throws BaseEngineException
BaseEngine
enableSLAAlert
in class BaseEngine
BaseEngineException
public void disableSLAAlert(String id, String actions, String dates, String childIds) throws BaseEngineException
BaseEngine
disableSLAAlert
in class BaseEngine
BaseEngineException
public void changeSLA(String id, String actions, String dates, String childIds, String newParams) throws BaseEngineException
BaseEngine
changeSLA
in class BaseEngine
BaseEngineException
public WorkflowsInfo killJobs(String filter, int start, int len) throws DagEngineException
filter
- Jobs that satisfy the filter will be killedstart
- start index in the database of jobslen
- maximum number of jobs that will be killedDagEngineException
public WorkflowsInfo suspendJobs(String filter, int start, int len) throws DagEngineException
filter
- Filter for jobs that will be suspended, can be name, user, group, status, id or combination of anystart
- Offset for the jobs that will be suspendedlen
- maximum number of jobs that will be suspendedDagEngineException
public WorkflowsInfo resumeJobs(String filter, int start, int len) throws DagEngineException
filter
- Filter for jobs that will be resumed, can be name, user, group, status, id or combination of anystart
- Offset for the jobs that will be resumedlen
- maximum number of jobs that will be resumedDagEngineException
Copyright © 2015 Apache Software Foundation. All Rights Reserved.