public class JobProgressListener extends SparkListener
All access to the data structures in this class must be synchronized on the class, since the UI thread and the EventBus loop may otherwise be reading and updating the internal data structures concurrently.
| Constructor and Description |
|---|
JobProgressListener(SparkConf conf) |
| Modifier and Type | Method and Description |
|---|---|
scala.collection.mutable.HashMap<java.lang.Object,UIData.JobUIData> |
activeJobs() |
scala.collection.mutable.HashMap<java.lang.Object,StageInfo> |
activeStages() |
scala.collection.Seq<BlockManagerId> |
blockManagerIds() |
scala.collection.mutable.ListBuffer<UIData.JobUIData> |
completedJobs() |
scala.collection.mutable.ListBuffer<StageInfo> |
completedStages() |
long |
endTime() |
scala.collection.mutable.HashMap<java.lang.String,BlockManagerId> |
executorIdToBlockManagerId() |
scala.collection.mutable.ListBuffer<UIData.JobUIData> |
failedJobs() |
scala.collection.mutable.ListBuffer<StageInfo> |
failedStages() |
scala.collection.mutable.HashMap<java.lang.String,scala.collection.mutable.HashSet<java.lang.Object>> |
jobGroupToJobIds() |
scala.collection.mutable.HashMap<java.lang.Object,UIData.JobUIData> |
jobIdToData() |
int |
numCompletedJobs() |
int |
numCompletedStages() |
int |
numFailedJobs() |
int |
numFailedStages() |
void |
onApplicationEnd(SparkListenerApplicationEnd appEnded)
Called when the application ends
|
void |
onApplicationStart(SparkListenerApplicationStart appStarted)
Called when the application starts
|
void |
onBlockManagerAdded(SparkListenerBlockManagerAdded blockManagerAdded)
Called when a new block manager has joined
|
void |
onBlockManagerRemoved(SparkListenerBlockManagerRemoved blockManagerRemoved)
Called when an existing block manager has been removed
|
void |
onEnvironmentUpdate(SparkListenerEnvironmentUpdate environmentUpdate)
Called when environment properties have been updated
|
void |
onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate executorMetricsUpdate)
Called when the driver receives task metrics from an executor in a heartbeat.
|
void |
onJobEnd(SparkListenerJobEnd jobEnd)
Called when a job ends
|
void |
onJobStart(SparkListenerJobStart jobStart)
Called when a job starts
|
void |
onStageCompleted(SparkListenerStageCompleted stageCompleted)
Called when a stage completes successfully or fails, with information on the completed stage.
|
void |
onStageSubmitted(SparkListenerStageSubmitted stageSubmitted)
For FIFO, all stages are contained by "default" pool but "default" pool here is meaningless
|
void |
onTaskEnd(SparkListenerTaskEnd taskEnd)
Called when a task ends
|
void |
onTaskGettingResult(SparkListenerTaskGettingResult taskGettingResult)
Called when a task begins remotely fetching its result (will not be called for tasks that do
not need to fetch the result remotely).
|
void |
onTaskStart(SparkListenerTaskStart taskStart)
Called when a task starts
|
scala.collection.mutable.HashMap<java.lang.Object,StageInfo> |
pendingStages() |
scala.collection.mutable.HashMap<java.lang.String,scala.collection.mutable.HashMap<java.lang.Object,StageInfo>> |
poolToActiveStages() |
int |
retainedJobs() |
int |
retainedStages() |
scala.Option<scala.Enumeration.Value> |
schedulingMode() |
scala.collection.mutable.ListBuffer<StageInfo> |
skippedStages() |
scala.collection.mutable.HashMap<java.lang.Object,scala.collection.mutable.HashSet<java.lang.Object>> |
stageIdToActiveJobIds() |
scala.collection.mutable.HashMap<scala.Tuple2<java.lang.Object,java.lang.Object>,UIData.StageUIData> |
stageIdToData() |
scala.collection.mutable.HashMap<java.lang.Object,StageInfo> |
stageIdToInfo() |
long |
startTime() |
void |
updateAggregateMetrics(UIData.StageUIData stageData,
java.lang.String execId,
org.apache.spark.executor.TaskMetrics taskMetrics,
scala.Option<org.apache.spark.executor.TaskMetrics> oldMetrics)
Upon receiving new metrics for a task, updates the per-stage and per-executor-per-stage
aggregate metrics by calculating deltas between the currently recorded metrics and the new
metrics.
|
onBlockUpdated, onExecutorAdded, onExecutorRemoved, onOtherEvent, onUnpersistRDDpublic JobProgressListener(SparkConf conf)
public long startTime()
public long endTime()
public scala.collection.mutable.HashMap<java.lang.Object,UIData.JobUIData> activeJobs()
public scala.collection.mutable.ListBuffer<UIData.JobUIData> completedJobs()
public scala.collection.mutable.ListBuffer<UIData.JobUIData> failedJobs()
public scala.collection.mutable.HashMap<java.lang.Object,UIData.JobUIData> jobIdToData()
public scala.collection.mutable.HashMap<java.lang.String,scala.collection.mutable.HashSet<java.lang.Object>> jobGroupToJobIds()
public scala.collection.mutable.HashMap<java.lang.Object,StageInfo> pendingStages()
public scala.collection.mutable.HashMap<java.lang.Object,StageInfo> activeStages()
public scala.collection.mutable.ListBuffer<StageInfo> completedStages()
public scala.collection.mutable.ListBuffer<StageInfo> skippedStages()
public scala.collection.mutable.ListBuffer<StageInfo> failedStages()
public scala.collection.mutable.HashMap<scala.Tuple2<java.lang.Object,java.lang.Object>,UIData.StageUIData> stageIdToData()
public scala.collection.mutable.HashMap<java.lang.Object,StageInfo> stageIdToInfo()
public scala.collection.mutable.HashMap<java.lang.Object,scala.collection.mutable.HashSet<java.lang.Object>> stageIdToActiveJobIds()
public scala.collection.mutable.HashMap<java.lang.String,scala.collection.mutable.HashMap<java.lang.Object,StageInfo>> poolToActiveStages()
public int numCompletedStages()
public int numFailedStages()
public int numCompletedJobs()
public int numFailedJobs()
public scala.collection.mutable.HashMap<java.lang.String,BlockManagerId> executorIdToBlockManagerId()
public scala.collection.Seq<BlockManagerId> blockManagerIds()
public scala.Option<scala.Enumeration.Value> schedulingMode()
public int retainedStages()
public int retainedJobs()
public void onJobStart(SparkListenerJobStart jobStart)
onJobStart in class SparkListenerjobStart - (undocumented)public void onJobEnd(SparkListenerJobEnd jobEnd)
onJobEnd in class SparkListenerjobEnd - (undocumented)public void onStageCompleted(SparkListenerStageCompleted stageCompleted)
onStageCompleted in class SparkListenerstageCompleted - (undocumented)public void onStageSubmitted(SparkListenerStageSubmitted stageSubmitted)
onStageSubmitted in class SparkListenerstageSubmitted - (undocumented)public void onTaskStart(SparkListenerTaskStart taskStart)
onTaskStart in class SparkListenertaskStart - (undocumented)public void onTaskGettingResult(SparkListenerTaskGettingResult taskGettingResult)
onTaskGettingResult in class SparkListenertaskGettingResult - (undocumented)public void onTaskEnd(SparkListenerTaskEnd taskEnd)
onTaskEnd in class SparkListenertaskEnd - (undocumented)public void updateAggregateMetrics(UIData.StageUIData stageData, java.lang.String execId, org.apache.spark.executor.TaskMetrics taskMetrics, scala.Option<org.apache.spark.executor.TaskMetrics> oldMetrics)
stageData - (undocumented)execId - (undocumented)taskMetrics - (undocumented)oldMetrics - (undocumented)public void onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate executorMetricsUpdate)
onExecutorMetricsUpdate in class SparkListenerexecutorMetricsUpdate - (undocumented)public void onEnvironmentUpdate(SparkListenerEnvironmentUpdate environmentUpdate)
onEnvironmentUpdate in class SparkListenerenvironmentUpdate - (undocumented)public void onBlockManagerAdded(SparkListenerBlockManagerAdded blockManagerAdded)
onBlockManagerAdded in class SparkListenerblockManagerAdded - (undocumented)public void onBlockManagerRemoved(SparkListenerBlockManagerRemoved blockManagerRemoved)
onBlockManagerRemoved in class SparkListenerblockManagerRemoved - (undocumented)public void onApplicationStart(SparkListenerApplicationStart appStarted)
onApplicationStart in class SparkListenerappStarted - (undocumented)public void onApplicationEnd(SparkListenerApplicationEnd appEnded)
onApplicationEnd in class SparkListenerappEnded - (undocumented)