Home
last modified time | relevance | path

Searched refs:stageAttemptId (Results 1 – 22 of 22) sorted by relevance

/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/scheduler/
H A DFakeTask.scala36 createTaskSet(numTasks, stageAttemptId = 0, prefLocs: _*)
39 def createTaskSet(numTasks: Int, stageAttemptId: Int, prefLocs: Seq[TaskLocation]*): TaskSet = {
40 createTaskSet(numTasks, stageId = 0, stageAttemptId, prefLocs: _*)
43 def createTaskSet(numTasks: Int, stageId: Int, stageAttemptId: Int, prefLocs: Seq[TaskLocation]*):
51 new TaskSet(tasks, stageId, stageAttemptId, priority = 0, null)
H A DTaskSchedulerImplSuite.scala179 taskScheduler.taskSetManagerForAttempt(attempt1.stageId, attempt1.stageAttemptId)
184 taskScheduler.taskSetManagerForAttempt(attempt2.stageId, attempt2.stageAttemptId)
203 taskScheduler.taskSetManagerForAttempt(attempt1.stageId, attempt1.stageAttemptId)
218 assert(mgr.taskSet.stageAttemptId === 1)
235 val mgr1 = taskScheduler.taskSetManagerForAttempt(attempt1.stageId, attempt1.stageAttemptId).get
256 assert(mgr.taskSet.stageAttemptId === 1)
302 val tsm = taskScheduler.taskSetManagerForAttempt(taskSet.stageId, taskSet.stageAttemptId).get
353 val tsm = taskScheduler.taskSetManagerForAttempt(taskSet.stageId, taskSet.stageAttemptId).get
H A DSchedulerIntegrationSuite.scala570 (task.stageId, task.stageAttemptId, task.partitionId) match {
601 stageToAttempts.getOrElseUpdate(task.stageId, new HashSet()) += task.stageAttemptId
607 (task.stageId, task.stageAttemptId, task.partitionId) match {
H A DDAGSchedulerSuite.scala674 assert(stageAttempt.stageAttemptId == attempt)
1299 assert(stage0Resubmit.stageAttemptId === 1)
1312 assert(stage1Resubmit.stageAttemptId === 1)
1363 assert(stage2TaskSet.stageAttemptId == 0)
1415 assert(stage1TaskSet.stageAttemptId == 0)
1557 assert(taskSets(2).stageAttemptId === 1)
H A DTaskSetManagerSuite.scala980 val taskSet = FakeTask.createTaskSet(numTasks = 1, stageId = 0, stageAttemptId = 0)
985 val taskSet2 = FakeTask.createTaskSet(numTasks = 1, stageId = 0, stageAttemptId = 1)
990 val taskSet3 = FakeTask.createTaskSet(numTasks = 1, stageId = 1, stageAttemptId = 1)
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/status/api/v1/
H A DOneStageResource.scala48 @PathParam("stageAttemptId") stageAttemptId: Int): StageData = {
49 withStageAttempt(stageId, stageAttemptId) { stage =>
59 @PathParam("stageAttemptId") stageAttemptId: Int,
62 withStageAttempt(stageId, stageAttemptId) { stage =>
79 @PathParam("stageAttemptId") stageAttemptId: Int,
83 withStageAttempt(stageId, stageAttemptId) { stage =>
124 stageAttemptId: Int)
127 val oneAttempt = attempts.find { stage => stage.info.attemptId == stageAttemptId }
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/scheduler/
H A DTaskSet.scala29 val stageAttemptId: Int, constant
32 val id: String = stageId + "." + stageAttemptId
H A DStage.scala105 private[scheduler] def failedOnFetchAndShouldAbort(stageAttemptId: Int): Boolean = {
106 fetchFailedAttemptIds.add(stageAttemptId)
H A DResultTask.scala54 stageAttemptId: Int,
64 extends Task[U](stageId, stageAttemptId, partition.index, metrics, localProperties, jobId,
H A DShuffleMapTask.scala55 stageAttemptId: Int,
64 extends Task[MapStatus](stageId, stageAttemptId, partition.index, metrics, localProperties, jobId,
H A DTaskSchedulerImpl.scala180 stageTaskSets(taskSet.stageAttemptId) = manager
242 taskSetsForStage -= manager.taskSet.stageAttemptId
410 (id, taskSetMgr.stageId, taskSetMgr.taskSet.stageAttemptId, accInfos)
628 stageAttemptId: Int): Option[TaskSetManager] = {
631 manager <- attempts.get(stageAttemptId)
H A DSparkListener.scala49 case class SparkListenerTaskStart(stageId: Int, stageAttemptId: Int, taskInfo: TaskInfo)
58 stageAttemptId: Int,
H A DTask.scala58 val stageAttemptId: Int, constant
95 new CallerContext("TASK", appId, appAttemptId, jobId, Option(stageId), Option(stageAttemptId),
H A DDAGScheduler.scala794 val stageAttemptId = stageIdToStage.get(task.stageId).map(_.latestInfo.attemptId).getOrElse(-1) constant
795 listenerBus.post(SparkListenerTaskStart(task.stageId, stageAttemptId, taskInfo))
1137 stageId, task.stageAttemptId, taskType, event.reason, event.taskInfo, taskMetrics))
1241 if (failedStage.latestInfo.attemptId != task.stageAttemptId) {
1261 } else if (failedStage.failedOnFetchAndShouldAbort(task.stageAttemptId)) {
/dports/devel/spark/spark-2.1.1/sql/core/src/main/scala/org/apache/spark/sql/execution/ui/
H A DSQLListener.scala167 _stageIdToStageMetrics(stageId) = new SQLStageMetrics(stageAttemptId = 0))
199 val stageAttemptId = stageSubmitted.stageInfo.attemptId constant
202 _stageIdToStageMetrics(stageId) = new SQLStageMetrics(stageAttemptId)
215 taskEnd.stageAttemptId,
236 if (stageAttemptID < stageMetrics.stageAttemptId) {
238 } else if (stageAttemptID > stageMetrics.stageAttemptId) {
396 taskEnd.stageAttemptId,
478 val stageAttemptId: Long, constant
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/ui/jobs/
H A DExecutorTable.scala28 private[ui] class ExecutorTable(stageId: Int, stageAttemptId: Int, parent: StagesTab) {
39 val stageData = listener.stageIdToData.get((stageId, stageAttemptId))
113 listener.stageIdToData.get((stageId, stageAttemptId)) match {
H A DStagePage.scala111 val stageAttemptId = parameterAttempt.toInt constant
112 val stageDataOption = progressListener.stageIdToData.get((stageId, stageAttemptId))
114 val stageHeader = s"Details for Stage $stageId (Attempt $stageAttemptId)"
118 <p>No information to display for Stage {stageId} (Attempt {stageAttemptId})</p>
143 val allAccumulables = progressListener.stageIdToData((stageId, stageAttemptId)).accumulables
564 val executorTable = new ExecutorTable(stageId, stageAttemptId, parent)
H A DJobProgressListener.scala332 val stageData = stageIdToData.getOrElseUpdate((taskStart.stageId, taskStart.stageAttemptId), {
358 if (info != null && taskEnd.stageAttemptId != -1) {
359 val stageData = stageIdToData.getOrElseUpdate((taskEnd.stageId, taskEnd.stageAttemptId), {
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/util/
H A DJsonProtocol.scala125 ("Stage Attempt ID" -> taskStart.stageAttemptId) ~
142 ("Stage Attempt ID" -> taskEnd.stageAttemptId) ~
239 ("Metrics Updated" -> accumUpdates.map { case (taskId, stageId, stageAttemptId, updates) =>
242 ("Stage Attempt ID" -> stageAttemptId) ~
550 val stageAttemptId = (json \ "Stage Attempt ID").extractOpt[Int].getOrElse(0) constant
552 SparkListenerTaskStart(stageId, stageAttemptId, taskInfo)
562 val stageAttemptId = (json \ "Stage Attempt ID").extractOpt[Int].getOrElse(0) constant
567 SparkListenerTaskEnd(stageId, stageAttemptId, taskType, taskEndReason, taskInfo, taskMetrics)
658 val stageAttemptId = (json \ "Stage Attempt ID").extract[Int] constant
661 (taskId, stageId, stageAttemptId, updates)
H A DUtils.scala2609 stageAttemptId: Option[Int] = None,
2617 val stageAttemptIdStr = if (stageAttemptId.isDefined) s"_${stageAttemptId.get}" else ""
/dports/devel/spark/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/
H A DSQLListenerSuite.scala178 stageAttemptId = 0,
189 stageAttemptId = 1,
196 stageAttemptId = 1,
218 stageAttemptId = 0,
225 stageAttemptId = 0,
/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/
H A DAccumulatorSuite.scala307 def getCompletedTaskInfos(stageId: StageId, stageAttemptId: StageAttemptId): Seq[TaskInfo] =
308 completedTaskInfos.getOrElse((stageId, stageAttemptId), Seq.empty[TaskInfo])
351 (taskEnd.stageId, taskEnd.stageAttemptId), new ArrayBuffer[TaskInfo]) += taskEnd.taskInfo