Home
last modified time | relevance | path

Searched refs:TaskContextImpl (Results 1 – 14 of 14) sorted by relevance

/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/memory/
H A DMemoryTestingUtils.scala22 import org.apache.spark.{SparkEnv, TaskContext, TaskContextImpl}
30 new TaskContextImpl(
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/
H A DTaskContext.scala67 private[spark] def empty(): TaskContextImpl = {
68 new TaskContextImpl(0, 0, 0, 0, null, new Properties, null)
H A DTaskContextImpl.scala31 private[spark] class TaskContextImpl(
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-tools/hadoop-pipes/src/main/native/pipes/impl/
H A DHadoopPipes.cc719 class TaskContextImpl: public MapContext, public ReduceContext, class
751 TaskContextImpl(const Factory& _factory) { in TaskContextImpl() function in HadoopPipes::TaskContextImpl
1033 virtual ~TaskContextImpl() { in ~TaskContextImpl()
1054 TaskContextImpl* context = (TaskContextImpl*) ptr; in ping()
1106 TaskContextImpl* context = new TaskContextImpl(factory); in runTask()
/dports/devel/hadoop/hadoop-1.2.1/src/c++/pipes/impl/
H A DHadoopPipes.cc719 class TaskContextImpl: public MapContext, public ReduceContext, class
751 TaskContextImpl(const Factory& _factory) { in TaskContextImpl() function in HadoopPipes::TaskContextImpl
1033 virtual ~TaskContextImpl() { in ~TaskContextImpl()
1054 TaskContextImpl* context = (TaskContextImpl*) ptr; in ping()
1106 TaskContextImpl* context = new TaskContextImpl(factory); in runTask()
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/scheduler/
H A DTask.scala79 context = new TaskContextImpl(
145 @transient protected var context: TaskContextImpl = _
/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/storage/
H A DPartiallySerializedBlockSuite.scala30 import org.apache.spark.{SparkConf, SparkFunSuite, TaskContext, TaskContextImpl}
148 TaskContext.get().asInstanceOf[TaskContextImpl].markTaskCompleted()
H A DBlockInfoManagerSuite.scala29 import org.apache.spark.{SparkException, SparkFunSuite, TaskContext, TaskContextImpl}
65 new TaskContextImpl(0, 0, taskAttemptId, 0, null, new Properties, null))
/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/scheduler/
H A DTaskContextSuite.scala178 context = new TaskContextImpl(0, 0, 0L, 0,
201 context = new TaskContextImpl(0, 0, 0L, 0,
/dports/devel/spark/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/execution/
H A DUnsafeFixedWidthAggregationMapSuite.scala28 import org.apache.spark.{SparkConf, SparkFunSuite, TaskContext, TaskContextImpl}
70 TaskContext.setTaskContext(new TaskContextImpl(
H A DUnsafeRowSerializerSuite.scala117 val taskContext = new TaskContextImpl(0, 0, 0, 0, taskMemoryManager, new Properties, null)
H A DUnsafeKVExternalSorterSuite.scala117 TaskContext.setTaskContext(new TaskContextImpl(
/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/
H A DShuffleSuite.scala339 new TaskContextImpl(0, 0, 0L, 0, taskMemoryManager, new Properties, metricsSystem))
346 new TaskContextImpl(0, 0, 1L, 0, taskMemoryManager, new Properties, metricsSystem))
374 new TaskContextImpl(1, 0, 2L, 0, taskMemoryManager, new Properties, metricsSystem))
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/util/
H A DUtils.scala1349 TaskContext.get().asInstanceOf[TaskContextImpl].markTaskFailed(originalThrowable)