Home
last modified time | relevance | path

Searched refs:maxRpcMessageSize (Results 1 – 4 of 4) sorted by relevance

/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/scheduler/
H A DTaskResultGetterSuite.scala123 val maxRpcMessageSize = RpcUtils.maxMessageSizeBytes(conf) constant
125 sc.parallelize(Seq(1), 1).map(x => 1.to(maxRpcMessageSize).toArray).reduce((x, y) => x)
126 assert(result === 1.to(maxRpcMessageSize).toArray)
148 val maxRpcMessageSize = RpcUtils.maxMessageSizeBytes(conf) constant
150 sc.parallelize(Seq(1), 1).map(x => 1.to(maxRpcMessageSize).toArray).reduce((x, y) => x)
152 assert(result === 1.to(maxRpcMessageSize).toArray)
H A DSparkListenerSuite.scala289 val maxRpcMessageSize = RpcUtils.maxMessageSizeBytes(conf) constant
290 assert(maxRpcMessageSize === 1024 * 1024)
292 .map { x => 1.to(maxRpcMessageSize).toArray }
294 assert(result === 1.to(maxRpcMessageSize).toArray)
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/scheduler/cluster/
H A DCoarseGrainedSchedulerBackend.scala53 private val maxRpcMessageSize = RpcUtils.maxMessageSizeBytes(conf)
253 if (serializedTask.limit >= maxRpcMessageSize) {
259 msg = msg.format(task.taskId, task.index, serializedTask.limit, maxRpcMessageSize)
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/
H A DMapOutputTracker.scala295 private val maxRpcMessageSize = RpcUtils.maxMessageSizeBytes(conf)
322 if (minSizeForBroadcast > maxRpcMessageSize) {
324 s"be <= spark.rpc.message.maxSize ($maxRpcMessageSize bytes) to prevent sending an rpc " +