Home
last modified time | relevance | path

Searched refs:rpcEnv (Results 1 – 25 of 48) sorted by relevance

12

/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/
H A DMapOutputTrackerSuite.scala46 val rpcEnv = createRpcEnv("test") constant
51 rpcEnv.shutdown()
55 val rpcEnv = createRpcEnv("test") constant
74 rpcEnv.shutdown()
78 val rpcEnv = createRpcEnv("test") constant
97 rpcEnv.shutdown()
124 rpcEnv.shutdown()
165 rpcEnv.shutdown()
194 rpcEnv.shutdown()
238 rpcEnv.shutdown()
[all …]
/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/deploy/worker/
H A DWorkerWatcherSuite.scala26 val rpcEnv = RpcEnv.create("test", "localhost", 12345, conf, new SecurityManager(conf)) constant
28 val workerWatcher = new WorkerWatcher(rpcEnv, targetWorkerUrl, isTesting = true)
29 rpcEnv.setupEndpoint("worker-watcher", workerWatcher)
32 rpcEnv.shutdown()
37 val rpcEnv = RpcEnv.create("test", "localhost", 12345, conf, new SecurityManager(conf)) constant
40 val workerWatcher = new WorkerWatcher(rpcEnv, targetWorkerUrl, isTesting = true)
41 rpcEnv.setupEndpoint("worker-watcher", workerWatcher)
44 rpcEnv.shutdown()
H A DWorkerSuite.scala68 val rpcEnv = RpcEnv.create("test", "localhost", 12345, conf, new SecurityManager(conf)) constant
69 val worker = new Worker(rpcEnv, 50000, 20, 1234 * 5, Array.fill(1)(RpcAddress("1.2.3.4", 1234)),
94 val rpcEnv = RpcEnv.create("test", "localhost", 12345, conf, new SecurityManager(conf)) constant
95 val worker = new Worker(rpcEnv, 50000, 20, 1234 * 5, Array.fill(1)(RpcAddress("1.2.3.4", 1234)),
129 val rpcEnv = RpcEnv.create("test", "localhost", 12345, conf, new SecurityManager(conf)) constant
130 val worker = new Worker(rpcEnv, 50000, 20, 1234 * 5, Array.fill(1)(RpcAddress("1.2.3.4", 1234)),
155 val rpcEnv = RpcEnv.create("test", "localhost", 12345, conf, new SecurityManager(conf)) constant
156 val worker = new Worker(rpcEnv, 50000, 20, 1234 * 5, Array.fill(1)(RpcAddress("1.2.3.4", 1234)),
/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/rpc/
H A DRpcEnvSuite.scala74 override val rpcEnv = env
90 override val rpcEnv = env
113 override val rpcEnv = env
128 override val rpcEnv = env
141 override val rpcEnv = env
163 override val rpcEnv = env
199 override val rpcEnv = env
223 override val rpcEnv = env
246 override val rpcEnv = env
271 override val rpcEnv = env
[all …]
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/
H A DSparkEnv.scala58 private[spark] val rpcEnv: RpcEnv,
93 rpcEnv.shutdown()
94 rpcEnv.awaitTermination()
252 conf.set("spark.driver.port", rpcEnv.address.port.toString)
253 } else if (rpcEnv.address != null) {
254 conf.set("spark.executor.port", rpcEnv.address.port.toString)
297 rpcEnv.setupEndpoint(name, endpointCreator)
299 RpcUtils.makeDriverRef(name, conf, rpcEnv)
345 new BlockManagerMasterEndpoint(rpcEnv, isLocal, conf, listenerBus)),
372 new OutputCommitCoordinatorEndpoint(rpcEnv, outputCommitCoordinator))
[all …]
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/rpc/
H A DRpcEndpoint.scala51 val rpcEnv: RpcEnv constant
61 require(rpcEnv != null, "rpcEnv has not been initialized")
62 rpcEnv.endpointRef(this)
132 rpcEnv.stop(_self)
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/deploy/worker/
H A DDriverWrapper.scala41 val rpcEnv = RpcEnv.create("Driver", constant
43 rpcEnv.setupEndpoint("workerWatcher", new WorkerWatcher(rpcEnv, workerUrl))
60 rpcEnv.shutdown()
H A DWorker.scala44 override val rpcEnv: RpcEnv,
55 private val host = rpcEnv.address.host
56 private val port = rpcEnv.address.port
105 private val workerUri = RpcEndpointAddress(rpcEnv.address, endpointName).toString
218 val masterEndpoint = rpcEnv.setupEndpointRef(masterAddress, Master.ENDPOINT_NAME)
274 val masterEndpoint = rpcEnv.setupEndpointRef(masterAddress, Master.ENDPOINT_NAME)
696 val rpcEnv = startRpcEnvAndEndpoint(args.host, args.port, args.webUiPort, args.cores, constant
698 rpcEnv.awaitTermination()
715 val rpcEnv = RpcEnv.create(systemName, host, port, conf, securityMgr) constant
717 rpcEnv.setupEndpoint(ENDPOINT_NAME, new Worker(rpcEnv, webUiPort, cores, memory,
[all …]
H A DWorkerWatcher.scala29 override val rpcEnv: RpcEnv, workerUrl: String, isTesting: Boolean = false)
34 rpcEnv.asyncSetupEndpointRefByURI(workerUrl)
/dports/devel/spark/spark-2.1.1/yarn/src/main/scala/org/apache/spark/scheduler/cluster/
H A DYarnSchedulerBackend.scala41 extends CoarseGrainedSchedulerBackend(scheduler, sc.env.rpcEnv) {
52 private val yarnSchedulerEndpoint = new YarnSchedulerEndpoint(rpcEnv)
54 private val yarnSchedulerEndpointRef = rpcEnv.setupEndpoint(
167 new YarnDriverEndpoint(rpcEnv, properties)
185 private class YarnDriverEndpoint(rpcEnv: RpcEnv, sparkProperties: Seq[(String, String)])
186 extends DriverEndpoint(rpcEnv, sparkProperties) {
211 private class YarnSchedulerEndpoint(override val rpcEnv: RpcEnv)
/dports/devel/spark/spark-2.1.1/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/
H A DStateStoreCoordinator.scala57 val coordinator = new StateStoreCoordinator(env.rpcEnv)
58 val coordinatorRef = env.rpcEnv.setupEndpoint(endpointName, coordinator)
63 val rpcEndpointRef = RpcUtils.makeDriverRef(endpointName, env.conf, env.rpcEnv)
70 val rpcEndpointRef = RpcUtils.makeDriverRef(endpointName, env.conf, env.rpcEnv)
114 private class StateStoreCoordinator(override val rpcEnv: RpcEnv)
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/deploy/
H A DClient.scala41 override val rpcEnv: RpcEnv,
229 val rpcEnv = constant
233 map(rpcEnv.setupEndpointRef(_, Master.ENDPOINT_NAME))
234 rpcEnv.setupEndpoint("client", new ClientEndpoint(rpcEnv, driverArgs, masterEndpoints, conf))
236 rpcEnv.awaitTermination()
H A DLocalSparkCluster.scala58 val (rpcEnv, webUiPort, _) = Master.startRpcEnvAndEndpoint(localHostname, 0, 0, _conf)
60 masterRpcEnvs += rpcEnv
61 val masterUrl = "spark://" + Utils.localHostNameForURI() + ":" + rpcEnv.address.port
/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/storage/
H A DBlockManagerReplicationSuite.scala47 private var rpcEnv: RpcEnv = null
73 val store = new BlockManager(name, rpcEnv, master, serializerManager, conf,
82 rpcEnv = RpcEnv.create("test", "localhost", 0, conf, securityMgr)
85 conf.set("spark.driver.port", rpcEnv.address.port.toString)
98 master = new BlockManagerMaster(rpcEnv.setupEndpoint("blockmanager",
99 new BlockManagerMasterEndpoint(rpcEnv, true, conf,
107 rpcEnv.shutdown()
108 rpcEnv.awaitTermination()
109 rpcEnv = null
284 val failableStore = new BlockManager("failable-store", rpcEnv, master, serializerManager, conf,
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/scheduler/local/
H A DLocalSchedulerBackend.scala47 override val rpcEnv: RpcEnv,
125 val rpcEnv = SparkEnv.get.rpcEnv constant
126 val executorEndpoint = new LocalEndpoint(rpcEnv, userClassPath, scheduler, this, totalCores)
127 localEndpoint = rpcEnv.setupEndpoint("LocalSchedulerBackendEndpoint", executorEndpoint)
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/executor/
H A DCoarseGrainedExecutorBackend.scala40 override val rpcEnv: RpcEnv,
59 rpcEnv.asyncSetupEndpointRefByURI(driverUrl).flatMap { ref =>
226 env.rpcEnv.setupEndpoint("Executor", new CoarseGrainedExecutorBackend(
227 env.rpcEnv, driverUrl, executorId, hostname, cores, userClassPath, env))
229 env.rpcEnv.setupEndpoint("WorkerWatcher", new WorkerWatcher(env.rpcEnv, url))
231 env.rpcEnv.awaitTermination()
/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/deploy/master/
H A DMasterSuite.scala46 _master.rpcEnv.shutdown()
47 _master.rpcEnv.awaitTermination()
110 val (rpcEnv, _, _) =
114 rpcEnv.setupEndpointRef(rpcEnv.address, Master.ENDPOINT_NAME)
123 val (apps, drivers, workers) = persistenceEngine.readPersistedData(rpcEnv)
130 rpcEnv.shutdown()
131 rpcEnv.awaitTermination()
/dports/devel/spark/spark-2.1.1/streaming/src/test/scala/org/apache/spark/streaming/
H A DReceivedBlockHandlerSuite.scala82 var rpcEnv: RpcEnv = null variable
89 rpcEnv = RpcEnv.create("test", "localhost", 0, conf, securityMgr)
90 conf.set("spark.driver.port", rpcEnv.address.port.toString)
93 blockManagerMaster = new BlockManagerMaster(rpcEnv.setupEndpoint("blockmanager",
94 new BlockManagerMasterEndpoint(rpcEnv, true, conf,
116 rpcEnv.shutdown()
117 rpcEnv.awaitTermination()
118 rpcEnv = null
290 val blockManager = new BlockManager(name, rpcEnv, blockManagerMaster, serializerManager, conf,
/dports/devel/spark/spark-2.1.1/streaming/src/main/scala/org/apache/spark/streaming/receiver/
H A DReceiverSupervisorImpl.scala72 private val trackerEndpoint = RpcUtils.makeDriverRef("ReceiverTracker", env.conf, env.rpcEnv)
75 private val endpoint = env.rpcEnv.setupEndpoint(
77 override val rpcEnv: RpcEnv = env.rpcEnv
179 env.rpcEnv.stop(endpoint)
/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/deploy/client/
H A DAppClientSuite.scala164 val rpcEnv = workerRpcEnvs(i) constant
165 val worker = new Worker(rpcEnv, 0, cores, memory, Array(masterRpcEnv.address),
167 rpcEnv.setupEndpoint(Worker.ENDPOINT_NAME, worker)
221 val rpcEnv = RpcEnv.create("spark", Utils.localHostName(), 0, conf, securityManager) constant
226 val client = new StandaloneAppClient(rpcEnv, Array(masterUrl), desc, listener, new SparkConf)
/dports/devel/spark/spark-2.1.1/yarn/src/main/scala/org/apache/spark/deploy/yarn/
H A DApplicationMaster.scala111 private var rpcEnv: RpcEnv = null
384 val driverEndpoint = rpcEnv.setupEndpointRef(
388 rpcEnv.setupEndpoint("YarnAM", new AMEndpoint(rpcEnv, driverEndpoint, isClusterMode))
404 rpcEnv = sc.env.rpcEnv
409 registerAM(sc.getConf, rpcEnv, driverRef, sc.ui.map(_.appUIAddress).getOrElse(""),
432 rpcEnv = RpcEnv.create("sparkYarnAM", Utils.localHostName, port, sparkConf, securityMgr,
436 registerAM(sparkConf, rpcEnv, driverRef, sparkConf.get("spark.driver.appUIAddress", ""),
680 override val rpcEnv: RpcEnv, driver: RpcEndpointRef, isClusterMode: Boolean)
/dports/devel/spark/spark-2.1.1/repl/src/test/scala/org/apache/spark/repl/
H A DExecutorClassLoaderSuite.scala146 val rpcEnv = mock[RpcEnv] constant
147 when(env.rpcEnv).thenReturn(rpcEnv)
148 when(rpcEnv.openChannel(anyString())).thenAnswer(new Answer[ReadableByteChannel]() {
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/deploy/client/
H A DStandaloneAppClient.scala45 rpcEnv: RpcEnv,
61 private class ClientEndpoint(override val rpcEnv: RpcEnv) extends ThreadSafeRpcEndpoint
106 val masterRef = rpcEnv.setupEndpointRef(masterAddress, Master.ENDPOINT_NAME)
272 endpoint.set(rpcEnv.setupEndpoint("AppClient", new ClientEndpoint(rpcEnv)))
/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/deploy/master/ui/
H A DMasterWebUISuite.scala41 val rpcEnv = mock(classOf[RpcEnv]) constant
46 when(master.rpcEnv).thenReturn(rpcEnv)
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/util/
H A DRpcUtils.scala28 def makeDriverRef(name: String, conf: SparkConf, rpcEnv: RpcEnv): RpcEndpointRef = {
32 rpcEnv.setupEndpointRef(RpcAddress(driverHost, driverPort), name)

12