Home
last modified time | relevance | path

Searched refs:sparkProperties (Results 1 – 18 of 18) sorted by relevance

/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/deploy/rest/
H A DSubmitRestProtocolSuite.scala91 message.sparkProperties = conf.getAll.toMap
104 message.sparkProperties = conf.getAll.toMap
110 message.sparkProperties = badConf.getAll.toMap
113 message.sparkProperties = badConf.getAll.toMap
116 message.sparkProperties = badConf.getAll.toMap
118 message.sparkProperties = conf.getAll.toMap
126 assert(newMessage.sparkProperties("spark.app.name") === "SparkPie")
128 assert(newMessage.sparkProperties("spark.files") === "fireball.png")
130 assert(newMessage.sparkProperties("spark.driver.cores") === "180")
137 assert(newMessage.sparkProperties("spark.cores.max") === "10000")
[all …]
H A DStandaloneRestSubmitSuite.scala56 val sparkProperties = Map("spark.app.name" -> "pi") constant
59 "my-app-resource", "my-main-class", appArgs, sparkProperties, environmentVariables)
65 assert(request.sparkProperties === sparkProperties)
76 assert(request.sparkProperties("spark.master") === masterUrl)
448 val (_, _, sparkProperties, _) = SparkSubmit.prepareSubmitEnvironment(args)
450 mainJar, mainClass, appArgs, sparkProperties.toMap, Map.empty)
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/deploy/
H A DSparkSubmitArguments.scala121 if (!sparkProperties.contains(k)) {
122 sparkProperties(k) = v
131 sparkProperties.foreach { case (k, v) =>
133 sparkProperties -= k
144 .orElse(sparkProperties.get("spark.master"))
157 .orElse(sparkProperties.get("spark.driver.memory"))
161 .orElse(sparkProperties.get("spark.driver.cores"))
164 .orElse(sparkProperties.get("spark.executor.memory"))
168 .orElse(sparkProperties.get("spark.executor.cores"))
172 .orElse(sparkProperties.get("spark.cores.max"))
[all …]
H A DSparkSubmit.scala615 for ((k, v) <- args.sparkProperties) {
/dports/devel/spark/spark-2.1.1/mesos/src/main/scala/org/apache/spark/deploy/rest/mesos/
H A DMesosRestServer.scala87 val sparkProperties = request.sparkProperties constant
88 val driverExtraJavaOptions = sparkProperties.get("spark.driver.extraJavaOptions")
89 val driverExtraClassPath = sparkProperties.get("spark.driver.extraClassPath")
90 val driverExtraLibraryPath = sparkProperties.get("spark.driver.extraLibraryPath")
91 val superviseDriver = sparkProperties.get("spark.driver.supervise")
92 val driverMemory = sparkProperties.get("spark.driver.memory")
93 val driverCores = sparkProperties.get("spark.driver.cores")
96 val name = request.sparkProperties.getOrElse("spark.app.name", mainClass)
99 val conf = new SparkConf(false).setAll(sparkProperties)
115 command, request.sparkProperties, submissionId, submitDate)
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/deploy/rest/
H A DStandaloneRestServer.scala134 val sparkProperties = request.sparkProperties constant
135 val driverMemory = sparkProperties.get("spark.driver.memory")
136 val driverCores = sparkProperties.get("spark.driver.cores")
137 val driverExtraJavaOptions = sparkProperties.get("spark.driver.extraJavaOptions")
138 val driverExtraClassPath = sparkProperties.get("spark.driver.extraClassPath")
139 val driverExtraLibraryPath = sparkProperties.get("spark.driver.extraLibraryPath")
140 val superviseDriver = sparkProperties.get("spark.driver.supervise")
146 .setAll(sparkProperties)
H A DSubmitRestProtocolRequest.scala42 var sparkProperties: Map[String, String] = null variable
47 assert(sparkProperties != null, "No Spark properties set!")
58 assertFieldIsSet(sparkProperties.getOrElse(key, null), key)
71 sparkProperties.get(key).foreach { value =>
H A DRestSubmissionClient.scala176 sparkProperties: Map[String, String],
183 message.sparkProperties = sparkProperties
413 val sparkProperties = conf.getAll.toMap constant
416 appResource, mainClass, appArgs, sparkProperties, env)
/dports/biology/gatk/gatk-4.2.0.0/src/test/java/org/broadinstitute/hellbender/engine/spark/
H A DSparkCommandLineArgumentCollectionTest.java25 …sparkArgumentCollection.sparkProperties.addAll(Arrays.asList(prop1+ '=' +value1, prop2 + '=' + val… in testGetSparkProperties()
26 final Map<String, String> sparkProperties = sparkArgumentCollection.getSparkProperties(); in testGetSparkProperties() local
27 Assert.assertEquals(sparkProperties.size(),2); in testGetSparkProperties()
28 Assert.assertEquals(sparkProperties.get(prop1), value1); in testGetSparkProperties()
29 Assert.assertEquals(sparkProperties.get(prop2), value2); in testGetSparkProperties()
46 sparkArgumentCollection.sparkProperties.add(property); in testBadProperties()
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/ui/env/
H A DEnvironmentTab.scala36 var sparkProperties = Seq[(String, String)]() variable
44 sparkProperties = environmentDetails("Spark Properties")
H A DEnvironmentPage.scala39 propertyHeader, propertyRow, listener.sparkProperties.map(removePass), fixedWidth = true)
/dports/biology/gatk/gatk-4.2.0.0/src/main/java/org/broadinstitute/hellbender/engine/spark/
H A DSparkCommandLineArgumentCollection.java38 final List<String> sparkProperties = new ArrayList<>(); field in SparkCommandLineArgumentCollection
48 for( String property: sparkProperties) { in getSparkProperties()
/dports/devel/spark/spark-2.1.1/yarn/src/main/scala/org/apache/spark/scheduler/cluster/
H A DYarnSchedulerBackend.scala185 private class YarnDriverEndpoint(rpcEnv: RpcEnv, sparkProperties: Seq[(String, String)])
186 extends DriverEndpoint(rpcEnv, sparkProperties) {
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/scheduler/cluster/
H A DCoarseGrainedClusterMessage.scala34 sparkProperties: Seq[(String, String)],
H A DCoarseGrainedSchedulerBackend.scala95 class DriverEndpoint(override val rpcEnv: RpcEnv, sparkProperties: Seq[(String, String)])
210 val reply = SparkAppConfig(sparkProperties,
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/
H A DSparkEnv.scala429 val sparkProperties = (conf.getAll ++ schedulerMode).sorted constant
447 "Spark Properties" -> sparkProperties,
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/executor/
H A DCoarseGrainedExecutorBackend.scala204 val props = cfg.sparkProperties ++ Seq[(String, String)](("spark.app.id", appId))
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/util/
H A DJsonProtocol.scala170 val sparkProperties = mapToJson(environmentDetails("Spark Properties").toMap) constant
175 ("Spark Properties" -> sparkProperties) ~