/dports/devel/spark/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/ |
H A D | SQLContextSuite.scala | 35 val sqlContext = SQLContext.getOrCreate(sc) constant 42 val sqlContext = SQLContext.getOrCreate(sc) constant 43 val newSession = sqlContext.newSession() 53 val session1 = sqlContext.newSession() 54 val session2 = sqlContext.newSession() 87 val df = sqlContext.range(10) 104 val df = sqlContext.range(10) 121 val df = sqlContext.range(10) 129 Seq(sqlContext.tables(), sqlContext.sql("SHOW TABLes")).foreach { 135 sqlContext.sql( [all …]
|
/dports/devel/spark/spark-2.1.1/sql/hive/src/test/java/org/apache/spark/sql/hive/ |
H A D | JavaMetastoreDataSourcesSuite.java | 50 private transient SQLContext sqlContext; field in JavaMetastoreDataSourcesSuite 66 sqlContext = TestHive$.MODULE$; in setUp() 67 sc = new JavaSparkContext(sqlContext.sparkContext()); in setUp() 85 df = sqlContext.read().json(rdd); in setUp() 92 if (sqlContext != null) { in tearDown() 94 sqlContext.sql("DROP TABLE IF EXISTS externalTable"); in tearDown() 109 sqlContext.sql("SELECT * FROM javaSavedTable"), in saveExternalTableAndQueryIt() 117 sqlContext.sql("SELECT * FROM externalTable"), in saveExternalTableAndQueryIt() 132 sqlContext.sql("SELECT * FROM javaSavedTable"), in saveExternalTableWithSchemaAndQueryIt() 145 sqlContext.sql("SELECT * FROM externalTable"), in saveExternalTableWithSchemaAndQueryIt() [all …]
|
/dports/devel/spark/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/ |
H A D | StateStoreRDDSuite.scala | 63 spark.sqlContext, path, opId, storeVersion = 0, keySchema, valueSchema)( 69 spark.sqlContext, path, opId, storeVersion = 1, keySchema, valueSchema)(increment) 85 implicit val sqlContext = spark.sqlContext constant 87 sqlContext, path, opId, storeVersion, keySchema, valueSchema)(increment) 105 implicit val sqlContext = spark.sqlContext constant 153 implicit val sqlContext = spark.sqlContext constant 154 val coordinatorRef = sqlContext.streams.stateStoreCoordinator 163 sqlContext, path, opId, storeVersion = 0, keySchema, valueSchema)(increment) 186 implicit val sqlContext = spark.sqlContext constant 190 sqlContext, path, opId, storeVersion = 0, keySchema, valueSchema)(increment) [all …]
|
/dports/devel/spark/spark-2.1.1/sql/core/src/main/scala/org/apache/spark/sql/sources/ |
H A D | interfaces.scala | 76 def createRelation(sqlContext: SQLContext, parameters: Map[String, String]): BaseRelation 108 sqlContext: SQLContext, 128 sqlContext: SQLContext, 137 sqlContext: SQLContext, 154 sqlContext: SQLContext, 180 sqlContext: SQLContext, 200 def sqlContext: SQLContext method 215 def sizeInBytes: Long = sqlContext.conf.defaultSizeInBytes
|
/dports/devel/spark/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/ |
H A D | TextSocketStreamSuite.scala | 37 sqlContext.streams.active.foreach(_.stop()) 58 val schema = provider.sourceSchema(sqlContext, None, "", parameters)._2 61 source = provider.createSource(sqlContext, "", None, "", parameters) 96 val schema = provider.sourceSchema(sqlContext, None, "", parameters)._2 100 source = provider.createSource(sqlContext, "", None, "", parameters) 133 provider.sourceSchema(sqlContext, None, "", Map()) 136 provider.sourceSchema(sqlContext, None, "", Map("host" -> "localhost")) 139 provider.sourceSchema(sqlContext, None, "", Map("port" -> "1234")) 146 provider.sourceSchema(sqlContext, None, "", Map("host" -> "localhost", 155 source = provider.createSource(sqlContext, "", None, "", parameters) [all …]
|
/dports/devel/spark/spark-2.1.1/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/ |
H A D | HiveThriftServer2.scala | 57 def startWithContext(sqlContext: SQLContext): Unit = { 58 val server = new HiveThriftServer2(sqlContext) 61 sqlContext.sparkContext.conf, 62 sqlContext.sessionState.newHadoopConf()) 66 listener = new HiveThriftServer2Listener(server, sqlContext.conf) 67 sqlContext.sparkContext.addSparkListener(listener) 69 Some(new ThriftServerTab(sqlContext.sparkContext)) 89 SparkSQLEnv.sqlContext.sparkContext.conf, 90 SparkSQLEnv.sqlContext.sessionState.newHadoopConf()) 93 val server = new HiveThriftServer2(SparkSQLEnv.sqlContext) [all …]
|
H A D | SparkSQLEnv.scala | 32 var sqlContext: SQLContext = _ variable 36 if (sqlContext == null) { 49 sqlContext = sparkSession.sqlContext 66 sqlContext = null
|
H A D | SparkExecuteStatementOperation.scala | 48 (sqlContext: SQLContext, sessionToActivePool: JMap[SessionHandle, String]) 74 sqlContext.sparkContext.clearJobGroup() 119 iter = if (sqlContext.getConf(SQLConf.THRIFTSERVER_INCREMENTAL_COLLECT.key).toBoolean) { 216 val executionHiveClassLoader = sqlContext.sharedState.jarClassLoader 225 sqlContext.sparkContext.setJobGroup(statementId, statement) 228 sqlContext.sparkContext.setLocalProperty("spark.scheduler.pool", pool) 231 result = sqlContext.sql(statement) 241 if (sqlContext.getConf(SQLConf.THRIFTSERVER_INCREMENTAL_COLLECT.key).toBoolean) { 275 sqlContext.sparkContext.cancelJobGroup(statementId)
|
H A D | SparkSQLSessionManager.scala | 36 private[hive] class SparkSQLSessionManager(hiveServer: HiveServer2, sqlContext: SQLContext) 75 val sessionState = sqlContext.sessionState.asInstanceOf[HiveSessionState] 77 sqlContext 79 sqlContext.newSession()
|
/dports/devel/spark/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/sources/ |
H A D | PathOptionSuite.scala | 30 sqlContext: SQLContext, 33 new TestOptionsRelation(parameters)(sqlContext.sparkSession) 38 sqlContext: SQLContext, 42 new TestOptionsRelation(parameters)(sqlContext.sparkSession) 49 override def sqlContext: SQLContext = session.sqlContext
|
H A D | TableScanSuite.scala | 32 sqlContext: SQLContext, 34 SimpleScan(parameters("from").toInt, parameters("TO").toInt)(sqlContext.sparkSession) 41 override def sqlContext: SQLContext = sparkSession.sqlContext 53 sqlContext: SQLContext, 62 parameters("TO").toInt, schema)(sqlContext.sparkSession) 73 override def sqlContext: SQLContext = sparkSession.sqlContext
|
H A D | DDLTestSuite.scala | 29 sqlContext: SQLContext, 34 parameters("Table"))(sqlContext.sparkSession) 44 override def sqlContext: SQLContext = sparkSession.sqlContext
|
/dports/devel/spark/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/ |
H A D | ExistenceJoinSuite.scala | 108 EnsureRequirements(left.sqlContext.sessionState.conf).apply( 114 EnsureRequirements(left.sqlContext.sessionState.conf).apply( 127 EnsureRequirements(left.sqlContext.sessionState.conf).apply( 133 EnsureRequirements(left.sqlContext.sessionState.conf).apply( 146 EnsureRequirements(left.sqlContext.sessionState.conf).apply( 151 EnsureRequirements(left.sqlContext.sessionState.conf).apply( 163 EnsureRequirements(left.sqlContext.sessionState.conf).apply( 168 EnsureRequirements(left.sqlContext.sessionState.conf).apply( 179 EnsureRequirements(left.sqlContext.sessionState.conf).apply( 184 EnsureRequirements(left.sqlContext.sessionState.conf).apply(
|
/dports/devel/spark/spark-2.1.1/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/ |
H A D | DB2IntegrationSuite.scala | 66 val df = sqlContext.read.jdbc(jdbcUrl, "tbl", new Properties) 76 val df = sqlContext.read.jdbc(jdbcUrl, "numbers", new Properties) 97 val df = sqlContext.read.jdbc(jdbcUrl, "dates", new Properties) 111 val df = sqlContext.read.jdbc(jdbcUrl, "strings", new Properties) 128 val df2 = sqlContext.read.jdbc(jdbcUrl, "dates", new Properties) 129 val df3 = sqlContext.read.jdbc(jdbcUrl, "strings", new Properties)
|
H A D | MySQLIntegrationSuite.scala | 67 val df = sqlContext.read.jdbc(jdbcUrl, "tbl", new Properties) 77 val df = sqlContext.read.jdbc(jdbcUrl, "numbers", new Properties) 104 val df = sqlContext.read.jdbc(jdbcUrl, "dates", new Properties) 122 val df = sqlContext.read.jdbc(jdbcUrl, "strings", new Properties) 148 val df1 = sqlContext.read.jdbc(jdbcUrl, "numbers", new Properties) 149 val df2 = sqlContext.read.jdbc(jdbcUrl, "dates", new Properties) 150 val df3 = sqlContext.read.jdbc(jdbcUrl, "strings", new Properties)
|
H A D | PostgresIntegrationSuite.scala | 61 val df = sqlContext.read.jdbc(jdbcUrl, "bar", new Properties) 110 val df = sqlContext.read.jdbc(jdbcUrl, "bar", new Properties) 114 assert(sqlContext.read.jdbc(jdbcUrl, "public.barcopy", new Properties).schema(13).dataType == 123 sqlContext.createDataFrame(Seq((1.0f, 1.toShort))) 125 val schema = sqlContext.read.jdbc(jdbcUrl, "shortfloat", new Properties).schema
|
/dports/devel/spark/spark-2.1.1/python/pyspark/ml/ |
H A D | util.py | 79 def context(self, sqlContext): argument 113 def context(self, sqlContext): argument 119 self._jwrite.context(sqlContext._ssql_ctx) 168 def context(self, sqlContext): argument 200 def context(self, sqlContext): argument 206 self._jread.context(sqlContext._ssql_ctx)
|
/dports/devel/spark/spark-2.1.1/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/server/ |
H A D | SparkSQLOperationManager.scala | 49 val sqlContext = sessionToContexts.get(parentSession.getSessionHandle) constant 50 require(sqlContext != null, s"Session handle: ${parentSession.getSessionHandle} has not been" + 52 val sessionState = sqlContext.sessionState.asInstanceOf[HiveSessionState] 55 runInBackground)(sqlContext, sessionToActivePool)
|
/dports/devel/spark/spark-2.1.1/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/ |
H A D | socket.scala | 48 class TextSocketSource(host: String, port: Int, includeTimestamp: Boolean, sqlContext: SQLContext) 129 import sqlContext.implicits._ 130 val rawBatch = sqlContext.createDataset(rawList) 184 sqlContext: SQLContext, 206 sqlContext: SQLContext, 213 new TextSocketSource(host, port, parseIncludeTimestamp(parameters), sqlContext)
|
H A D | memory.scala | 40 def apply[A : Encoder](implicit sqlContext: SQLContext): MemoryStream[A] = 41 new MemoryStream[A](memoryStreamId.getAndIncrement(), sqlContext) 49 case class MemoryStream[A : Encoder](id: Int, sqlContext: SQLContext) 75 Dataset(sqlContext.sparkSession, logicalPlan) 79 Dataset.ofRows(sqlContext.sparkSession, logicalPlan) 87 import sqlContext.implicits._
|
/dports/devel/spark/spark-2.1.1/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/ |
H A D | JdbcRelationProvider.scala | 30 sqlContext: SQLContext, 45 JDBCRelation(parts, jdbcOptions)(sqlContext.sparkSession) 49 sqlContext: SQLContext, 96 createRelation(sqlContext, parameters)
|
/dports/devel/spark/spark-2.1.1/mllib/src/main/scala/org/apache/spark/ml/util/ |
H A D | ReadWrite.scala | 50 def context(sqlContext: SQLContext): this.type = { 51 optionSparkSession = Option(sqlContext.sparkSession) 77 protected final def sqlContext: SQLContext = sparkSession.sqlContext method 134 override def context(sqlContext: SQLContext): this.type = super.session(sqlContext.sparkSession) 192 override def context(sqlContext: SQLContext): this.type = super.session(sqlContext.sparkSession)
|
/dports/devel/spark/spark-2.1.1/sql/core/src/main/scala/org/apache/spark/sql/execution/ |
H A D | SparkPlan.scala | 52 final val sqlContext = SparkSession.getActiveSession.map(_.sqlContext).orNull constant 54 protected def sparkContext = sqlContext.sparkContext 59 val subexpressionEliminationEnabled: Boolean = if (sqlContext != null) { 60 sqlContext.conf.subexpressionEliminationEnabled 67 SparkSession.setActiveSession(sqlContext.sparkSession) 321 val limitScaleUpFactor = Math.max(sqlContext.conf.limitScaleUpFactor, 2) 332 val sc = sqlContext.sparkContext
|
/dports/devel/spark/spark-2.1.1/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/ |
H A D | package.scala | 33 sqlContext: SQLContext, 47 sqlContext.sessionState, 48 Some(sqlContext.streams.stateStoreCoordinator))(
|
/dports/devel/spark/spark-2.1.1/external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/ |
H A D | KafkaSourceProvider.scala | 56 sqlContext: SQLContext, 66 sqlContext: SQLContext, 95 sqlContext, 111 sqlContext: SQLContext, 141 sqlContext, 151 sqlContext: SQLContext, 157 new KafkaSink(sqlContext, 183 override def sqlContext: SQLContext = unsupportedException
|