Home
last modified time | relevance | path

Searched refs:classOf (Results 1 – 25 of 749) sorted by relevance

12345678910>>...30

/dports/devel/spark/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/
H A DParquetFilterSuite.scala133 checkFilterPredicate('_1 === 1, classOf[Eq[_]], 1)
138 checkFilterPredicate('_1 > 3, classOf[Gt[_]], 4)
140 checkFilterPredicate('_1 >= 4, classOf[GtEq[_]], 4)
159 checkFilterPredicate('_1 === 1, classOf[Eq[_]], 1)
164 checkFilterPredicate('_1 > 3, classOf[Gt[_]], 4)
166 checkFilterPredicate('_1 >= 4, classOf[GtEq[_]], 4)
185 checkFilterPredicate('_1 === 1, classOf[Eq[_]], 1)
190 checkFilterPredicate('_1 > 3, classOf[Gt[_]], 4)
192 checkFilterPredicate('_1 >= 4, classOf[GtEq[_]], 4)
211 checkFilterPredicate('_1 === 1, classOf[Eq[_]], 1)
[all …]
/dports/devel/spark/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/
H A DJoinSuite.scala71 classOf[SortMergeJoinExec]),
81 classOf[CartesianProductExec]),
89 classOf[SortMergeJoinExec]),
91 classOf[SortMergeJoinExec]),
94 classOf[SortMergeJoinExec]),
96 classOf[SortMergeJoinExec]),
98 classOf[SortMergeJoinExec]),
126 classOf[BroadcastHashJoinExec])
141 classOf[BroadcastHashJoinExec])
469 classOf[SortMergeJoinExec]),
[all …]
H A DUDTRegistrationSuite.scala42 override def userClass: Class[TestUserClass] = classOf[TestUserClass]
46 override def hashCode(): Int = classOf[TestUserClassUDT].getName.hashCode()
57 UDTRegistration.register(classOf[TestUserClass].getName,
60 UDTRegistration.getUDTFor(classOf[TestUserClass].getName)
78 UDTRegistration.register(classOf[TestUserClass2].getName, classOf[TestUserClassUDT].getName)
79 assert(UDTRegistration.exists(classOf[TestUserClass2].getName))
81 classOf[UserDefinedType[_]].isAssignableFrom((
82 UDTRegistration.getUDTFor(classOf[TestUserClass2].getName).get)))
86 assert(!UDTRegistration.exists(classOf[TestUserClass3].getName))
87 assert(!UDTRegistration.getUDTFor(classOf[TestUserClass3].getName).isDefined)
/dports/devel/spark/spark-2.1.1/graphx/src/test/scala/org/apache/spark/graphx/util/
H A DBytecodeUtilsSuite.scala30 assert(BytecodeUtils.invokedMethod(c1, classOf[TestClass], "foo"))
31 assert(BytecodeUtils.invokedMethod(c1, classOf[TestClass], "bar"))
32 assert(BytecodeUtils.invokedMethod(c1, classOf[TestClass], "baz"))
35 assert(BytecodeUtils.invokedMethod(c2, classOf[TestClass], "foo"))
36 assert(BytecodeUtils.invokedMethod(c2, classOf[TestClass], "bar"))
37 assert(!BytecodeUtils.invokedMethod(c2, classOf[TestClass], "baz"))
40 assert(BytecodeUtils.invokedMethod(c3, classOf[TestClass], "foo"))
41 assert(!BytecodeUtils.invokedMethod(c3, classOf[TestClass], "bar"))
48 assert(BytecodeUtils.invokedMethod(c2, classOf[TestClass], "foo"))
49 assert(BytecodeUtils.invokedMethod(c2, classOf[TestClass], "bar"))
[all …]
/dports/devel/spark/spark-2.1.1/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/
H A DJavaTypeInference.scala196 case c if c == classOf[java.lang.Short] =>
200 case c if c == classOf[java.lang.Long] =>
204 case c if c == classOf[java.lang.Byte] =>
211 case c if c == classOf[java.sql.Date] =>
267 ObjectType(classOf[Array[Any]]))
283 ObjectType(classOf[Array[Any]]))
292 ObjectType(classOf[Array[Any]]))
296 ObjectType(classOf[JMap[_, _]]),
350 classOf[GenericArrayData],
362 case c if c == classOf[String] =>
[all …]
/dports/misc/py-xgboost/xgboost-1.5.1/jvm-packages/xgboost4j/src/main/scala/ml/dmlc/xgboost4j/scala/
H A DBooster.scala40 @throws(classOf[XGBoostError])
51 @throws(classOf[XGBoostError])
62 @throws(classOf[XGBoostError])
72 @throws(classOf[XGBoostError])
83 @throws(classOf[XGBoostError])
93 @throws(classOf[XGBoostError])
104 @throws(classOf[XGBoostError])
115 @throws(classOf[XGBoostError])
127 @throws(classOf[XGBoostError])
140 @throws(classOf[XGBoostError])
[all …]
H A DDMatrix.scala53 @throws(classOf[XGBoostError])
69 @throws(classOf[XGBoostError])
83 @throws(classOf[XGBoostError])
96 @throws(classOf[XGBoostError])
106 @throws(classOf[XGBoostError])
116 @throws(classOf[XGBoostError])
127 @throws(classOf[XGBoostError])
138 @throws(classOf[XGBoostError])
148 @throws(classOf[XGBoostError])
156 @throws(classOf[XGBoostError])
[all …]
/dports/misc/xgboost/xgboost-1.5.1/jvm-packages/xgboost4j/src/main/scala/ml/dmlc/xgboost4j/scala/
H A DBooster.scala40 @throws(classOf[XGBoostError])
51 @throws(classOf[XGBoostError])
62 @throws(classOf[XGBoostError])
72 @throws(classOf[XGBoostError])
83 @throws(classOf[XGBoostError])
93 @throws(classOf[XGBoostError])
104 @throws(classOf[XGBoostError])
115 @throws(classOf[XGBoostError])
127 @throws(classOf[XGBoostError])
140 @throws(classOf[XGBoostError])
[all …]
H A DDMatrix.scala53 @throws(classOf[XGBoostError])
69 @throws(classOf[XGBoostError])
83 @throws(classOf[XGBoostError])
96 @throws(classOf[XGBoostError])
106 @throws(classOf[XGBoostError])
116 @throws(classOf[XGBoostError])
127 @throws(classOf[XGBoostError])
138 @throws(classOf[XGBoostError])
148 @throws(classOf[XGBoostError])
156 @throws(classOf[XGBoostError])
[all …]
/dports/devel/spark/spark-2.1.1/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/
H A DHiveShim.scala174 classOf[URI])
177 classOf[Hive],
193 classOf[Hive],
197 classOf[Path],
209 classOf[Hive],
211 classOf[Path],
220 classOf[Hive],
222 classOf[Path],
228 classOf[Hive],
230 classOf[Path],
[all …]
/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/io/
H A DCompressionCodecSuite.scala49 assert(codec.getClass === classOf[LZ4CompressionCodec])
54 val codec = CompressionCodec.createCodec(conf, classOf[LZ4CompressionCodec].getName)
55 assert(codec.getClass === classOf[LZ4CompressionCodec])
61 assert(codec.getClass === classOf[LZ4CompressionCodec])
67 assert(codec.getClass === classOf[LZ4CompressionCodec])
73 assert(codec.getClass === classOf[LZFCompressionCodec])
79 assert(codec.getClass === classOf[LZFCompressionCodec])
85 assert(codec.getClass === classOf[LZFCompressionCodec])
91 assert(codec.getClass === classOf[SnappyCompressionCodec])
97 assert(codec.getClass === classOf[SnappyCompressionCodec])
[all …]
/dports/devel/spark/spark-2.1.1/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/encoders/
H A DRowEncoder.scala59 val cls = classOf[Row]
117 classOf[UTF8String],
126 classOf[ArrayData],
133 ObjectType(classOf[Object]))
152 classOf[ArrayBasedMapData],
209 case _: StructType => ObjectType(classOf[Row])
253 ObjectType(classOf[java.sql.Timestamp]),
260 ObjectType(classOf[java.sql.Date]),
275 ObjectType(classOf[Array[_]]))
278 ObjectType(classOf[Seq[_]]),
[all …]
/dports/devel/spark/spark-2.1.1/graphx/src/main/scala/org/apache/spark/graphx/
H A DGraphXUtils.scala35 classOf[Edge[Object]],
36 classOf[(VertexId, Object)],
37 classOf[EdgePartition[Object, Object]],
38 classOf[BitSet],
39 classOf[VertexIdToIndexMap],
40 classOf[VertexAttributeBlock[Object]],
41 classOf[PartitionStrategy],
42 classOf[BoundedPriorityQueue[Object]],
43 classOf[EdgeDirection],
45 classOf[OpenHashSet[Int]],
[all …]
/dports/devel/spark/spark-2.1.1/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/
H A DPostgresIntegrationSuite.scala67 assert(classOf[String].isAssignableFrom(types(0)))
69 assert(classOf[java.lang.Double].isAssignableFrom(types(2)))
70 assert(classOf[java.lang.Long].isAssignableFrom(types(3)))
72 assert(classOf[Array[Byte]].isAssignableFrom(types(5)))
73 assert(classOf[Array[Byte]].isAssignableFrom(types(6)))
75 assert(classOf[String].isAssignableFrom(types(8)))
76 assert(classOf[String].isAssignableFrom(types(9)))
77 assert(classOf[Seq[Int]].isAssignableFrom(types(10)))
78 assert(classOf[Seq[String]].isAssignableFrom(types(11)))
79 assert(classOf[Seq[Double]].isAssignableFrom(types(12)))
[all …]
/dports/devel/spark/spark-2.1.1/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/
H A DCallMethodViaReflection.scala120 BooleanType -> Seq(classOf[java.lang.Boolean], classOf[Boolean]),
121 ByteType -> Seq(classOf[java.lang.Byte], classOf[Byte]),
122 ShortType -> Seq(classOf[java.lang.Short], classOf[Short]),
123 IntegerType -> Seq(classOf[java.lang.Integer], classOf[Int]),
124 LongType -> Seq(classOf[java.lang.Long], classOf[Long]),
125 FloatType -> Seq(classOf[java.lang.Float], classOf[Float]),
126 DoubleType -> Seq(classOf[java.lang.Double], classOf[Double]),
127 StringType -> Seq(classOf[String])
/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/serializer/
H A DJavaSerializerSuite.scala42 val intClass = classOf[Int]
43 val longClass = classOf[Long]
44 val shortClass = classOf[Short]
45 val charClass = classOf[Char]
46 val doubleClass = classOf[Double]
47 val floatClass = classOf[Float]
48 val booleanClass = classOf[Boolean]
49 val byteClass = classOf[Byte]
50 val voidClass = classOf[Void]
/dports/devel/spark/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/sources/
H A DResolvedDataSourceSuite.scala31 classOf[org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider])
34 classOf[org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider])
37 classOf[org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider])
43 classOf[org.apache.spark.sql.execution.datasources.json.JsonFileFormat])
46 classOf[org.apache.spark.sql.execution.datasources.json.JsonFileFormat])
49 classOf[org.apache.spark.sql.execution.datasources.json.JsonFileFormat])
55 classOf[org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat])
58 classOf[org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat])
61 classOf[org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat])
67 classOf[org.apache.spark.sql.execution.datasources.csv.CSVFileFormat])
[all …]
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/serializer/
H A DKryoSerializer.scala177 kryo.register(classOf[ArrayBuffer[Any]])
382 classOf[StorageLevel],
383 classOf[CompressedMapStatus],
384 classOf[HighlyCompressedMapStatus],
385 classOf[CompactBuffer[_]],
386 classOf[BlockManagerId],
387 classOf[Array[Byte]],
388 classOf[Array[Short]],
389 classOf[Array[Long]],
390 classOf[BoundedPriorityQueue[_]],
[all …]
H A DJavaSerializer.scala81 "boolean" -> classOf[Boolean],
82 "byte" -> classOf[Byte],
83 "char" -> classOf[Char],
84 "short" -> classOf[Short],
85 "int" -> classOf[Int],
86 "long" -> classOf[Long],
87 "float" -> classOf[Float],
88 "double" -> classOf[Double],
89 "void" -> classOf[Void]
/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/rpc/netty/
H A DNettyRpcHandlerSuite.scala34 val env = mock(classOf[NettyRpcEnv])
35 val sm = mock(classOf[StreamManager])
36 when(env.deserialize(any(classOf[TransportClient]), any(classOf[ByteBuffer]))(any()))
40 val dispatcher = mock(classOf[Dispatcher])
43 val channel = mock(classOf[Channel])
44 val client = new TransportClient(channel, mock(classOf[TransportResponseHandler]))
52 val dispatcher = mock(classOf[Dispatcher])
55 val channel = mock(classOf[Channel])
56 val client = new TransportClient(channel, mock(classOf[TransportResponseHandler]))
H A DInboxSuite.scala32 val endpointRef = mock(classOf[NettyRpcEndpointRef])
35 val dispatcher = mock(classOf[Dispatcher])
54 val endpointRef = mock(classOf[NettyRpcEndpointRef])
55 val dispatcher = mock(classOf[Dispatcher])
68 val endpointRef = mock(classOf[NettyRpcEndpointRef])
71 val dispatcher = mock(classOf[Dispatcher])
110 val endpointRef = mock(classOf[NettyRpcEndpointRef])
111 val dispatcher = mock(classOf[Dispatcher])
124 val endpointRef = mock(classOf[NettyRpcEndpointRef])
125 val dispatcher = mock(classOf[Dispatcher])
[all …]
/dports/devel/scalatest/scalatest-1.6.1/src/test/scala/org/scalatest/tools/
H A DSuiteDiscoveryHelperSuite.scala26 Array(classOf[String], classOf[Char]): _*)
33 Array(classOf[Iterator[String]], classOf[Char]): _*)
40 Array(classOf[Class[_]]): _*) // This one works in 2.7
49 Array(classOf[Iterator[String]], classOf[Char], classOf[ClassLoader]): _*)
56 Array(classOf[File], classOf[Char]): _*)
83 assert(sdtf.isAccessibleSuite(classOf[SuiteDiscoveryHelperSuite]))
84 assert(!sdtf.isAccessibleSuite(classOf[PackageAccessSuite]))
85 assert(!sdtf.isAccessibleSuite(classOf[PackageAccessConstructorSuite]))
86 assert(!sdtf.isAccessibleSuite(classOf[Suite]))
87 assert(!sdtf.isAccessibleSuite(classOf[Object]))
/dports/devel/spark/spark-2.1.1/mllib/src/main/scala/org/apache/spark/ml/param/shared/
H A DSharedParamsCodeGen.scala109 case _ if c == classOf[Int] => "IntParam"
110 case _ if c == classOf[Long] => "LongParam"
111 case _ if c == classOf[Float] => "FloatParam"
112 case _ if c == classOf[Double] => "DoubleParam"
113 case _ if c == classOf[Boolean] => "BooleanParam"
127 case _ if c == classOf[Int] => "Int"
128 case _ if c == classOf[Long] => "Long"
129 case _ if c == classOf[Float] => "Float"
130 case _ if c == classOf[Double] => "Double"
131 case _ if c == classOf[Boolean] => "Boolean"
[all …]
/dports/devel/spark/spark-2.1.1/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/
H A DGenerateColumnAccessor.scala76 case NullType => classOf[NullColumnAccessor].getName
77 case BooleanType => classOf[BooleanColumnAccessor].getName
78 case ByteType => classOf[ByteColumnAccessor].getName
79 case ShortType => classOf[ShortColumnAccessor].getName
82 case FloatType => classOf[FloatColumnAccessor].getName
83 case DoubleType => classOf[DoubleColumnAccessor].getName
84 case StringType => classOf[StringColumnAccessor].getName
85 case BinaryType => classOf[BinaryColumnAccessor].getName
87 classOf[CompactDecimalColumnAccessor].getName
90 case array: ArrayType => classOf[ArrayColumnAccessor].getName
[all …]
/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/
H A DSparkConfSuite.scala179 conf.registerKryoClasses(Array(classOf[Class1], classOf[Class2]))
181 classOf[Class1].getName + "," + classOf[Class2].getName)
183 conf.registerKryoClasses(Array(classOf[Class3]))
185 classOf[Class1].getName + "," + classOf[Class2].getName + "," + classOf[Class3].getName)
187 conf.registerKryoClasses(Array(classOf[Class2]))
189 classOf[Class1].getName + "," + classOf[Class2].getName + "," + classOf[Class3].getName)
202 conf.registerKryoClasses(Array(classOf[Class1]))
203 assert(conf.get("spark.kryo.classesToRegister") === classOf[Class1].getName)
205 conf.set("spark.kryo.registrator", classOf[CustomRegistrator].getName)
217 conf.set("spark.serializer", classOf[KryoSerializer].getName)
[all …]

12345678910>>...30