/dports/devel/spark/spark-2.1.1/sql/catalyst/src/test/java/org/apache/spark/sql/catalyst/expressions/ |
H A D | RowBasedKeyValueBatchSuite.java | 58 UnsafeRow row = new UnsafeRow(2); in makeKeyRow() 69 UnsafeRow row = new UnsafeRow(2); in makeKeyRow() 80 UnsafeRow row = new UnsafeRow(2); in makeValueRow() 90 private UnsafeRow appendRow(RowBasedKeyValueBatch batch, UnsafeRow key, UnsafeRow value) { in appendRow() 248 org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> iterator in iteratorTest() 251 UnsafeRow key1 = iterator.getKey(); in iteratorTest() 256 UnsafeRow key2 = iterator.getKey(); in iteratorTest() 261 UnsafeRow key3 = iterator.getKey(); in iteratorTest() 288 org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> iterator in fixedLengthTest() 324 org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> iterator in appendRowUntilExceedingCapacity() [all …]
|
/dports/devel/spark/spark-2.1.1/sql/core/src/main/java/org/apache/spark/sql/execution/ |
H A D | UnsafeFixedWidthAggregationMap.java | 26 import org.apache.spark.sql.catalyst.expressions.UnsafeRow; 64 private final UnsafeRow currentAggregationBuffer; 74 if (!UnsafeRow.isMutable(field.dataType())) { in supportsAggregationBufferSchema() 124 public UnsafeRow getAggregationBufferFromUnsafeRow(UnsafeRow key) { in getAggregationBufferFromUnsafeRow() 128 public UnsafeRow getAggregationBufferFromUnsafeRow(UnsafeRow key, int hash) { in getAggregationBufferFromUnsafeRow() 167 public KVIterator<UnsafeRow, UnsafeRow> iterator() { in iterator() 168 return new KVIterator<UnsafeRow, UnsafeRow>() { in iterator() 172 private final UnsafeRow key = new UnsafeRow(groupingKeySchema.length()); in iterator() 173 private final UnsafeRow value = new UnsafeRow(aggregationBufferSchema.length()); in iterator() 196 public UnsafeRow getKey() { in iterator() [all …]
|
H A D | UnsafeKVExternalSorter.java | 113 UnsafeRow row = new UnsafeRow(numKeyFields); in UnsafeKVExternalSorter() 157 public void insertKV(UnsafeRow key, UnsafeRow value) throws IOException { in insertKV() 226 private final UnsafeRow row1; 227 private final UnsafeRow row2; 232 this.row1 = new UnsafeRow(numKeyFields); in KVComparator() 233 this.row2 = new UnsafeRow(numKeyFields); in KVComparator() 247 public class KVSorterIterator extends KVIterator<UnsafeRow, UnsafeRow> { 248 private UnsafeRow key = new UnsafeRow(keySchema.size()); 249 private UnsafeRow value = new UnsafeRow(valueSchema.size()); 286 public UnsafeRow getKey() { in getKey() [all …]
|
/dports/devel/spark/spark-2.1.1/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/ |
H A D | CartesianProductExec.scala | 24 import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression, JoinedRow, UnsafeRow} 37 class UnsafeCartesianRDD(left : RDD[UnsafeRow], right : RDD[UnsafeRow], numFieldsOfRight: Int) 38 extends CartesianRDD[UnsafeRow, UnsafeRow](left.sparkContext, left, right) { 40 override def compute(split: Partition, context: TaskContext): Iterator[(UnsafeRow, UnsafeRow)] = { 61 def createIter(): Iterator[UnsafeRow] = { 63 val unsafeRow = new UnsafeRow(numFieldsOfRight) 64 new Iterator[UnsafeRow] { 68 override def next(): UnsafeRow = { 79 CompletionIterator[(UnsafeRow, UnsafeRow), Iterator[(UnsafeRow, UnsafeRow)]]( 97 val leftResults = left.execute().asInstanceOf[RDD[UnsafeRow]] [all …]
|
H A D | HashedRelation.scala | 135 var resultRow = new UnsafeRow(numFields) 144 new Iterator[UnsafeRow] { 147 override def next(): UnsafeRow = { 222 resultRow = new UnsafeRow(numFields) 462 private def getRow(address: Long, resultRow: UnsafeRow): UnsafeRow = { 470 def getValue(key: Long, resultRow: UnsafeRow): UnsafeRow = { 493 private def valueIter(address: Long, resultRow: UnsafeRow): Iterator[UnsafeRow] = { 494 new Iterator[UnsafeRow] { 510 def get(key: Long, resultRow: UnsafeRow): Iterator[UnsafeRow] = { 751 private var resultRow: UnsafeRow = new UnsafeRow(nFields) [all …]
|
/dports/devel/spark/spark-2.1.1/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/ |
H A D | FixedLengthRowBasedKeyValueBatch.java | 46 public UnsafeRow appendRow(Object kbase, long koff, int klen, in appendRow() 74 public UnsafeRow getKeyRow(int rowId) { in getKeyRow() 93 protected UnsafeRow getValueFromKey(int rowId) { 106 public org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> rowIterator() { in rowIterator() 107 return new org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow>() { in rowIterator() 108 private final UnsafeRow key = new UnsafeRow(keySchema.length()); in rowIterator() 109 private final UnsafeRow value = new UnsafeRow(valueSchema.length()); in rowIterator() 142 public UnsafeRow getKey() { in rowIterator() 147 public UnsafeRow getValue() { in rowIterator() 170 klen = keySize + UnsafeRow.calculateBitSetWidthInBytes(keySchema.length()); in FixedLengthRowBasedKeyValueBatch() [all …]
|
H A D | RowBasedKeyValueBatch.java | 62 protected final UnsafeRow keyRow; 63 protected final UnsafeRow valueRow; 82 && UnsafeRow.isFixedLength(keySchema.apply(name).dataType()); in allocate() 86 && UnsafeRow.isFixedLength(valueSchema.apply(name).dataType()); in allocate() 104 this.keyRow = new UnsafeRow(keySchema.length()); in RowBasedKeyValueBatch() 105 this.valueRow = new UnsafeRow(valueSchema.length()); in RowBasedKeyValueBatch() 142 public abstract UnsafeRow appendRow(Object kbase, long koff, int klen, in appendRow() 148 public abstract UnsafeRow getKeyRow(int rowId); in getKeyRow() 155 public final UnsafeRow getValueRow(int rowId) { in getValueRow() 165 protected abstract UnsafeRow getValueFromKey(int rowId); in getValueFromKey() [all …]
|
H A D | VariableLengthRowBasedKeyValueBatch.java | 42 public UnsafeRow appendRow(Object kbase, long koff, int klen, in appendRow() 77 public UnsafeRow getKeyRow(int rowId) { in getKeyRow() 97 public UnsafeRow getValueFromKey(int rowId) { 113 public org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> rowIterator() { in rowIterator() 114 return new org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow>() { in rowIterator() 115 private final UnsafeRow key = new UnsafeRow(keySchema.length()); in rowIterator() 116 private final UnsafeRow value = new UnsafeRow(valueSchema.length()); in rowIterator() 157 public UnsafeRow getKey() { in rowIterator() 162 public UnsafeRow getValue() { in rowIterator()
|
H A D | UnsafeRow.java | 146 public UnsafeRow(int numFields) { 152 public UnsafeRow() {} 456 public UnsafeRow getStruct(int ordinal, int numFields) { in getStruct() 463 final UnsafeRow row = new UnsafeRow(numFields); in getStruct() 502 public UnsafeRow copy() { in copy() 503 UnsafeRow rowCopy = new UnsafeRow(numFields); in copy() 520 public static UnsafeRow createFromByteArray(int numBytes, int numFields) { in createFromByteArray() 521 final UnsafeRow row = new UnsafeRow(numFields); in createFromByteArray() 530 public void copyFrom(UnsafeRow row) { in copyFrom() 576 if (other instanceof UnsafeRow) { in equals() [all …]
|
/dports/devel/spark/spark-2.1.1/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/ |
H A D | StateStore.scala | 30 import org.apache.spark.sql.catalyst.expressions.UnsafeRow 51 def get(key: UnsafeRow): Option[UnsafeRow] 54 def put(key: UnsafeRow, value: UnsafeRow): Unit 59 def remove(condition: UnsafeRow => Boolean): Unit 73 def iterator(): Iterator[(UnsafeRow, UnsafeRow)] 104 def key: UnsafeRow 105 def value: UnsafeRow 108 case class ValueAdded(key: UnsafeRow, value: UnsafeRow) extends StoreUpdate 110 case class ValueUpdated(key: UnsafeRow, value: UnsafeRow) extends StoreUpdate 112 case class ValueRemoved(key: UnsafeRow, value: UnsafeRow) extends StoreUpdate
|
H A D | HDFSBackedStateStoreProvider.scala | 34 import org.apache.spark.sql.catalyst.expressions.UnsafeRow 76 type MapType = java.util.HashMap[UnsafeRow, UnsafeRow] 98 override def get(key: UnsafeRow): Option[UnsafeRow] = { 102 override def put(key: UnsafeRow, value: UnsafeRow): Unit = { 185 override def iterator(): Iterator[(UnsafeRow, UnsafeRow)] = { 281 private[state] def latestIterator(): Iterator[(UnsafeRow, UnsafeRow)] = synchronized { 293 private[state] def iterator(version: Long): Iterator[(UnsafeRow, UnsafeRow)] = synchronized { 327 def writeUpdate(key: UnsafeRow, value: UnsafeRow): Unit = { 336 def writeRemove(key: UnsafeRow): Unit = { 383 val keyRow = new UnsafeRow(keySchema.fields.length) [all …]
|
/dports/devel/spark/spark-2.1.1/sql/catalyst/src/main/java/org/apache/spark/sql/execution/ |
H A D | UnsafeExternalRowSorter.java | 30 import org.apache.spark.sql.catalyst.expressions.UnsafeRow; 107 public void insertRow(UnsafeRow row) throws IOException { in insertRow() 140 public Iterator<UnsafeRow> sort() throws IOException { in sort() 148 return new AbstractScalaRowIterator<UnsafeRow>() { in sort() 151 private UnsafeRow row = new UnsafeRow(numFields); in sort() 159 public UnsafeRow next() { in sort() 189 public Iterator<UnsafeRow> sort(Iterator<UnsafeRow> inputIterator) throws IOException { in sort() 199 private final UnsafeRow row1; 200 private final UnsafeRow row2; 204 this.row1 = new UnsafeRow(numFields); in RowComparator() [all …]
|
H A D | UnsafeKeyValueSorter.java | 22 import org.apache.spark.sql.catalyst.expressions.UnsafeRow; 27 public abstract void insert(UnsafeRow key, UnsafeRow value); in insert() 29 public abstract KVIterator<UnsafeRow, UnsafeRow> sort() throws IOException; in sort()
|
/dports/devel/spark/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/ |
H A D | UnsafeRowSuite.scala | 37 val row = new UnsafeRow(1) 42 val row1 = ser.deserialize[UnsafeRow](ser.serialize(row)) 50 val row = new UnsafeRow(1) 61 assert(UnsafeRow.calculateBitSetWidthInBytes(0) === 0) 62 assert(UnsafeRow.calculateBitSetWidthInBytes(1) === 8) 63 assert(UnsafeRow.calculateBitSetWidthInBytes(32) === 8) 64 assert(UnsafeRow.calculateBitSetWidthInBytes(64) === 8) 65 assert(UnsafeRow.calculateBitSetWidthInBytes(65) === 16) 71 val arrayBackedUnsafeRow: UnsafeRow = 89 val offheapUnsafeRow: UnsafeRow = new UnsafeRow(3) [all …]
|
/dports/devel/spark/spark-2.1.1/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/ |
H A D | TungstenAggregationIterator.scala | 118 private def createNewAggregationBuffer(): UnsafeRow = { 120 val buffer: UnsafeRow = UnsafeProjection.create(bufferSchema.map(_.dataType)) 130 override protected def generateResultProjection(): (UnsafeRow, InternalRow) => UnsafeRow = { 140 (currentGroupingKey: UnsafeRow, currentBuffer: InternalRow) => { 189 var buffer: UnsafeRow = null 223 private[this] var aggregationBufferMapIterator: KVIterator[UnsafeRow, UnsafeRow] = null 288 private[this] var currentGroupingKey: UnsafeRow = null 291 private[this] var nextGroupingKey: UnsafeRow = null 294 private[this] var firstRowInNextGroup: UnsafeRow = null 378 override final def next(): UnsafeRow = { [all …]
|
H A D | SortBasedAggregationIterator.scala | 57 val useUnsafeBuffer = bufferSchema.map(_.dataType).forall(UnsafeRow.isMutable) 75 private[this] var currentGroupingKey: UnsafeRow = _ 78 private[this] var nextGroupingKey: UnsafeRow = _ 154 override final def next(): UnsafeRow = { 170 def outputForEmptyGroupingKeyWithoutInput(): UnsafeRow = { 172 generateOutput(UnsafeRow.createFromByteArray(0, 0), sortBasedAggregationBuffer)
|
H A D | AggregationIterator.scala | 43 extends Iterator[UnsafeRow] with Logging { 205 protected def generateResultProjection(): (UnsafeRow, InternalRow) => UnsafeRow = { 221 (currentGroupingKey: UnsafeRow, currentBuffer: InternalRow) => { 247 (currentGroupingKey: UnsafeRow, currentBuffer: InternalRow) => { 259 (currentGroupingKey: UnsafeRow, currentBuffer: InternalRow) => { 265 protected val generateOutput: (UnsafeRow, InternalRow) => UnsafeRow =
|
/dports/devel/spark/spark-2.1.1/sql/core/src/main/scala/org/apache/spark/sql/execution/python/ |
H A D | RowQueue.scala | 43 def add(row: UnsafeRow): Boolean 50 def remove(): UnsafeRow 75 private val resultRow = new UnsafeRow(numFields) 77 def add(row: UnsafeRow): Boolean = synchronized { 94 def remove(): UnsafeRow = synchronized { 117 private val resultRow = new UnsafeRow(fields) 119 def add(row: UnsafeRow): Boolean = synchronized { 130 def remove(): UnsafeRow = synchronized { 241 def add(row: UnsafeRow): Boolean = { 251 def remove(): UnsafeRow = { [all …]
|
/dports/devel/spark/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/ |
H A D | AggregateBenchmark.scala | 325 val key = new UnsafeRow(1) 340 val key = new UnsafeRow(1) 355 val key = new UnsafeRow(1) 374 val key = new UnsafeRow(1) 376 val value = new UnsafeRow(1) 393 val value = new UnsafeRow(1) 415 val value = new UnsafeRow(1) 440 val key = new UnsafeRow(1) 442 val value = new UnsafeRow(1) 445 val map = new HashMap[UnsafeRow, UnsafeRow]() [all …]
|
/dports/devel/spark/spark-2.1.1/sql/core/src/main/scala/org/apache/spark/sql/execution/ |
H A D | UnsafeRowSerializer.scala | 28 import org.apache.spark.sql.catalyst.expressions.UnsafeRow 64 val row = value.asInstanceOf[UnsafeRow] 105 private[this] var row: UnsafeRow = new UnsafeRow(numFields) 106 private[this] var rowTuple: (Int, UnsafeRow) = (0, row) 109 override def asKeyValueIterator: Iterator[(Int, UnsafeRow)] = { 110 new Iterator[(Int, UnsafeRow)] { 123 override def next(): (Int, UnsafeRow) = {
|
/dports/devel/spark/spark-2.1.1/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/codegen/ |
H A D | BufferHolder.java | 20 import org.apache.spark.sql.catalyst.expressions.UnsafeRow; 40 private final UnsafeRow row; 43 public BufferHolder(UnsafeRow row) { in BufferHolder() 47 public BufferHolder(UnsafeRow row, int initialSize) { in BufferHolder() 48 int bitsetWidthInBytes = UnsafeRow.calculateBitSetWidthInBytes(row.numFields()); in BufferHolder()
|
/dports/devel/spark/spark-2.1.1/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/ |
H A D | GenerateUnsafeRowJoiner.scala | 20 import org.apache.spark.sql.catalyst.expressions.{Attribute, UnsafeRow} 25 def join(row1: UnsafeRow, row2: UnsafeRow): UnsafeRow 140 if (UnsafeRow.isFixedLength(field.dataType)) {
|
/dports/devel/spark/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/execution/ |
H A D | UnsafeRowSerializerSuite.scala | 29 import org.apache.spark.sql.catalyst.expressions.{UnsafeProjection, UnsafeRow} 48 private def toUnsafeRow(row: Row, schema: Array[DataType]): UnsafeRow = { 53 private def unsafeRowConverter(schema: Array[DataType]): Row => UnsafeRow = { 82 val actualRow = deserializerIter.next().asInstanceOf[(Integer, UnsafeRow)]._2 119 val sorter = new ExternalSorter[Int, UnsafeRow, UnsafeRow](
|
/dports/devel/spark/spark-2.1.1/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/codegen/ |
H A D | BufferHolderSuite.scala | 21 import org.apache.spark.sql.catalyst.expressions.UnsafeRow 27 new BufferHolder(new UnsafeRow(Int.MaxValue / 8)) 31 val holder = new BufferHolder(new UnsafeRow(1000))
|
/dports/devel/spark/spark-2.1.1/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ |
H A D | ParquetRecordMaterializer.scala | 23 import org.apache.spark.sql.catalyst.expressions.UnsafeRow 35 extends RecordMaterializer[UnsafeRow] { 40 override def getCurrentRecord: UnsafeRow = rootConverter.currentRecord
|