Home
last modified time | relevance | path

Searched defs:createCombiner (Results 1 – 20 of 20) sorted by relevance

/dports/devel/spark/spark-2.1.1/core/src/test/scala/org/apache/spark/util/collection/
H A DExternalSorterSuite.scala129 def createCombiner(i: String): ArrayBuffer[String] = ArrayBuffer[String](i) method
213 def createCombiner(i: Int): ArrayBuffer[Int] = ArrayBuffer[Int](i) method
239 def createCombiner(i: String): ArrayBuffer[String] = ArrayBuffer[String](i) method
593 def createCombiner(i: String): ArrayBuffer[String] = ArrayBuffer(i) method
H A DExternalAppendOnlyMapSuite.scala31 private def createCombiner[T](i: T) = ArrayBuffer[T](i) method
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/rdd/
H A DCoGroupedRDD.scala166 val createCombiner: (CoGroupValue => CoGroupCombiner) = value => { constant
H A DPairRDDFunctions.scala507 val createCombiner = (v: V) => CompactBuffer(v) constant
/dports/devel/hadoop/hadoop-1.2.1/c++/Linux-i386-32/include/hadoop/
H A DTemplateFactory.hh51 Reducer* createCombiner(MapContext& context) const { in createCombiner() function in HadoopPipes::TemplateFactory4
H A DPipes.hh216 virtual Reducer* createCombiner(MapContext& context) const { in createCombiner() function in HadoopPipes::Factory
/dports/devel/hadoop/hadoop-1.2.1/c++/Linux-amd64-64/include/hadoop/
H A DTemplateFactory.hh51 Reducer* createCombiner(MapContext& context) const { in createCombiner() function in HadoopPipes::TemplateFactory4
H A DPipes.hh216 virtual Reducer* createCombiner(MapContext& context) const { in createCombiner() function in HadoopPipes::Factory
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-tools/hadoop-pipes/src/main/native/pipes/api/hadoop/
H A DTemplateFactory.hh51 Reducer* createCombiner(MapContext& context) const { in createCombiner() function in HadoopPipes::TemplateFactory4
H A DPipes.hh216 virtual Reducer* createCombiner(MapContext& context) const { in createCombiner() function in HadoopPipes::Factory
/dports/devel/hadoop/hadoop-1.2.1/src/c++/pipes/api/hadoop/
H A DTemplateFactory.hh51 Reducer* createCombiner(MapContext& context) const { in createCombiner() function in HadoopPipes::TemplateFactory4
H A DPipes.hh216 virtual Reducer* createCombiner(MapContext& context) const { in createCombiner() function in HadoopPipes::Factory
/dports/devel/spark/spark-2.1.1/streaming/src/main/scala/org/apache/spark/streaming/dstream/
H A DPairDStreamFunctions.scala69 val createCombiner = (v: V) => ArrayBuffer[V](v) constant
193 val createCombiner = (v: Iterable[V]) => new ArrayBuffer[V] ++= v constant
/dports/devel/spark/spark-2.1.1/R/pkg/R/
H A DpairRDD.R441 signature(x = "RDD", createCombiner = "ANY", mergeValue = "ANY", argument
443 function(x, createCombiner, mergeValue, mergeCombiners, numPartitions) { argument
508 createCombiner <- function(v) { function
H A Dgenerics.R304 function(x, createCombiner, mergeValue, mergeCombiners, numPartitions) { argument
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/util/collection/
H A DExternalSorter.scala186 val createCombiner = aggregator.get.createCombiner constant
/dports/devel/spark/spark-2.1.1/python/pyspark/
H A Dshuffle.py95 def __init__(self, createCombiner, mergeValue, mergeCombiners): argument
H A Drdd.py1791 def combineByKey(self, createCombiner, mergeValue, mergeCombiners, argument
1894 def createCombiner(x): function
/dports/devel/spark/spark-2.1.1/python/pyspark/streaming/
H A Ddstream.py134 def combineByKey(self, createCombiner, mergeValue, mergeCombiners, argument
/dports/graphics/ossim/ossim-OrchidIsland-2.11.1/src/util/
H A DossimChipperUtil.cpp182 ossimRefPtr<ossimImageSource> ossimChipperUtil::createCombiner() const in createCombiner() function in ossimChipperUtil