Home
last modified time | relevance | path

Searched refs:outputRecords (Results 1 – 19 of 19) sorted by relevance

/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/
H A DGridmixSplit.java34 private long outputRecords; field in GridmixSplit
49 long outputRecords, double[] reduceBytes, double[] reduceRecords, in GridmixSplit() argument
58 this.outputRecords = outputRecords; in GridmixSplit()
86 return new long[] { outputRecords }; in getOutputRecords()
90 ret[i] = Math.round(outputRecords * reduceRecords[i]); in getOutputRecords()
107 WritableUtils.writeVLong(out, outputRecords); in write()
127 outputRecords = WritableUtils.readVLong(in); in readFields()
H A DLoadSplit.java35 private long outputRecords; field in LoadSplit
52 long inputRecords, long outputBytes, long outputRecords, in LoadSplit() argument
64 this.outputRecords = outputRecords; in LoadSplit()
95 return new long[] { outputRecords }; in getOutputRecords()
99 ret[i] = Math.round(outputRecords * reduceRecords[i]); in getOutputRecords()
125 WritableUtils.writeVLong(out, outputRecords); in write()
151 outputRecords = WritableUtils.readVLong(in); in readFields()
/dports/devel/hadoop/hadoop-1.2.1/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/
H A DLoadSplit.java34 private long outputRecords; field in LoadSplit
52 long outputRecords, double[] reduceBytes, double[] reduceRecords, in LoadSplit() argument
63 this.outputRecords = outputRecords; in LoadSplit()
94 return new long[] { outputRecords }; in getOutputRecords()
98 ret[i] = Math.round(outputRecords * reduceRecords[i]); in getOutputRecords()
124 WritableUtils.writeVLong(out, outputRecords); in write()
150 outputRecords = WritableUtils.readVLong(in); in readFields()
/dports/devel/hadoop/hadoop-1.2.1/src/test/tools/data/rumen/small-trace-test/
H A Dtruncated-trace-output48 "outputRecords" : -1,
93 "outputRecords" : -1,
138 "outputRecords" : -1,
183 "outputRecords" : -1,
228 "outputRecords" : -1,
273 "outputRecords" : -1,
341 "outputRecords" : -1,
386 "outputRecords" : -1,
431 "outputRecords" : -1,
476 "outputRecords" : -1,
[all …]
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/
H A DLoggedTask.java47 long outputRecords = -1L; field in LoggedTask
107 return outputRecords; in getOutputRecords()
110 void setOutputRecords(long outputRecords) { in setOutputRecords() argument
111 this.outputRecords = outputRecords; in setOutputRecords()
200 task.outputRecords = val; in incorporateMapCounters()
227 task.outputRecords = val; in incorporateReduceCounters()
358 compare1(outputRecords, other.outputRecords, loc, "outputRecords"); in deepCompare()
H A DZombieJob.java649 long outputRecords = -1; in getTaskInfo() local
673 outputRecords = attempt.getMapOutputRecords(); in getTaskInfo()
681 outputRecords = attempt.getReduceOutputRecords(); in getTaskInfo()
693 (int) outputRecords, (int) heapMegabytes, in getTaskInfo()
/dports/devel/hadoop/hadoop-1.2.1/src/tools/org/apache/hadoop/tools/rumen/
H A DLoggedTask.java41 long outputRecords = -1L; field in LoggedTask
102 return outputRecords; in getOutputRecords()
105 void setOutputRecords(long outputRecords) { in setOutputRecords() argument
106 this.outputRecords = outputRecords; in setOutputRecords()
195 task.outputRecords = val; in incorporateMapCounters()
222 task.outputRecords = val; in incorporateReduceCounters()
353 compare1(outputRecords, other.outputRecords, loc, "outputRecords"); in deepCompare()
H A DZombieJob.java635 long outputRecords = -1; in getTaskInfo() local
659 outputRecords = attempt.getMapOutputRecords(); in getTaskInfo()
667 outputRecords = attempt.getReduceOutputRecords(); in getTaskInfo()
679 (int) outputRecords, (int) heapMegabytes, in getTaskInfo()
/dports/biology/gatk/gatk-4.2.0.0/src/test/java/org/broadinstitute/hellbender/testutils/testers/
H A DMarkDuplicatesSparkTester.java107 int outputRecords = 0; in test() local
110 outputRecords++; in test()
127 …Assert.assertEquals(outputRecords, this.getNumberOfRecords(), ("saw " + outputRecords + " output r… in test()
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/
H A DTestMetricsServlet.java67 Collection<OutputRecord> outputRecords = m.values().iterator().next(); in setUp() local
68 assertEquals(1, outputRecords.size()); in setUp()
69 outputRecord = outputRecords.iterator().next(); in setUp()
/dports/devel/py-opengrok-tools/opengrok-1.3.16/opengrok-indexer/src/test/resources/analysis/tcl/
H A Dsample.tcl222 set outputRecords {}
229 lappend outputRecords $outputRecord
235 set output [$self Serialize [$self cget -outputformat] $outputRecords \
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/ui/exec/
H A DExecutorsTab.scala52 var outputRecords: Long = 0L, variable
151 taskSummary.outputRecords += metrics.outputMetrics.recordsWritten
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/
H A DGenSort.java174 public static void outputRecords(OutputStream out, in outputRecords() method in GenSort
244 outputRecords(out, useAscii, startingRecord, numberOfRecords, checksum); in main()
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/ui/jobs/
H A DUIData.scala39 var outputRecords : Long = 0 variable
88 var outputRecords: Long = _ variable
H A DStagePage.scala451 val outputRecords = validTasks.map { taskUIData: TaskUIData => constant
456 getFormattedSizeQuantilesWithRecords(outputSizes, outputRecords)
922 val outputRecords = maybeOutput.map(_.recordsWritten.toString).getOrElse("") constant
969 Some(TaskTableRowOutputData(outputSortable, s"$outputReadable / $outputRecords"))
H A DJobProgressListener.scala490 stageData.outputRecords += outputRecordsDelta
491 execSummary.outputRecords += outputRecordsDelta
/dports/databases/hbase/hbase-1.2.1/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/
H A DImport.java553 long outputRecords = job.getCounters().findCounter(TaskCounter.MAP_OUTPUT_RECORDS).getValue(); in main() local
554 if (outputRecords < inputRecords) { in main()
556 if (outputRecords == 0) { in main()
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/status/api/v1/
H A DAllStagesResource.scala111 outputRecords = stageUiData.outputRecords,
H A Dapi.scala139 val outputRecords: Long, constant