Home
last modified time | relevance | path

Searched refs:getJobConf (Results 1 – 25 of 98) sorted by relevance

1234

/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/
H A DCustomOutputCommitter.java42 writeFile(jobContext.getJobConf(), JOB_SETUP_FILE_NAME); in setupJob()
48 writeFile(jobContext.getJobConf(), JOB_COMMIT_FILE_NAME); in commitJob()
55 writeFile(jobContext.getJobConf(), JOB_ABORT_FILE_NAME); in abortJob()
60 writeFile(taskContext.getJobConf(), TASK_SETUP_FILE_NAME); in setupTask()
71 writeFile(taskContext.getJobConf(), TASK_COMMIT_FILE_NAME); in commitTask()
76 writeFile(taskContext.getJobConf(), TASK_ABORT_FILE_NAME); in abortTask()
/dports/devel/hadoop/hadoop-1.2.1/src/mapred/org/apache/hadoop/mapred/
H A DFileOutputCommitter.java47 JobConf conf = context.getJobConf(); in setupJob()
66 JobConf conf = context.getJobConf(); in markSuccessfulOutputDir()
81 if (getOutputDirMarking(context.getJobConf())) { in commitJob()
89 JobConf conf = context.getJobConf(); in cleanupJob()
125 JobConf job = context.getJobConf(); in commitTask()
182 FileSystem fs = taskOutputPath.getFileSystem(context.getJobConf()); in abortTask()
213 FileSystem fs = taskOutputPath.getFileSystem(context.getJobConf()); in needsTaskCommit()
227 JobConf conf = taskContext.getJobConf(); in getTempTaskOutputPath()
248 FileSystem fs = jobTmpDir.getFileSystem(taskContext.getJobConf()); in getWorkPath()
H A DTaskAttemptContext.java49 public JobConf getJobConf() { in getJobConf() method in TaskAttemptContext
H A DJobContext.java42 public JobConf getJobConf() { in getJobConf() method in JobContext
/dports/devel/hadoop/hadoop-1.2.1/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/tests/
H A DMapSideDiskSpill.java101 …"* Increase map side sort buffer size (io.sort.mb:"+this._job.getJobConf().getInt("io.sort.mb", 0)… in getPrescription()
102 …"* Increase index buffer size (io.sort.record.percent:"+ this._job.getJobConf().getInt("io.sort.re… in getPrescription()
103 …"* Increase (io.sort.spill.percent:"+ this._job.getJobConf().getInt("io.sort.spill.percent", 0) + … in getPrescription()
/dports/devel/hadoop/hadoop-1.2.1/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/
H A DTestCapacitySchedulerWithJobTracker.java53 JobConf conf = getJobConf(); in testFailingJobInitalization()
101 JobConf conf = getJobConf(); in testJobTrackerIntegration()
111 JobConf conf2 = getJobConf(); in testJobTrackerIntegration()
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-tools/hadoop-pipes/src/main/native/examples/impl/
H A Dsort.cc36 const HadoopPipes::JobConf* conf = context.getJobConf(); in SortMap()
68 const HadoopPipes::JobConf* conf = context.getJobConf(); in SortReduce()
/dports/devel/hadoop/hadoop-1.2.1/src/examples/pipes/impl/
H A Dsort.cc36 const HadoopPipes::JobConf* conf = context.getJobConf(); in SortMap()
68 const HadoopPipes::JobConf* conf = context.getJobConf(); in SortReduce()
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/
H A DTestMiniMRProxyUser.java90 protected JobConf getJobConf() { in getJobConf() method in TestMiniMRProxyUser
106 FileSystem fs = FileSystem.get(getJobConf()); in mrRun()
115 JobConf jobConf = new JobConf(getJobConf()); in mrRun()
H A DTestNonExistentJob.java77 protected JobConf getJobConf() { in getJobConf() method in TestNonExistentJob
93 RunningJob runJob = new JobClient(getJobConf()).getJob(JobID.forName("job_0_0")); in testGetInvalidJob()
/dports/devel/hadoop/hadoop-1.2.1/src/test/org/apache/hadoop/mapred/
H A DTestTaskTrackerLocalization.java412 localizedJobConf = rjob.getJobConf(); in testJobLocalization()
529 localizedJobConf = rjob.getJobConf(); in testTaskLocalization()
757 localizedJobConf = rjob.getJobConf(); in testTaskFilesRemoval()
807 localizedJobConf = rjob.getJobConf(); in testJobFilesRemoval()
895 localizedJobConf = rjob.getJobConf(); in testTrackerRestart()
909 localizedJobConf = rjob.getJobConf(); in testTrackerRestart()
937 localizedJobConf = rjob.getJobConf(); in testTrackerReinit()
951 localizedJobConf = rjob.getJobConf(); in testTrackerReinit()
978 localizedJobConf = rjob.getJobConf(); in testCleanupTaskLocalization()
H A DFakeObjectUtilities.java81 JobSplit.EMPTY_TASK_SPLIT, jobtracker, getJobConf(), this, i, 1); in initTasks()
88 jobtracker, getJobConf(), this, 1); in initTasks()
110 if (getJobConf().getSpeculativeExecution()) { in findTask()
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/ssl/
H A DTestEncryptedShuffle.java124 protected JobConf getJobConf() throws IOException { in getJobConf() method in TestEncryptedShuffle
139 FileSystem fs = FileSystem.get(getJobConf()); in encryptedShuffleWithCerts()
149 JobConf jobConf = new JobConf(getJobConf()); in encryptedShuffleWithCerts()
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/
H A DFileOutputCommitter.java51 JobConf conf = context.getJobConf(); in getOutputPath()
56 JobConf conf = context.getJobConf(); in getOutputPath()
101 Path workPath = FileOutputFormat.getWorkOutputPath(context.getJobConf()); in getTaskAttemptPath()
H A DJobContext.java33 public JobConf getJobConf(); in getJobConf() method
H A DTaskAttemptContext.java34 public JobConf getJobConf(); in getJobConf() method
H A DJobContextImpl.java48 public JobConf getJobConf() { in getJobConf() method in JobContextImpl
H A DMapOutputCollector.java57 public JobConf getJobConf() { in getJobConf() method in MapOutputCollector.Context
/dports/devel/spark/spark-2.1.1/core/src/main/scala/org/apache/spark/rdd/
H A DHadoopRDD.scala146 protected def getJobConf(): JobConf = { method
198 val jobConf = getJobConf()
215 private val jobConf = getJobConf()
356 def getConf: Configuration = getJobConf()
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/
H A DJobStory.java36 public JobConf getJobConf(); in getJobConf() method
/dports/devel/hadoop/hadoop-1.2.1/src/tools/org/apache/hadoop/tools/rumen/
H A DJobStory.java36 public JobConf getJobConf(); in getJobConf() method
/dports/devel/hadoop/hadoop-1.2.1/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/
H A DGridmixSystemTestCase.java131 gridmixJS.getJobConf(), jtClient); in runGridmixAndVerify()
191 JobConf origJobConf = zombieJob.getJobConf(); in validateTaskMemoryParamters()
/dports/devel/hadoop/hadoop-1.2.1/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/
H A DJobFactory.java142 public JobConf getJobConf() { return job.getJobConf(); } in getJobConf() method in JobFactory.FilterJobStory
/dports/devel/hadoop2/hadoop-2.7.2-src/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/
H A DJobFactory.java144 public JobConf getJobConf() { return job.getJobConf(); } in getJobConf() method in JobFactory.FilterJobStory
/dports/devel/hadoop/hadoop-1.2.1/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/
H A DTestFairScheduler.java92 JobConf conf = getJobConf(); in initTasks()
133 getJobConf(), this, jobtracker); in initTasks()
1495 job1.getJobConf().set("user.name", "user1"); in testUserMaxJobs()
1499 job2.getJobConf().set("user.name", "user1"); in testUserMaxJobs()
1503 job3.getJobConf().set("user.name", "user2"); in testUserMaxJobs()
1507 job4.getJobConf().set("user.name", "user2"); in testUserMaxJobs()
1561 job1.getJobConf().set("user.name", "user1"); in testComplexJobLimits()
1565 job2.getJobConf().set("user.name", "user1"); in testComplexJobLimits()
1571 job3.getJobConf().set("user.name", "user2"); in testComplexJobLimits()
1575 job4.getJobConf().set("user.name", "user2"); in testComplexJobLimits()
[all …]

1234