1 /** 2 * Licensed to the Apache Software Foundation (ASF) under one 3 * or more contributor license agreements. See the NOTICE file 4 * distributed with this work for additional information 5 * regarding copyright ownership. The ASF licenses this file 6 * to you under the Apache License, Version 2.0 (the 7 * "License"); you may not use this file except in compliance 8 * with the License. You may obtain a copy of the License at 9 * 10 * http://www.apache.org/licenses/LICENSE-2.0 11 * 12 * Unless required by applicable law or agreed to in writing, software 13 * distributed under the License is distributed on an "AS IS" BASIS, 14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 * See the License for the specific language governing permissions and 16 * limitations under the License. 17 */ 18 package org.apache.hadoop.mapred; 19 20 import java.io.IOException; 21 import java.io.OutputStream; 22 import java.io.OutputStreamWriter; 23 import java.io.Writer; 24 25 import org.apache.hadoop.conf.Configuration; 26 import org.apache.hadoop.fs.Path; 27 import org.apache.hadoop.io.LongWritable; 28 import org.apache.hadoop.io.Text; 29 import org.apache.hadoop.mapreduce.TestMRJobClient; 30 import org.apache.hadoop.mapreduce.tools.CLI; 31 import org.apache.hadoop.util.Tool; 32 import org.junit.Ignore; 33 @Ignore 34 public class TestMRCJCJobClient extends TestMRJobClient { 35 runJob()36 private String runJob() throws Exception { 37 OutputStream os = getFileSystem().create(new Path(getInputDir(), 38 "text.txt")); 39 Writer wr = new OutputStreamWriter(os); 40 wr.write("hello1\n"); 41 wr.write("hello2\n"); 42 wr.write("hello3\n"); 43 wr.close(); 44 45 JobConf conf = createJobConf(); 46 conf.setJobName("mr"); 47 conf.setJobPriority(JobPriority.HIGH); 48 49 conf.setInputFormat(TextInputFormat.class); 50 51 conf.setMapOutputKeyClass(LongWritable.class); 52 conf.setMapOutputValueClass(Text.class); 53 54 conf.setOutputFormat(TextOutputFormat.class); 55 conf.setOutputKeyClass(LongWritable.class); 56 conf.setOutputValueClass(Text.class); 57 58 conf.setMapperClass(org.apache.hadoop.mapred.lib.IdentityMapper.class); 59 conf.setReducerClass(org.apache.hadoop.mapred.lib.IdentityReducer.class); 60 61 FileInputFormat.setInputPaths(conf, getInputDir()); 62 FileOutputFormat.setOutputPath(conf, getOutputDir()); 63 64 return JobClient.runJob(conf).getID().toString(); 65 } 66 runTool(Configuration conf, Tool tool, String[] args, OutputStream out)67 public static int runTool(Configuration conf, Tool tool, String[] args, 68 OutputStream out) throws Exception { 69 return TestMRJobClient.runTool(conf, tool, args, out); 70 } 71 verifyJobPriority(String jobId, String priority, JobConf conf)72 static void verifyJobPriority(String jobId, String priority, 73 JobConf conf) throws Exception { 74 TestMRCJCJobClient test = new TestMRCJCJobClient(); 75 test.verifyJobPriority(jobId, priority, conf, test.createJobClient()); 76 } 77 testJobClient()78 public void testJobClient() throws Exception { 79 Configuration conf = createJobConf(); 80 String jobId = runJob(); 81 testGetCounter(jobId, conf); 82 testAllJobList(jobId, conf); 83 testChangingJobPriority(jobId, conf); 84 } 85 createJobClient()86 protected CLI createJobClient() 87 throws IOException { 88 return new JobClient(); 89 } 90 } 91