1 /** 2 * Licensed to the Apache Software Foundation (ASF) under one 3 * or more contributor license agreements. See the NOTICE file 4 * distributed with this work for additional information 5 * regarding copyright ownership. The ASF licenses this file 6 * to you under the Apache License, Version 2.0 (the 7 * "License"); you may not use this file except in compliance 8 * with the License. You may obtain a copy of the License at 9 * 10 * http://www.apache.org/licenses/LICENSE-2.0 11 * 12 * Unless required by applicable law or agreed to in writing, software 13 * distributed under the License is distributed on an "AS IS" BASIS, 14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 * See the License for the specific language governing permissions and 16 * limitations under the License. 17 */ 18 package org.apache.hadoop.mapreduce.v2.hs; 19 20 import java.io.IOException; 21 import java.util.List; 22 import java.util.Map; 23 24 import org.apache.hadoop.conf.Configuration; 25 import org.apache.hadoop.fs.Path; 26 import org.apache.hadoop.mapred.TaskCompletionEvent; 27 import org.apache.hadoop.mapreduce.JobACL; 28 import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; 29 import org.apache.hadoop.mapreduce.v2.api.records.JobId; 30 import org.apache.hadoop.mapreduce.v2.api.records.JobReport; 31 import org.apache.hadoop.mapreduce.v2.api.records.JobState; 32 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent; 33 import org.apache.hadoop.mapreduce.v2.api.records.TaskId; 34 import org.apache.hadoop.mapreduce.v2.api.records.TaskType; 35 import org.apache.hadoop.mapreduce.v2.app.MockJobs; 36 import org.apache.hadoop.mapreduce.v2.app.job.Job; 37 import org.apache.hadoop.mapreduce.v2.app.job.Task; 38 import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo; 39 import org.apache.hadoop.security.UserGroupInformation; 40 import org.apache.hadoop.security.authorize.AccessControlList; 41 import org.apache.hadoop.yarn.api.records.ApplicationId; 42 43 import com.google.common.collect.Maps; 44 45 public class MockHistoryJobs extends MockJobs { 46 47 public static class JobsPair { 48 public Map<JobId, Job> partial; 49 public Map<JobId, Job> full; 50 } 51 newHistoryJobs(int numJobs, int numTasksPerJob, int numAttemptsPerTask)52 public static JobsPair newHistoryJobs(int numJobs, int numTasksPerJob, 53 int numAttemptsPerTask) throws IOException { 54 Map<JobId, Job> mocked = newJobs(numJobs, numTasksPerJob, numAttemptsPerTask); 55 return split(mocked); 56 } 57 newHistoryJobs(ApplicationId appID, int numJobsPerApp, int numTasksPerJob, int numAttemptsPerTask)58 public static JobsPair newHistoryJobs(ApplicationId appID, int numJobsPerApp, 59 int numTasksPerJob, int numAttemptsPerTask) throws IOException { 60 Map<JobId, Job> mocked = newJobs(appID, numJobsPerApp, numTasksPerJob, 61 numAttemptsPerTask); 62 return split(mocked); 63 } 64 newHistoryJobs(ApplicationId appID, int numJobsPerApp, int numTasksPerJob, int numAttemptsPerTask, boolean hasFailedTasks)65 public static JobsPair newHistoryJobs(ApplicationId appID, int numJobsPerApp, 66 int numTasksPerJob, int numAttemptsPerTask, boolean hasFailedTasks) 67 throws IOException { 68 Map<JobId, Job> mocked = newJobs(appID, numJobsPerApp, numTasksPerJob, 69 numAttemptsPerTask, hasFailedTasks); 70 return split(mocked); 71 } 72 split(Map<JobId, Job> mocked)73 private static JobsPair split(Map<JobId, Job> mocked) throws IOException { 74 JobsPair ret = new JobsPair(); 75 ret.full = Maps.newHashMap(); 76 ret.partial = Maps.newHashMap(); 77 for(Map.Entry<JobId, Job> entry: mocked.entrySet()) { 78 JobId id = entry.getKey(); 79 Job j = entry.getValue(); 80 MockCompletedJob mockJob = new MockCompletedJob(j); 81 // use MockCompletedJob to set everything below to make sure 82 // consistent with what history server would do 83 ret.full.put(id, mockJob); 84 JobReport report = mockJob.getReport(); 85 JobIndexInfo info = new JobIndexInfo(report.getStartTime(), 86 report.getFinishTime(), mockJob.getUserName(), mockJob.getName(), id, 87 mockJob.getCompletedMaps(), mockJob.getCompletedReduces(), 88 String.valueOf(mockJob.getState())); 89 info.setJobStartTime(report.getStartTime()); 90 info.setQueueName(mockJob.getQueueName()); 91 ret.partial.put(id, new PartialJob(info, id)); 92 93 } 94 return ret; 95 } 96 97 private static class MockCompletedJob extends CompletedJob { 98 private Job job; 99 MockCompletedJob(Job job)100 public MockCompletedJob(Job job) throws IOException { 101 super(new Configuration(), job.getID(), null, true, job.getUserName(), 102 null, null); 103 this.job = job; 104 } 105 106 @Override getCompletedMaps()107 public int getCompletedMaps() { 108 // we always return total since this is history server 109 // and PartialJob also assumes completed - total 110 return job.getTotalMaps(); 111 } 112 113 @Override getCompletedReduces()114 public int getCompletedReduces() { 115 // we always return total since this is history server 116 // and PartialJob also assumes completed - total 117 return job.getTotalReduces(); 118 } 119 120 @Override getAllCounters()121 public org.apache.hadoop.mapreduce.Counters getAllCounters() { 122 return job.getAllCounters(); 123 } 124 125 @Override getID()126 public JobId getID() { 127 return job.getID(); 128 } 129 130 @Override getReport()131 public JobReport getReport() { 132 return job.getReport(); 133 } 134 135 @Override getProgress()136 public float getProgress() { 137 return job.getProgress(); 138 } 139 140 @Override getState()141 public JobState getState() { 142 return job.getState(); 143 } 144 145 @Override getTask(TaskId taskId)146 public Task getTask(TaskId taskId) { 147 return job.getTask(taskId); 148 } 149 150 @Override getTaskAttemptCompletionEvents( int fromEventId, int maxEvents)151 public TaskAttemptCompletionEvent[] getTaskAttemptCompletionEvents( 152 int fromEventId, int maxEvents) { 153 return job.getTaskAttemptCompletionEvents(fromEventId, maxEvents); 154 } 155 156 @Override getMapAttemptCompletionEvents( int startIndex, int maxEvents)157 public TaskCompletionEvent[] getMapAttemptCompletionEvents( 158 int startIndex, int maxEvents) { 159 return job.getMapAttemptCompletionEvents(startIndex, maxEvents); 160 } 161 162 @Override getTasks()163 public Map<TaskId, Task> getTasks() { 164 return job.getTasks(); 165 } 166 167 @Override loadFullHistoryData(boolean loadTasks, Path historyFileAbsolute)168 protected void loadFullHistoryData(boolean loadTasks, 169 Path historyFileAbsolute) throws IOException { 170 //Empty 171 } 172 173 @Override getDiagnostics()174 public List<String> getDiagnostics() { 175 return job.getDiagnostics(); 176 } 177 178 @Override getName()179 public String getName() { 180 return job.getName(); 181 } 182 183 @Override getQueueName()184 public String getQueueName() { 185 return job.getQueueName(); 186 } 187 188 @Override getTotalMaps()189 public int getTotalMaps() { 190 return job.getTotalMaps(); 191 } 192 193 @Override getTotalReduces()194 public int getTotalReduces() { 195 return job.getTotalReduces(); 196 } 197 198 @Override isUber()199 public boolean isUber() { 200 return job.isUber(); 201 } 202 203 @Override getTasks(TaskType taskType)204 public Map<TaskId, Task> getTasks(TaskType taskType) { 205 return job.getTasks(); 206 } 207 208 @Override 209 public checkAccess(UserGroupInformation callerUGI, JobACL jobOperation)210 boolean checkAccess(UserGroupInformation callerUGI, JobACL jobOperation) { 211 return job.checkAccess(callerUGI, jobOperation); 212 } 213 214 @Override getJobACLs()215 public Map<JobACL, AccessControlList> getJobACLs() { 216 return job.getJobACLs(); 217 } 218 219 @Override getUserName()220 public String getUserName() { 221 return job.getUserName(); 222 } 223 224 @Override getConfFile()225 public Path getConfFile() { 226 return job.getConfFile(); 227 } 228 229 @Override getAMInfos()230 public List<AMInfo> getAMInfos() { 231 return job.getAMInfos(); 232 } 233 } 234 } 235