001 /* 002 * Copyright (c) 2007-2014 Concurrent, Inc. All Rights Reserved. 003 * 004 * Project and contact information: http://www.cascading.org/ 005 * 006 * This file is part of the Cascading project. 007 * 008 * Licensed under the Apache License, Version 2.0 (the "License"); 009 * you may not use this file except in compliance with the License. 010 * You may obtain a copy of the License at 011 * 012 * http://www.apache.org/licenses/LICENSE-2.0 013 * 014 * Unless required by applicable law or agreed to in writing, software 015 * distributed under the License is distributed on an "AS IS" BASIS, 016 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 017 * See the License for the specific language governing permissions and 018 * limitations under the License. 019 */ 020 021 package cascading.stats.hadoop; 022 023 import java.util.HashMap; 024 import java.util.Map; 025 026 import cascading.stats.CascadingStats; 027 import org.apache.hadoop.mapreduce.Counter; 028 import org.apache.hadoop.mapreduce.CounterGroup; 029 import org.apache.hadoop.mapreduce.Counters; 030 import org.apache.hadoop.mapreduce.TaskCompletionEvent; 031 import org.apache.hadoop.mapreduce.TaskReport; 032 033 import static cascading.stats.CascadingStats.Status.*; 034 035 /** Class HadoopTaskStats tracks individual task stats. */ 036 public class HadoopSliceStats 037 { 038 private final CascadingStats.Status parentStatus; 039 040 public static class HadoopAttempt 041 { 042 private final TaskCompletionEvent event; 043 044 public HadoopAttempt( TaskCompletionEvent event ) 045 { 046 this.event = event; 047 } 048 049 public int getEventId() 050 { 051 return event.getEventId(); 052 } 053 054 public int getTaskRunTime() 055 { 056 return event.getTaskRunTime(); 057 } 058 059 public String getTaskStatus() 060 { 061 return event.getStatus().toString(); 062 } 063 064 public String getTaskTrackerHttp() 065 { 066 return event.getTaskTrackerHttp(); 067 } 068 069 public CascadingStats.Status getStatusFor() 070 { 071 CascadingStats.Status status = null; 072 073 switch( event.getStatus() ) 074 { 075 case FAILED: 076 status = FAILED; 077 break; 078 case KILLED: 079 status = STOPPED; 080 break; 081 case SUCCEEDED: 082 status = SUCCESSFUL; 083 break; 084 case OBSOLETE: 085 status = SKIPPED; 086 break; 087 case TIPFAILED: 088 status = FAILED; 089 break; 090 } 091 return status; 092 } 093 } 094 095 public enum Kind 096 { 097 SETUP, MAPPER, REDUCER, CLEANUP 098 } 099 100 private String id; 101 private Kind kind; 102 private final boolean parentStepHasReducers; 103 private TaskReport taskReport; 104 private Map<String, Map<String, Long>> counters; 105 106 private Map<Integer, HadoopAttempt> attempts = new HashMap<Integer, HadoopAttempt>(); 107 108 HadoopSliceStats( String id, CascadingStats.Status parentStatus, Kind kind, boolean parentStepHasReducers, TaskReport taskReport ) 109 { 110 this.parentStatus = parentStatus; 111 this.id = id; 112 this.kind = kind; 113 this.parentStepHasReducers = parentStepHasReducers; 114 this.taskReport = taskReport; 115 } 116 117 public String getID() 118 { 119 return id; 120 } 121 122 public Kind getKind() 123 { 124 return kind; 125 } 126 127 /** 128 * Method getId returns the Hadoop task id. 129 * 130 * @return the id (type String) of this HadoopTaskStats object. 131 */ 132 public String getTaskID() 133 { 134 return taskReport.getTaskID().toString(); 135 } 136 137 public int getTaskIDNum() 138 { 139 return taskReport.getTaskID().getId(); 140 } 141 142 public String getJobID() 143 { 144 return taskReport.getTaskID().getJobID().toString(); 145 } 146 147 public boolean parentStepHasReducers() 148 { 149 return parentStepHasReducers; 150 } 151 152 public float getProgress() 153 { 154 return taskReport.getProgress(); 155 } 156 157 public String getState() 158 { 159 return taskReport.getState(); 160 } 161 162 public long getStartTime() 163 { 164 return taskReport.getStartTime(); 165 } 166 167 public long getFinishTime() 168 { 169 return taskReport.getFinishTime(); 170 } 171 172 public CascadingStats.Status getParentStatus() 173 { 174 return parentStatus; 175 } 176 177 public CascadingStats.Status getStatus() 178 { 179 CascadingStats.Status status = null; 180 181 switch( taskReport.getCurrentStatus() ) 182 { 183 case PENDING: 184 status = PENDING; 185 break; 186 case RUNNING: 187 status = RUNNING; 188 break; 189 case COMPLETE: 190 status = SUCCESSFUL; 191 break; 192 case KILLED: 193 status = STOPPED; 194 break; 195 case FAILED: 196 status = FAILED; 197 break; 198 } 199 200 return status; 201 } 202 203 public String[] getDiagnostics() 204 { 205 return taskReport.getDiagnostics(); 206 } 207 208 public Map<String, Map<String, Long>> getCounters() 209 { 210 if( counters == null ) 211 setCounters( taskReport ); 212 213 return counters; 214 } 215 216 public Map<Integer, HadoopAttempt> getAttempts() 217 { 218 return attempts; 219 } 220 221 private void setCounters( TaskReport taskReport ) 222 { 223 this.counters = new HashMap<String, Map<String, Long>>(); 224 225 Counters hadoopCounters = taskReport.getTaskCounters(); 226 227 for( CounterGroup group : hadoopCounters ) 228 { 229 Map<String, Long> values = new HashMap<String, Long>(); 230 this.counters.put( group.getName(), values ); 231 232 for( Counter counter : group ) 233 values.put( counter.getName(), counter.getValue() ); 234 } 235 } 236 237 /** 238 * Method getCounterValue returns the raw Hadoop counter value. 239 * 240 * @param counter of Enum 241 * @return long 242 */ 243 public long getCounterValue( Enum counter ) 244 { 245 return getCounterValue( counter.getDeclaringClass().getName(), counter.name() ); 246 } 247 248 /** 249 * Method getCounterValue returns the raw Hadoop counter value. 250 * 251 * @param group of String 252 * @param name of String 253 * @return long 254 */ 255 public long getCounterValue( String group, String name ) 256 { 257 if( getCounters() == null || getCounters().get( group ) == null ) 258 return 0; 259 260 Long value = getCounters().get( group ).get( name ); 261 262 if( value == null ) 263 return 0; 264 265 return value; 266 } 267 268 public void addAttempt( TaskCompletionEvent event ) 269 { 270 attempts.put( event.getEventId(), new HadoopAttempt( event ) ); 271 } 272 273 @Override 274 public String toString() 275 { 276 final StringBuilder sb = new StringBuilder(); 277 sb.append( "HadoopTaskStats" ); 278 sb.append( "{id='" ).append( id ).append( '\'' ); 279 sb.append( ", kind=" ).append( kind ); 280 sb.append( '}' ); 281 return sb.toString(); 282 } 283 }