public class SparkJobStats extends JobStats
JobStats.JobState| Modifier and Type | Field and Description |
|---|---|
protected org.apache.hadoop.mapred.Counters |
counters |
static java.lang.String |
FS_COUNTER_GROUP |
ALIAS, ALIAS_LOCATION, conf, FAILURE_HEADER, FEATURE, hdfsBytesRead, hdfsBytesWritten, inputs, outputs, state, SUCCESS_HEADER| Modifier | Constructor and Description |
|---|---|
protected |
SparkJobStats(int jobId,
PigStats.JobGraph plan,
org.apache.hadoop.conf.Configuration conf) |
protected |
SparkJobStats(java.lang.String jobId,
PigStats.JobGraph plan,
org.apache.hadoop.conf.Configuration conf) |
| Modifier and Type | Method and Description |
|---|---|
void |
accept(PlanVisitor v)
Accept a visitor at this node in the graph.
|
void |
addInputStats(POLoad po,
boolean success,
boolean singleInput) |
void |
addOutputInfo(POStore poStore,
boolean success,
JobStatisticCollector jobStatisticCollector) |
void |
collectStats(JobStatisticCollector jobStatisticCollector) |
protected java.util.Map<java.lang.String,java.lang.Long> |
combineTaskMetrics(java.util.Map<java.lang.String,java.util.List<TaskMetrics>> jobMetric) |
long |
getAvgMapTime() |
long |
getAvgREduceTime() |
java.lang.String |
getDisplayString() |
org.apache.hadoop.mapred.Counters |
getHadoopCounters() |
java.lang.String |
getJobId() |
long |
getMapInputRecords() |
long |
getMapOutputRecords() |
long |
getMaxMapTime() |
long |
getMaxReduceTime() |
long |
getMinMapTime() |
long |
getMinReduceTime() |
java.util.Map<java.lang.String,java.lang.Long> |
getMultiInputCounters() |
java.util.Map<java.lang.String,java.lang.Long> |
getMultiStoreCounters() |
int |
getNumberMaps() |
int |
getNumberReduces() |
long |
getProactiveSpillCountObjects() |
long |
getProactiveSpillCountRecs() |
long |
getReduceInputRecords() |
long |
getReduceOutputRecords() |
long |
getSMMSpillCount() |
java.util.Map<java.lang.String,java.lang.Long> |
getStats() |
java.util.Map<java.lang.String,SparkCounter<java.util.Map<java.lang.String,java.lang.Long>>> |
getWarningCounters() |
void |
initWarningCounters() |
void |
setAlias(SparkOperator sparkOperator) |
void |
setConf(org.apache.hadoop.conf.Configuration conf) |
calculateMedianValue, getAlias, getAliasLocation, getBytesWritten, getErrorMessage, getException, getFeature, getHdfsBytesRead, getHdfsBytesWritten, getInputs, getOutputs, getOutputSize, getRecordWrittern, getState, isEqual, isIndexer, isSampler, isSuccessful, setBackendException, setErrorMsg, setSuccessfulannotate, getAnnotation, getLocation, getName, getPlan, removeAnnotation, setLocation, setPlanprotected org.apache.hadoop.mapred.Counters counters
public static java.lang.String FS_COUNTER_GROUP
protected SparkJobStats(int jobId,
PigStats.JobGraph plan,
org.apache.hadoop.conf.Configuration conf)
protected SparkJobStats(java.lang.String jobId,
PigStats.JobGraph plan,
org.apache.hadoop.conf.Configuration conf)
public void setConf(org.apache.hadoop.conf.Configuration conf)
public void addOutputInfo(POStore poStore, boolean success, JobStatisticCollector jobStatisticCollector)
public void addInputStats(POLoad po, boolean success, boolean singleInput)
public void collectStats(JobStatisticCollector jobStatisticCollector)
protected java.util.Map<java.lang.String,java.lang.Long> combineTaskMetrics(java.util.Map<java.lang.String,java.util.List<TaskMetrics>> jobMetric)
public java.util.Map<java.lang.String,java.lang.Long> getStats()
public void accept(PlanVisitor v) throws FrontendException
Operatoraccept in class JobStatsv - Visitor to accept.FrontendExceptionpublic java.lang.String getDisplayString()
getDisplayString in class JobStatspublic int getNumberMaps()
getNumberMaps in class JobStatspublic int getNumberReduces()
getNumberReduces in class JobStatspublic long getMaxMapTime()
getMaxMapTime in class JobStatspublic long getMinMapTime()
getMinMapTime in class JobStatspublic long getAvgMapTime()
getAvgMapTime in class JobStatspublic long getMaxReduceTime()
getMaxReduceTime in class JobStatspublic long getMinReduceTime()
getMinReduceTime in class JobStatspublic long getAvgREduceTime()
getAvgREduceTime in class JobStatspublic long getMapInputRecords()
getMapInputRecords in class JobStatspublic long getMapOutputRecords()
getMapOutputRecords in class JobStatspublic long getReduceInputRecords()
getReduceInputRecords in class JobStatspublic long getReduceOutputRecords()
getReduceOutputRecords in class JobStatspublic long getSMMSpillCount()
getSMMSpillCount in class JobStatspublic long getProactiveSpillCountObjects()
getProactiveSpillCountObjects in class JobStatspublic long getProactiveSpillCountRecs()
getProactiveSpillCountRecs in class JobStatspublic org.apache.hadoop.mapred.Counters getHadoopCounters()
getHadoopCounters in class JobStatspublic java.util.Map<java.lang.String,java.lang.Long> getMultiStoreCounters()
getMultiStoreCounters in class JobStatspublic java.util.Map<java.lang.String,java.lang.Long> getMultiInputCounters()
getMultiInputCounters in class JobStatspublic void setAlias(SparkOperator sparkOperator)
public java.util.Map<java.lang.String,SparkCounter<java.util.Map<java.lang.String,java.lang.Long>>> getWarningCounters()
public void initWarningCounters()
Copyright © 2007-2025 The Apache Software Foundation