|
||||||||||
| PREV CLASS NEXT CLASS | FRAMES NO FRAMES | |||||||||
| SUMMARY: NESTED | FIELD | CONSTR | METHOD | DETAIL: FIELD | CONSTR | METHOD | |||||||||
java.lang.Objectorg.apache.hadoop.mapreduce.Mapper.Context
org.apache.hadoop.mapreduce.lib.map.WrappedMapper.Context
@InterfaceStability.Evolving public class WrappedMapper.Context
| Field Summary | |
|---|---|
protected MapContext<KEYIN,VALUEIN,KEYOUT,VALUEOUT> |
mapContext
|
| Constructor Summary | |
|---|---|
WrappedMapper.Context(MapContext<KEYIN,VALUEIN,KEYOUT,VALUEOUT> mapContext)
|
|
| Method Summary | |
|---|---|
org.apache.hadoop.fs.Path[] |
getArchiveClassPaths()
Get the archive entries in classpath as an array of Path |
String[] |
getArchiveTimestamps()
Get the timestamps of the archives. |
URI[] |
getCacheArchives()
Get cache archives set in the Configuration |
URI[] |
getCacheFiles()
Get cache files set in the Configuration |
Class<? extends Reducer<?,?,?,?>> |
getCombinerClass()
Get the combiner class for the job. |
org.apache.hadoop.conf.Configuration |
getConfiguration()
Return the configuration for the job. |
Counter |
getCounter(Enum<?> counterName)
Get the Counter for the given counterName. |
Counter |
getCounter(String groupName,
String counterName)
Get the Counter for the given groupName and
counterName. |
org.apache.hadoop.security.Credentials |
getCredentials()
Get credentials for the job. |
KEYIN |
getCurrentKey()
Get the current key. |
VALUEIN |
getCurrentValue()
Get the current value. |
org.apache.hadoop.fs.Path[] |
getFileClassPaths()
Get the file entries in classpath as an array of Path |
String[] |
getFileTimestamps()
Get the timestamps of the files. |
org.apache.hadoop.io.RawComparator<?> |
getGroupingComparator()
Get the user defined RawComparator comparator for
grouping keys of inputs to the reduce. |
Class<? extends InputFormat<?,?>> |
getInputFormatClass()
Get the InputFormat class for the job. |
InputSplit |
getInputSplit()
Get the input split for this map. |
String |
getJar()
Get the pathname of the job's jar. |
JobID |
getJobID()
Get the unique ID for the job. |
String |
getJobName()
Get the user-specified job name. |
boolean |
getJobSetupCleanupNeeded()
Get whether job-setup and job-cleanup is needed for the job |
org.apache.hadoop.fs.Path[] |
getLocalCacheArchives()
Return the path array of the localized caches |
org.apache.hadoop.fs.Path[] |
getLocalCacheFiles()
Return the path array of the localized files |
Class<?> |
getMapOutputKeyClass()
Get the key class for the map output data. |
Class<?> |
getMapOutputValueClass()
Get the value class for the map output data. |
Class<? extends Mapper<?,?,?,?>> |
getMapperClass()
Get the Mapper class for the job. |
int |
getMaxMapAttempts()
Get the configured number of maximum attempts that will be made to run a map task, as specified by the mapred.map.max.attempts
property. |
int |
getMaxReduceAttempts()
Get the configured number of maximum attempts that will be made to run a reduce task, as specified by the mapred.reduce.max.attempts
property. |
int |
getNumReduceTasks()
Get configured the number of reduce tasks for this job. |
OutputCommitter |
getOutputCommitter()
Get the OutputCommitter for the task-attempt. |
Class<? extends OutputFormat<?,?>> |
getOutputFormatClass()
Get the OutputFormat class for the job. |
Class<?> |
getOutputKeyClass()
Get the key class for the job output data. |
Class<?> |
getOutputValueClass()
Get the value class for job outputs. |
Class<? extends Partitioner<?,?>> |
getPartitionerClass()
Get the Partitioner class for the job. |
boolean |
getProfileEnabled()
Get whether the task profiling is enabled. |
String |
getProfileParams()
Get the profiler configuration arguments. |
org.apache.hadoop.conf.Configuration.IntegerRanges |
getProfileTaskRange(boolean isMap)
Get the range of maps or reduces to profile. |
float |
getProgress()
The current progress of the task attempt. |
Class<? extends Reducer<?,?,?,?>> |
getReducerClass()
Get the Reducer class for the job. |
org.apache.hadoop.io.RawComparator<?> |
getSortComparator()
Get the RawComparator comparator used to compare keys. |
String |
getStatus()
Get the last set status message. |
boolean |
getSymlink()
This method checks to see if symlinks are to be create for the localized cache files in the current working directory |
TaskAttemptID |
getTaskAttemptID()
Get the unique name for this task attempt. |
boolean |
getTaskCleanupNeeded()
Get whether task-cleanup is needed for the job |
String |
getUser()
Get the reported username for this job. |
org.apache.hadoop.fs.Path |
getWorkingDirectory()
Get the current working directory for the default file system. |
boolean |
nextKeyValue()
Advance to the next key, value pair, returning null if at end. |
void |
progress()
|
void |
setStatus(String msg)
Set the current status of the task to the given string. |
void |
write(KEYOUT key,
VALUEOUT value)
Generate an output key/value pair. |
| Methods inherited from class java.lang.Object |
|---|
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait |
| Field Detail |
|---|
protected MapContext<KEYIN,VALUEIN,KEYOUT,VALUEOUT> mapContext
| Constructor Detail |
|---|
public WrappedMapper.Context(MapContext<KEYIN,VALUEIN,KEYOUT,VALUEOUT> mapContext)
| Method Detail |
|---|
public InputSplit getInputSplit()
public KEYIN getCurrentKey()
throws IOException,
InterruptedException
TaskInputOutputContext
IOException
InterruptedException
public VALUEIN getCurrentValue()
throws IOException,
InterruptedException
TaskInputOutputContext
IOException
InterruptedException
public boolean nextKeyValue()
throws IOException,
InterruptedException
TaskInputOutputContext
IOException
InterruptedExceptionpublic Counter getCounter(Enum<?> counterName)
TaskAttemptContextCounter for the given counterName.
counterName - counter name
Counter for the given counterName
public Counter getCounter(String groupName,
String counterName)
TaskAttemptContextCounter for the given groupName and
counterName.
counterName - counter name
Counter for the given groupName and
counterNamepublic OutputCommitter getOutputCommitter()
TaskInputOutputContextOutputCommitter for the task-attempt.
OutputCommitter for the task-attempt
public void write(KEYOUT key,
VALUEOUT value)
throws IOException,
InterruptedException
TaskInputOutputContext
IOException
InterruptedExceptionpublic String getStatus()
TaskAttemptContext
public TaskAttemptID getTaskAttemptID()
TaskAttemptContext
public void setStatus(String msg)
TaskAttemptContext
public org.apache.hadoop.fs.Path[] getArchiveClassPaths()
JobContext
public String[] getArchiveTimestamps()
JobContext
public URI[] getCacheArchives()
throws IOException
JobContext
IOException
public URI[] getCacheFiles()
throws IOException
JobContext
IOException
public Class<? extends Reducer<?,?,?,?>> getCombinerClass()
throws ClassNotFoundException
JobContext
ClassNotFoundExceptionpublic org.apache.hadoop.conf.Configuration getConfiguration()
JobContext
public org.apache.hadoop.fs.Path[] getFileClassPaths()
JobContext
public String[] getFileTimestamps()
JobContext
public org.apache.hadoop.io.RawComparator<?> getGroupingComparator()
JobContextRawComparator comparator for
grouping keys of inputs to the reduce.
for details.
public Class<? extends InputFormat<?,?>> getInputFormatClass()
throws ClassNotFoundException
JobContextInputFormat class for the job.
InputFormat class for the job.
ClassNotFoundExceptionpublic String getJar()
JobContext
public JobID getJobID()
JobContext
public String getJobName()
JobContext
public boolean getJobSetupCleanupNeeded()
JobContext
public boolean getTaskCleanupNeeded()
JobContext
public org.apache.hadoop.fs.Path[] getLocalCacheArchives()
throws IOException
JobContext
IOException
public org.apache.hadoop.fs.Path[] getLocalCacheFiles()
throws IOException
JobContext
IOExceptionpublic Class<?> getMapOutputKeyClass()
JobContext
public Class<?> getMapOutputValueClass()
JobContext
public Class<? extends Mapper<?,?,?,?>> getMapperClass()
throws ClassNotFoundException
JobContextMapper class for the job.
Mapper class for the job.
ClassNotFoundExceptionpublic int getMaxMapAttempts()
JobContextmapred.map.max.attempts
property. If this property is not already set, the default is 4 attempts.
public int getMaxReduceAttempts()
JobContextmapred.reduce.max.attempts
property. If this property is not already set, the default is 4 attempts.
public int getNumReduceTasks()
JobContext1.
public Class<? extends OutputFormat<?,?>> getOutputFormatClass()
throws ClassNotFoundException
JobContextOutputFormat class for the job.
OutputFormat class for the job.
ClassNotFoundExceptionpublic Class<?> getOutputKeyClass()
JobContext
public Class<?> getOutputValueClass()
JobContext
public Class<? extends Partitioner<?,?>> getPartitionerClass()
throws ClassNotFoundException
JobContextPartitioner class for the job.
Partitioner class for the job.
ClassNotFoundException
public Class<? extends Reducer<?,?,?,?>> getReducerClass()
throws ClassNotFoundException
JobContextReducer class for the job.
Reducer class for the job.
ClassNotFoundExceptionpublic org.apache.hadoop.io.RawComparator<?> getSortComparator()
JobContextRawComparator comparator used to compare keys.
RawComparator comparator used to compare keys.public boolean getSymlink()
JobContext
public org.apache.hadoop.fs.Path getWorkingDirectory()
throws IOException
JobContext
IOExceptionpublic void progress()
public boolean getProfileEnabled()
JobContext
public String getProfileParams()
JobContext
public org.apache.hadoop.conf.Configuration.IntegerRanges getProfileTaskRange(boolean isMap)
JobContext
isMap - is the task a map?
public String getUser()
JobContext
public org.apache.hadoop.security.Credentials getCredentials()
JobContext
public float getProgress()
TaskAttemptContext
|
||||||||||
| PREV CLASS NEXT CLASS | FRAMES NO FRAMES | |||||||||
| SUMMARY: NESTED | FIELD | CONSTR | METHOD | DETAIL: FIELD | CONSTR | METHOD | |||||||||