public class PigAvroRecordReader
extends org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.Writable>
| Constructor and Description |
|---|
PigAvroRecordReader(org.apache.hadoop.mapreduce.TaskAttemptContext context,
org.apache.hadoop.mapreduce.lib.input.FileSplit split,
org.apache.avro.Schema readerSchema,
boolean ignoreBadFiles,
java.util.Map<org.apache.hadoop.fs.Path,java.util.Map<java.lang.Integer,java.lang.Integer>> schemaToMergedSchemaMap,
boolean useMultipleSchemas)
constructor to initialize input and avro data reader
|
| Modifier and Type | Method and Description |
|---|---|
void |
close() |
org.apache.hadoop.io.NullWritable |
getCurrentKey() |
org.apache.hadoop.io.Writable |
getCurrentValue() |
long |
getPos() |
float |
getProgress() |
void |
initialize(org.apache.hadoop.mapreduce.InputSplit arg0,
org.apache.hadoop.mapreduce.TaskAttemptContext arg1) |
boolean |
nextKeyValue() |
protected Tuple |
wrapAsTuple(java.lang.Object in)
Wrap non-tuple value as a tuple
|
public PigAvroRecordReader(org.apache.hadoop.mapreduce.TaskAttemptContext context,
org.apache.hadoop.mapreduce.lib.input.FileSplit split,
org.apache.avro.Schema readerSchema,
boolean ignoreBadFiles,
java.util.Map<org.apache.hadoop.fs.Path,java.util.Map<java.lang.Integer,java.lang.Integer>> schemaToMergedSchemaMap,
boolean useMultipleSchemas)
throws java.io.IOException
java.io.IOExceptionpublic float getProgress()
throws java.io.IOException
getProgress in class org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.Writable>java.io.IOExceptionpublic long getPos()
throws java.io.IOException
java.io.IOExceptionpublic void close()
throws java.io.IOException
close in interface java.io.Closeableclose in interface java.lang.AutoCloseableclose in class org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.Writable>java.io.IOExceptionpublic org.apache.hadoop.io.NullWritable getCurrentKey()
throws java.io.IOException,
java.lang.InterruptedException
getCurrentKey in class org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.Writable>java.io.IOExceptionjava.lang.InterruptedExceptionpublic org.apache.hadoop.io.Writable getCurrentValue()
throws java.io.IOException,
java.lang.InterruptedException
getCurrentValue in class org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.Writable>java.io.IOExceptionjava.lang.InterruptedExceptionprotected Tuple wrapAsTuple(java.lang.Object in)
public void initialize(org.apache.hadoop.mapreduce.InputSplit arg0,
org.apache.hadoop.mapreduce.TaskAttemptContext arg1)
throws java.io.IOException,
java.lang.InterruptedException
initialize in class org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.Writable>java.io.IOExceptionjava.lang.InterruptedExceptionpublic boolean nextKeyValue()
throws java.io.IOException,
java.lang.InterruptedException
nextKeyValue in class org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.Writable>java.io.IOExceptionjava.lang.InterruptedExceptionCopyright © 2007-2012 The Apache Software Foundation