public class PigAvroRecordReader
extends org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.Writable>
Constructor and Description |
---|
PigAvroRecordReader(org.apache.hadoop.mapreduce.TaskAttemptContext context,
org.apache.hadoop.mapreduce.lib.input.FileSplit split,
org.apache.avro.Schema readerSchema,
boolean ignoreBadFiles,
java.util.Map<org.apache.hadoop.fs.Path,java.util.Map<java.lang.Integer,java.lang.Integer>> schemaToMergedSchemaMap,
boolean useMultipleSchemas)
constructor to initialize input and avro data reader
|
Modifier and Type | Method and Description |
---|---|
void |
close() |
org.apache.hadoop.io.NullWritable |
getCurrentKey() |
org.apache.hadoop.io.Writable |
getCurrentValue() |
long |
getPos() |
float |
getProgress() |
void |
initialize(org.apache.hadoop.mapreduce.InputSplit arg0,
org.apache.hadoop.mapreduce.TaskAttemptContext arg1) |
boolean |
nextKeyValue() |
protected Tuple |
wrapAsTuple(java.lang.Object in)
Wrap non-tuple value as a tuple
|
public PigAvroRecordReader(org.apache.hadoop.mapreduce.TaskAttemptContext context, org.apache.hadoop.mapreduce.lib.input.FileSplit split, org.apache.avro.Schema readerSchema, boolean ignoreBadFiles, java.util.Map<org.apache.hadoop.fs.Path,java.util.Map<java.lang.Integer,java.lang.Integer>> schemaToMergedSchemaMap, boolean useMultipleSchemas) throws java.io.IOException
java.io.IOException
public float getProgress() throws java.io.IOException
getProgress
in class org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.Writable>
java.io.IOException
public long getPos() throws java.io.IOException
java.io.IOException
public void close() throws java.io.IOException
close
in interface java.io.Closeable
close
in interface java.lang.AutoCloseable
close
in class org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.Writable>
java.io.IOException
public org.apache.hadoop.io.NullWritable getCurrentKey() throws java.io.IOException, java.lang.InterruptedException
getCurrentKey
in class org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.Writable>
java.io.IOException
java.lang.InterruptedException
public org.apache.hadoop.io.Writable getCurrentValue() throws java.io.IOException, java.lang.InterruptedException
getCurrentValue
in class org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.Writable>
java.io.IOException
java.lang.InterruptedException
protected Tuple wrapAsTuple(java.lang.Object in)
public void initialize(org.apache.hadoop.mapreduce.InputSplit arg0, org.apache.hadoop.mapreduce.TaskAttemptContext arg1) throws java.io.IOException, java.lang.InterruptedException
initialize
in class org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.Writable>
java.io.IOException
java.lang.InterruptedException
public boolean nextKeyValue() throws java.io.IOException, java.lang.InterruptedException
nextKeyValue
in class org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.Writable>
java.io.IOException
java.lang.InterruptedException
Copyright © 2007-2012 The Apache Software Foundation