|
@@ -30,6 +30,7 @@ import org.apache.hadoop.fs.FileSystem;
|
|
import org.apache.hadoop.fs.Path;
|
|
import org.apache.hadoop.fs.Path;
|
|
import org.apache.hadoop.io.DataInputBuffer;
|
|
import org.apache.hadoop.io.DataInputBuffer;
|
|
import org.apache.hadoop.io.DataOutputBuffer;
|
|
import org.apache.hadoop.io.DataOutputBuffer;
|
|
|
|
+import org.apache.hadoop.io.IOUtils;
|
|
import org.apache.hadoop.io.WritableUtils;
|
|
import org.apache.hadoop.io.WritableUtils;
|
|
import org.apache.hadoop.io.compress.CodecPool;
|
|
import org.apache.hadoop.io.compress.CodecPool;
|
|
import org.apache.hadoop.io.compress.CompressionCodec;
|
|
import org.apache.hadoop.io.compress.CompressionCodec;
|
|
@@ -325,7 +326,8 @@ class IFile {
|
|
private int readData(byte[] buf, int off, int len) throws IOException {
|
|
private int readData(byte[] buf, int off, int len) throws IOException {
|
|
int bytesRead = 0;
|
|
int bytesRead = 0;
|
|
while (bytesRead < len) {
|
|
while (bytesRead < len) {
|
|
- int n = in.read(buf, off+bytesRead, len-bytesRead);
|
|
|
|
|
|
+ int n = IOUtils.wrappedReadForCompressedData(in, buf, off + bytesRead,
|
|
|
|
+ len - bytesRead);
|
|
if (n < 0) {
|
|
if (n < 0) {
|
|
return bytesRead;
|
|
return bytesRead;
|
|
}
|
|
}
|