|
@@ -787,20 +787,19 @@ public class AggregatedLogFormat {
|
|
|
|
|
|
long toSkip = 0;
|
|
long toSkip = 0;
|
|
long totalBytesToRead = fileLength;
|
|
long totalBytesToRead = fileLength;
|
|
|
|
+ long skipAfterRead = 0;
|
|
if (bytes < 0) {
|
|
if (bytes < 0) {
|
|
long absBytes = Math.abs(bytes);
|
|
long absBytes = Math.abs(bytes);
|
|
if (absBytes < fileLength) {
|
|
if (absBytes < fileLength) {
|
|
toSkip = fileLength - absBytes;
|
|
toSkip = fileLength - absBytes;
|
|
totalBytesToRead = absBytes;
|
|
totalBytesToRead = absBytes;
|
|
}
|
|
}
|
|
- long skippedBytes = valueStream.skip(toSkip);
|
|
|
|
- if (skippedBytes != toSkip) {
|
|
|
|
- throw new IOException("The bytes were skipped are "
|
|
|
|
- + "different from the caller requested");
|
|
|
|
- }
|
|
|
|
|
|
+ org.apache.hadoop.io.IOUtils.skipFully(
|
|
|
|
+ valueStream, toSkip);
|
|
} else {
|
|
} else {
|
|
if (bytes < fileLength) {
|
|
if (bytes < fileLength) {
|
|
totalBytesToRead = bytes;
|
|
totalBytesToRead = bytes;
|
|
|
|
+ skipAfterRead = fileLength - bytes;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
@@ -818,7 +817,9 @@ public class AggregatedLogFormat {
|
|
pendingRead > buf.length ? buf.length : (int) pendingRead;
|
|
pendingRead > buf.length ? buf.length : (int) pendingRead;
|
|
len = valueStream.read(buf, 0, toRead);
|
|
len = valueStream.read(buf, 0, toRead);
|
|
}
|
|
}
|
|
- out.println("End of LogType:" + fileType);
|
|
|
|
|
|
+ org.apache.hadoop.io.IOUtils.skipFully(
|
|
|
|
+ valueStream, skipAfterRead);
|
|
|
|
+ out.println("\nEnd of LogType:" + fileType);
|
|
out.println("");
|
|
out.println("");
|
|
}
|
|
}
|
|
|
|
|
|
@@ -913,20 +914,19 @@ public class AggregatedLogFormat {
|
|
|
|
|
|
long toSkip = 0;
|
|
long toSkip = 0;
|
|
long totalBytesToRead = fileLength;
|
|
long totalBytesToRead = fileLength;
|
|
|
|
+ long skipAfterRead = 0;
|
|
if (bytes < 0) {
|
|
if (bytes < 0) {
|
|
long absBytes = Math.abs(bytes);
|
|
long absBytes = Math.abs(bytes);
|
|
if (absBytes < fileLength) {
|
|
if (absBytes < fileLength) {
|
|
toSkip = fileLength - absBytes;
|
|
toSkip = fileLength - absBytes;
|
|
totalBytesToRead = absBytes;
|
|
totalBytesToRead = absBytes;
|
|
}
|
|
}
|
|
- long skippedBytes = valueStream.skip(toSkip);
|
|
|
|
- if (skippedBytes != toSkip) {
|
|
|
|
- throw new IOException("The bytes were skipped are "
|
|
|
|
- + "different from the caller requested");
|
|
|
|
- }
|
|
|
|
|
|
+ org.apache.hadoop.io.IOUtils.skipFully(
|
|
|
|
+ valueStream, toSkip);
|
|
} else {
|
|
} else {
|
|
if (bytes < fileLength) {
|
|
if (bytes < fileLength) {
|
|
totalBytesToRead = bytes;
|
|
totalBytesToRead = bytes;
|
|
|
|
+ skipAfterRead = fileLength - bytes;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
@@ -942,7 +942,9 @@ public class AggregatedLogFormat {
|
|
toRead = pendingRead > buf.length ? buf.length : (int) pendingRead;
|
|
toRead = pendingRead > buf.length ? buf.length : (int) pendingRead;
|
|
len = valueStream.read(buf, 0, toRead);
|
|
len = valueStream.read(buf, 0, toRead);
|
|
}
|
|
}
|
|
- out.println("End of LogType:" + fileType);
|
|
|
|
|
|
+ org.apache.hadoop.io.IOUtils.skipFully(
|
|
|
|
+ valueStream, skipAfterRead);
|
|
|
|
+ out.println("\nEnd of LogType:" + fileType);
|
|
out.println("");
|
|
out.println("");
|
|
return 0;
|
|
return 0;
|
|
} else {
|
|
} else {
|