|
@@ -17,6 +17,9 @@
|
|
*/
|
|
*/
|
|
package org.apache.hadoop.hdfs.protocolPB;
|
|
package org.apache.hadoop.hdfs.protocolPB;
|
|
|
|
|
|
|
|
+import java.io.EOFException;
|
|
|
|
+import java.io.IOException;
|
|
|
|
+import java.io.InputStream;
|
|
import java.util.ArrayList;
|
|
import java.util.ArrayList;
|
|
import java.util.Arrays;
|
|
import java.util.Arrays;
|
|
import java.util.EnumSet;
|
|
import java.util.EnumSet;
|
|
@@ -40,10 +43,10 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
|
|
import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
|
|
import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
|
|
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
|
|
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
|
|
import org.apache.hadoop.hdfs.protocol.HdfsLocatedFileStatus;
|
|
import org.apache.hadoop.hdfs.protocol.HdfsLocatedFileStatus;
|
|
-import org.apache.hadoop.hdfs.protocol.HdfsProtoUtil;
|
|
|
|
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
|
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
|
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
|
|
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
|
|
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos;
|
|
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos;
|
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos;
|
|
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CreateFlagProto;
|
|
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CreateFlagProto;
|
|
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.DatanodeReportTypeProto;
|
|
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.DatanodeReportTypeProto;
|
|
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFsStatsResponseProto;
|
|
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFsStatsResponseProto;
|
|
@@ -127,15 +130,20 @@ import org.apache.hadoop.hdfs.server.protocol.ReceivedDeletedBlockInfo.BlockStat
|
|
import org.apache.hadoop.hdfs.server.protocol.RegisterCommand;
|
|
import org.apache.hadoop.hdfs.server.protocol.RegisterCommand;
|
|
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
|
|
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
|
|
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
|
|
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
|
|
|
|
+import org.apache.hadoop.hdfs.util.ExactSizeInputStream;
|
|
import org.apache.hadoop.io.EnumSetWritable;
|
|
import org.apache.hadoop.io.EnumSetWritable;
|
|
import org.apache.hadoop.io.Text;
|
|
import org.apache.hadoop.io.Text;
|
|
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
|
|
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
|
|
import org.apache.hadoop.security.token.Token;
|
|
import org.apache.hadoop.security.token.Token;
|
|
|
|
+import org.apache.hadoop.util.DataChecksum;
|
|
|
|
|
|
|
|
+import com.google.common.collect.Lists;
|
|
import com.google.protobuf.ByteString;
|
|
import com.google.protobuf.ByteString;
|
|
|
|
+import com.google.protobuf.CodedInputStream;
|
|
|
|
|
|
/**
|
|
/**
|
|
- * Utilities for converting protobuf classes to and from implementation classes.
|
|
|
|
|
|
+ * Utilities for converting protobuf classes to and from implementation classes
|
|
|
|
+ * and other helper utilities to help in dealing with protobuf.
|
|
*
|
|
*
|
|
* Note that when converting from an internal type to protobuf type, the
|
|
* Note that when converting from an internal type to protobuf type, the
|
|
* converter never return null for protobuf type. The check for internal type
|
|
* converter never return null for protobuf type. The check for internal type
|
|
@@ -219,7 +227,8 @@ public class PBHelper {
|
|
|
|
|
|
// Arrays of DatanodeId
|
|
// Arrays of DatanodeId
|
|
public static DatanodeIDProto[] convert(DatanodeID[] did) {
|
|
public static DatanodeIDProto[] convert(DatanodeID[] did) {
|
|
- if (did == null) return null;
|
|
|
|
|
|
+ if (did == null)
|
|
|
|
+ return null;
|
|
final int len = did.length;
|
|
final int len = did.length;
|
|
DatanodeIDProto[] result = new DatanodeIDProto[len];
|
|
DatanodeIDProto[] result = new DatanodeIDProto[len];
|
|
for (int i = 0; i < len; ++i) {
|
|
for (int i = 0; i < len; ++i) {
|
|
@@ -482,14 +491,26 @@ public class PBHelper {
|
|
}
|
|
}
|
|
return result;
|
|
return result;
|
|
}
|
|
}
|
|
|
|
+
|
|
|
|
+ public static List<? extends HdfsProtos.DatanodeInfoProto> convert(
|
|
|
|
+ DatanodeInfo[] dnInfos) {
|
|
|
|
+ return convert(dnInfos, 0);
|
|
|
|
+ }
|
|
|
|
|
|
- static public DatanodeInfoProto[] convert(DatanodeInfo[] di) {
|
|
|
|
- if (di == null) return null;
|
|
|
|
- DatanodeInfoProto[] result = new DatanodeInfoProto[di.length];
|
|
|
|
- for (int i = 0; i < di.length; i++) {
|
|
|
|
- result[i] = PBHelper.convertDatanodeInfo(di[i]);
|
|
|
|
|
|
+ /**
|
|
|
|
+ * Copy from {@code dnInfos} to a target of list of same size starting at
|
|
|
|
+ * {@code startIdx}.
|
|
|
|
+ */
|
|
|
|
+ public static List<? extends HdfsProtos.DatanodeInfoProto> convert(
|
|
|
|
+ DatanodeInfo[] dnInfos, int startIdx) {
|
|
|
|
+ if (dnInfos == null)
|
|
|
|
+ return null;
|
|
|
|
+ ArrayList<HdfsProtos.DatanodeInfoProto> protos = Lists
|
|
|
|
+ .newArrayListWithCapacity(dnInfos.length);
|
|
|
|
+ for (int i = startIdx; i < dnInfos.length; i++) {
|
|
|
|
+ protos.add(convert(dnInfos[i]));
|
|
}
|
|
}
|
|
- return result;
|
|
|
|
|
|
+ return protos;
|
|
}
|
|
}
|
|
|
|
|
|
public static DatanodeInfo[] convert(List<DatanodeInfoProto> list) {
|
|
public static DatanodeInfo[] convert(List<DatanodeInfoProto> list) {
|
|
@@ -694,7 +715,7 @@ public class PBHelper {
|
|
DatanodeInfosProto[] ret = new DatanodeInfosProto[targets.length];
|
|
DatanodeInfosProto[] ret = new DatanodeInfosProto[targets.length];
|
|
for (int i = 0; i < targets.length; i++) {
|
|
for (int i = 0; i < targets.length; i++) {
|
|
ret[i] = DatanodeInfosProto.newBuilder()
|
|
ret[i] = DatanodeInfosProto.newBuilder()
|
|
- .addAllDatanodes(Arrays.asList(PBHelper.convert(targets[i]))).build();
|
|
|
|
|
|
+ .addAllDatanodes(PBHelper.convert(targets[i])).build();
|
|
}
|
|
}
|
|
return Arrays.asList(ret);
|
|
return Arrays.asList(ret);
|
|
}
|
|
}
|
|
@@ -963,7 +984,7 @@ public class PBHelper {
|
|
fs.getFileBufferSize(),
|
|
fs.getFileBufferSize(),
|
|
fs.getEncryptDataTransfer(),
|
|
fs.getEncryptDataTransfer(),
|
|
fs.getTrashInterval(),
|
|
fs.getTrashInterval(),
|
|
- HdfsProtoUtil.fromProto(fs.getChecksumType()));
|
|
|
|
|
|
+ PBHelper.convert(fs.getChecksumType()));
|
|
}
|
|
}
|
|
|
|
|
|
public static FsServerDefaultsProto convert(FsServerDefaults fs) {
|
|
public static FsServerDefaultsProto convert(FsServerDefaults fs) {
|
|
@@ -976,7 +997,7 @@ public class PBHelper {
|
|
.setFileBufferSize(fs.getFileBufferSize())
|
|
.setFileBufferSize(fs.getFileBufferSize())
|
|
.setEncryptDataTransfer(fs.getEncryptDataTransfer())
|
|
.setEncryptDataTransfer(fs.getEncryptDataTransfer())
|
|
.setTrashInterval(fs.getTrashInterval())
|
|
.setTrashInterval(fs.getTrashInterval())
|
|
- .setChecksumType(HdfsProtoUtil.toProto(fs.getChecksumType()))
|
|
|
|
|
|
+ .setChecksumType(PBHelper.convert(fs.getChecksumType()))
|
|
.build();
|
|
.build();
|
|
}
|
|
}
|
|
|
|
|
|
@@ -1314,4 +1335,24 @@ public class PBHelper {
|
|
.setLayoutVersion(j.getLayoutVersion())
|
|
.setLayoutVersion(j.getLayoutVersion())
|
|
.setNamespaceID(j.getNamespaceId()).build();
|
|
.setNamespaceID(j.getNamespaceId()).build();
|
|
}
|
|
}
|
|
|
|
+
|
|
|
|
+ public static DataChecksum.Type convert(HdfsProtos.ChecksumTypeProto type) {
|
|
|
|
+ return DataChecksum.Type.valueOf(type.getNumber());
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ public static HdfsProtos.ChecksumTypeProto convert(DataChecksum.Type type) {
|
|
|
|
+ return HdfsProtos.ChecksumTypeProto.valueOf(type.id);
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ public static InputStream vintPrefixed(final InputStream input)
|
|
|
|
+ throws IOException {
|
|
|
|
+ final int firstByte = input.read();
|
|
|
|
+ if (firstByte == -1) {
|
|
|
|
+ throw new EOFException("Premature EOF: no length prefix available");
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ int size = CodedInputStream.readRawVarint32(firstByte, input);
|
|
|
|
+ assert size >= 0;
|
|
|
|
+ return new ExactSizeInputStream(input, size);
|
|
|
|
+ }
|
|
}
|
|
}
|