|
@@ -0,0 +1,851 @@
|
|
|
+/**
|
|
|
+ * Licensed to the Apache Software Foundation (ASF) under one
|
|
|
+ * or more contributor license agreements. See the NOTICE file
|
|
|
+ * distributed with this work for additional information
|
|
|
+ * regarding copyright ownership. The ASF licenses this file
|
|
|
+ * to you under the Apache License, Version 2.0 (the
|
|
|
+ * "License"); you may not use this file except in compliance
|
|
|
+ * with the License. You may obtain a copy of the License at
|
|
|
+ *
|
|
|
+ * http://www.apache.org/licenses/LICENSE-2.0
|
|
|
+ *
|
|
|
+ * Unless required by applicable law or agreed to in writing, software
|
|
|
+ * distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
+ * See the License for the specific language governing permissions and
|
|
|
+ * limitations under the License.
|
|
|
+ */
|
|
|
+package org.apache.hadoop.hdfs.protocolPB;
|
|
|
+
|
|
|
+import java.io.Closeable;
|
|
|
+import java.io.FileNotFoundException;
|
|
|
+import java.io.IOException;
|
|
|
+import java.net.InetSocketAddress;
|
|
|
+import java.util.Arrays;
|
|
|
+import java.util.HashMap;
|
|
|
+import java.util.Map;
|
|
|
+import java.util.concurrent.TimeUnit;
|
|
|
+
|
|
|
+import org.apache.hadoop.classification.InterfaceAudience;
|
|
|
+import org.apache.hadoop.classification.InterfaceStability;
|
|
|
+import org.apache.hadoop.conf.Configuration;
|
|
|
+import org.apache.hadoop.fs.ContentSummary;
|
|
|
+import org.apache.hadoop.fs.CreateFlag;
|
|
|
+import org.apache.hadoop.fs.FileAlreadyExistsException;
|
|
|
+import org.apache.hadoop.fs.FsServerDefaults;
|
|
|
+import org.apache.hadoop.fs.ParentNotDirectoryException;
|
|
|
+import org.apache.hadoop.fs.UnresolvedLinkException;
|
|
|
+import org.apache.hadoop.fs.Options.Rename;
|
|
|
+import org.apache.hadoop.fs.permission.FsPermission;
|
|
|
+import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
|
|
|
+import org.apache.hadoop.hdfs.protocol.ClientProtocol;
|
|
|
+import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
|
|
+import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
|
|
|
+import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
|
|
|
+import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
|
|
|
+import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
|
|
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
|
|
+import org.apache.hadoop.hdfs.protocol.DirectoryListing;
|
|
|
+import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
|
|
|
+import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
|
|
|
+import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException;
|
|
|
+import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
|
|
|
+import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
|
|
|
+import org.apache.hadoop.hdfs.protocol.HdfsConstants.UpgradeAction;
|
|
|
+import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
|
|
|
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
|
|
|
+import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
|
|
|
+import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
|
|
+import org.apache.hadoop.hdfs.server.namenode.NotReplicatedYetException;
|
|
|
+import org.apache.hadoop.hdfs.server.namenode.SafeModeException;
|
|
|
+import org.apache.hadoop.io.EnumSetWritable;
|
|
|
+import org.apache.hadoop.io.Text;
|
|
|
+import org.apache.hadoop.io.retry.RetryPolicies;
|
|
|
+import org.apache.hadoop.io.retry.RetryPolicy;
|
|
|
+import org.apache.hadoop.io.retry.RetryProxy;
|
|
|
+import org.apache.hadoop.ipc.ProtobufHelper;
|
|
|
+import org.apache.hadoop.ipc.ProtobufRpcEngine;
|
|
|
+import org.apache.hadoop.ipc.ProtocolSignature;
|
|
|
+import org.apache.hadoop.ipc.RPC;
|
|
|
+import org.apache.hadoop.ipc.RemoteException;
|
|
|
+import org.apache.hadoop.net.NetUtils;
|
|
|
+import org.apache.hadoop.security.AccessControlException;
|
|
|
+import org.apache.hadoop.security.UserGroupInformation;
|
|
|
+import org.apache.hadoop.security.token.Token;
|
|
|
+import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AbandonBlockRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddBlockRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AppendRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CancelDelegationTokenRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CompleteRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ConcatRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CreateRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CreateSymlinkRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.DeleteRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.DistributedUpgradeProgressRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.FinalizeUpgradeRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.FsyncRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetAdditionalDatanodeRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetBlockLocationsRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetContentSummaryRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetDatanodeReportRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetDelegationTokenRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFileInfoRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFileLinkInfoRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetFsStatusRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetLinkTargetRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetListingRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetPreferredBlockSizeRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetServerDefaultsRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCorruptFileBlocksRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MetaSaveRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MkdirsRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RecoverLeaseRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RefreshNodesRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.Rename2RequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenameRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenewDelegationTokenRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenewLeaseRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ReportBadBlocksRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RestoreFailedStorageRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SaveNamespaceRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetBalancerBandwidthRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetOwnerRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetPermissionRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetQuotaRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetReplicationRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetSafeModeRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetTimesRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.UpdateBlockForPipelineRequestProto;
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.UpdatePipelineRequestProto;
|
|
|
+
|
|
|
+import com.google.protobuf.ByteString;
|
|
|
+import com.google.protobuf.ServiceException;
|
|
|
+
|
|
|
+/**
|
|
|
+ * This class forwards NN's ClientProtocol calls as RPC calls to the NN server
|
|
|
+ * while translating from the parameter types used in ClientProtocol to those
|
|
|
+ * used in protocolR23Compatile.*.
|
|
|
+ */
|
|
|
+@InterfaceAudience.Private
|
|
|
+@InterfaceStability.Stable
|
|
|
+public class ClientNamenodeProtocolTranslatorPB implements
|
|
|
+ ClientProtocol, Closeable {
|
|
|
+ final private ClientNamenodeProtocolPB rpcProxy;
|
|
|
+
|
|
|
+ private static ClientNamenodeProtocolPB createNamenode(
|
|
|
+ InetSocketAddress nameNodeAddr, Configuration conf,
|
|
|
+ UserGroupInformation ugi) throws IOException {
|
|
|
+ RPC.setProtocolEngine(conf, ClientNamenodeProtocolPB.class,
|
|
|
+ ProtobufRpcEngine.class);
|
|
|
+ return RPC.getProxy(ClientNamenodeProtocolPB.class,
|
|
|
+ RPC.getProtocolVersion(ClientNamenodeProtocolPB.class), nameNodeAddr, ugi, conf,
|
|
|
+ NetUtils.getSocketFactory(conf, ClientNamenodeProtocolPB.class));
|
|
|
+ }
|
|
|
+
|
|
|
+ /** Create a {@link NameNode} proxy */
|
|
|
+ static ClientNamenodeProtocolPB createNamenodeWithRetry(
|
|
|
+ ClientNamenodeProtocolPB rpcNamenode) {
|
|
|
+ RetryPolicy createPolicy = RetryPolicies
|
|
|
+ .retryUpToMaximumCountWithFixedSleep(5,
|
|
|
+ HdfsConstants.LEASE_SOFTLIMIT_PERIOD, TimeUnit.MILLISECONDS);
|
|
|
+
|
|
|
+ Map<Class<? extends Exception>, RetryPolicy> remoteExceptionToPolicyMap
|
|
|
+ = new HashMap<Class<? extends Exception>, RetryPolicy>();
|
|
|
+ remoteExceptionToPolicyMap.put(AlreadyBeingCreatedException.class,
|
|
|
+ createPolicy);
|
|
|
+
|
|
|
+ Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap =
|
|
|
+ new HashMap<Class<? extends Exception>, RetryPolicy>();
|
|
|
+ exceptionToPolicyMap.put(RemoteException.class, RetryPolicies
|
|
|
+ .retryByRemoteException(RetryPolicies.TRY_ONCE_THEN_FAIL,
|
|
|
+ remoteExceptionToPolicyMap));
|
|
|
+ RetryPolicy methodPolicy = RetryPolicies.retryByException(
|
|
|
+ RetryPolicies.TRY_ONCE_THEN_FAIL, exceptionToPolicyMap);
|
|
|
+ Map<String, RetryPolicy> methodNameToPolicyMap = new HashMap<String, RetryPolicy>();
|
|
|
+
|
|
|
+ methodNameToPolicyMap.put("create", methodPolicy);
|
|
|
+
|
|
|
+ return (ClientNamenodeProtocolPB) RetryProxy.create(
|
|
|
+ ClientNamenodeProtocolPB.class, rpcNamenode, methodNameToPolicyMap);
|
|
|
+ }
|
|
|
+
|
|
|
+ public ClientNamenodeProtocolTranslatorPB(InetSocketAddress nameNodeAddr,
|
|
|
+ Configuration conf, UserGroupInformation ugi) throws IOException {
|
|
|
+
|
|
|
+ rpcProxy = createNamenodeWithRetry(createNamenode(nameNodeAddr, conf, ugi));
|
|
|
+ }
|
|
|
+
|
|
|
+ public void close() {
|
|
|
+ RPC.stopProxy(rpcProxy);
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public ProtocolSignature getProtocolSignature(String protocolName,
|
|
|
+ long clientVersion, int clientMethodHash)
|
|
|
+ throws IOException {
|
|
|
+ return ProtocolSignatureWritable.convert(rpcProxy.getProtocolSignature2(
|
|
|
+ protocolName, clientVersion, clientMethodHash));
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public long getProtocolVersion(String protocolName, long clientVersion)
|
|
|
+ throws IOException {
|
|
|
+ return rpcProxy.getProtocolVersion(protocolName, clientVersion);
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public LocatedBlocks getBlockLocations(String src, long offset, long length)
|
|
|
+ throws AccessControlException, FileNotFoundException,
|
|
|
+ UnresolvedLinkException, IOException {
|
|
|
+ GetBlockLocationsRequestProto req = GetBlockLocationsRequestProto
|
|
|
+ .newBuilder()
|
|
|
+ .setSrc(src)
|
|
|
+ .setOffset(offset)
|
|
|
+ .setLength(length)
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ return PBHelper.convert(rpcProxy.getBlockLocations(null, req).getLocations());
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public FsServerDefaults getServerDefaults() throws IOException {
|
|
|
+ GetServerDefaultsRequestProto req = GetServerDefaultsRequestProto.newBuilder().build();
|
|
|
+ try {
|
|
|
+ return PBHelper
|
|
|
+ .convert(rpcProxy.getServerDefaults(null, req).getServerDefaults());
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void create(String src, FsPermission masked, String clientName,
|
|
|
+ EnumSetWritable<CreateFlag> flag, boolean createParent,
|
|
|
+ short replication, long blockSize) throws AccessControlException,
|
|
|
+ AlreadyBeingCreatedException, DSQuotaExceededException,
|
|
|
+ FileAlreadyExistsException, FileNotFoundException,
|
|
|
+ NSQuotaExceededException, ParentNotDirectoryException, SafeModeException,
|
|
|
+ UnresolvedLinkException, IOException {
|
|
|
+ CreateRequestProto req = CreateRequestProto.newBuilder()
|
|
|
+ .setSrc(src)
|
|
|
+ .setMasked(PBHelper.convert(masked))
|
|
|
+ .setClientName(clientName)
|
|
|
+ .setCreateFlag(PBHelper.convertCreateFlag(flag))
|
|
|
+ .setCreateParent(createParent)
|
|
|
+ .setReplication(replication)
|
|
|
+ .setBlockSize(blockSize)
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ rpcProxy.create(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public LocatedBlock append(String src, String clientName)
|
|
|
+ throws AccessControlException, DSQuotaExceededException,
|
|
|
+ FileNotFoundException, SafeModeException, UnresolvedLinkException,
|
|
|
+ IOException {
|
|
|
+ AppendRequestProto req = AppendRequestProto.newBuilder()
|
|
|
+ .setSrc(src)
|
|
|
+ .setClientName(clientName)
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ return PBHelper.convert(rpcProxy.append(null, req).getBlock());
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public boolean setReplication(String src, short replication)
|
|
|
+ throws AccessControlException, DSQuotaExceededException,
|
|
|
+ FileNotFoundException, SafeModeException, UnresolvedLinkException,
|
|
|
+ IOException {
|
|
|
+ SetReplicationRequestProto req = SetReplicationRequestProto.newBuilder()
|
|
|
+ .setSrc(src)
|
|
|
+ .setReplication(replication)
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ return rpcProxy.setReplication(null, req).getResult();
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void setPermission(String src, FsPermission permission)
|
|
|
+ throws AccessControlException, FileNotFoundException, SafeModeException,
|
|
|
+ UnresolvedLinkException, IOException {
|
|
|
+ SetPermissionRequestProto req = SetPermissionRequestProto.newBuilder()
|
|
|
+ .setSrc(src)
|
|
|
+ .setPermission(PBHelper.convert(permission))
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ rpcProxy.setPermission(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void setOwner(String src, String username, String groupname)
|
|
|
+ throws AccessControlException, FileNotFoundException, SafeModeException,
|
|
|
+ UnresolvedLinkException, IOException {
|
|
|
+ SetOwnerRequestProto req = SetOwnerRequestProto.newBuilder()
|
|
|
+ .setSrc(src)
|
|
|
+ .setUsername(username)
|
|
|
+ .setGroupname(groupname)
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ rpcProxy.setOwner(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void abandonBlock(ExtendedBlock b, String src, String holder)
|
|
|
+ throws AccessControlException, FileNotFoundException,
|
|
|
+ UnresolvedLinkException, IOException {
|
|
|
+ AbandonBlockRequestProto req = AbandonBlockRequestProto.newBuilder()
|
|
|
+ .setB(PBHelper.convert(b)).setSrc(src).setHolder(holder).build();
|
|
|
+ try {
|
|
|
+ rpcProxy.abandonBlock(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public LocatedBlock addBlock(String src, String clientName,
|
|
|
+ ExtendedBlock previous, DatanodeInfo[] excludeNodes)
|
|
|
+ throws AccessControlException, FileNotFoundException,
|
|
|
+ NotReplicatedYetException, SafeModeException, UnresolvedLinkException,
|
|
|
+ IOException {
|
|
|
+ AddBlockRequestProto req = AddBlockRequestProto.newBuilder().setSrc(src)
|
|
|
+ .setClientName(clientName).setPrevious(PBHelper.convert(previous))
|
|
|
+ .addAllExcludeNodes(Arrays.asList(PBHelper.convert(excludeNodes)))
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ return PBHelper.convert(rpcProxy.addBlock(null, req).getBlock());
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public LocatedBlock getAdditionalDatanode(String src, ExtendedBlock blk,
|
|
|
+ DatanodeInfo[] existings, DatanodeInfo[] excludes,
|
|
|
+ int numAdditionalNodes, String clientName) throws AccessControlException,
|
|
|
+ FileNotFoundException, SafeModeException, UnresolvedLinkException,
|
|
|
+ IOException {
|
|
|
+ GetAdditionalDatanodeRequestProto req = GetAdditionalDatanodeRequestProto
|
|
|
+ .newBuilder()
|
|
|
+ .setSrc(src)
|
|
|
+ .setBlk(PBHelper.convert(blk))
|
|
|
+ .addAllExistings(Arrays.asList(PBHelper.convert(existings)))
|
|
|
+ .addAllExcludes(Arrays.asList(PBHelper.convert(excludes)))
|
|
|
+ .setNumAdditionalNodes(numAdditionalNodes)
|
|
|
+ .setClientName(clientName)
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ return PBHelper.convert(rpcProxy.getAdditionalDatanode(null, req)
|
|
|
+ .getBlock());
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public boolean complete(String src, String clientName, ExtendedBlock last)
|
|
|
+ throws AccessControlException, FileNotFoundException, SafeModeException,
|
|
|
+ UnresolvedLinkException, IOException {
|
|
|
+ CompleteRequestProto req = CompleteRequestProto.newBuilder()
|
|
|
+ .setSrc(src)
|
|
|
+ .setClientName(clientName)
|
|
|
+ .setLast(PBHelper.convert(last))
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ return rpcProxy.complete(null, req).getResult();
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void reportBadBlocks(LocatedBlock[] blocks) throws IOException {
|
|
|
+ ReportBadBlocksRequestProto req = ReportBadBlocksRequestProto.newBuilder()
|
|
|
+ .addAllBlocks(Arrays.asList(PBHelper.convertLocatedBlock(blocks)))
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ rpcProxy.reportBadBlocks(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public boolean rename(String src, String dst) throws UnresolvedLinkException,
|
|
|
+ IOException {
|
|
|
+ RenameRequestProto req = RenameRequestProto.newBuilder()
|
|
|
+ .setSrc(src)
|
|
|
+ .setDst(dst).build();
|
|
|
+ try {
|
|
|
+ return rpcProxy.rename(null, req).getResult();
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void rename2(String src, String dst, Rename... options)
|
|
|
+ throws AccessControlException, DSQuotaExceededException,
|
|
|
+ FileAlreadyExistsException, FileNotFoundException,
|
|
|
+ NSQuotaExceededException, ParentNotDirectoryException, SafeModeException,
|
|
|
+ UnresolvedLinkException, IOException {
|
|
|
+ boolean overwrite = false;
|
|
|
+ if (options != null) {
|
|
|
+ for (Rename option : options) {
|
|
|
+ if (option == Rename.OVERWRITE) {
|
|
|
+ overwrite = true;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ Rename2RequestProto req = Rename2RequestProto.newBuilder().
|
|
|
+ setSrc(src).
|
|
|
+ setDst(dst).setOverwriteDest(overwrite).
|
|
|
+ build();
|
|
|
+ try {
|
|
|
+ rpcProxy.rename2(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void concat(String trg, String[] srcs) throws IOException,
|
|
|
+ UnresolvedLinkException {
|
|
|
+ ConcatRequestProto req = ConcatRequestProto.newBuilder().
|
|
|
+ setTrg(trg).
|
|
|
+ addAllSrcs(Arrays.asList(srcs)).build();
|
|
|
+ try {
|
|
|
+ rpcProxy.concat(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public boolean delete(String src, boolean recursive)
|
|
|
+ throws AccessControlException, FileNotFoundException, SafeModeException,
|
|
|
+ UnresolvedLinkException, IOException {
|
|
|
+ DeleteRequestProto req = DeleteRequestProto.newBuilder().setSrc(src).setRecursive(recursive).build();
|
|
|
+ try {
|
|
|
+ return rpcProxy.delete(null, req).getResult();
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public boolean mkdirs(String src, FsPermission masked, boolean createParent)
|
|
|
+ throws AccessControlException, FileAlreadyExistsException,
|
|
|
+ FileNotFoundException, NSQuotaExceededException,
|
|
|
+ ParentNotDirectoryException, SafeModeException, UnresolvedLinkException,
|
|
|
+ IOException {
|
|
|
+ MkdirsRequestProto req = MkdirsRequestProto.newBuilder()
|
|
|
+ .setSrc(src)
|
|
|
+ .setMasked(PBHelper.convert(masked))
|
|
|
+ .setCreateParent(createParent).build();
|
|
|
+
|
|
|
+ try {
|
|
|
+ return rpcProxy.mkdirs(null, req).getResult();
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public DirectoryListing getListing(String src, byte[] startAfter,
|
|
|
+ boolean needLocation) throws AccessControlException,
|
|
|
+ FileNotFoundException, UnresolvedLinkException, IOException {
|
|
|
+ GetListingRequestProto req = GetListingRequestProto.newBuilder()
|
|
|
+ .setSrc(src)
|
|
|
+ .setStartAfter(ByteString.copyFrom(startAfter))
|
|
|
+ .setNeedLocation(needLocation).build();
|
|
|
+ try {
|
|
|
+ return PBHelper.convert(rpcProxy.getListing(null, req).getDirList());
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void renewLease(String clientName) throws AccessControlException,
|
|
|
+ IOException {
|
|
|
+ RenewLeaseRequestProto req = RenewLeaseRequestProto.newBuilder()
|
|
|
+ .setClientName(clientName).build();
|
|
|
+ try {
|
|
|
+ rpcProxy.renewLease(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public boolean recoverLease(String src, String clientName)
|
|
|
+ throws IOException {
|
|
|
+ RecoverLeaseRequestProto req = RecoverLeaseRequestProto.newBuilder()
|
|
|
+ .setSrc(src)
|
|
|
+ .setClientName(clientName).build();
|
|
|
+ try {
|
|
|
+ return rpcProxy.recoverLease(null, req).getResult();
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public long[] getStats() throws IOException {
|
|
|
+ GetFsStatusRequestProto req = GetFsStatusRequestProto.newBuilder().build();
|
|
|
+ try {
|
|
|
+ return PBHelper.convert(rpcProxy.getFsStats(null, req));
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public DatanodeInfo[] getDatanodeReport(DatanodeReportType type)
|
|
|
+ throws IOException {
|
|
|
+ GetDatanodeReportRequestProto req = GetDatanodeReportRequestProto
|
|
|
+ .newBuilder()
|
|
|
+ .setType(PBHelper.convert(type)).build();
|
|
|
+ try {
|
|
|
+ return PBHelper.convert(
|
|
|
+ rpcProxy.getDatanodeReport(null, req).getDiList());
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public long getPreferredBlockSize(String filename) throws IOException,
|
|
|
+ UnresolvedLinkException {
|
|
|
+ GetPreferredBlockSizeRequestProto req = GetPreferredBlockSizeRequestProto
|
|
|
+ .newBuilder()
|
|
|
+ .setFilename(filename)
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ return rpcProxy.getPreferredBlockSize(null, req).getBsize();
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public boolean setSafeMode(SafeModeAction action) throws IOException {
|
|
|
+ SetSafeModeRequestProto req = SetSafeModeRequestProto.newBuilder().
|
|
|
+ setAction(PBHelper.convert(action)).build();
|
|
|
+ try {
|
|
|
+ return rpcProxy.setSafeMode(null, req).getResult();
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void saveNamespace() throws AccessControlException, IOException {
|
|
|
+ SaveNamespaceRequestProto req = SaveNamespaceRequestProto.newBuilder()
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ rpcProxy.saveNamespace(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public boolean restoreFailedStorage(String arg)
|
|
|
+ throws AccessControlException, IOException{
|
|
|
+ RestoreFailedStorageRequestProto req = RestoreFailedStorageRequestProto
|
|
|
+ .newBuilder()
|
|
|
+ .setArg(arg).build();
|
|
|
+ try {
|
|
|
+ return rpcProxy.restoreFailedStorage(null, req).getResult();
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void refreshNodes() throws IOException {
|
|
|
+ RefreshNodesRequestProto req = RefreshNodesRequestProto.newBuilder().build();
|
|
|
+ try {
|
|
|
+ rpcProxy.refreshNodes(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void finalizeUpgrade() throws IOException {
|
|
|
+ FinalizeUpgradeRequestProto req = FinalizeUpgradeRequestProto.newBuilder().build();
|
|
|
+ try {
|
|
|
+ rpcProxy.finalizeUpgrade(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public UpgradeStatusReport distributedUpgradeProgress(UpgradeAction action)
|
|
|
+ throws IOException {
|
|
|
+ DistributedUpgradeProgressRequestProto req =
|
|
|
+ DistributedUpgradeProgressRequestProto.newBuilder().
|
|
|
+ setAction(PBHelper.convert(action)).build();
|
|
|
+ try {
|
|
|
+ return PBHelper.convert(
|
|
|
+ rpcProxy.distributedUpgradeProgress(null, req).getReport());
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public CorruptFileBlocks listCorruptFileBlocks(String path, String cookie)
|
|
|
+ throws IOException {
|
|
|
+ ListCorruptFileBlocksRequestProto req = ListCorruptFileBlocksRequestProto
|
|
|
+ .newBuilder().setPath(path).setCookie(cookie).build();
|
|
|
+ try {
|
|
|
+ return PBHelper.convert(
|
|
|
+ rpcProxy.listCorruptFileBlocks(null, req).getCorrupt());
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void metaSave(String filename) throws IOException {
|
|
|
+ MetaSaveRequestProto req = MetaSaveRequestProto.newBuilder()
|
|
|
+ .setFilename(filename).build();
|
|
|
+ try {
|
|
|
+ rpcProxy.metaSave(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public HdfsFileStatus getFileInfo(String src) throws AccessControlException,
|
|
|
+ FileNotFoundException, UnresolvedLinkException, IOException {
|
|
|
+ GetFileInfoRequestProto req = GetFileInfoRequestProto.newBuilder()
|
|
|
+ .setSrc(src).build();
|
|
|
+ try {
|
|
|
+ return PBHelper.convert(rpcProxy.getFileInfo(null, req).getFs());
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public HdfsFileStatus getFileLinkInfo(String src)
|
|
|
+ throws AccessControlException, UnresolvedLinkException, IOException {
|
|
|
+ GetFileLinkInfoRequestProto req = GetFileLinkInfoRequestProto.newBuilder()
|
|
|
+ .setSrc(src).build();
|
|
|
+ try {
|
|
|
+ return PBHelper.convert(rpcProxy.getFileLinkInfo(null, req).getFs());
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public ContentSummary getContentSummary(String path)
|
|
|
+ throws AccessControlException, FileNotFoundException,
|
|
|
+ UnresolvedLinkException, IOException {
|
|
|
+ GetContentSummaryRequestProto req = GetContentSummaryRequestProto
|
|
|
+ .newBuilder()
|
|
|
+ .setPath(path)
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ return PBHelper.convert(rpcProxy.getContentSummary(null, req)
|
|
|
+ .getSummary());
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void setQuota(String path, long namespaceQuota, long diskspaceQuota)
|
|
|
+ throws AccessControlException, FileNotFoundException,
|
|
|
+ UnresolvedLinkException, IOException {
|
|
|
+ SetQuotaRequestProto req = SetQuotaRequestProto.newBuilder()
|
|
|
+ .setPath(path)
|
|
|
+ .setNamespaceQuota(namespaceQuota)
|
|
|
+ .setDiskspaceQuota(diskspaceQuota)
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ rpcProxy.setQuota(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void fsync(String src, String client) throws AccessControlException,
|
|
|
+ FileNotFoundException, UnresolvedLinkException, IOException {
|
|
|
+ FsyncRequestProto req = FsyncRequestProto.newBuilder()
|
|
|
+ .setSrc(src)
|
|
|
+ .setClient(client)
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ rpcProxy.fsync(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void setTimes(String src, long mtime, long atime)
|
|
|
+ throws AccessControlException, FileNotFoundException,
|
|
|
+ UnresolvedLinkException, IOException {
|
|
|
+ SetTimesRequestProto req = SetTimesRequestProto.newBuilder()
|
|
|
+ .setSrc(src)
|
|
|
+ .setMtime(mtime)
|
|
|
+ .setAtime(atime)
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ rpcProxy.setTimes(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void createSymlink(String target, String link, FsPermission dirPerm,
|
|
|
+ boolean createParent) throws AccessControlException,
|
|
|
+ FileAlreadyExistsException, FileNotFoundException,
|
|
|
+ ParentNotDirectoryException, SafeModeException, UnresolvedLinkException,
|
|
|
+ IOException {
|
|
|
+ CreateSymlinkRequestProto req = CreateSymlinkRequestProto.newBuilder()
|
|
|
+ .setTarget(target)
|
|
|
+ .setLink(link)
|
|
|
+ .setDirPerm(PBHelper.convert(dirPerm))
|
|
|
+ .setCreateParent(createParent)
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ rpcProxy.createSymlink(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public String getLinkTarget(String path) throws AccessControlException,
|
|
|
+ FileNotFoundException, IOException {
|
|
|
+ GetLinkTargetRequestProto req = GetLinkTargetRequestProto.newBuilder()
|
|
|
+ .setPath(path).build();
|
|
|
+ try {
|
|
|
+ return rpcProxy.getLinkTarget(null, req).getTargetPath();
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public LocatedBlock updateBlockForPipeline(ExtendedBlock block,
|
|
|
+ String clientName) throws IOException {
|
|
|
+ UpdateBlockForPipelineRequestProto req = UpdateBlockForPipelineRequestProto
|
|
|
+ .newBuilder()
|
|
|
+ .setBlock(PBHelper.convert(block))
|
|
|
+ .setClientName(clientName)
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ return PBHelper.convert(
|
|
|
+ rpcProxy.updateBlockForPipeline(null, req).getBlock());
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void updatePipeline(String clientName, ExtendedBlock oldBlock,
|
|
|
+ ExtendedBlock newBlock, DatanodeID[] newNodes) throws IOException {
|
|
|
+ UpdatePipelineRequestProto req = UpdatePipelineRequestProto.newBuilder()
|
|
|
+ .setClientName(clientName)
|
|
|
+ .setOldBlock(PBHelper.convert(oldBlock))
|
|
|
+ .setNewBlock(PBHelper.convert(newBlock))
|
|
|
+ .addAllNewNodes(Arrays.asList(PBHelper.convert(newNodes)))
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ rpcProxy.updatePipeline(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public Token<DelegationTokenIdentifier> getDelegationToken(Text renewer)
|
|
|
+ throws IOException {
|
|
|
+ GetDelegationTokenRequestProto req = GetDelegationTokenRequestProto
|
|
|
+ .newBuilder()
|
|
|
+ .setRenewer(renewer.toString())
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ return PBHelper.convertDelegationToken(rpcProxy.getDelegationToken(null, req).getToken());
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public long renewDelegationToken(Token<DelegationTokenIdentifier> token)
|
|
|
+ throws IOException {
|
|
|
+ RenewDelegationTokenRequestProto req = RenewDelegationTokenRequestProto.newBuilder().
|
|
|
+ setToken(PBHelper.convert(token)).
|
|
|
+ build();
|
|
|
+ try {
|
|
|
+ return rpcProxy.renewDelegationToken(null, req).getNewExireTime();
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void cancelDelegationToken(Token<DelegationTokenIdentifier> token)
|
|
|
+ throws IOException {
|
|
|
+ CancelDelegationTokenRequestProto req = CancelDelegationTokenRequestProto
|
|
|
+ .newBuilder()
|
|
|
+ .setToken(PBHelper.convert(token))
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ rpcProxy.cancelDelegationToken(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void setBalancerBandwidth(long bandwidth) throws IOException {
|
|
|
+ SetBalancerBandwidthRequestProto req = SetBalancerBandwidthRequestProto.newBuilder()
|
|
|
+ .setBandwidth(bandwidth)
|
|
|
+ .build();
|
|
|
+ try {
|
|
|
+ rpcProxy.setBalancerBandwidth(null, req);
|
|
|
+ } catch (ServiceException e) {
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
+ }
|
|
|
+ }
|
|
|
+}
|