|
@@ -1,42 +1,44 @@
|
|
/**
|
|
/**
|
|
- * Licensed to the Apache Software Foundation (ASF) under one
|
|
|
|
- * or more contributor license agreements. See the NOTICE file
|
|
|
|
- * distributed with this work for additional information
|
|
|
|
- * regarding copyright ownership. The ASF licenses this file
|
|
|
|
- * to you under the Apache License, Version 2.0 (the
|
|
|
|
- * "License"); you may not use this file except in compliance
|
|
|
|
- * with the License. You may obtain a copy of the License at
|
|
|
|
- *
|
|
|
|
- * http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
- *
|
|
|
|
|
|
+ * Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
+ * contributor license agreements. See the NOTICE file distributed with this
|
|
|
|
+ * work for additional information regarding copyright ownership. The ASF
|
|
|
|
+ * licenses this file to you under the Apache License, Version 2.0 (the
|
|
|
|
+ * "License"); you may not use this file except in compliance with the License.
|
|
|
|
+ * You may obtain a copy of the License at
|
|
|
|
+ * <p>
|
|
|
|
+ * http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
+ * <p>
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
- * distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
- * See the License for the specific language governing permissions and
|
|
|
|
- * limitations under the License.
|
|
|
|
|
|
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
|
|
+ * License for the specific language governing permissions and limitations under
|
|
|
|
+ * the License.
|
|
*/
|
|
*/
|
|
package org.apache.hadoop.ozone.protocolPB;
|
|
package org.apache.hadoop.ozone.protocolPB;
|
|
|
|
|
|
-import java.io.Closeable;
|
|
|
|
-import java.io.IOException;
|
|
|
|
-import java.util.Set;
|
|
|
|
-
|
|
|
|
|
|
+import com.google.common.base.Preconditions;
|
|
import com.google.common.collect.Sets;
|
|
import com.google.common.collect.Sets;
|
|
import com.google.protobuf.RpcController;
|
|
import com.google.protobuf.RpcController;
|
|
import com.google.protobuf.ServiceException;
|
|
import com.google.protobuf.ServiceException;
|
|
-
|
|
|
|
import org.apache.hadoop.classification.InterfaceAudience;
|
|
import org.apache.hadoop.classification.InterfaceAudience;
|
|
-import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto;
|
|
|
|
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
|
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
|
|
|
+import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto;
|
|
import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;
|
|
import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;
|
|
import org.apache.hadoop.ipc.ProtobufHelper;
|
|
import org.apache.hadoop.ipc.ProtobufHelper;
|
|
import org.apache.hadoop.ipc.ProtocolTranslator;
|
|
import org.apache.hadoop.ipc.ProtocolTranslator;
|
|
import org.apache.hadoop.ipc.RPC;
|
|
import org.apache.hadoop.ipc.RPC;
|
|
import org.apache.hadoop.ozone.protocol.LocatedContainer;
|
|
import org.apache.hadoop.ozone.protocol.LocatedContainer;
|
|
import org.apache.hadoop.ozone.protocol.StorageContainerLocationProtocol;
|
|
import org.apache.hadoop.ozone.protocol.StorageContainerLocationProtocol;
|
|
|
|
+import org.apache.hadoop.ozone.protocol.proto.StorageContainerLocationProtocolProtos.ContainerRequestProto;
|
|
|
|
+import org.apache.hadoop.ozone.protocol.proto.StorageContainerLocationProtocolProtos.ContainerResponseProto;
|
|
import org.apache.hadoop.ozone.protocol.proto.StorageContainerLocationProtocolProtos.GetStorageContainerLocationsRequestProto;
|
|
import org.apache.hadoop.ozone.protocol.proto.StorageContainerLocationProtocolProtos.GetStorageContainerLocationsRequestProto;
|
|
import org.apache.hadoop.ozone.protocol.proto.StorageContainerLocationProtocolProtos.GetStorageContainerLocationsResponseProto;
|
|
import org.apache.hadoop.ozone.protocol.proto.StorageContainerLocationProtocolProtos.GetStorageContainerLocationsResponseProto;
|
|
import org.apache.hadoop.ozone.protocol.proto.StorageContainerLocationProtocolProtos.LocatedContainerProto;
|
|
import org.apache.hadoop.ozone.protocol.proto.StorageContainerLocationProtocolProtos.LocatedContainerProto;
|
|
|
|
+import org.apache.hadoop.scm.container.common.helpers.Pipeline;
|
|
|
|
+
|
|
|
|
+import java.io.Closeable;
|
|
|
|
+import java.io.IOException;
|
|
|
|
+import java.util.Set;
|
|
|
|
|
|
/**
|
|
/**
|
|
* This class is the client-side translator to translate the requests made on
|
|
* This class is the client-side translator to translate the requests made on
|
|
@@ -47,7 +49,9 @@ import org.apache.hadoop.ozone.protocol.proto.StorageContainerLocationProtocolPr
|
|
public final class StorageContainerLocationProtocolClientSideTranslatorPB
|
|
public final class StorageContainerLocationProtocolClientSideTranslatorPB
|
|
implements StorageContainerLocationProtocol, ProtocolTranslator, Closeable {
|
|
implements StorageContainerLocationProtocol, ProtocolTranslator, Closeable {
|
|
|
|
|
|
- /** RpcController is not used and hence is set to null. */
|
|
|
|
|
|
+ /**
|
|
|
|
+ * RpcController is not used and hence is set to null.
|
|
|
|
+ */
|
|
private static final RpcController NULL_RPC_CONTROLLER = null;
|
|
private static final RpcController NULL_RPC_CONTROLLER = null;
|
|
|
|
|
|
private final StorageContainerLocationProtocolPB rpcProxy;
|
|
private final StorageContainerLocationProtocolPB rpcProxy;
|
|
@@ -67,7 +71,7 @@ public final class StorageContainerLocationProtocolClientSideTranslatorPB
|
|
throws IOException {
|
|
throws IOException {
|
|
GetStorageContainerLocationsRequestProto.Builder req =
|
|
GetStorageContainerLocationsRequestProto.Builder req =
|
|
GetStorageContainerLocationsRequestProto.newBuilder();
|
|
GetStorageContainerLocationsRequestProto.newBuilder();
|
|
- for (String key: keys) {
|
|
|
|
|
|
+ for (String key : keys) {
|
|
req.addKeys(key);
|
|
req.addKeys(key);
|
|
}
|
|
}
|
|
final GetStorageContainerLocationsResponseProto resp;
|
|
final GetStorageContainerLocationsResponseProto resp;
|
|
@@ -79,11 +83,11 @@ public final class StorageContainerLocationProtocolClientSideTranslatorPB
|
|
}
|
|
}
|
|
Set<LocatedContainer> locatedContainers =
|
|
Set<LocatedContainer> locatedContainers =
|
|
Sets.newLinkedHashSetWithExpectedSize(resp.getLocatedContainersCount());
|
|
Sets.newLinkedHashSetWithExpectedSize(resp.getLocatedContainersCount());
|
|
- for (LocatedContainerProto locatedContainer:
|
|
|
|
|
|
+ for (LocatedContainerProto locatedContainer :
|
|
resp.getLocatedContainersList()) {
|
|
resp.getLocatedContainersList()) {
|
|
Set<DatanodeInfo> locations = Sets.newLinkedHashSetWithExpectedSize(
|
|
Set<DatanodeInfo> locations = Sets.newLinkedHashSetWithExpectedSize(
|
|
locatedContainer.getLocationsCount());
|
|
locatedContainer.getLocationsCount());
|
|
- for (DatanodeInfoProto location: locatedContainer.getLocationsList()) {
|
|
|
|
|
|
+ for (DatanodeInfoProto location : locatedContainer.getLocationsList()) {
|
|
locations.add(PBHelperClient.convert(location));
|
|
locations.add(PBHelperClient.convert(location));
|
|
}
|
|
}
|
|
locatedContainers.add(new LocatedContainer(locatedContainer.getKey(),
|
|
locatedContainers.add(new LocatedContainer(locatedContainer.getKey(),
|
|
@@ -94,6 +98,37 @@ public final class StorageContainerLocationProtocolClientSideTranslatorPB
|
|
return locatedContainers;
|
|
return locatedContainers;
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+ /**
|
|
|
|
+ * Asks SCM where a container should be allocated. SCM responds with the set
|
|
|
|
+ * of datanodes that should be used creating this container.
|
|
|
|
+ *
|
|
|
|
+ * @param containerName - Name of the container.
|
|
|
|
+ * @return Pipeline.
|
|
|
|
+ * @throws IOException
|
|
|
|
+ */
|
|
|
|
+ @Override
|
|
|
|
+ public Pipeline allocateContainer(String containerName) throws IOException {
|
|
|
|
+
|
|
|
|
+ Preconditions.checkNotNull(containerName, "Container Name cannot be Null");
|
|
|
|
+ Preconditions.checkState(!containerName.isEmpty(), "Container name cannot" +
|
|
|
|
+ " be empty");
|
|
|
|
+
|
|
|
|
+ ContainerRequestProto request = ContainerRequestProto.newBuilder()
|
|
|
|
+ .setContainerName(containerName).build();
|
|
|
|
+
|
|
|
|
+ final ContainerResponseProto response;
|
|
|
|
+ try {
|
|
|
|
+ response = rpcProxy.allocateContainer(NULL_RPC_CONTROLLER, request);
|
|
|
|
+ } catch (ServiceException e) {
|
|
|
|
+ throw ProtobufHelper.getRemoteException(e);
|
|
|
|
+ }
|
|
|
|
+ if (response.getErrorCode() != ContainerResponseProto.Error.success) {
|
|
|
|
+ throw new IOException(response.hasErrorMessage() ?
|
|
|
|
+ response.getErrorMessage() : "Allocate container failed.");
|
|
|
|
+ }
|
|
|
|
+ return Pipeline.getFromProtoBuf(response.getPipeline());
|
|
|
|
+ }
|
|
|
|
+
|
|
@Override
|
|
@Override
|
|
public Object getUnderlyingProxyObject() {
|
|
public Object getUnderlyingProxyObject() {
|
|
return rpcProxy;
|
|
return rpcProxy;
|