StorageContainerLocationProtocol.proto 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899
  1. /**
  2. * Licensed to the Apache Software Foundation (ASF) under one
  3. * or more contributor license agreements. See the NOTICE file
  4. * distributed with this work for additional information
  5. * regarding copyright ownership. The ASF licenses this file
  6. * to you under the Apache License, Version 2.0 (the
  7. * "License"); you may not use this file except in compliance
  8. * with the License. You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing, software
  13. * distributed under the License is distributed on an "AS IS" BASIS,
  14. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. * See the License for the specific language governing permissions and
  16. * limitations under the License.
  17. */
  18. /**
  19. * These .proto interfaces are private and unstable.
  20. * Please see http://wiki.apache.org/hadoop/Compatibility
  21. * for what changes are allowed for a *unstable* .proto interface.
  22. */
  23. option java_package = "org.apache.hadoop.ozone.protocol.proto";
  24. option java_outer_classname = "StorageContainerLocationProtocolProtos";
  25. option java_generic_services = true;
  26. option java_generate_equals_and_hash = true;
  27. package hadoop.hdfs;
  28. import "hdfs.proto";
  29. import "DatanodeContainerProtocol.proto";
  30. /**
  31. * keys - batch of object keys to find
  32. */
  33. message GetStorageContainerLocationsRequestProto {
  34. repeated string keys = 1;
  35. }
  36. /**
  37. * locatedContainers - for each requested hash, nodes that currently host the
  38. * container for that object key hash
  39. */
  40. message GetStorageContainerLocationsResponseProto {
  41. repeated LocatedContainerProto locatedContainers = 1;
  42. }
  43. /**
  44. * Holds the nodes that currently host the container for an object key.
  45. */
  46. message LocatedContainerProto {
  47. required string key = 1;
  48. required string matchedKeyPrefix = 2;
  49. required string containerName = 3;
  50. repeated DatanodeInfoProto locations = 4;
  51. required DatanodeInfoProto leader = 5;
  52. }
  53. /**
  54. * Request send to SCM asking where the container should be created.
  55. */
  56. message ContainerRequestProto {
  57. required string containerName = 1;
  58. }
  59. /**
  60. * Reply from SCM indicating that the container.
  61. */
  62. message ContainerResponseProto {
  63. enum Error {
  64. success = 1;
  65. errorContainerAlreadyExists = 2;
  66. errorContainerMissing = 3;
  67. }
  68. required Error errorCode = 1;
  69. required hadoop.hdfs.ozone.Pipeline pipeline = 2;
  70. optional string errorMessage = 3;
  71. }
  72. /**
  73. * Protocol used from an HDFS node to StorageContainerManager. See the request
  74. * and response messages for details of the RPC calls.
  75. */
  76. service StorageContainerLocationProtocolService {
  77. /**
  78. * Find the set of nodes that currently host the container of an object, as
  79. * identified by the object key hash. This method supports batch lookup by
  80. * passing multiple key hashes.
  81. */
  82. rpc getStorageContainerLocations(GetStorageContainerLocationsRequestProto)
  83. returns(GetStorageContainerLocationsResponseProto);
  84. /**
  85. Creates a container entry in SCM.
  86. */
  87. rpc allocateContainer(ContainerRequestProto) returns (ContainerResponseProto);
  88. }