瀏覽代碼

HADOOP-16558. [COMMON+HDFS] use protobuf-maven-plugin to generate protobuf classes (#1494). Contributed by Vinayakumar B.

Vinayakumar B 5 年之前
父節點
當前提交
07c81e9bfc
共有 45 個文件被更改,包括 141 次插入169 次删除
  1. 14 52
      hadoop-common-project/hadoop-common/pom.xml
  2. 1 1
      hadoop-common-project/hadoop-common/src/main/proto/FSProtos.proto
  3. 1 1
      hadoop-common-project/hadoop-common/src/main/proto/GenericRefreshProtocol.proto
  4. 1 1
      hadoop-common-project/hadoop-common/src/main/proto/GetUserMappingsProtocol.proto
  5. 1 1
      hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto
  6. 1 1
      hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto
  7. 1 1
      hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
  8. 1 1
      hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto
  9. 1 1
      hadoop-common-project/hadoop-common/src/main/proto/RefreshAuthorizationPolicyProtocol.proto
  10. 1 1
      hadoop-common-project/hadoop-common/src/main/proto/RefreshCallQueueProtocol.proto
  11. 1 1
      hadoop-common-project/hadoop-common/src/main/proto/RefreshUserMappingsProtocol.proto
  12. 1 1
      hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
  13. 1 1
      hadoop-common-project/hadoop-common/src/main/proto/Security.proto
  14. 1 1
      hadoop-common-project/hadoop-common/src/main/proto/TraceAdmin.proto
  15. 1 1
      hadoop-common-project/hadoop-common/src/main/proto/ZKFCProtocol.proto
  16. 1 1
      hadoop-common-project/hadoop-common/src/test/proto/test.proto
  17. 1 0
      hadoop-common-project/hadoop-common/src/test/proto/test_rpc_service.proto
  18. 9 27
      hadoop-hdfs-project/hadoop-hdfs-client/pom.xml
  19. 1 1
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientDatanodeProtocol.proto
  20. 1 1
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientNamenodeProtocol.proto
  21. 1 1
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ReconfigurationProtocol.proto
  22. 1 1
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/acl.proto
  23. 1 1
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/datatransfer.proto
  24. 1 1
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/encryption.proto
  25. 1 1
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/erasurecoding.proto
  26. 1 1
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto
  27. 1 1
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/inotify.proto
  28. 1 1
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/xattr.proto
  29. 12 20
      hadoop-hdfs-project/hadoop-hdfs-rbf/pom.xml
  30. 1 1
      hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/FederationProtocol.proto
  31. 1 1
      hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/RouterProtocol.proto
  32. 18 30
      hadoop-hdfs-project/hadoop-hdfs/pom.xml
  33. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/proto/AliasMapProtocol.proto
  34. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeLifelineProtocol.proto
  35. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeProtocol.proto
  36. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HAZKInfo.proto
  37. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HdfsServer.proto
  38. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/proto/InterDatanodeProtocol.proto
  39. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/proto/InterQJournalProtocol.proto
  40. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/proto/JournalProtocol.proto
  41. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/proto/NamenodeProtocol.proto
  42. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/proto/QJournalProtocol.proto
  43. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/proto/editlog.proto
  44. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto
  45. 48 1
      hadoop-project/pom.xml

+ 14 - 52
hadoop-common-project/hadoop-common/pom.xml

@@ -380,6 +380,20 @@
       </resource>
     </resources>
     <plugins>
+      <plugin>
+        <groupId>org.xolstice.maven.plugins</groupId>
+        <artifactId>protobuf-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>src-compile-protoc</id>
+            <configuration><skip>false</skip></configuration>
+          </execution>
+          <execution>
+            <id>src-test-compile-protoc</id>
+            <configuration><skip>false</skip></configuration>
+          </execution>
+        </executions>
+      </plugin>
       <plugin>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-maven-plugins</artifactId>
@@ -400,58 +414,6 @@
               </source>
             </configuration>
           </execution>
-          <execution>
-            <id>compile-protoc</id>
-            <goals>
-              <goal>protoc</goal>
-            </goals>
-            <configuration>
-              <protocVersion>${protobuf.version}</protocVersion>
-              <protocCommand>${protoc.path}</protocCommand>
-              <imports>
-                <param>${basedir}/src/main/proto</param>
-              </imports>
-              <source>
-                <directory>${basedir}/src/main/proto</directory>
-                <includes>
-                  <include>HAServiceProtocol.proto</include>
-                  <include>IpcConnectionContext.proto</include>
-                  <include>ProtocolInfo.proto</include>
-                  <include>RpcHeader.proto</include>
-                  <include>ZKFCProtocol.proto</include>
-                  <include>ProtobufRpcEngine.proto</include>
-                  <include>Security.proto</include>
-                  <include>GetUserMappingsProtocol.proto</include>
-                  <include>TraceAdmin.proto</include>
-                  <include>RefreshAuthorizationPolicyProtocol.proto</include>
-                  <include>RefreshUserMappingsProtocol.proto</include>
-                  <include>RefreshCallQueueProtocol.proto</include>
-                  <include>GenericRefreshProtocol.proto</include>
-                  <include>FSProtos.proto</include>
-                </includes>
-              </source>
-            </configuration>
-          </execution>
-          <execution>
-            <id>compile-test-protoc</id>
-            <goals>
-              <goal>test-protoc</goal>
-            </goals>
-            <configuration>
-              <protocVersion>${protobuf.version}</protocVersion>
-              <protocCommand>${protoc.path}</protocCommand>
-              <imports>
-                <param>${basedir}/src/test/proto</param>
-              </imports>
-              <source>
-                <directory>${basedir}/src/test/proto</directory>
-                <includes>
-                  <include>test.proto</include>
-                  <include>test_rpc_service.proto</include>
-                </includes>
-              </source>
-            </configuration>
-          </execution>
           <execution>
             <id>resource-gz</id>
             <phase>generate-resources</phase>

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/proto/FSProtos.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.fs";
 option java_outer_classname = "FSProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/proto/GenericRefreshProtocol.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.ipc.proto";
 option java_outer_classname = "GenericRefreshProtocolProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/proto/GetUserMappingsProtocol.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.tools.proto";
 option java_outer_classname = "GetUserMappingsProtocolProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.ha.proto";
 option java_outer_classname = "HAServiceProtocolProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.ipc.protobuf";
 option java_outer_classname = "IpcConnectionContextProtos";
 option java_generate_equals_and_hash = true;

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax = "proto2";
 /**
  * These are the messages used by Hadoop RPC for the Rpc Engine Protocol Buffer
  * to marshal the request and response in the RPC layer.

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.ipc.protobuf";
 option java_outer_classname = "ProtocolInfoProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/proto/RefreshAuthorizationPolicyProtocol.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.security.proto";
 option java_outer_classname = "RefreshAuthorizationPolicyProtocolProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/proto/RefreshCallQueueProtocol.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.ipc.proto";
 option java_outer_classname = "RefreshCallQueueProtocolProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/proto/RefreshUserMappingsProtocol.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.security.proto";
 option java_outer_classname = "RefreshUserMappingsProtocolProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.ipc.protobuf";
 option java_outer_classname = "RpcHeaderProtos";
 option java_generate_equals_and_hash = true;

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/proto/Security.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.security.proto";
 option java_outer_classname = "SecurityProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/proto/TraceAdmin.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.tracing";
 option java_outer_classname = "TraceAdminPB";
 option java_generic_services = true;

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/proto/ZKFCProtocol.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.ha.proto";
 option java_outer_classname = "ZKFCProtocolProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/proto/test.proto

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.ipc.protobuf";
 option java_outer_classname = "TestProtos";
 option java_generate_equals_and_hash = true;

+ 1 - 0
hadoop-common-project/hadoop-common/src/test/proto/test_rpc_service.proto

@@ -15,6 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+syntax = "proto2";
 option java_package = "org.apache.hadoop.ipc.protobuf";
 option java_outer_classname = "TestRpcServiceProtos";
 option java_generic_services = true;

+ 9 - 27
hadoop-hdfs-project/hadoop-hdfs-client/pom.xml

@@ -131,36 +131,18 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
         </configuration>
       </plugin>
       <plugin>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-maven-plugins</artifactId>
+        <groupId>org.xolstice.maven.plugins</groupId>
+        <artifactId>protobuf-maven-plugin</artifactId>
         <executions>
           <execution>
-            <id>compile-protoc</id>
-            <goals>
-              <goal>protoc</goal>
-            </goals>
+            <id>src-compile-protoc</id>
             <configuration>
-              <protocVersion>${protobuf.version}</protocVersion>
-              <protocCommand>${protoc.path}</protocCommand>
-              <imports>
-                <param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
-                <param>${basedir}/src/main/proto</param>
-              </imports>
-              <source>
-                <directory>${basedir}/src/main/proto</directory>
-                <includes>
-                  <include>ClientDatanodeProtocol.proto</include>
-                  <include>ClientNamenodeProtocol.proto</include>
-                  <include>acl.proto</include>
-                  <include>xattr.proto</include>
-                  <include>datatransfer.proto</include>
-                  <include>hdfs.proto</include>
-                  <include>encryption.proto</include>
-                  <include>inotify.proto</include>
-                  <include>erasurecoding.proto</include>
-                  <include>ReconfigurationProtocol.proto</include>
-                </includes>
-              </source>
+              <skip>false</skip>
+              <additionalProtoPathElements>
+                <additionalProtoPathElement>
+                  ${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
+                </additionalProtoPathElement>
+              </additionalProtoPathElements>
             </configuration>
           </execution>
         </executions>

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientDatanodeProtocol.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax="proto2";
 // This file contains protocol buffers that are used throughout HDFS -- i.e.
 // by the client, server, and data transfer protocols.
 

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientNamenodeProtocol.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax="proto2";
 option java_package = "org.apache.hadoop.hdfs.protocol.proto";
 option java_outer_classname = "ClientNamenodeProtocolProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ReconfigurationProtocol.proto

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
+syntax="proto2";
  // This file contains protocol buffers that are used to reconfigure NameNode
  // and DataNode by HDFS admin.
 

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/acl.proto

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
+syntax="proto2";
 option java_package = "org.apache.hadoop.hdfs.protocol.proto";
 option java_outer_classname = "AclProtos";
 option java_generate_equals_and_hash = true;

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/datatransfer.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax="proto2";
 // This file contains protocol buffers that are used to transfer data
 // to and from the datanode, as well as between datanodes.
 

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/encryption.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax="proto2";
 // This file contains protocol buffers that are used throughout HDFS -- i.e.
 // by the client, server, and data transfer protocols.
 

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/erasurecoding.proto

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
- 
+syntax="proto2";
 option java_package = "org.apache.hadoop.hdfs.protocol.proto";
 option java_outer_classname = "ErasureCodingProtos";
 option java_generate_equals_and_hash = true;

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax="proto2";
 // This file contains protocol buffers that are used throughout HDFS -- i.e.
 // by the client, server, and data transfer protocols.
 

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/inotify.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax="proto2";
 // This file contains protocol buffers used to communicate edits to clients
 // as part of the inotify system.
 

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/xattr.proto

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
+syntax="proto2";
 option java_package = "org.apache.hadoop.hdfs.protocol.proto";
 option java_outer_classname = "XAttrProtos";
 option java_generate_equals_and_hash = true;

+ 12 - 20
hadoop-hdfs-project/hadoop-hdfs-rbf/pom.xml

@@ -184,29 +184,21 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
         </executions>
       </plugin>
       <plugin>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-maven-plugins</artifactId>
+        <groupId>org.xolstice.maven.plugins</groupId>
+        <artifactId>protobuf-maven-plugin</artifactId>
         <executions>
           <execution>
-            <id>compile-protoc</id>
-            <goals>
-              <goal>protoc</goal>
-            </goals>
+            <id>src-compile-protoc</id>
             <configuration>
-              <protocVersion>${protobuf.version}</protocVersion>
-              <protocCommand>${protoc.path}</protocCommand>
-              <imports>
-                <param>${basedir}/../hadoop-hdfs-client/src/main/proto</param>
-                <param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
-                <param>${basedir}/src/main/proto</param>
-              </imports>
-              <source>
-                <directory>${basedir}/src/main/proto</directory>
-                <includes>
-                  <include>FederationProtocol.proto</include>
-                  <include>RouterProtocol.proto</include>
-                </includes>
-              </source>
+              <skip>false</skip>
+              <additionalProtoPathElements>
+                <additionalProtoPathElement>
+                  ${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
+                </additionalProtoPathElement>
+                <additionalProtoPathElement>
+                  ${basedir}/../hadoop-hdfs-client/src/main/proto
+                </additionalProtoPathElement>
+              </additionalProtoPathElements>
             </configuration>
           </execution>
         </executions>

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/FederationProtocol.proto

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.hdfs.federation.protocol.proto";
 option java_outer_classname = "HdfsServerFederationProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/RouterProtocol.proto

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.hdfs.protocol.proto";
 option java_outer_classname = "RouterProtocolProtos";
 option java_generic_services = true;

+ 18 - 30
hadoop-hdfs-project/hadoop-hdfs/pom.xml

@@ -310,41 +310,29 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
         </executions>
       </plugin>
       <plugin>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-maven-plugins</artifactId>
+        <groupId>org.xolstice.maven.plugins</groupId>
+        <artifactId>protobuf-maven-plugin</artifactId>
         <executions>
           <execution>
-            <id>compile-protoc</id>
-            <goals>
-              <goal>protoc</goal>
-            </goals>
+            <id>src-compile-protoc</id>
             <configuration>
-              <protocVersion>${protobuf.version}</protocVersion>
-              <protocCommand>${protoc.path}</protocCommand>
-              <imports>
-                <param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
-                <param>${basedir}/../hadoop-hdfs-client/src/main/proto</param>
-                <param>${basedir}/src/main/proto</param>
-              </imports>
-              <source>
-                <directory>${basedir}/src/main/proto</directory>
-                <includes>
-                  <include>HdfsServer.proto</include>
-                  <include>DatanodeProtocol.proto</include>
-                  <include>DatanodeLifelineProtocol.proto</include>
-                  <include>HAZKInfo.proto</include>
-                  <include>InterDatanodeProtocol.proto</include>
-                  <include>JournalProtocol.proto</include>
-                  <include>NamenodeProtocol.proto</include>
-                  <include>QJournalProtocol.proto</include>
-                  <include>editlog.proto</include>
-                  <include>fsimage.proto</include>
-                  <include>AliasMapProtocol.proto</include>
-                  <include>InterQJournalProtocol.proto</include>
-                </includes>
-              </source>
+              <skip>false</skip>
+              <additionalProtoPathElements>
+                <additionalProtoPathElement>
+                  ${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
+                </additionalProtoPathElement>
+                <additionalProtoPathElement>
+                  ${basedir}/../hadoop-hdfs-client/src/main/proto
+                </additionalProtoPathElement>
+              </additionalProtoPathElements>
             </configuration>
           </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-maven-plugins</artifactId>
+        <executions>
           <execution>
             <id>resource-gz</id>
             <phase>generate-resources</phase>

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/proto/AliasMapProtocol.proto

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.hdfs.protocol.proto";
 option java_outer_classname = "AliasMapProtocolProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeLifelineProtocol.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.hdfs.protocol.proto";
 option java_outer_classname = "DatanodeLifelineProtocolProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeProtocol.proto

@@ -24,7 +24,7 @@
 
 // This file contains protocol buffers that are used throughout HDFS -- i.e.
 // by the client, server, and data transfer protocols.
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.hdfs.protocol.proto";
 option java_outer_classname = "DatanodeProtocolProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HAZKInfo.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.hdfs.server.namenode.ha.proto";
 option java_outer_classname = "HAZKInfoProtos";
 package hadoop.hdfs;

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HdfsServer.proto

@@ -24,7 +24,7 @@
 
 // This file contains protocol buffers that are used throughout HDFS -- i.e.
 // by the client, server, and data transfer protocols.
-
+syntax = "proto2";
 
 option java_package = "org.apache.hadoop.hdfs.protocol.proto";
 option java_outer_classname = "HdfsServerProtos";

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/proto/InterDatanodeProtocol.proto

@@ -24,7 +24,7 @@
 
 // This file contains protocol buffers that are used throughout HDFS -- i.e.
 // by the client, server, and data transfer protocols.
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.hdfs.protocol.proto";
 option java_outer_classname = "InterDatanodeProtocolProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/proto/InterQJournalProtocol.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.hdfs.qjournal.protocol";
 option java_outer_classname = "InterQJournalProtocolProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/proto/JournalProtocol.proto

@@ -24,7 +24,7 @@
 
 // This file contains protocol buffers that are used throughout HDFS -- i.e.
 // by the client, server, and data transfer protocols.
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.hdfs.protocol.proto";
 option java_outer_classname = "JournalProtocolProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/proto/NamenodeProtocol.proto

@@ -24,7 +24,7 @@
 
 // This file contains protocol buffers that are used throughout HDFS -- i.e.
 // by the client, server, and data transfer protocols.
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.hdfs.protocol.proto";
 option java_outer_classname = "NamenodeProtocolProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/proto/QJournalProtocol.proto

@@ -21,7 +21,7 @@
  * Please see http://wiki.apache.org/hadoop/Compatibility
  * for what changes are allowed for a *stable* .proto interface.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.hdfs.qjournal.protocol";
 option java_outer_classname = "QJournalProtocolProtos";
 option java_generic_services = true;

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/proto/editlog.proto

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.hdfs.protocol.proto";
 option java_outer_classname = "EditLogProtos";
 option java_generate_equals_and_hash = true;

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
+syntax = "proto2";
 option java_package = "org.apache.hadoop.hdfs.server.namenode";
 option java_outer_classname = "FsImageProto";
 

+ 48 - 1
hadoop-project/pom.xml

@@ -1715,9 +1715,56 @@
           <artifactId>frontend-maven-plugin</artifactId>
           <version>${frontend-maven-plugin.version}</version>
         </plugin>
+        <plugin>
+          <groupId>org.xolstice.maven.plugins</groupId>
+          <artifactId>protobuf-maven-plugin</artifactId>
+          <version>${protobuf-maven-plugin.version}</version>
+          <extensions>true</extensions>
+          <configuration>
+            <protocArtifact>
+              com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier}
+            </protocArtifact>
+            <attachProtoSources>false</attachProtoSources>
+          </configuration>
+          <executions>
+            <execution>
+              <id>src-compile-protoc</id>
+              <phase>generate-sources</phase>
+              <goals>
+                <goal>compile</goal>
+              </goals>
+              <configuration>
+                <includeDependenciesInDescriptorSet>false</includeDependenciesInDescriptorSet>
+                <protoSourceRoot>${basedir}/src/main/proto</protoSourceRoot>
+                <outputDirectory>${project.build.directory}/generated-sources/java</outputDirectory>
+                <clearOutputDirectory>false</clearOutputDirectory>
+                <skip>true</skip>
+              </configuration>
+            </execution>
+            <execution>
+              <id>src-test-compile-protoc</id>
+              <phase>generate-test-sources</phase>
+              <goals>
+                <goal>test-compile</goal>
+              </goals>
+              <configuration>
+                <protoTestSourceRoot>${basedir}/src/test/proto</protoTestSourceRoot>
+                <outputDirectory>${project.build.directory}/generated-test-sources/java</outputDirectory>
+                <clearOutputDirectory>false</clearOutputDirectory>
+                <skip>true</skip>
+              </configuration>
+            </execution>
+          </executions>
+        </plugin>
       </plugins>
     </pluginManagement>
-
+    <extensions>
+      <extension>
+        <groupId>kr.motd.maven</groupId>
+        <artifactId>os-maven-plugin</artifactId>
+        <version>${os-maven-plugin.version}</version>
+      </extension>
+    </extensions>
     <plugins>
       <plugin>
         <artifactId>maven-clean-plugin</artifactId>