Ver código fonte

HADOOP-7496. Break Maven TAR & bintar profiles into just LAYOUT & TAR proper. Contributed by Alejandro Abdelnur.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1160344 13f79535-47bb-0310-9956-ffa450edef68
Thomas White 13 anos atrás
pai
commit
14b97a91d9

+ 11 - 9
BUILDING.txt

@@ -15,12 +15,13 @@ Requirements:
 ----------------------------------------------------------------------------------
 Maven modules:
 
-  hadoop                      (Main Hadoop project)
-         - hadoop-project     (Parent POM for all Hadoop Maven modules.             )
-                              (All plugins & dependencies versions are defined here.)
-         - hadoop-annotations (Generates the Hadoop doclet used to generated the Javadocs)
-         - hadoop-common      (Hadoop Common)
-         - hadoop-hdfs        (Hadoop HDFS)
+  hadoop                       (Main Hadoop project)
+         - hadoop-project      (Parent POM for all Hadoop Maven modules.             )
+                               (All plugins & dependencies versions are defined here.)
+         - hadoop-project-dist (Parent POM for modules that generate distributions.)
+         - hadoop-annotations  (Generates the Hadoop doclet used to generated the Javadocs)
+         - hadoop-common       (Hadoop Common)
+         - hadoop-hdfs         (Hadoop HDFS)
 
 ----------------------------------------------------------------------------------
 Where to run Maven from?
@@ -43,15 +44,16 @@ Maven build goals:
  * Run clover                : mvn test -Pclover [-DcloverLicenseLocation=${user.name}/.clover.license]
  * Run Rat                   : mvn apache-rat:check
  * Build javadocs            : mvn javadoc:javadoc
- * Build TAR                 : mvn package [-Ptar][-Pdocs][-Psrc][-Pnative]
+ * Build distribution        : mvn package [-Pdist][-Pdocs][-Psrc][-Pnative][-Dtar]
 
  Build options:
 
   * Use -Pnative to compile/bundle native code
   * Use -Dsnappy.prefix=(/usr/local) & -Dbundle.snappy=(false) to compile
     Snappy JNI bindings and to bundle Snappy SO files
-  * Use -Pdocs to generate & bundle the documentation in the TAR (using -Ptar)
-  * Use -Psrc to bundle the source in the TAR (using -Ptar)
+  * Use -Pdocs to generate & bundle the documentation in the distribution (using -Pdist)
+  * Use -Psrc to bundle the source in the distribution (using -Pdist)
+  * Use -Dtar to create a TAR with the distribution (using -Pdist)
 
    Tests options:
 

+ 1 - 1
hadoop-assemblies/src/main/resources/assemblies/hadoop-tar.xml → hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml

@@ -15,7 +15,7 @@
    limitations under the License.
 -->
 <assembly>
-  <id>hadoop-bintar</id>
+  <id>hadoop-distro</id>
   <formats>
     <format>dir</format>
   </formats>

+ 3 - 0
hadoop-common/CHANGES.txt

@@ -337,6 +337,9 @@ Trunk (unreleased changes)
     HADOOP-7498. Remove legacy TAR layout creation. (Alejandro Abdelnur via
     tomwhite)
 
+    HADOOP-7496. Break Maven TAR & bintar profiles into just LAYOUT & TAR proper.
+    (Alejandro Abdelnur via tomwhite)
+
   OPTIMIZATIONS
   
     HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole

+ 2 - 2
hadoop-common/pom.xml

@@ -16,9 +16,9 @@
   <modelVersion>4.0.0</modelVersion>
   <parent>
     <groupId>org.apache.hadoop</groupId>
-    <artifactId>hadoop-project-distro</artifactId>
+    <artifactId>hadoop-project-dist</artifactId>
     <version>0.23.0-SNAPSHOT</version>
-    <relativePath>../hadoop-project-distro</relativePath>
+    <relativePath>../hadoop-project-dist</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-common</artifactId>

+ 2 - 2
hadoop-hdfs/pom.xml

@@ -16,9 +16,9 @@
   <modelVersion>4.0.0</modelVersion>
   <parent>
     <groupId>org.apache.hadoop</groupId>
-    <artifactId>hadoop-project-distro</artifactId>
+    <artifactId>hadoop-project-dist</artifactId>
     <version>0.23.0-SNAPSHOT</version>
-    <relativePath>../hadoop-project-distro</relativePath>
+    <relativePath>../hadoop-project-dist</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-hdfs</artifactId>

+ 1 - 1
hadoop-project-distro/README.txt → hadoop-project-dist/README.txt

@@ -1,4 +1,4 @@
 DUMMY.
 
 Required for the assembly:single goal not to fail because there
-are not files in the hadoop-project-distro module.
+are not files in the hadoop-project-dist module.

+ 12 - 12
hadoop-project-distro/pom.xml → hadoop-project-dist/pom.xml

@@ -21,10 +21,10 @@
     <relativePath>../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
-  <artifactId>hadoop-project-distro</artifactId>
+  <artifactId>hadoop-project-dist</artifactId>
   <version>0.23.0-SNAPSHOT</version>
-  <description>Apache Hadoop Project Distro POM</description>
-  <name>Apache Hadoop Project Distro POM</name>
+  <description>Apache Hadoop Project Dist POM</description>
+  <name>Apache Hadoop Project Dist POM</name>
   <packaging>pom</packaging>
 
   <properties>
@@ -356,7 +356,7 @@
     </profile>
 
     <profile>
-      <id>tar</id>
+      <id>dist</id>
       <activation>
         <activeByDefault>false</activeByDefault>
       </activation>
@@ -367,7 +367,7 @@
             <artifactId>maven-antrun-plugin</artifactId>
             <executions>
               <execution>
-                <id>pre-tar</id>
+                <id>pre-dist</id>
                 <phase>prepare-package</phase>
                 <goals>
                   <goal>run</goal>
@@ -375,7 +375,7 @@
                 <configuration>
                   <target>
                     <!-- Using Unix script to preserve symlinks -->
-                    <echo file="${project.build.directory}/tar-copynativelibs.sh">
+                    <echo file="${project.build.directory}/dist-copynativelibs.sh">
 
                       which cygpath 2> /dev/null
                       if [ $? = 1 ]; then
@@ -398,7 +398,7 @@
                       fi
                     </echo>
                     <exec executable="sh" dir="${project.build.directory}" failonerror="true">
-                      <arg line="./tar-copynativelibs.sh"/>
+                      <arg line="./dist-copynativelibs.sh"/>
                     </exec>
                   </target>
                 </configuration>
@@ -410,9 +410,9 @@
                   <goal>run</goal>
                 </goals>
                 <configuration>
-                  <target>
+                  <target if="tar">
                     <!-- Using Unix script to preserve symlinks -->
-                    <echo file="${project.build.directory}/tar-maketar.sh">
+                    <echo file="${project.build.directory}/dist-maketar.sh">
 
                       which cygpath 2> /dev/null
                       if [ $? = 1 ]; then
@@ -424,7 +424,7 @@
                       tar czf ${project.artifactId}-${project.version}.tar.gz ${project.artifactId}-${project.version}
                     </echo>
                     <exec executable="sh" dir="${project.build.directory}" failonerror="true">
-                      <arg line="./tar-maketar.sh"/>
+                      <arg line="./dist-maketar.sh"/>
                     </exec>
                   </target>
                 </configuration>
@@ -443,7 +443,7 @@
             </dependencies>
             <executions>
               <execution>
-                <id>pre-tar</id>
+                <id>dist</id>
                 <phase>prepare-package</phase>
                 <goals>
                   <goal>single</goal>
@@ -453,7 +453,7 @@
                   <attach>false</attach>
                   <finalName>${project.artifactId}-${project.version}</finalName>
                   <descriptorRefs>
-                    <descriptorRef>hadoop-tar</descriptorRef>
+                    <descriptorRef>hadoop-dist</descriptorRef>
                   </descriptorRefs>
                 </configuration>
               </execution>

+ 1 - 1
pom.xml

@@ -35,7 +35,7 @@
 
   <modules>
     <module>hadoop-project</module>
-    <module>hadoop-project-distro</module>
+    <module>hadoop-project-dist</module>
     <module>hadoop-assemblies</module>
     <module>hadoop-annotations</module>
     <module>hadoop-alfredo</module>