123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166 |
- <?xml version="1.0" encoding="UTF-8"?>
- <!--
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License. See accompanying LICENSE file.
- -->
- <project xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
- http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-project</artifactId>
- <version>3.0.0-SNAPSHOT</version>
- <relativePath>../hadoop-project</relativePath>
- </parent>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-dist</artifactId>
- <version>3.0.0-SNAPSHOT</version>
- <description>Apache Hadoop Distribution</description>
- <name>Apache Hadoop Distribution</name>
- <packaging>jar</packaging>
- <!-- Using dependencies to ensure this module is the last one -->
- <dependencies>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-common</artifactId>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-hdfs</artifactId>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-mapreduce-client-app</artifactId>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-api</artifactId>
- <scope>provided</scope>
- </dependency>
- </dependencies>
- <build>
- <plugins>
- <plugin>
- <artifactId>maven-deploy-plugin</artifactId>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.rat</groupId>
- <artifactId>apache-rat-plugin</artifactId>
- <configuration>
- <includes>
- <include>pom.xml</include>
- </includes>
- </configuration>
- </plugin>
- </plugins>
- </build>
- <profiles>
- <profile>
- <id>dist</id>
- <activation>
- <activeByDefault>false</activeByDefault>
- <property>
- <name>tar|rpm|deb</name>
- </property>
- </activation>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-antrun-plugin</artifactId>
- <executions>
- <execution>
- <id>tar</id>
- <phase>package</phase>
- <goals>
- <goal>run</goal>
- </goals>
- <configuration>
- <target if="tar">
- <!-- This script preserves permissions and symlinks. -->
- <!-- Python requires resetting indentation to far left. -->
- <echo file="${project.build.directory}/dist-maketar.py">
- from os.path import abspath, basename, isdir, join
- import tarfile
- def make_file_filter(root, file_name_filter):
- def filter_func(tar_info):
- if tar_info.name == root:
- # Always include root directory. Otherwise, tarfile.add assumes you are
- # filtering out the whole directory and produces an empty tar.
- return tar_info
- if tar_info.isfile() or tar_info.issym():
- # Include files and symlinks only if they match the specified name filter.
- if file_name_filter(basename(tar_info.name)):
- return tar_info
- # Otherwise, exclude.
- return None
- return filter_func
- target_dirs = [
- abspath(r"${basedir}/../hadoop-common-project/hadoop-common/target/hadoop-common-${project.version}"),
- abspath(r"${basedir}/../hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${project.version}"),
- abspath(r"${basedir}/../hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-${project.version}"),
- abspath(r"${basedir}/../hadoop-yarn-project/target/hadoop-yarn-project-${project.version}"),
- abspath(r"${basedir}/../hadoop-mapreduce-project/target/hadoop-mapreduce-${project.version}"),
- abspath(r"${basedir}/../hadoop-tools/hadoop-tools-dist/target/hadoop-tools-dist-${project.version}")
- ]
- base_name = "hadoop" + "-" + "${project.version}"
- dir_name = abspath(join(r"${project.build.directory}", base_name))
- tar_name = dir_name + ".tar.gz"
- with tarfile.open(tar_name, "w:gz") as tar:
- for target_dir in target_dirs:
- tar.add(target_dir, arcname=base_name)
- native_dir = abspath(join(target_dir, "../native/target/usr/local/lib"))
- if isdir(native_dir):
- arc_name = base_name + "/lib/native"
- tar.add(native_dir, arcname=arc_name,
- filter=make_file_filter(arc_name, lambda file: file.startswith("lib")))
- bin_dir = abspath(join(target_dir, "../bin"))
- if isdir(bin_dir):
- arc_name = base_name + "/bin"
- tar.add(bin_dir, arcname=arc_name)
- if "${bundle.snappy}" == "true":
- arc_name = base_name + "/lib/native"
- tar.add(r"${snappy.lib}", arcname=arc_name,
- filter=make_file_filter(arc_name, lambda file: "snappy" in file))
- print
- print "Hadoop dist tar available at: " + tar_name
- print
- </echo>
- <exec executable="python" dir="${project.build.directory}" failonerror="true">
- <arg value="dist-maketar.py" />
- </exec>
- </target>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
- </profile>
- </profiles>
- </project>
|