|
@@ -0,0 +1,230 @@
|
|
|
+<?xml version="1.0" encoding="UTF-8"?>
|
|
|
+<!--
|
|
|
+ Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
+ you may not use this file except in compliance with the License.
|
|
|
+ You may obtain a copy of the License at
|
|
|
+
|
|
|
+ http://www.apache.org/licenses/LICENSE-2.0
|
|
|
+
|
|
|
+ Unless required by applicable law or agreed to in writing, software
|
|
|
+ distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
+ See the License for the specific language governing permissions and
|
|
|
+ limitations under the License. See accompanying LICENSE file.
|
|
|
+-->
|
|
|
+<project xmlns="http://maven.apache.org/POM/4.0.0"
|
|
|
+xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
|
+xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
|
|
|
+http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
|
+ <modelVersion>4.0.0</modelVersion>
|
|
|
+ <parent>
|
|
|
+ <groupId>org.apache.hadoop</groupId>
|
|
|
+ <artifactId>hadoop-project-dist</artifactId>
|
|
|
+ <version>3.0.0-SNAPSHOT</version>
|
|
|
+ <relativePath>../../hadoop-project-dist</relativePath>
|
|
|
+ </parent>
|
|
|
+ <groupId>org.apache.hadoop</groupId>
|
|
|
+ <artifactId>hadoop-hdfs-native-client</artifactId>
|
|
|
+ <version>3.0.0-SNAPSHOT</version>
|
|
|
+ <description>Apache Hadoop HDFS Native Client</description>
|
|
|
+ <name>Apache Hadoop HDFS Native Client</name>
|
|
|
+ <packaging>jar</packaging>
|
|
|
+
|
|
|
+ <properties>
|
|
|
+ <require.fuse>false</require.fuse>
|
|
|
+ <require.libwebhdfs>false</require.libwebhdfs>
|
|
|
+ </properties>
|
|
|
+
|
|
|
+ <dependencies>
|
|
|
+ <dependency>
|
|
|
+ <groupId>org.apache.hadoop</groupId>
|
|
|
+ <artifactId>hadoop-common</artifactId>
|
|
|
+ <scope>test</scope>
|
|
|
+ </dependency>
|
|
|
+ <dependency>
|
|
|
+ <groupId>org.apache.hadoop</groupId>
|
|
|
+ <artifactId>hadoop-common</artifactId>
|
|
|
+ <type>test-jar</type>
|
|
|
+ <scope>test</scope>
|
|
|
+ </dependency>
|
|
|
+ <dependency>
|
|
|
+ <groupId>org.apache.hadoop</groupId>
|
|
|
+ <artifactId>hadoop-hdfs</artifactId>
|
|
|
+ <scope>test</scope>
|
|
|
+ </dependency>
|
|
|
+ <dependency>
|
|
|
+ <groupId>org.apache.hadoop</groupId>
|
|
|
+ <artifactId>hadoop-hdfs</artifactId>
|
|
|
+ <type>test-jar</type>
|
|
|
+ <scope>test</scope>
|
|
|
+ </dependency>
|
|
|
+ <dependency>
|
|
|
+ <groupId>org.mockito</groupId>
|
|
|
+ <artifactId>mockito-all</artifactId>
|
|
|
+ <scope>test</scope>
|
|
|
+ </dependency>
|
|
|
+ </dependencies>
|
|
|
+
|
|
|
+ <profiles>
|
|
|
+ <profile>
|
|
|
+ <id>native-win</id>
|
|
|
+ <activation>
|
|
|
+ <activeByDefault>false</activeByDefault>
|
|
|
+ <os>
|
|
|
+ <family>windows</family>
|
|
|
+ </os>
|
|
|
+ </activation>
|
|
|
+ <properties>
|
|
|
+ <runningWithNative>true</runningWithNative>
|
|
|
+ </properties>
|
|
|
+ <build>
|
|
|
+ <plugins>
|
|
|
+ <plugin>
|
|
|
+ <groupId>org.apache.maven.plugins</groupId>
|
|
|
+ <artifactId>maven-enforcer-plugin</artifactId>
|
|
|
+ <executions>
|
|
|
+ <execution>
|
|
|
+ <id>enforce-os</id>
|
|
|
+ <goals>
|
|
|
+ <goal>enforce</goal>
|
|
|
+ </goals>
|
|
|
+ <configuration>
|
|
|
+ <rules>
|
|
|
+ <requireOS>
|
|
|
+ <family>windows</family>
|
|
|
+ <message>native-win build only supported on Windows</message>
|
|
|
+ </requireOS>
|
|
|
+ </rules>
|
|
|
+ <fail>true</fail>
|
|
|
+ </configuration>
|
|
|
+ </execution>
|
|
|
+ </executions>
|
|
|
+ </plugin>
|
|
|
+ <plugin>
|
|
|
+ <groupId>org.apache.maven.plugins</groupId>
|
|
|
+ <artifactId>maven-antrun-plugin</artifactId>
|
|
|
+ <executions>
|
|
|
+ <execution>
|
|
|
+ <id>make</id>
|
|
|
+ <phase>compile</phase>
|
|
|
+ <goals>
|
|
|
+ <goal>run</goal>
|
|
|
+ </goals>
|
|
|
+ <configuration>
|
|
|
+ <target>
|
|
|
+ <condition property="generator" value="Visual Studio 10" else="Visual Studio 10 Win64">
|
|
|
+ <equals arg1="Win32" arg2="${env.PLATFORM}" />
|
|
|
+ </condition>
|
|
|
+ <mkdir dir="${project.build.directory}/native"/>
|
|
|
+ <exec executable="cmake" dir="${project.build.directory}/native"
|
|
|
+ failonerror="true">
|
|
|
+ <arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_LIBWEBHDFS=${require.libwebhdfs} -DREQUIRE_FUSE=${require.fuse} -G '${generator}'"/>
|
|
|
+ </exec>
|
|
|
+ <exec executable="msbuild" dir="${project.build.directory}/native"
|
|
|
+ failonerror="true">
|
|
|
+ <arg line="ALL_BUILD.vcxproj /nologo /p:Configuration=RelWithDebInfo /p:LinkIncremental=false"/>
|
|
|
+ </exec>
|
|
|
+ <!-- Copy for inclusion in distribution. -->
|
|
|
+ <copy todir="${project.build.directory}/bin">
|
|
|
+ <fileset dir="${project.build.directory}/native/target/bin/RelWithDebInfo"/>
|
|
|
+ </copy>
|
|
|
+ </target>
|
|
|
+ </configuration>
|
|
|
+ </execution>
|
|
|
+ <execution>
|
|
|
+ <id>native_tests</id>
|
|
|
+ <phase>test</phase>
|
|
|
+ <goals><goal>run</goal></goals>
|
|
|
+ <configuration>
|
|
|
+ <skip>${skipTests}</skip>
|
|
|
+ <target>
|
|
|
+ <property name="compile_classpath" refid="maven.compile.classpath"/>
|
|
|
+ <property name="test_classpath" refid="maven.test.classpath"/>
|
|
|
+ <macrodef name="run-test">
|
|
|
+ <attribute name="test"/>
|
|
|
+ <sequential>
|
|
|
+ <echo message="Running @{test}"/>
|
|
|
+ <exec executable="${project.build.directory}/native/RelWithDebInfo/@{test}" failonerror="true" dir="${project.build.directory}/native/">
|
|
|
+ <env key="CLASSPATH" value="${test_classpath}:${compile_classpath}"/>
|
|
|
+ <!-- HADOOP_HOME required to find winutils. -->
|
|
|
+ <env key="HADOOP_HOME" value="${hadoop.common.build.dir}"/>
|
|
|
+ <!-- Make sure hadoop.dll and jvm.dll are on PATH. -->
|
|
|
+ <env key="PATH" value="${env.PATH};${hadoop.common.build.dir}/bin;${java.home}/jre/bin/server;${java.home}/bin/server"/>
|
|
|
+ </exec>
|
|
|
+ <echo message="Finished @{test}"/>
|
|
|
+ </sequential>
|
|
|
+ </macrodef>
|
|
|
+ <run-test test="test_libhdfs_threaded"/>
|
|
|
+ <echo message="Skipping test_libhdfs_zerocopy"/>
|
|
|
+ <run-test test="test_native_mini_dfs"/>
|
|
|
+ </target>
|
|
|
+ </configuration>
|
|
|
+ </execution>
|
|
|
+ </executions>
|
|
|
+ </plugin>
|
|
|
+ </plugins>
|
|
|
+ </build>
|
|
|
+ </profile>
|
|
|
+ <profile>
|
|
|
+ <id>native</id>
|
|
|
+ <activation>
|
|
|
+ <activeByDefault>false</activeByDefault>
|
|
|
+ </activation>
|
|
|
+ <properties>
|
|
|
+ <runningWithNative>true</runningWithNative>
|
|
|
+ </properties>
|
|
|
+ <build>
|
|
|
+ <plugins>
|
|
|
+ <plugin>
|
|
|
+ <groupId>org.apache.maven.plugins</groupId>
|
|
|
+ <artifactId>maven-antrun-plugin</artifactId>
|
|
|
+ <executions>
|
|
|
+ <execution>
|
|
|
+ <id>make</id>
|
|
|
+ <phase>compile</phase>
|
|
|
+ <goals><goal>run</goal></goals>
|
|
|
+ <configuration>
|
|
|
+ <target>
|
|
|
+ <mkdir dir="${project.build.directory}"/>
|
|
|
+ <exec executable="cmake" dir="${project.build.directory}" failonerror="true">
|
|
|
+ <arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_LIBWEBHDFS=${require.libwebhdfs} -DREQUIRE_FUSE=${require.fuse}"/>
|
|
|
+ </exec>
|
|
|
+ <exec executable="make" dir="${project.build.directory}" failonerror="true">
|
|
|
+ </exec>
|
|
|
+ </target>
|
|
|
+ </configuration>
|
|
|
+ </execution>
|
|
|
+ <execution>
|
|
|
+ <id>native_tests</id>
|
|
|
+ <phase>test</phase>
|
|
|
+ <goals><goal>run</goal></goals>
|
|
|
+ <configuration>
|
|
|
+ <skip>${skipTests}</skip>
|
|
|
+ <target>
|
|
|
+ <property name="compile_classpath" refid="maven.compile.classpath"/>
|
|
|
+ <property name="test_classpath" refid="maven.test.classpath"/>
|
|
|
+ <macrodef name="run-test">
|
|
|
+ <attribute name="test"/>
|
|
|
+ <sequential>
|
|
|
+ <echo message="Running @{test} ${compile_classpath} ${test_classpath}"/>
|
|
|
+ <exec executable="${project.build.directory}/main/native/libhdfs/@{test}" failonerror="true" dir="${project.build.directory}/">
|
|
|
+ <env key="CLASSPATH" value="${test_classpath}:${compile_classpath}"/>
|
|
|
+ <!-- Make sure libhadoop.so is on LD_LIBRARY_PATH. -->
|
|
|
+ <env key="LD_LIBRARY_PATH" value="${env.LD_LIBRARY_PATH}:${project.build.directory}/target/usr/local/lib:${hadoop.common.build.dir}/native/target/usr/local/lib"/>
|
|
|
+ </exec>
|
|
|
+ <echo message="Finished @{test}"/>
|
|
|
+ </sequential>
|
|
|
+ </macrodef>
|
|
|
+ <run-test test="test_libhdfs_threaded"/>
|
|
|
+ <run-test test="test_libhdfs_zerocopy"/>
|
|
|
+ <run-test test="test_native_mini_dfs"/>
|
|
|
+ </target>
|
|
|
+ </configuration>
|
|
|
+ </execution>
|
|
|
+ </executions>
|
|
|
+ </plugin>
|
|
|
+ </plugins>
|
|
|
+ </build>
|
|
|
+ </profile>
|
|
|
+ </profiles>
|
|
|
+</project>
|