1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279 |
- <?xml version="1.0"?>
- <!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- -->
- <project name="Hadoop" default="compile">
- <!-- Load all the default properties, and any the user wants -->
- <!-- to contribute (without having to type -D or edit this file -->
- <property file="${user.home}/build.properties" />
- <property file="${basedir}/build.properties" />
-
- <property name="Name" value="Hadoop"/>
- <property name="name" value="hadoop"/>
- <property name="version" value="0.19.0-dev"/>
- <property name="final.name" value="${name}-${version}"/>
- <property name="year" value="2006"/>
- <property name="libhdfs.version" value="1"/>
- <property name="src.dir" value="${basedir}/src"/>
- <property name="core.src.dir" value="${src.dir}/core"/>
- <property name="mapred.src.dir" value="${src.dir}/mapred"/>
- <property name="hdfs.src.dir" value="${src.dir}/hdfs"/>
- <property name="native.src.dir" value="${basedir}/src/native"/>
- <property name="examples.dir" value="${basedir}/src/examples"/>
- <property name="anttasks.dir" value="${basedir}/src/ant"/>
- <property name="lib.dir" value="${basedir}/lib"/>
- <property name="conf.dir" value="${basedir}/conf"/>
- <property name="docs.dir" value="${basedir}/docs"/>
- <property name="contrib.dir" value="${basedir}/src/contrib"/>
- <property name="docs.src" value="${basedir}/src/docs"/>
- <property name="changes.src" value="${docs.src}/changes"/>
- <property name="c++.src" value="${basedir}/src/c++"/>
- <property name="c++.utils.src" value="${c++.src}/utils"/>
- <property name="c++.pipes.src" value="${c++.src}/pipes"/>
- <property name="c++.examples.pipes.src" value="${examples.dir}/pipes"/>
- <property name="libhdfs.src" value="${c++.src}/libhdfs"/>
- <property name="tools.src" value="${basedir}/src/tools"/>
- <property name="build.dir" value="${basedir}/build"/>
- <property name="build.classes" value="${build.dir}/classes"/>
- <property name="build.src" value="${build.dir}/src"/>
- <property name="build.tools" value="${build.dir}/tools"/>
- <property name="build.webapps" value="${build.dir}/webapps"/>
- <property name="build.examples" value="${build.dir}/examples"/>
- <property name="build.anttasks" value="${build.dir}/ant"/>
- <property name="build.libhdfs" value="${build.dir}/libhdfs"/>
- <!-- convert spaces to _ so that mac os doesn't break things -->
- <exec executable="sed" inputstring="${os.name}"
- outputproperty="nonspace.os">
- <arg value="s/ /_/g"/>
- </exec>
- <property name="build.platform"
- value="${nonspace.os}-${os.arch}-${sun.arch.data.model}"/>
- <property name="build.native" value="${build.dir}/native/${build.platform}"/>
- <property name="build.c++" value="${build.dir}/c++-build/${build.platform}"/>
- <property name="build.c++.utils" value="${build.c++}/utils"/>
- <property name="build.c++.pipes" value="${build.c++}/pipes"/>
- <property name="build.c++.examples.pipes"
- value="${build.c++}/examples/pipes"/>
- <property name="build.docs" value="${build.dir}/docs"/>
- <property name="build.javadoc" value="${build.docs}/api"/>
- <property name="build.encoding" value="ISO-8859-1"/>
- <property name="install.c++" value="${build.dir}/c++/${build.platform}"/>
- <property name="install.c++.examples"
- value="${build.dir}/c++-examples/${build.platform}"/>
- <property name="test.src.dir" value="${basedir}/src/test"/>
- <property name="test.lib.dir" value="${basedir}/src/test/lib"/>
- <property name="test.build.dir" value="${build.dir}/test"/>
- <property name="test.generated.dir" value="${test.build.dir}/src"/>
- <property name="test.build.data" value="${test.build.dir}/data"/>
- <property name="test.cache.data" value="${test.build.dir}/cache"/>
- <property name="test.debug.data" value="${test.build.dir}/debug"/>
- <property name="test.log.dir" value="${test.build.dir}/logs"/>
- <property name="test.build.classes" value="${test.build.dir}/classes"/>
- <property name="test.build.testjar" value="${test.build.dir}/testjar"/>
- <property name="test.build.testshell" value="${test.build.dir}/testshell"/>
- <property name="test.build.javadoc" value="${test.build.dir}/docs/api"/>
- <property name="test.include" value="Test*"/>
- <property name="test.classpath.id" value="test.classpath"/>
- <property name="test.output" value="no"/>
- <property name="test.timeout" value="900000"/>
- <property name="test.junit.output.format" value="plain"/>
- <property name="libhdfs.test.conf.dir" value="${libhdfs.src}/tests/conf"/>
- <property name="libhdfs.test.dir" value="${test.build.dir}/libhdfs"/>
- <property name="web.src.dir" value="${basedir}/src/web"/>
- <property name="src.webapps" value="${basedir}/src/webapps"/>
- <property name="javadoc.link.java"
- value="http://java.sun.com/j2se/1.5/docs/api/"/>
- <property name="javadoc.packages" value="org.apache.hadoop.*"/>
- <property name="dist.dir" value="${build.dir}/${final.name}"/>
- <property name="javac.debug" value="on"/>
- <property name="javac.optimize" value="on"/>
- <property name="javac.deprecation" value="off"/>
- <property name="javac.version" value="1.5"/>
- <property name="javac.args" value=""/>
- <property name="javac.args.warnings" value="-Xlint:unchecked"/>
- <property name="clover.db.dir" location="${build.dir}/test/clover/db"/>
- <property name="clover.report.dir" location="${build.dir}/test/clover/reports"/>
- <property name="rat.reporting.classname" value="rat.Report"/>
- <property name="jdiff.home" value="${user.home}/jdiff-1.1.0"/>
- <property name="jdiff.reports.dir" value="${docs.dir}/jdiff"/>
- <property name="jdiff.stable" value="0.17.0"/>
- <property name="jdiff.stable.javadoc"
- value="http://hadoop.apache.org/core/docs/r${jdiff.stable}/api/"/>
- <property name="scratch.dir" value="${user.home}/tmp"/>
- <property name="svn.cmd" value="svn"/>
- <property name="grep.cmd" value="grep"/>
- <property name="patch.cmd" value="patch"/>
- <property name="make.cmd" value="make"/>
- <available property="clover.present" classname="com.cenqua.clover.tasks.CloverReportTask" />
- <!-- check if clover reports should be generated -->
- <condition property="clover.enabled">
- <and>
- <isset property="run.clover"/>
- <isset property="clover.present"/>
- </and>
- </condition>
- <!-- the normal classpath -->
- <path id="classpath">
- <pathelement location="${build.classes}"/>
- <fileset dir="${lib.dir}">
- <include name="**/*.jar" />
- <exclude name="**/excluded/" />
- </fileset>
- <pathelement location="${conf.dir}"/>
- </path>
- <!-- the unit test classpath: uses test.src.dir for configuration -->
- <path id="test.classpath">
- <pathelement location="${test.build.classes}" />
- <pathelement location="${test.src.dir}"/>
- <pathelement location="${build.dir}"/>
- <pathelement location="${build.examples}"/>
- <pathelement location="${build.tools}"/>
- <fileset dir="${test.lib.dir}">
- <include name="**/*.jar" />
- <exclude name="**/excluded/" />
- </fileset>
- <path refid="classpath"/>
- </path>
- <!-- the cluster test classpath: uses conf.dir for configuration -->
- <path id="test.cluster.classpath">
- <path refid="classpath"/>
- <pathelement location="${test.build.classes}" />
- <pathelement location="${test.src.dir}"/>
- <pathelement location="${build.dir}"/>
- </path>
- <!-- properties dependent on the items defined above. -->
- <available classname="${rat.reporting.classname}" classpathref="classpath" property="rat.present" value="true"/>
- <!-- ====================================================== -->
- <!-- Macro definitions -->
- <!-- ====================================================== -->
- <macrodef name="macro_tar" description="Worker Macro for tar">
- <attribute name="param.destfile"/>
- <element name="param.listofitems"/>
- <sequential>
- <tar compression="gzip" longfile="gnu"
- destfile="@{param.destfile}">
- <param.listofitems/>
- </tar>
- </sequential>
- </macrodef>
- <!-- ====================================================== -->
- <!-- Stuff needed by all targets -->
- <!-- ====================================================== -->
- <target name="init">
- <mkdir dir="${build.dir}"/>
- <mkdir dir="${build.classes}"/>
- <mkdir dir="${build.tools}"/>
- <mkdir dir="${build.src}"/>
- <mkdir dir="${build.webapps}/task/WEB-INF"/>
- <mkdir dir="${build.webapps}/job/WEB-INF"/>
- <mkdir dir="${build.webapps}/dfs/WEB-INF"/>
- <mkdir dir="${build.webapps}/datanode/WEB-INF"/>
- <mkdir dir="${build.webapps}/secondary/WEB-INF"/>
- <mkdir dir="${build.examples}"/>
- <mkdir dir="${build.anttasks}"/>
- <mkdir dir="${build.dir}/c++"/>
-
- <mkdir dir="${test.build.dir}"/>
- <mkdir dir="${test.build.classes}"/>
- <mkdir dir="${test.build.testjar}"/>
- <mkdir dir="${test.build.testshell}"/>
- <tempfile property="touch.temp.file" destDir="${java.io.tmpdir}"/>
- <touch millis="0" file="${touch.temp.file}">
- <fileset dir="${conf.dir}" includes="**/*.template"/>
- <fileset dir="${contrib.dir}" includes="**/*.template"/>
- </touch>
- <delete file="${touch.temp.file}"/>
- <!-- copy all of the jsp and static files -->
- <copy todir="${build.webapps}">
- <fileset dir="${src.webapps}">
- <exclude name="**/*.jsp" />
- </fileset>
- </copy>
- <copy todir="${conf.dir}" verbose="true">
- <fileset dir="${conf.dir}" includes="**/*.template"/>
- <mapper type="glob" from="*.template" to="*"/>
- </copy>
- <copy todir="${contrib.dir}" verbose="true">
- <fileset dir="${contrib.dir}" includes="**/*.template"/>
- <mapper type="glob" from="*.template" to="*"/>
- </copy>
- <exec executable="sh">
- <arg line="src/saveVersion.sh ${version}"/>
- </exec>
- </target>
- <!-- ====================================================== -->
- <!-- Compile the Java files -->
- <!-- ====================================================== -->
- <taskdef classname="org.apache.jasper.JspC" name="jsp-compile" >
- <classpath refid="test.classpath"/>
- </taskdef>
- <target name="record-parser" depends="init" if="javacc.home">
- <javacc
- target="${core.src.dir}/org/apache/hadoop/record/compiler/generated/rcc.jj"
- outputdirectory="${core.src.dir}/org/apache/hadoop/record/compiler/generated"
- javacchome="${javacc.home}"
- />
- </target>
-
- <target name="compile-rcc-compiler" depends="init, record-parser">
- <javac
- encoding="${build.encoding}"
- srcdir="${core.src.dir}"
- includes="org/apache/hadoop/record/compiler/**/*.java"
- destdir="${build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args}"/>
- <classpath refid="classpath"/>
- </javac>
-
- <taskdef name="recordcc" classname="org.apache.hadoop.record.compiler.ant.RccTask">
- <classpath refid="classpath" />
- </taskdef>
- </target>
-
- <target name="compile-core-classes" depends="init, compile-rcc-compiler">
- <jsp-compile
- uriroot="${src.webapps}/task"
- outputdir="${build.src}"
- package="org.apache.hadoop.mapred"
- webxml="${build.webapps}/task/WEB-INF/web.xml">
- </jsp-compile>
- <jsp-compile
- uriroot="${src.webapps}/job"
- outputdir="${build.src}"
- package="org.apache.hadoop.mapred"
- webxml="${build.webapps}/job/WEB-INF/web.xml">
- </jsp-compile>
- <jsp-compile
- uriroot="${src.webapps}/dfs"
- outputdir="${build.src}"
- package="org.apache.hadoop.dfs"
- webxml="${build.webapps}/dfs/WEB-INF/web.xml">
- </jsp-compile>
- <jsp-compile
- uriroot="${src.webapps}/datanode"
- outputdir="${build.src}"
- package="org.apache.hadoop.dfs"
- webxml="${build.webapps}/datanode/WEB-INF/web.xml">
- </jsp-compile>
- <!-- Compile Java files (excluding JSPs) checking warnings -->
- <javac
- encoding="${build.encoding}"
- srcdir="${core.src.dir};${mapred.src.dir};${hdfs.src.dir};${build.src}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="classpath"/>
- </javac>
-
- <copy todir="${build.classes}">
- <fileset dir="${core.src.dir}" includes="**/*.properties"/>
- <fileset dir="${mapred.src.dir}" includes="**/*.properties"/>
- <fileset dir="${hdfs.src.dir}" includes="**/*.properties"/>
- </copy>
-
- </target>
- <target name="compile-tools" depends="init">
- <javac
- encoding="${build.encoding}"
- srcdir="${tools.src}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${build.tools}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="classpath"/>
- </javac>
-
- <copy todir="${build.tools}">
- <fileset
- dir="${tools.src}"
- includes="**/*.properties"
- />
- </copy>
- </target>
- <target name="compile-core-native" depends="compile-core-classes"
- if="compile.native">
-
- <mkdir dir="${build.native}/lib"/>
- <mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/zlib"/>
- <mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/lzo"/>
- <javah
- classpath="${build.classes}"
- destdir="${build.native}/src/org/apache/hadoop/io/compress/zlib"
- force="yes"
- verbose="yes"
- >
- <class name="org.apache.hadoop.io.compress.zlib.ZlibCompressor" />
- <class name="org.apache.hadoop.io.compress.zlib.ZlibDecompressor" />
- </javah>
- <javah
- classpath="${build.classes}"
- destdir="${build.native}/src/org/apache/hadoop/io/compress/lzo"
- force="yes"
- verbose="yes"
- >
- <class name="org.apache.hadoop.io.compress.lzo.LzoCompressor" />
- <class name="org.apache.hadoop.io.compress.lzo.LzoDecompressor" />
- </javah>
- <exec dir="${build.native}" executable="sh" failonerror="true">
- <env key="OS_NAME" value="${os.name}"/>
- <env key="OS_ARCH" value="${os.arch}"/>
- <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
- <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
- <arg line="${native.src.dir}/configure"/>
- </exec>
- <exec dir="${build.native}" executable="${make.cmd}" failonerror="true">
- <env key="OS_NAME" value="${os.name}"/>
- <env key="OS_ARCH" value="${os.arch}"/>
- <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
- <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
- </exec>
- <exec dir="${build.native}" executable="sh" failonerror="true">
- <arg line="${build.native}/libtool --mode=install cp ${build.native}/lib/libhadoop.la ${build.native}/lib"/>
- </exec>
- </target>
- <target name="compile-core"
- depends="clover,compile-core-classes,compile-core-native,compile-c++" description="Compile core only">
- </target>
- <target name="compile-contrib" depends="compile-core,compile-libhdfs">
- <subant target="compile">
- <property name="version" value="${version}"/>
- <fileset file="${contrib.dir}/build.xml"/>
- </subant>
- </target>
-
- <target name="compile" depends="compile-core, compile-contrib, compile-ant-tasks, compile-tools" description="Compile core, contrib">
- </target>
- <target name="compile-examples"
- depends="compile-core,compile-c++-examples">
- <javac
- encoding="${build.encoding}"
- srcdir="${examples.dir}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${build.examples}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="classpath"/>
- </javac>
- </target>
- <!-- ================================================================== -->
- <!-- Make hadoop.jar -->
- <!-- ================================================================== -->
- <!-- -->
- <!-- ================================================================== -->
- <target name="jar" depends="compile-core" description="Make hadoop.jar">
- <tar compression="gzip" destfile="${build.classes}/bin.tgz">
- <tarfileset dir="bin" mode="755"/>
- </tar>
- <jar jarfile="${build.dir}/${final.name}-core.jar"
- basedir="${build.classes}">
- <manifest>
- <section name="org/apache/hadoop">
- <attribute name="Implementation-Title" value="Hadoop"/>
- <attribute name="Implementation-Version" value="${version}"/>
- <attribute name="Implementation-Vendor" value="Apache"/>
- </section>
- </manifest>
- <fileset file="${conf.dir}/hadoop-default.xml"/>
- <fileset file="${conf.dir}/commons-logging.properties"/>
- <fileset file="${conf.dir}/log4j.properties"/>
- <fileset file="${conf.dir}/hadoop-metrics.properties"/>
- <zipfileset dir="${build.webapps}" prefix="webapps"/>
- </jar>
- </target>
- <!-- ================================================================== -->
- <!-- Make the Hadoop examples jar. -->
- <!-- ================================================================== -->
- <!-- -->
- <!-- ================================================================== -->
- <target name="examples" depends="jar, compile-examples" description="Make the Hadoop examples jar.">
- <jar jarfile="${build.dir}/${final.name}-examples.jar"
- basedir="${build.examples}">
- <manifest>
- <attribute name="Main-Class"
- value="org/apache/hadoop/examples/ExampleDriver"/>
- </manifest>
- </jar>
- </target>
- <target name="tools-jar" depends="jar, compile-tools"
- description="Make the Hadoop tools jar.">
- <jar jarfile="${build.dir}/${final.name}-tools.jar"
- basedir="${build.tools}">
- <manifest>
- <attribute name="Main-Class"
- value="org/apache/hadoop/examples/ExampleDriver"/>
- </manifest>
- </jar>
- </target>
- <!-- ================================================================== -->
- <!-- Make the Hadoop metrics jar. (for use outside Hadoop) -->
- <!-- ================================================================== -->
- <!-- -->
- <!-- ================================================================== -->
- <target name="metrics.jar" depends="compile-core" description="Make the Hadoop metrics jar. (for use outside Hadoop)">
- <jar jarfile="${build.dir}/hadoop-metrics-${version}.jar"
- basedir="${build.classes}">
- <include name="**/metrics/**" />
- <exclude name="**/package.html" />
- </jar>
- </target>
- <target name="generate-test-records" depends="compile-rcc-compiler">
- <recordcc destdir="${test.generated.dir}">
- <fileset dir="${test.src.dir}"
- includes="**/*.jr" />
- </recordcc>
- </target>
-
- <!-- ================================================================== -->
- <!-- Compile test code -->
- <!-- ================================================================== -->
- <target name="compile-core-test" depends="compile-examples, compile-tools, generate-test-records">
- <javac
- encoding="${build.encoding}"
- srcdir="${test.generated.dir}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${test.build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args}" />
- <classpath refid="test.classpath"/>
- </javac>
- <javac
- encoding="${build.encoding}"
- srcdir="${test.src.dir}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${test.build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="test.classpath"/>
- </javac>
- <javac
- encoding="${build.encoding}"
- srcdir="${test.src.dir}/testjar"
- includes="*.java"
- destdir="${test.build.testjar}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="test.classpath"/>
- </javac>
- <delete file="${test.build.testjar}/testjob.jar"/>
- <jar jarfile="${test.build.testjar}/testjob.jar"
- basedir="${test.build.testjar}">
- </jar>
- <javac
- encoding="${build.encoding}"
- srcdir="${test.src.dir}/testshell"
- includes="*.java"
- destdir="${test.build.testshell}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}"/>
- <classpath refid="test.classpath"/>
- </javac>
- <delete file="${test.build.testshell}/testshell.jar"/>
- <jar jarfile="${test.build.testshell}/testshell.jar"
- basedir="${test.build.testshell}">
- </jar>
-
- <delete dir="${test.cache.data}"/>
- <mkdir dir="${test.cache.data}"/>
- <delete dir="${test.debug.data}"/>
- <mkdir dir="${test.debug.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/testscript.txt" todir="${test.debug.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.txt" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.jar" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.zip" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.tar" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.tgz" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.tar.gz" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/dfs/hadoop-14-dfs-dir.tgz" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/dfs/hadoop-dfs-dir.txt" todir="${test.cache.data}"/>
- </target>
- <!-- ================================================================== -->
- <!-- Make hadoop-test.jar -->
- <!-- ================================================================== -->
- <!-- -->
- <!-- ================================================================== -->
- <target name="jar-test" depends="compile-core-test" description="Make hadoop-test.jar">
- <jar jarfile="${build.dir}/${final.name}-test.jar"
- basedir="${test.build.classes}">
- <manifest>
- <attribute name="Main-Class"
- value="org/apache/hadoop/test/AllTestDriver"/>
- <section name="org/apache/hadoop">
- <attribute name="Implementation-Title" value="Hadoop"/>
- <attribute name="Implementation-Version" value="${version}"/>
- <attribute name="Implementation-Vendor" value="Apache"/>
- </section>
- </manifest>
- </jar>
- </target>
- <!-- ================================================================== -->
- <!-- Run unit tests -->
- <!-- ================================================================== -->
- <target name="test-core" depends="jar-test" description="Run core unit tests">
- <delete dir="${test.build.data}"/>
- <mkdir dir="${test.build.data}"/>
- <delete dir="${test.log.dir}"/>
- <mkdir dir="${test.log.dir}"/>
- <junit showoutput="${test.output}" printsummary="yes" haltonfailure="no"
- fork="yes" maxmemory="256m" dir="${basedir}" timeout="${test.timeout}"
- errorProperty="tests.failed" failureProperty="tests.failed">
- <sysproperty key="test.build.data" value="${test.build.data}"/>
- <sysproperty key="test.cache.data" value="${test.cache.data}"/>
- <sysproperty key="test.debug.data" value="${test.debug.data}"/>
- <sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
- <sysproperty key="test.src.dir" value="${test.src.dir}"/>
- <sysproperty key="java.library.path"
- value="${build.native}/lib:${lib.dir}/native/${build.platform}"/>
- <sysproperty key="install.c++.examples" value="${install.c++.examples}"/>
- <!-- set compile.c++ in the child jvm only if it is set -->
- <syspropertyset dynamic="no">
- <propertyref name="compile.c++"/>
- </syspropertyset>
- <classpath refid="${test.classpath.id}"/>
- <formatter type="${test.junit.output.format}" />
- <batchtest todir="${test.build.dir}" unless="testcase">
- <fileset dir="${test.src.dir}"
- includes="**/${test.include}.java"
- excludes="**/${test.exclude}.java" />
- </batchtest>
- <batchtest todir="${test.build.dir}" if="testcase">
- <fileset dir="${test.src.dir}" includes="**/${testcase}.java"/>
- </batchtest>
- </junit>
- <fail if="tests.failed">Tests failed!</fail>
- </target>
- <target name="test-contrib" depends="compile, compile-core-test" description="Run contrib unit tests">
- <subant target="test">
- <property name="version" value="${version}"/>
- <fileset file="${contrib.dir}/build.xml"/>
- </subant>
- </target>
-
- <target name="test" depends="test-core, test-contrib" description="Run core, contrib unit tests">
- </target>
- <!-- Run all unit tests, not just Test*, and use non-test configuration. -->
- <target name="test-cluster" description="Run all unit tests, not just Test*, and use non-test configuration.">
- <antcall target="test">
- <param name="test.include" value="*"/>
- <param name="test.classpath.id" value="test.cluster.classpath"/>
- </antcall>
- </target>
- <target name="nightly" depends="test, tar">
- </target>
-
- <!-- ================================================================== -->
- <!-- Run optional third-party tool targets -->
- <!-- ================================================================== -->
- <target name="checkstyle" depends="check-for-checkstyle" if="checkstyle.present" description="Run optional third-party tool targets">
- <taskdef resource="checkstyletask.properties">
- <classpath>
- <fileset dir="${lib.dir}">
- <include name="excluded/checkstyle-all-*.jar"/>
- </fileset>
- </classpath>
- </taskdef>
-
- <mkdir dir="${test.build.dir}"/>
-
- <checkstyle config="${test.src.dir}/checkstyle.xml"
- failOnViolation="false">
- <fileset dir="${core.src.dir}" includes="**/*.java" excludes="**/generated/**"/>
- <fileset dir="${mapred.src.dir}" includes="**/*.java" excludes="**/generated/**"/>
- <fileset dir="${hdfs.src.dir}" includes="**/*.java" excludes="**/generated/**"/>
- <formatter type="xml" toFile="${test.build.dir}/checkstyle-errors.xml"/>
- </checkstyle>
-
- <xslt style="${test.src.dir}/checkstyle-noframes-sorted.xsl"
- in="${test.build.dir}/checkstyle-errors.xml"
- out="${test.build.dir}/checkstyle-errors.html"/>
- </target>
-
- <target name="check-for-checkstyle">
- <available property="checkstyle.present" resource="checkstyletask.properties">
- <classpath>
- <fileset dir="${lib.dir}">
- <include name="excluded/checkstyle-all-*.jar"/>
- </fileset>
- </classpath>
- </available>
- </target>
- <property name="findbugs.home" value=""/>
- <target name="findbugs" depends="check-for-findbugs, tar" if="findbugs.present" description="Run findbugs if present">
- <property name="findbugs.out.dir" value="${test.build.dir}/findbugs"/>
- <property name="findbugs.exclude.file" value="${test.src.dir}/findbugsExcludeFile.xml"/>
- <property name="findbugs.report.htmlfile" value="${findbugs.out.dir}/hadoop-findbugs-report.html"/>
- <property name="findbugs.report.xmlfile" value="${findbugs.out.dir}/hadoop-findbugs-report.xml"/>
- <taskdef name="findbugs" classname="edu.umd.cs.findbugs.anttask.FindBugsTask"
- classpath="${findbugs.home}/lib/findbugs-ant.jar" />
-
- <mkdir dir="${findbugs.out.dir}"/>
-
- <findbugs home="${findbugs.home}" output="xml:withMessages"
- outputFile="${findbugs.report.xmlfile}" effort="max"
- excludeFilter="${findbugs.exclude.file}" jvmargs="-Xmx512M">
- <auxClasspath>
- <fileset dir="${lib.dir}">
- <include name="**/*.jar"/>
- </fileset>
- </auxClasspath>
- <sourcePath path="${core.src.dir}"/>
- <sourcePath path="${mapred.src.dir}"/>
- <sourcePath path="${hdfs.src.dir}"/>
- <sourcePath path="${examples.dir}" />
- <sourcePath path="${basedir}/src/contrib/streaming/src/java" />
- <class location="${basedir}/build/hadoop-${version}-core.jar" />
- <class location="${basedir}/build/hadoop-${version}-examples.jar" />
- <class location="${basedir}/build/contrib/streaming/hadoop-${version}-streaming.jar" />
- </findbugs>
-
- <xslt style="${findbugs.home}/src/xsl/default.xsl"
- in="${findbugs.report.xmlfile}"
- out="${findbugs.report.htmlfile}"/>
- </target>
- <target name="check-for-findbugs">
- <available property="findbugs.present"
- file="${findbugs.home}/lib/findbugs.jar" />
- </target>
-
- <!-- ================================================================== -->
- <!-- Documentation -->
- <!-- ================================================================== -->
-
- <target name="docs" depends="forrest.check" description="Generate forrest-based documentation. To use, specify -Dforrest.home=<base of Apache Forrest installation> on the command line." if="forrest.home">
- <exec dir="${docs.src}" executable="${forrest.home}/bin/forrest" failonerror="true" />
- <copy todir="${docs.dir}">
- <fileset dir="${docs.src}/build/site/" />
- </copy>
- <style basedir="${conf.dir}" destdir="${docs.dir}"
- includes="hadoop-default.xml" style="conf/configuration.xsl"/>
- <antcall target="changes-to-html"/>
- </target>
- <target name="forrest.check" unless="forrest.home">
- <fail message="'forrest.home' is not defined. Please pass -Dforrest.home=<base of Apache Forrest installation> to Ant on the command-line." />
- </target>
-
- <target name="javadoc" description="Generate javadoc">
- <mkdir dir="${build.javadoc}"/>
- <javadoc
- overview="${core.src.dir}/overview.html"
- packagenames="org.apache.hadoop.*"
- destdir="${build.javadoc}"
- author="true"
- version="true"
- use="true"
- windowtitle="${Name} ${version} API"
- doctitle="${Name} ${version} API"
- bottom="Copyright &copy; ${year} The Apache Software Foundation"
- >
- <packageset dir="${core.src.dir}"/>
- <packageset dir="${mapred.src.dir}"/>
- <packageset dir="${hdfs.src.dir}"/>
- <packageset dir="${examples.dir}"/>
- <packageset dir="src/contrib/streaming/src/java"/>
- <packageset dir="src/contrib/data_join/src/java"/>
- <packageset dir="src/contrib/index/src/java"/>
- <link href="${javadoc.link.java}"/>
- <classpath >
- <path refid="classpath" />
- <fileset dir="src/contrib/">
- <include name="*/lib/*.jar" />
- </fileset>
- <pathelement path="${java.class.path}"/>
- </classpath>
- <group title="Core" packages="org.apache.*"/>
- <group title="Examples" packages="org.apache.hadoop.examples*"/>
- <group title="contrib: Streaming" packages="org.apache.hadoop.streaming*"/>
- <group title="contrib: DataJoin" packages="org.apache.hadoop.contrib.utils.join*"/>
- <group title="contrib: Index" packages="org.apache.hadoop.contrib.index*"/>
- </javadoc>
- </target>
- <target name="api-xml" depends="javadoc">
- <javadoc>
- <doclet name="jdiff.JDiff"
- path="${jdiff.home}/jdiff.jar:${jdiff.home}/xerces.jar">
- <param name="-apidir" value="${build.dir}"/>
- <param name="-apiname" value="hadoop ${version}"/>
- </doclet>
- <packageset dir="src/core"/>
- <packageset dir="src/hdfs"/>
- <packageset dir="src/mapred"/>
- <packageset dir="src/tools"/>
- <classpath >
- <path refid="classpath" />
- <pathelement path="${java.class.path}"/>
- </classpath>
- </javadoc>
- </target>
-
- <target name="api-report" depends="api-xml">
- <javadoc sourcepath="src/core,src/hdfs,src,mapred,src/tools"
- destdir="${build.dir}"
- sourceFiles="${jdiff.home}/Null.java">
- <doclet name="jdiff.JDiff"
- path="${jdiff.home}/jdiff.jar:${jdiff.home}/xerces.jar">
- <param name="-oldapi" value="hadoop ${jdiff.stable}"/>
- <param name="-newapi" value="hadoop ${version}"/>
- <param name="-oldapidir" value="${jdiff.reports.dir}"/>
- <param name="-newapidir" value="${build.dir}"/>
- <param name="-javadocold" value="${jdiff.stable.javadoc}"/>
- <param name="-javadocnew" value="${build.javadoc}/"/>
- <param name="-stats"/>
- </doclet>
- <classpath >
- <path refid="classpath" />
- <pathelement path="${java.class.path}"/>
- </classpath>
- </javadoc>
- </target>
-
- <target name="changes-to-html" description="Convert CHANGES.txt into an html file">
- <mkdir dir="${docs.dir}"/>
- <exec executable="perl" input="CHANGES.txt" output="${docs.dir}/changes.html" failonerror="true">
- <arg value="${changes.src}/changes2html.pl"/>
- </exec>
- <copy todir="${docs.dir}">
- <fileset dir="${changes.src}" includes="*.css"/>
- </copy>
- </target>
- <!-- ================================================================== -->
- <!-- D I S T R I B U T I O N -->
- <!-- ================================================================== -->
- <!-- -->
- <!-- ================================================================== -->
- <target name="package" depends="compile, jar, javadoc, examples, tools-jar, jar-test, ant-tasks, package-libhdfs"
- description="Build distribution">
- <mkdir dir="${dist.dir}"/>
- <mkdir dir="${dist.dir}/lib"/>
- <mkdir dir="${dist.dir}/contrib"/>
- <mkdir dir="${dist.dir}/bin"/>
- <mkdir dir="${dist.dir}/docs"/>
- <mkdir dir="${dist.dir}/docs/api"/>
- <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
- <fileset dir="lib">
- <exclude name="**/native/**"/>
- </fileset>
- </copy>
- <exec dir="${dist.dir}" executable="sh" failonerror="true">
- <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/>
- <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
- <env key="DIST_LIB_DIR" value="${dist.dir}/lib/native"/>
- <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
- </exec>
- <subant target="package">
- <!--Pass down the version in case its needed again and the target
- distribution directory so contribs know where to install to.-->
- <property name="version" value="${version}"/>
- <property name="dist.dir" value="${dist.dir}"/>
- <fileset file="${contrib.dir}/build.xml"/>
- </subant>
- <copy todir="${dist.dir}/webapps">
- <fileset dir="${build.webapps}"/>
- </copy>
- <copy todir="${dist.dir}">
- <fileset file="${build.dir}/${final.name}-*.jar"/>
- </copy>
-
- <copy todir="${dist.dir}/bin">
- <fileset dir="bin"/>
- </copy>
- <copy todir="${dist.dir}/conf">
- <fileset dir="${conf.dir}" excludes="**/*.template"/>
- </copy>
- <copy todir="${dist.dir}/docs">
- <fileset dir="${docs.dir}" />
- <fileset dir="${build.docs}"/>
- </copy>
- <copy todir="${dist.dir}">
- <fileset dir=".">
- <include name="*.txt" />
- </fileset>
- </copy>
- <copy todir="${dist.dir}/src" includeEmptyDirs="true">
- <fileset dir="src" excludes="**/*.template **/docs/build/**/*"/>
- </copy>
-
- <copy todir="${dist.dir}/c++" includeEmptyDirs="false">
- <fileset dir="${build.dir}/c++"/>
- </copy>
- <copy todir="${dist.dir}/" file="build.xml"/>
- <chmod perm="ugo+x" type="file" parallel="false">
- <fileset dir="${dist.dir}/bin"/>
- <fileset dir="${dist.dir}/src/contrib/">
- <include name="*/bin/*" />
- </fileset>
- <fileset dir="${dist.dir}/src/contrib/ec2/bin/image"/>
- </chmod>
- <chmod perm="ugo+x" type="file">
- <fileset dir="${dist.dir}/src/c++/pipes/debug"/>
- </chmod>
- </target>
- <!-- ================================================================== -->
- <!-- Make release tarball -->
- <!-- ================================================================== -->
- <target name="tar" depends="package" description="Make release tarball">
- <macro_tar param.destfile="${build.dir}/${final.name}.tar.gz">
- <param.listofitems>
- <tarfileset dir="${build.dir}" mode="664">
- <exclude name="${final.name}/bin/*" />
- <exclude name="${final.name}/contrib/*/bin/*" />
- <exclude name="${final.name}/src/contrib/ec2/bin/*" />
- <exclude name="${final.name}/src/contrib/ec2/bin/image/*" />
- <include name="${final.name}/**" />
- </tarfileset>
- <tarfileset dir="${build.dir}" mode="755">
- <include name="${final.name}/bin/*" />
- <include name="${final.name}/contrib/*/bin/*" />
- <include name="${final.name}/src/contrib/ec2/bin/*" />
- <include name="${final.name}/src/contrib/ec2/bin/image/*" />
- </tarfileset>
- </param.listofitems>
- </macro_tar>
- </target>
- <target name="binary" depends="package" description="Make tarball without source and documentation">
- <macro_tar param.destfile="${build.dir}/${final.name}-bin.tar.gz">
- <param.listofitems>
- <tarfileset dir="${build.dir}" mode="664">
- <exclude name="${final.name}/bin/*" />
- <exclude name="${final.name}/src/**" />
- <exclude name="${final.name}/docs/**" />
- <include name="${final.name}/**" />
- </tarfileset>
- <tarfileset dir="${build.dir}" mode="755">
- <include name="${final.name}/bin/*" />
- </tarfileset>
- </param.listofitems>
- </macro_tar>
- </target>
- <!-- ================================================================== -->
- <!-- Perform audit activities for the release -->
- <!-- ================================================================== -->
- <target name="releaseaudit" depends="package" description="Release Audit activities">
- <fail unless="rat.present" message="Failed to load class [${rat.reporting.classname}]. Download the latest rat jar from [http://code.google.com/p/arat] and copy it to [${lib.dir}]. Typically the file name will be of format rat-x.y.z.jar"/>
- <java classname="${rat.reporting.classname}" fork="true">
- <classpath refid="classpath"/>
- <arg value="${build.dir}/${final.name}"/>
- </java>
- </target>
- <!-- ================================================================== -->
- <!-- Clean. Delete the build files, and their directories -->
- <!-- ================================================================== -->
- <target name="clean" depends="clean-contrib" description="Clean. Delete the build files, and their directories">
- <delete dir="${build.dir}"/>
- <delete dir="${docs.src}/build"/>
- </target>
- <!-- ================================================================== -->
- <!-- Clean contrib target. For now, must be called explicitly -->
- <!-- Using subant instead of ant as a workaround for 30569 -->
- <!-- ================================================================== -->
- <target name="clean-contrib">
- <subant target="clean">
- <fileset file="src/contrib/build.xml"/>
- </subant>
- </target>
- <!-- ================================================================== -->
- <!-- libhdfs targets. -->
- <!-- ================================================================== -->
- <target name="compile-libhdfs" depends="init" if="libhdfs">
- <mkdir dir="${build.libhdfs}"/>
- <exec dir="${libhdfs.src}" executable="${make.cmd}" failonerror="true">
- <env key="OS_NAME" value="${os.name}"/>
- <env key="OS_ARCH" value="${os.arch}"/>
- <env key="SHLIB_VERSION" value="${libhdfs.version}"/>
- <env key="LIBHDFS_BUILD_DIR" value="${build.libhdfs}"/>
- </exec>
- </target>
-
- <target name="test-libhdfs" depends="compile-libhdfs, compile-core">
- <delete dir="${libhdfs.test.dir}"/>
- <mkdir dir="${libhdfs.test.dir}"/>
- <mkdir dir="${libhdfs.test.dir}/logs"/>
- <mkdir dir="${libhdfs.test.dir}/dfs/name"/>
- <exec dir="${libhdfs.src}" executable="${make.cmd}" failonerror="true">
- <env key="OS_NAME" value="${os.name}"/>
- <env key="OS_ARCH" value="${os.arch}"/>
- <env key="SHLIB_VERSION" value="${libhdfs.version}"/>
- <env key="LIBHDFS_BUILD_DIR" value="${build.libhdfs}"/>
- <env key="HADOOP_HOME" value="${basedir}"/>
- <env key="HADOOP_CONF_DIR" value="${libhdfs.test.conf.dir}"/>
- <env key="HADOOP_LOG_DIR" value="${libhdfs.test.dir}/logs"/>
- <arg value="test"/>
- </exec>
- </target>
- <target name="doc-libhdfs" depends="compile-libhdfs">
- <exec dir="${libhdfs.src}" executable="${make.cmd}">
- <arg value="doc"/>
- </exec>
- </target>
-
- <target name="package-libhdfs"
- depends="compile-libhdfs, doc-libhdfs"
- if="libhdfs">
- <mkdir dir="${dist.dir}/libhdfs"/>
- <copy todir="${dist.dir}/libhdfs">
- <fileset dir="${build.libhdfs}"
- casesensitive="yes"
- followsymlinks="false">
- <exclude name="**/tests/**"/>
- <exclude name="*.so"/>
- <exclude name="*.o"/>
- </fileset>
- </copy>
- <chmod perm="ugo+x" type="file">
- <fileset dir="${dist.dir}/libhdfs"/>
- </chmod>
- <exec dir="${dist.dir}/libhdfs" executable="ln">
- <arg line="-sf libhdfs.so.${libhdfs.version} libhdfs.so"/>
- </exec>
- </target>
-
- <target name="create-c++-configure" depends="init" if="compile.c++">
- <exec executable="autoreconf" dir="${c++.utils.src}" searchpath="yes"
- failonerror="yes">
- <arg value="-if"/>
- </exec>
- <exec executable="autoreconf" dir="${c++.pipes.src}" searchpath="yes"
- failonerror="yes">
- <arg value="-if"/>
- </exec>
- <exec executable="autoreconf" dir="${c++.examples.pipes.src}"
- searchpath="yes" failonerror="yes">
- <arg value="-if"/>
- </exec>
- </target>
- <target name="check-c++-makefiles" depends="init" if="compile.c++">
- <condition property="need.c++.utils.makefile">
- <not> <available file="${build.c++.utils}/Makefile"/> </not>
- </condition>
- <condition property="need.c++.pipes.makefile">
- <not> <available file="${build.c++.pipes}/Makefile"/> </not>
- </condition>
- <condition property="need.c++.examples.pipes.makefile">
- <not> <available file="${build.c++.examples.pipes}/Makefile"/> </not>
- </condition>
- </target>
- <target name="create-c++-utils-makefile" depends="check-c++-makefiles"
- if="need.c++.utils.makefile">
- <mkdir dir="${build.c++.utils}"/>
- <exec executable="${c++.utils.src}/configure" dir="${build.c++.utils}"
- failonerror="yes">
- <arg value="--prefix=${install.c++}"/>
- </exec>
- </target>
- <target name="compile-c++-utils" depends="create-c++-utils-makefile"
- if="compile.c++">
- <exec executable="${make.cmd}" dir="${build.c++.utils}" searchpath="yes"
- failonerror="yes">
- <arg value="install"/>
- </exec>
- </target>
- <target name="create-c++-pipes-makefile" depends="check-c++-makefiles"
- if="need.c++.pipes.makefile">
- <mkdir dir="${build.c++.pipes}"/>
- <exec executable="${c++.pipes.src}/configure" dir="${build.c++.pipes}"
- failonerror="yes">
- <arg value="--prefix=${install.c++}"/>
- </exec>
- </target>
- <target name="compile-c++-pipes"
- depends="create-c++-pipes-makefile,compile-c++-utils"
- if="compile.c++">
- <exec executable="${make.cmd}" dir="${build.c++.pipes}" searchpath="yes"
- failonerror="yes">
- <arg value="install"/>
- </exec>
- </target>
- <target name="compile-c++"
- depends="compile-c++-pipes"/>
- <target name="create-c++-examples-pipes-makefile"
- depends="check-c++-makefiles"
- if="need.c++.examples.pipes.makefile">
- <mkdir dir="${build.c++.examples.pipes}"/>
- <exec executable="${c++.examples.pipes.src}/configure"
- dir="${build.c++.examples.pipes}"
- failonerror="yes">
- <arg value="--prefix=${install.c++.examples}"/>
- <arg value="--with-hadoop-utils=${install.c++}"/>
- <arg value="--with-hadoop-pipes=${install.c++}"/>
- </exec>
- </target>
- <target name="compile-c++-examples-pipes"
- depends="create-c++-examples-pipes-makefile,compile-c++-pipes"
- if="compile.c++">
- <exec executable="${make.cmd}" dir="${build.c++.examples.pipes}" searchpath="yes"
- failonerror="yes">
- <arg value="install"/>
- </exec>
- </target>
- <target name="compile-c++-examples"
- depends="compile-c++-examples-pipes"/>
- <target name="compile-ant-tasks" depends="compile-core">
- <javac
- encoding="${build.encoding}"
- srcdir="${anttasks.dir}"
- includes="org/apache/hadoop/ant/**/*.java"
- destdir="${build.anttasks}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args}"/>
- <classpath refid="classpath"/>
- </javac>
- </target>
- <target name="ant-tasks" depends="jar, compile-ant-tasks">
- <copy file="${anttasks.dir}/org/apache/hadoop/ant/antlib.xml"
- todir="${build.anttasks}/org/apache/hadoop/ant"/>
- <jar destfile="${build.dir}/ant-${final.name}.jar">
- <fileset dir="${build.anttasks}"/>
- </jar>
- </target>
- <target name="clover" depends="clover.setup, clover.info" description="Instrument the Unit tests using Clover. Requires a Clover license and clover.jar in the ANT classpath. To use, specify -Drun.clover=true on the command line."/>
- <target name="clover.setup" if="clover.enabled">
- <taskdef resource="clovertasks"/>
- <mkdir dir="${clover.db.dir}"/>
- <clover-setup initString="${clover.db.dir}/hadoop_coverage.db">
- <fileset dir="src" includes="core/**/* tools/**/* hdfs/**/* mapred/**/*"/>
- </clover-setup>
- </target>
- <target name="clover.info" unless="clover.present">
- <echo>
- Clover not found. Code coverage reports disabled.
- </echo>
- </target>
- <target name="clover.check">
- <fail unless="clover.present">
- ##################################################################
- Clover not found.
- Please make sure clover.jar is in ANT_HOME/lib, or made available
- to Ant using other mechanisms like -lib or CLASSPATH.
- ##################################################################
- </fail>
- </target>
- <target name="generate-clover-reports" depends="clover.check, clover">
- <mkdir dir="${clover.report.dir}"/>
- <clover-report>
- <current outfile="${clover.report.dir}" title="${final.name}">
- <format type="html"/>
- </current>
- </clover-report>
- <clover-report>
- <current outfile="${clover.report.dir}/clover.xml" title="${final.name}">
- <format type="xml"/>
- </current>
- </clover-report>
- </target>
- <target name="findbugs.check" depends="check-for-findbugs" unless="findbugs.present">
- <fail message="'findbugs.home' is not defined. Please pass -Dfindbugs.home=<base of Findbugs installation> to Ant on the command-line." />
- </target>
- <target name="patch.check" unless="patch.file">
- <fail message="'patch.file' is not defined. Please pass -Dpatch.file=<location of patch file> to Ant on the command-line." />
- </target>
- <target name="test-patch" depends="patch.check,findbugs.check,forrest.check">
- <exec executable="bash" failonerror="true">
- <arg value="${basedir}/src/test/bin/test-patch.sh"/>
- <arg value="DEVELOPER"/>
- <arg value="${patch.file}"/>
- <arg value="${scratch.dir}"/>
- <arg value="${svn.cmd}"/>
- <arg value="${grep.cmd}"/>
- <arg value="${patch.cmd}"/>
- <arg value="${findbugs.home}"/>
- <arg value="${forrest.home}"/>
- <arg value="${basedir}"/>
- </exec>
- </target>
- <target name="hudson-test-patch" depends="findbugs.check,forrest.check">
- <exec executable="bash" failonerror="true">
- <arg value="${basedir}/src/test/bin/test-patch.sh"/>
- <arg value="HUDSON"/>
- <arg value="${scratch.dir}"/>
- <arg value="${support.dir}"/>
- <arg value="${ps.cmd}"/>
- <arg value="${wget.cmd}"/>
- <arg value="${jiracli.cmd}"/>
- <arg value="${svn.cmd}"/>
- <arg value="${grep.cmd}"/>
- <arg value="${patch.cmd}"/>
- <arg value="${findbugs.home}"/>
- <arg value="${forrest.home}"/>
- <arg value="${eclipse.home}"/>
- <arg value="${python.home}"/>
- <arg value="${basedir}"/>
- <arg value="${trigger.url}"/>
- <arg value="${jira.passwd}"/>
- </exec>
- </target>
-
- <target name="eclipse-files" depends="init"
- description="Generate files for Eclipse">
- <pathconvert property="eclipse.project">
- <path path="${basedir}"/>
- <regexpmapper from="^.*/([^/]+)$$" to="\1" handledirsep="yes"/>
- </pathconvert>
- <copy todir="." overwrite="true">
- <fileset dir=".eclipse.templates">
- <exclude name="**/README.txt"/>
- </fileset>
- <filterset>
- <filter token="PROJECT" value="${eclipse.project}"/>
- </filterset>
- </copy>
- </target>
- </project>
|