12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988 |
- <?xml version="1.0"?>
- <!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- -->
- <project name="Hadoop" default="compile"
- xmlns:artifact="urn:maven-artifact-ant"
- xmlns:ivy="antlib:org.apache.ivy.ant">
- <!-- Load all the default properties, and any the user wants -->
- <!-- to contribute (without having to type -D or edit this file -->
- <property file="${user.home}/build.properties" />
- <property file="${basedir}/build.properties" />
-
- <property name="Name" value="Hadoop"/>
- <property name="name" value="hadoop"/>
- <property name="version" value="0.20.3-SNAPSHOT"/>
- <property name="final.name" value="${name}-${version}"/>
- <property name="year" value="2009"/>
-
- <property name="core.final.name" value="${name}-core-${version}"/>
- <property name="test.final.name" value="${name}-test-${version}"/>
- <property name="examples.final.name" value="${name}-examples-${version}"/>
- <property name="tools.final.name" value="${name}-tools-${version}"/>
- <property name="ant.final.name" value="${name}-ant-${version}"/>
- <property name="streaming.final.name" value="${name}-streaming-${version}"/>
- <property name="src.dir" value="${basedir}/src"/>
- <property name="core.src.dir" value="${src.dir}/core"/>
- <property name="mapred.src.dir" value="${src.dir}/mapred"/>
- <property name="hdfs.src.dir" value="${src.dir}/hdfs"/>
- <property name="native.src.dir" value="${basedir}/src/native"/>
- <property name="examples.dir" value="${basedir}/src/examples"/>
- <property name="anttasks.dir" value="${basedir}/src/ant"/>
- <property name="lib.dir" value="${basedir}/lib"/>
- <property name="conf.dir" value="${basedir}/conf"/>
- <property name="contrib.dir" value="${basedir}/src/contrib"/>
- <property name="docs.src" value="${basedir}/src/docs"/>
- <property name="src.docs.cn" value="${basedir}/src/docs/cn"/>
- <property name="changes.src" value="${docs.src}/changes"/>
- <property name="c++.src" value="${basedir}/src/c++"/>
- <property name="c++.utils.src" value="${c++.src}/utils"/>
- <property name="c++.pipes.src" value="${c++.src}/pipes"/>
- <property name="c++.examples.pipes.src" value="${examples.dir}/pipes"/>
- <property name="c++.libhdfs.src" value="${c++.src}/libhdfs"/>
- <property name="librecordio.src" value="${c++.src}/librecordio"/>
- <property name="tools.src" value="${basedir}/src/tools"/>
- <property name="xercescroot" value=""/>
- <property name="build.dir" value="${basedir}/build"/>
- <property name="build.classes" value="${build.dir}/classes"/>
- <property name="build.src" value="${build.dir}/src"/>
- <property name="build.tools" value="${build.dir}/tools"/>
- <property name="build.webapps" value="${build.dir}/webapps"/>
- <property name="build.examples" value="${build.dir}/examples"/>
- <property name="build.anttasks" value="${build.dir}/ant"/>
- <property name="build.librecordio" value="${build.dir}/librecordio"/>
- <!-- convert spaces to _ so that mac os doesn't break things -->
- <exec executable="sed" inputstring="${os.name}"
- outputproperty="nonspace.os">
- <arg value="s/ /_/g"/>
- </exec>
- <property name="build.platform"
- value="${nonspace.os}-${os.arch}-${sun.arch.data.model}"/>
- <property name="jvm.arch"
- value="${sun.arch.data.model}"/>
- <property name="build.native" value="${build.dir}/native/${build.platform}"/>
- <property name="build.c++" value="${build.dir}/c++-build/${build.platform}"/>
- <property name="build.c++.utils" value="${build.c++}/utils"/>
- <property name="build.c++.pipes" value="${build.c++}/pipes"/>
- <property name="build.c++.libhdfs" value="${build.c++}/libhdfs"/>
- <property name="build.c++.examples.pipes"
- value="${build.c++}/examples/pipes"/>
- <property name="build.docs" value="${build.dir}/docs"/>
- <property name="build.docs.cn" value="${build.dir}/docs/cn"/>
- <property name="build.javadoc" value="${build.docs}/api"/>
- <property name="build.javadoc.dev" value="${build.docs}/dev-api"/>
- <property name="build.encoding" value="ISO-8859-1"/>
- <property name="install.c++" value="${build.dir}/c++/${build.platform}"/>
- <property name="install.c++.examples"
- value="${build.dir}/c++-examples/${build.platform}"/>
- <property name="test.src.dir" value="${basedir}/src/test"/>
- <property name="test.lib.dir" value="${basedir}/src/test/lib"/>
- <property name="test.build.dir" value="${build.dir}/test"/>
- <property name="test.generated.dir" value="${test.build.dir}/src"/>
- <property name="test.build.data" value="${test.build.dir}/data"/>
- <property name="test.cache.data" value="${test.build.dir}/cache"/>
- <property name="test.debug.data" value="${test.build.dir}/debug"/>
- <property name="test.log.dir" value="${test.build.dir}/logs"/>
- <property name="test.build.classes" value="${test.build.dir}/classes"/>
- <property name="test.build.testjar" value="${test.build.dir}/testjar"/>
- <property name="test.build.testshell" value="${test.build.dir}/testshell"/>
- <property name="test.build.extraconf" value="${test.build.dir}/extraconf"/>
- <property name="test.build.javadoc" value="${test.build.dir}/docs/api"/>
- <property name="test.build.javadoc.dev" value="${test.build.dir}/docs/dev-api"/>
- <property name="test.include" value="Test*"/>
- <property name="test.classpath.id" value="test.classpath"/>
- <property name="test.output" value="no"/>
- <property name="test.timeout" value="900000"/>
- <property name="test.junit.output.format" value="plain"/>
- <property name="test.junit.fork.mode" value="perTest" />
- <property name="test.junit.printsummary" value="yes" />
- <property name="test.junit.haltonfailure" value="no" />
- <property name="test.junit.maxmemory" value="512m" />
- <property name="test.libhdfs.conf.dir" value="${c++.libhdfs.src}/tests/conf"/>
- <property name="test.libhdfs.dir" value="${test.build.dir}/libhdfs"/>
- <property name="librecordio.test.dir" value="${test.build.dir}/librecordio"/>
- <property name="web.src.dir" value="${basedir}/src/web"/>
- <property name="src.webapps" value="${basedir}/src/webapps"/>
- <property name="javadoc.link.java"
- value="http://java.sun.com/javase/6/docs/api/"/>
- <property name="javadoc.packages" value="org.apache.hadoop.*"/>
- <property name="dist.dir" value="${build.dir}/${final.name}"/>
- <property name="javac.debug" value="on"/>
- <property name="javac.optimize" value="on"/>
- <property name="javac.deprecation" value="off"/>
- <property name="javac.version" value="1.6"/>
- <property name="javac.args" value=""/>
- <property name="javac.args.warnings" value="-Xlint:unchecked"/>
- <property name="clover.db.dir" location="${build.dir}/test/clover/db"/>
- <property name="clover.report.dir" location="${build.dir}/test/clover/reports"/>
- <property name="rat.reporting.classname" value="rat.Report"/>
- <property name="jdiff.build.dir" value="${build.docs}/jdiff"/>
- <property name="jdiff.xml.dir" value="${lib.dir}/jdiff"/>
- <property name="jdiff.stable" value="0.19.2"/>
- <property name="jdiff.stable.javadoc"
- value="http://hadoop.apache.org/core/docs/r${jdiff.stable}/api/"/>
- <property name="scratch.dir" value="${user.home}/tmp"/>
- <property name="svn.cmd" value="svn"/>
- <property name="grep.cmd" value="grep"/>
- <property name="patch.cmd" value="patch"/>
- <property name="make.cmd" value="make"/>
- <!-- IVY properteis set here -->
- <property name="ivy.dir" location="ivy" />
- <loadproperties srcfile="${ivy.dir}/libraries.properties"/>
- <property name="mvnrepo" value="http://repo2.maven.org/maven2"/>
- <property name="asfrepo" value="https://repository.apache.org"/>
- <property name="ivy.jar" location="${ivy.dir}/ivy-${ivy.version}.jar"/>
- <property name="ivy_repo_url"
- value="${mvnrepo}/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar"/>
- <property name="ant_task.jar"
- location="${ivy.dir}/maven-ant-tasks-${ant-task.version}.jar"/>
- <property name="tsk.org" value="/org/apache/maven/maven-ant-tasks/"/>
- <property name="ant_task_repo_url"
- value="${mvnrepo}${tsk.org}${ant-task.version}/maven-ant-tasks-${ant-task.version}.jar"/>
- <property name="repo" value="snapshots"/>
- <property name="asfsnapshotrepo"
- value="${asfrepo}/content/repositories/snapshots"/>
- <property name="asfstagingrepo"
- value="${asfrepo}/service/local/staging/deploy/maven2"/>
- <property name="ivysettings.xml" location="${ivy.dir}/ivysettings.xml"/>
- <property name="ivy.org" value="org.apache.hadoop"/>
- <property name="build.dir" location="build" />
- <property name="dist.dir" value="${build.dir}/${final.name}"/>
- <property name="build.ivy.dir" location="${build.dir}/ivy" />
- <property name="build.ivy.lib.dir" location="${build.ivy.dir}/lib"/>
- <property name="common.ivy.lib.dir"
- location="${build.ivy.lib.dir}/${ant.project.name}/common"/>
- <property name="build.ivy.report.dir" location="${build.ivy.dir}/report"/>
- <property name="hadoop-core.pom" location="${ivy.dir}/hadoop-core-pom.xml"/>
- <property name="hadoop-core-pom-template.xml"
- location="${ivy.dir}/hadoop-core-pom-template.xml"/>
- <property name="hadoop-core.jar" location="${build.dir}/${core.final.name}.jar"/>
- <property name="hadoop-test.pom" location="${ivy.dir}/hadoop-test-pom.xml"/>
- <property name="hadoop-test-pom-template.xml"
- location="${ivy.dir}/hadoop-test-pom-template.xml" />
- <property name="hadoop-test.jar" location="${build.dir}/${test.final.name}.jar"/>
- <property name="hadoop-tools.pom" location="${ivy.dir}/hadoop-tools-pom.xml"/>
- <property name="hadoop-tools-pom-template.xml"
- location="${ivy.dir}/hadoop-tools-pom-template.xml" />
- <property name="hadoop-tools.jar" location="${build.dir}/${tools.final.name}.jar"/>
- <property name="hadoop-examples.pom" location="${ivy.dir}/hadoop-examples-pom.xml"/>
- <property name="hadoop-examples-pom-template.xml"
- location="${ivy.dir}/hadoop-examples-pom-template.xml"/>
- <property name="hadoop-examples.jar"
- location="${build.dir}/${examples.final.name}.jar"/>
- <property name="hadoop-streaming.pom"
- location="${ivy.dir}/hadoop-streaming-pom.xml"/>
- <property name="hadoop-streaming-pom-template.xml"
- location="${ivy.dir}/hadoop-streaming-pom-template.xml"/>
- <property name="hadoop-streaming.jar"
- location="${build.dir}/contrib/streaming/${streaming.final.name}.jar"/>
-
- <!--this is the naming policy for artifacts we want pulled down-->
- <property name="ivy.artifact.retrieve.pattern"
- value="${ant.project.name}/[conf]/[artifact]-[revision].[ext]"/>
- <!--this is how artifacts that get built are named-->
- <property name="ivy.publish.pattern" value="hadoop-[revision]-core.[ext]"/>
- <!-- jdiff.home property set -->
- <property name="jdiff.home"
- value="${build.ivy.lib.dir}/${ant.project.name}/jdiff"/>
- <property name="jdiff.jar" value="${jdiff.home}/jdiff-${jdiff.version}.jar"/>
- <property name="xerces.jar" value="${jdiff.home}/xerces-${xerces.version}.jar"/>
- <property name="clover.jar" location="${clover.home}/lib/clover.jar"/>
- <available property="clover.present" file="${clover.jar}" />
- <!-- check if clover reports should be generated -->
- <condition property="clover.enabled">
- <and>
- <isset property="run.clover"/>
- <isset property="clover.present"/>
- </and>
- </condition>
- <condition property="staging">
- <equals arg1="${repo}" arg2="staging"/>
- </condition>
- <!-- the normal classpath -->
- <path id="classpath">
- <pathelement location="${build.classes}"/>
- <fileset dir="${lib.dir}">
- <include name="**/*.jar" />
- <exclude name="**/excluded/" />
- </fileset>
- <pathelement location="${conf.dir}"/>
- <path refid="ivy-common.classpath"/>
- </path>
- <!-- the unit test classpath: uses test.src.dir for configuration -->
- <path id="test.classpath">
- <pathelement location="${test.build.extraconf}"/>
- <pathelement location="${test.build.classes}" />
- <pathelement location="${test.src.dir}"/>
- <pathelement location="${build.dir}"/>
- <pathelement location="${build.examples}"/>
- <pathelement location="${build.tools}"/>
- <pathelement path="${clover.jar}"/>
- <fileset dir="${test.lib.dir}">
- <include name="**/*.jar"/>
- <exclude name="**/excluded/"/>
- </fileset>
- <path refid="classpath"/>
- </path>
- <!-- the cluster test classpath: uses conf.dir for configuration -->
- <path id="test.cluster.classpath">
- <path refid="classpath"/>
- <pathelement location="${test.build.classes}" />
- <pathelement location="${test.src.dir}"/>
- <pathelement location="${build.dir}"/>
- </path>
- <!-- ====================================================== -->
- <!-- Macro definitions -->
- <!-- ====================================================== -->
- <macrodef name="macro_tar" description="Worker Macro for tar">
- <attribute name="param.destfile"/>
- <element name="param.listofitems"/>
- <sequential>
- <tar compression="gzip" longfile="gnu"
- destfile="@{param.destfile}">
- <param.listofitems/>
- </tar>
- </sequential>
- </macrodef>
- <!-- ====================================================== -->
- <!-- Stuff needed by all targets -->
- <!-- ====================================================== -->
- <target name="init" depends="ivy-retrieve-common">
- <mkdir dir="${build.dir}"/>
- <mkdir dir="${build.classes}"/>
- <mkdir dir="${build.tools}"/>
- <mkdir dir="${build.src}"/>
- <mkdir dir="${build.webapps}/task/WEB-INF"/>
- <mkdir dir="${build.webapps}/job/WEB-INF"/>
- <mkdir dir="${build.webapps}/hdfs/WEB-INF"/>
- <mkdir dir="${build.webapps}/datanode/WEB-INF"/>
- <mkdir dir="${build.webapps}/secondary/WEB-INF"/>
- <mkdir dir="${build.examples}"/>
- <mkdir dir="${build.anttasks}"/>
- <mkdir dir="${build.dir}/c++"/>
-
- <mkdir dir="${test.build.dir}"/>
- <mkdir dir="${test.build.classes}"/>
- <mkdir dir="${test.build.testjar}"/>
- <mkdir dir="${test.build.testshell}"/>
- <mkdir dir="${test.build.extraconf}"/>
- <tempfile property="touch.temp.file" destDir="${java.io.tmpdir}"/>
- <touch millis="0" file="${touch.temp.file}">
- <fileset dir="${conf.dir}" includes="**/*.template"/>
- <fileset dir="${contrib.dir}" includes="**/*.template"/>
- </touch>
- <delete file="${touch.temp.file}"/>
- <!-- copy all of the jsp and static files -->
- <copy todir="${build.webapps}">
- <fileset dir="${src.webapps}">
- <exclude name="**/*.jsp" />
- </fileset>
- </copy>
- <copy todir="${conf.dir}" verbose="true">
- <fileset dir="${conf.dir}" includes="**/*.template"/>
- <mapper type="glob" from="*.template" to="*"/>
- </copy>
- <copy todir="${contrib.dir}" verbose="true">
- <fileset dir="${contrib.dir}" includes="**/*.template"/>
- <mapper type="glob" from="*.template" to="*"/>
- </copy>
- <exec executable="sh">
- <arg line="src/saveVersion.sh ${version}"/>
- </exec>
-
- <exec executable="sh">
- <arg line="src/fixFontsPath.sh ${src.docs.cn}"/>
- </exec>
- </target>
- <!-- ====================================================== -->
- <!-- Compile the Java files -->
- <!-- ====================================================== -->
- <target name="record-parser" depends="init" if="javacc.home">
- <javacc
- target="${core.src.dir}/org/apache/hadoop/record/compiler/generated/rcc.jj"
- outputdirectory="${core.src.dir}/org/apache/hadoop/record/compiler/generated"
- javacchome="${javacc.home}" />
- </target>
-
- <target name="compile-rcc-compiler" depends="init, record-parser">
- <javac
- encoding="${build.encoding}"
- srcdir="${core.src.dir}"
- includes="org/apache/hadoop/record/compiler/**/*.java"
- destdir="${build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args}"/>
- <classpath refid="classpath"/>
- </javac>
-
- <taskdef name="recordcc" classname="org.apache.hadoop.record.compiler.ant.RccTask">
- <classpath refid="classpath" />
- </taskdef>
- </target>
-
- <target name="compile-core-classes" depends="init, compile-rcc-compiler">
- <taskdef classname="org.apache.jasper.JspC" name="jsp-compile" >
- <classpath refid="test.classpath"/>
- </taskdef>
- <!-- Compile Java files (excluding JSPs) checking warnings -->
- <javac
- encoding="${build.encoding}"
- srcdir="${core.src.dir}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="classpath"/>
- </javac>
- <copy todir="${build.classes}">
- <fileset dir="${core.src.dir}" includes="**/*.properties"/>
- <fileset dir="${core.src.dir}" includes="core-default.xml"/>
- </copy>
-
- </target>
- <target name="compile-mapred-classes" depends="compile-core-classes">
- <jsp-compile
- uriroot="${src.webapps}/task"
- outputdir="${build.src}"
- package="org.apache.hadoop.mapred"
- webxml="${build.webapps}/task/WEB-INF/web.xml">
- </jsp-compile>
- <jsp-compile
- uriroot="${src.webapps}/job"
- outputdir="${build.src}"
- package="org.apache.hadoop.mapred"
- webxml="${build.webapps}/job/WEB-INF/web.xml">
- </jsp-compile>
- <!-- Compile Java files (excluding JSPs) checking warnings -->
- <javac
- encoding="${build.encoding}"
- srcdir="${mapred.src.dir};${build.src}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="classpath"/>
- </javac>
-
- <copy todir="${build.classes}">
- <fileset dir="${mapred.src.dir}" includes="**/*.properties"/>
- <fileset dir="${mapred.src.dir}" includes="mapred-default.xml"/>
- </copy>
- </target>
- <target name="compile-hdfs-classes" depends="compile-core-classes">
- <jsp-compile
- uriroot="${src.webapps}/hdfs"
- outputdir="${build.src}"
- package="org.apache.hadoop.hdfs.server.namenode"
- webxml="${build.webapps}/hdfs/WEB-INF/web.xml">
- </jsp-compile>
- <jsp-compile
- uriroot="${src.webapps}/datanode"
- outputdir="${build.src}"
- package="org.apache.hadoop.hdfs.server.datanode"
- webxml="${build.webapps}/datanode/WEB-INF/web.xml">
- </jsp-compile>
- <!-- Compile Java files (excluding JSPs) checking warnings -->
- <javac
- encoding="${build.encoding}"
- srcdir="${hdfs.src.dir};${build.src}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="classpath"/>
- </javac>
- <copy todir="${build.classes}">
- <fileset dir="${hdfs.src.dir}" includes="**/*.properties"/>
- <fileset dir="${hdfs.src.dir}" includes="hdfs-default.xml"/>
- </copy>
- </target>
- <target name="compile-tools" depends="init">
- <javac
- encoding="${build.encoding}"
- srcdir="${tools.src}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${build.tools}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="classpath"/>
- </javac>
-
- <copy todir="${build.tools}">
- <fileset
- dir="${tools.src}"
- includes="**/*.properties"
- />
- </copy>
- </target>
- <target name="compile-native">
- <antcall target="compile-core-native">
- <param name="compile.native" value="true"/>
- </antcall>
- </target>
- <target name="compile-core-native" depends="compile-core-classes"
- if="compile.native">
-
- <mkdir dir="${build.native}/lib"/>
- <mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/zlib"/>
- <javah
- classpath="${build.classes}"
- destdir="${build.native}/src/org/apache/hadoop/io/compress/zlib"
- force="yes"
- verbose="yes"
- >
- <class name="org.apache.hadoop.io.compress.zlib.ZlibCompressor" />
- <class name="org.apache.hadoop.io.compress.zlib.ZlibDecompressor" />
- </javah>
- <exec dir="${build.native}" executable="sh" failonerror="true">
- <env key="OS_NAME" value="${os.name}"/>
- <env key="OS_ARCH" value="${os.arch}"/>
- <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
- <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
- <arg line="${native.src.dir}/configure"/>
- </exec>
- <exec dir="${build.native}" executable="${make.cmd}" failonerror="true">
- <env key="OS_NAME" value="${os.name}"/>
- <env key="OS_ARCH" value="${os.arch}"/>
- <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
- <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
- </exec>
- <exec dir="${build.native}" executable="sh" failonerror="true">
- <arg line="${build.native}/libtool --mode=install cp ${build.native}/lib/libhadoop.la ${build.native}/lib"/>
- </exec>
- </target>
- <target name="compile-core"
- depends="clover,compile-core-classes,compile-mapred-classes,
- compile-hdfs-classes,compile-core-native,compile-c++"
- description="Compile core only">
- </target>
- <target name="compile-contrib" depends="compile-core,compile-c++-libhdfs">
- <subant target="compile">
- <property name="version" value="${version}"/>
- <fileset file="${contrib.dir}/build.xml"/>
- </subant>
- </target>
-
- <target name="compile" depends="compile-core, compile-contrib, compile-ant-tasks, compile-tools" description="Compile core, contrib">
- </target>
- <target name="compile-examples"
- depends="compile-core,compile-tools,compile-c++-examples">
- <javac
- encoding="${build.encoding}"
- srcdir="${examples.dir}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${build.examples}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath>
- <path refid="classpath"/>
- <pathelement location="${build.tools}"/>
- </classpath>
- </javac>
- </target>
- <!-- ================================================================== -->
- <!-- Make hadoop.jar -->
- <!-- ================================================================== -->
- <!-- -->
- <!-- ================================================================== -->
- <target name="jar" depends="compile-core" description="Make hadoop.jar">
- <tar compression="gzip" destfile="${build.classes}/bin.tgz">
- <tarfileset dir="bin" mode="755"/>
- </tar>
- <jar jarfile="${build.dir}/${core.final.name}.jar"
- basedir="${build.classes}">
- <manifest>
- <section name="org/apache/hadoop">
- <attribute name="Implementation-Title" value="Hadoop"/>
- <attribute name="Implementation-Version" value="${version}"/>
- <attribute name="Implementation-Vendor" value="Apache"/>
- </section>
- </manifest>
- <fileset file="${conf.dir}/commons-logging.properties"/>
- <fileset file="${conf.dir}/log4j.properties"/>
- <fileset file="${conf.dir}/hadoop-metrics.properties"/>
- <zipfileset dir="${build.webapps}" prefix="webapps"/>
- </jar>
- </target>
- <!-- ================================================================== -->
- <!-- Make the Hadoop examples jar. -->
- <!-- ================================================================== -->
- <!-- -->
- <!-- ================================================================== -->
- <target name="examples" depends="jar, compile-examples" description="Make the Hadoop examples jar.">
- <jar jarfile="${build.dir}/${examples.final.name}.jar"
- basedir="${build.examples}">
- <manifest>
- <attribute name="Main-Class"
- value="org/apache/hadoop/examples/ExampleDriver"/>
- </manifest>
- </jar>
- </target>
- <target name="tools-jar" depends="jar, compile-tools"
- description="Make the Hadoop tools jar.">
- <jar jarfile="${build.dir}/${tools.final.name}.jar"
- basedir="${build.tools}">
- <manifest>
- <attribute name="Main-Class"
- value="org/apache/hadoop/examples/ExampleDriver"/>
- </manifest>
- </jar>
- </target>
- <!-- ================================================================== -->
- <!-- Make the Hadoop metrics jar. (for use outside Hadoop) -->
- <!-- ================================================================== -->
- <!-- -->
- <!-- ================================================================== -->
- <target name="metrics.jar" depends="compile-core" description="Make the Hadoop metrics jar. (for use outside Hadoop)">
- <jar jarfile="${build.dir}/hadoop-metrics-${version}.jar"
- basedir="${build.classes}">
- <include name="**/metrics/**" />
- <exclude name="**/package.html" />
- </jar>
- </target>
- <target name="generate-test-records" depends="compile-rcc-compiler">
- <recordcc destdir="${test.generated.dir}">
- <fileset dir="${test.src.dir}"
- includes="**/*.jr" />
- </recordcc>
- </target>
-
- <!-- ================================================================== -->
- <!-- Compile test code -->
- <!-- ================================================================== -->
- <target name="compile-core-test" depends="compile-examples, compile-tools, generate-test-records">
- <javac
- encoding="${build.encoding}"
- srcdir="${test.generated.dir}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${test.build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args}" />
- <classpath refid="test.classpath"/>
- </javac>
- <javac
- encoding="${build.encoding}"
- srcdir="${test.src.dir}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${test.build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="test.classpath"/>
- </javac>
- <javac
- encoding="${build.encoding}"
- srcdir="${test.src.dir}/testjar"
- includes="*.java"
- destdir="${test.build.testjar}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="test.classpath"/>
- </javac>
- <delete file="${test.build.testjar}/testjob.jar"/>
- <jar jarfile="${test.build.testjar}/testjob.jar"
- basedir="${test.build.testjar}">
- </jar>
- <javac
- encoding="${build.encoding}"
- srcdir="${test.src.dir}/testshell"
- includes="*.java"
- destdir="${test.build.testshell}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}"/>
- <classpath refid="test.classpath"/>
- </javac>
- <delete file="${test.build.testshell}/testshell.jar"/>
- <jar jarfile="${test.build.testshell}/testshell.jar"
- basedir="${test.build.testshell}">
- </jar>
-
- <delete dir="${test.cache.data}"/>
- <mkdir dir="${test.cache.data}"/>
- <delete dir="${test.debug.data}"/>
- <mkdir dir="${test.debug.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/testscript.txt" todir="${test.debug.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.txt" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.jar" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.zip" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.tar" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.tgz" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.tar.gz" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/hdfs/hadoop-14-dfs-dir.tgz" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/hdfs/hadoop-dfs-dir.txt" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/cli/testConf.xml" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data15bytes" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data30bytes" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data60bytes" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data120bytes" todir="${test.cache.data}"/>
- </target>
- <!-- ================================================================== -->
- <!-- Make hadoop-test.jar -->
- <!-- ================================================================== -->
- <!-- -->
- <!-- ================================================================== -->
- <target name="jar-test" depends="compile-core-test" description="Make hadoop-test.jar">
- <jar jarfile="${build.dir}/${test.final.name}.jar"
- basedir="${test.build.classes}">
- <manifest>
- <attribute name="Main-Class"
- value="org/apache/hadoop/test/AllTestDriver"/>
- <section name="org/apache/hadoop">
- <attribute name="Implementation-Title" value="Hadoop"/>
- <attribute name="Implementation-Version" value="${version}"/>
- <attribute name="Implementation-Vendor" value="Apache"/>
- </section>
- </manifest>
- </jar>
- </target>
- <!-- ================================================================== -->
- <!-- Run unit tests -->
- <!-- ================================================================== -->
- <target name="test-core" depends="jar-test" description="Run core unit tests">
- <delete file="${test.build.dir}/testsfailed"/>
- <delete dir="${test.build.data}"/>
- <mkdir dir="${test.build.data}"/>
- <delete dir="${test.log.dir}"/>
- <mkdir dir="${test.log.dir}"/>
- <copy file="${test.src.dir}/hadoop-policy.xml"
- todir="${test.build.extraconf}" />
- <junit showoutput="${test.output}"
- printsummary="${test.junit.printsummary}"
- haltonfailure="${test.junit.haltonfailure}"
- fork="yes"
- forkmode="${test.junit.fork.mode}"
- maxmemory="${test.junit.maxmemory}"
- dir="${basedir}" timeout="${test.timeout}"
- errorProperty="tests.failed" failureProperty="tests.failed">
- <sysproperty key="test.build.data" value="${test.build.data}"/>
- <sysproperty key="test.cache.data" value="${test.cache.data}"/>
- <sysproperty key="test.debug.data" value="${test.debug.data}"/>
- <sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
- <sysproperty key="test.src.dir" value="${test.src.dir}"/>
- <sysproperty key="test.build.extraconf" value="${test.build.extraconf}" />
- <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml"/>
- <sysproperty key="java.library.path"
- value="${build.native}/lib:${lib.dir}/native/${build.platform}"/>
- <sysproperty key="install.c++.examples" value="${install.c++.examples}"/>
- <!-- set io.compression.codec.lzo.class in the child jvm only if it is set -->
- <syspropertyset dynamic="no">
- <propertyref name="io.compression.codec.lzo.class"/>
- </syspropertyset>
- <!-- set compile.c++ in the child jvm only if it is set -->
- <syspropertyset dynamic="no">
- <propertyref name="compile.c++"/>
- </syspropertyset>
- <classpath refid="${test.classpath.id}"/>
- <formatter type="${test.junit.output.format}" />
- <batchtest todir="${test.build.dir}" unless="testcase">
- <fileset dir="${test.src.dir}"
- includes="**/${test.include}.java"
- excludes="**/${test.exclude}.java" />
- </batchtest>
- <batchtest todir="${test.build.dir}" if="testcase">
- <fileset dir="${test.src.dir}" includes="**/${testcase}.java"/>
- </batchtest>
- </junit>
- <antcall target="checkfailure"/>
- </target>
- <target name="checkfailure" if="tests.failed">
- <touch file="${test.build.dir}/testsfailed"/>
- <fail unless="continueOnFailure">Tests failed!</fail>
- </target>
- <target name="test-contrib" depends="compile, compile-core-test" description="Run contrib unit tests">
- <subant target="test">
- <property name="version" value="${version}"/>
- <property name="clover.jar" value="${clover.jar}"/>
- <fileset file="${contrib.dir}/build.xml"/>
- </subant>
- </target>
-
- <target name="test" description="Run core, contrib tests">
- <delete file="${test.build.dir}/testsfailed"/>
- <property name="continueOnFailure" value="true"/>
- <antcall target="test-core"/>
- <antcall target="test-contrib"/>
- <available file="${test.build.dir}/testsfailed" property="testsfailed"/>
- <fail if="testsfailed">Tests failed!</fail>
- </target>
- <!-- Run all unit tests, not just Test*, and use non-test configuration. -->
- <target name="test-cluster" description="Run all unit tests, not just Test*, and use non-test configuration.">
- <antcall target="test">
- <param name="test.include" value="*"/>
- <param name="test.classpath.id" value="test.cluster.classpath"/>
- </antcall>
- </target>
- <target name="nightly" depends="test, tar">
- </target>
-
- <!-- ================================================================== -->
- <!-- Run optional third-party tool targets -->
- <!-- ================================================================== -->
- <target name="checkstyle" depends="ivy-retrieve-checkstyle,check-for-checkstyle" if="checkstyle.present" description="Run optional third-party tool targets">
- <taskdef resource="checkstyletask.properties">
- <classpath refid="checkstyle-classpath"/>
- </taskdef>
-
- <mkdir dir="${test.build.dir}"/>
-
- <checkstyle config="${test.src.dir}/checkstyle.xml"
- failOnViolation="false">
- <fileset dir="${core.src.dir}" includes="**/*.java" excludes="**/generated/**"/>
- <fileset dir="${mapred.src.dir}" includes="**/*.java" excludes="**/generated/**"/>
- <fileset dir="${hdfs.src.dir}" includes="**/*.java" excludes="**/generated/**"/>
- <formatter type="xml" toFile="${test.build.dir}/checkstyle-errors.xml"/>
- </checkstyle>
-
- <xslt style="${test.src.dir}/checkstyle-noframes-sorted.xsl"
- in="${test.build.dir}/checkstyle-errors.xml"
- out="${test.build.dir}/checkstyle-errors.html"/>
- </target>
-
- <target name="check-for-checkstyle">
- <available property="checkstyle.present" resource="checkstyletask.properties">
- <classpath refid="checkstyle-classpath"/>
- </available>
- </target>
- <property name="findbugs.home" value=""/>
- <target name="findbugs" depends="check-for-findbugs, tar" if="findbugs.present" description="Run findbugs if present">
- <property name="findbugs.out.dir" value="${test.build.dir}/findbugs"/>
- <property name="findbugs.exclude.file" value="${test.src.dir}/findbugsExcludeFile.xml"/>
- <property name="findbugs.report.htmlfile" value="${findbugs.out.dir}/hadoop-findbugs-report.html"/>
- <property name="findbugs.report.xmlfile" value="${findbugs.out.dir}/hadoop-findbugs-report.xml"/>
- <taskdef name="findbugs" classname="edu.umd.cs.findbugs.anttask.FindBugsTask"
- classpath="${findbugs.home}/lib/findbugs-ant.jar" />
- <mkdir dir="${findbugs.out.dir}"/>
- <findbugs home="${findbugs.home}" output="xml:withMessages"
- outputFile="${findbugs.report.xmlfile}" effort="max"
- excludeFilter="${findbugs.exclude.file}" jvmargs="-Xmx512M">
- <auxClasspath>
- <fileset dir="${lib.dir}">
- <include name="**/*.jar"/>
- </fileset>
- <fileset dir="${build.ivy.lib.dir}/${ant.project.name}/common">
- <include name="**/*.jar"/>
- </fileset>
- </auxClasspath>
- <sourcePath path="${core.src.dir}"/>
- <sourcePath path="${mapred.src.dir}"/>
- <sourcePath path="${hdfs.src.dir}"/>
- <sourcePath path="${examples.dir}" />
- <sourcePath path="${tools.src}" />
- <sourcePath path="${basedir}/src/contrib/streaming/src/java" />
- <class location="${build.dir}/${core.final.name}.jar" />
- <class location="${build.dir}/${examples.final.name}.jar" />
- <class location="${build.dir}/${tools.final.name}.jar" />
- <class location="${build.dir}/contrib/streaming/${streaming.final.name}.jar" />
- </findbugs>
- <xslt style="${findbugs.home}/src/xsl/default.xsl"
- in="${findbugs.report.xmlfile}"
- out="${findbugs.report.htmlfile}"/>
- </target>
-
- <target name="check-for-findbugs">
- <available property="findbugs.present"
- file="${findbugs.home}/lib/findbugs.jar" />
- </target>
- <!-- ================================================================== -->
- <!-- Documentation -->
- <!-- ================================================================== -->
-
- <target name="docs" depends="forrest.check" description="Generate forrest-based documentation. To use, specify -Dforrest.home=<base of Apache Forrest installation> on the command line." if="forrest.home">
- <exec dir="${docs.src}" executable="${forrest.home}/bin/forrest"
- failonerror="true">
- <env key="JAVA_HOME" value="${java5.home}"/>
- </exec>
- <copy todir="${build.docs}">
- <fileset dir="${docs.src}/build/site/" />
- </copy>
- <copy file="${docs.src}/releasenotes.html" todir="${build.docs}"/>
- <style basedir="${core.src.dir}" destdir="${build.docs}"
- includes="core-default.xml" style="conf/configuration.xsl"/>
- <style basedir="${hdfs.src.dir}" destdir="${build.docs}"
- includes="hdfs-default.xml" style="conf/configuration.xsl"/>
- <style basedir="${mapred.src.dir}" destdir="${build.docs}"
- includes="mapred-default.xml" style="conf/configuration.xsl"/>
- <antcall target="changes-to-html"/>
- <antcall target="cn-docs"/>
- </target>
- <target name="cn-docs" depends="forrest.check, init"
- description="Generate forrest-based Chinese documentation. To use, specify -Dforrest.home=<base of Apache Forrest installation> on the command line."
- if="forrest.home">
- <exec dir="${src.docs.cn}" executable="${forrest.home}/bin/forrest" failonerror="true">
- <env key="LANG" value="en_US.utf8"/>
- <env key="JAVA_HOME" value="${java5.home}"/>
- </exec>
- <copy todir="${build.docs.cn}">
- <fileset dir="${src.docs.cn}/build/site/" />
- </copy>
- <style basedir="${core.src.dir}" destdir="${build.docs.cn}"
- includes="core-default.xml" style="conf/configuration.xsl"/>
- <style basedir="${hdfs.src.dir}" destdir="${build.docs.cn}"
- includes="hdfs-default.xml" style="conf/configuration.xsl"/>
- <style basedir="${mapred.src.dir}" destdir="${build.docs.cn}"
- includes="mapred-default.xml" style="conf/configuration.xsl"/>
- <antcall target="changes-to-html"/>
- </target>
- <target name="forrest.check" unless="forrest.home" depends="java5.check">
- <fail message="'forrest.home' is not defined. Please pass -Dforrest.home=<base of Apache Forrest installation> to Ant on the command-line." />
- </target>
- <target name="java5.check" unless="java5.home">
- <fail message="'java5.home' is not defined. Forrest requires Java 5. Please pass -Djava5.home=<base of Java 5 distribution> to Ant on the command-line." />
- </target>
-
- <target name="javadoc-dev" description="Generate javadoc for hadoop developers">
- <mkdir dir="${build.javadoc.dev}"/>
- <javadoc
- overview="${core.src.dir}/overview.html"
- packagenames="org.apache.hadoop.*"
- destdir="${build.javadoc.dev}"
- author="true"
- version="true"
- use="true"
- windowtitle="${Name} ${version} API"
- doctitle="${Name} ${version} Developer API"
- bottom="Copyright &copy; ${year} The Apache Software Foundation"
- >
- <packageset dir="${core.src.dir}"/>
- <packageset dir="${mapred.src.dir}"/>
- <packageset dir="${hdfs.src.dir}"/>
- <packageset dir="${examples.dir}"/>
- <packageset dir="src/contrib/streaming/src/java"/>
- <packageset dir="src/contrib/data_join/src/java"/>
- <packageset dir="src/contrib/index/src/java"/>
- <link href="${javadoc.link.java}"/>
- <classpath >
- <path refid="classpath" />
- <fileset dir="src/contrib/">
- <include name="*/lib/*.jar" />
- </fileset>
- <pathelement path="${java.class.path}"/>
- </classpath>
- <group title="Core" packages="org.apache.*"/>
- <group title="Examples" packages="org.apache.hadoop.examples*"/>
- <group title="contrib: Streaming" packages="org.apache.hadoop.streaming*"/>
- <group title="contrib: DataJoin" packages="org.apache.hadoop.contrib.utils.join*"/>
- <group title="contrib: Index" packages="org.apache.hadoop.contrib.index*"/>
- </javadoc>
- </target>
- <target name="javadoc" depends="compile, ivy-retrieve-javadoc" description="Generate javadoc">
- <mkdir dir="${build.javadoc}"/>
- <javadoc
- overview="${core.src.dir}/overview.html"
- packagenames="org.apache.hadoop.*"
- destdir="${build.javadoc}"
- author="true"
- version="true"
- use="true"
- windowtitle="${Name} ${version} API"
- doctitle="${Name} ${version} API"
- bottom="Copyright &copy; ${year} The Apache Software Foundation"
- >
- <packageset dir="${core.src.dir}"/>
- <packageset dir="${mapred.src.dir}"/>
- <packageset dir="${examples.dir}"/>
- <packageset dir="src/contrib/streaming/src/java"/>
- <packageset dir="src/contrib/data_join/src/java"/>
- <packageset dir="src/contrib/index/src/java"/>
- <packageset dir="src/contrib/failmon/src/java/"/>
-
- <link href="${javadoc.link.java}"/>
- <classpath >
- <path refid="classpath" />
- <fileset dir="src/contrib/">
- <include name="*/lib/*.jar" />
- </fileset>
- <path refid="javadoc-classpath"/>
- <pathelement path="${java.class.path}"/>
- <pathelement location="${build.tools}"/>
- </classpath>
- <group title="Core" packages="org.apache.*"/>
- <group title="Examples" packages="org.apache.hadoop.examples*"/>
- <group title="contrib: Streaming" packages="org.apache.hadoop.streaming*"/>
- <group title="contrib: DataJoin" packages="org.apache.hadoop.contrib.utils.join*"/>
- <group title="contrib: Index" packages="org.apache.hadoop.contrib.index*"/>
- <group title="contrib: FailMon" packages="org.apache.hadoop.contrib.failmon*"/>
- </javadoc>
- </target>
- <target name="api-xml" depends="ivy-retrieve-jdiff,javadoc,write-null">
- <javadoc>
- <doclet name="jdiff.JDiff"
- path="${jdiff.jar}:${xerces.jar}">
- <param name="-apidir" value="${jdiff.xml.dir}"/>
- <param name="-apiname" value="hadoop ${version}"/>
- </doclet>
- <packageset dir="src/core"/>
- <packageset dir="src/mapred"/>
- <packageset dir="src/tools"/>
- <classpath >
- <path refid="classpath" />
- <path refid="jdiff-classpath" />
- <pathelement path="${java.class.path}"/>
- </classpath>
- </javadoc>
- </target>
-
- <target name="write-null">
- <exec executable="touch">
- <arg value="${jdiff.home}/Null.java"/>
- </exec>
- </target>
- <target name="api-report" depends="ivy-retrieve-jdiff,api-xml">
- <mkdir dir="${jdiff.build.dir}"/>
- <javadoc sourcepath="src/core,src/hdfs,src,mapred,src/tools"
- destdir="${jdiff.build.dir}"
- sourceFiles="${jdiff.home}/Null.java">
- <doclet name="jdiff.JDiff"
- path="${jdiff.jar}:${xerces.jar}">
- <param name="-oldapi" value="hadoop ${jdiff.stable}"/>
- <param name="-newapi" value="hadoop ${version}"/>
- <param name="-oldapidir" value="${jdiff.xml.dir}"/>
- <param name="-newapidir" value="${jdiff.xml.dir}"/>
- <param name="-javadocold" value="${jdiff.stable.javadoc}"/>
- <param name="-javadocnew" value="../../api/"/>
- <param name="-stats"/>
- </doclet>
- <classpath >
- <path refid="classpath" />
- <path refid="jdiff-classpath"/>
- <pathelement path="${java.class.path}"/>
- </classpath>
- </javadoc>
- </target>
-
- <target name="changes-to-html" description="Convert CHANGES.txt into an html file">
- <mkdir dir="${build.docs}"/>
- <exec executable="perl" input="CHANGES.txt" output="${build.docs}/changes.html" failonerror="true">
- <arg value="${changes.src}/changes2html.pl"/>
- </exec>
- <copy todir="${build.docs}">
- <fileset dir="${changes.src}" includes="*.css"/>
- </copy>
- </target>
- <!-- ================================================================== -->
- <!-- D I S T R I B U T I O N -->
- <!-- ================================================================== -->
- <!-- -->
- <!-- ================================================================== -->
- <target name="package" depends="compile, jar, javadoc, docs, cn-docs, api-report, examples, tools-jar, jar-test, ant-tasks, package-librecordio"
- description="Build distribution">
- <mkdir dir="${dist.dir}"/>
- <mkdir dir="${dist.dir}/lib"/>
- <mkdir dir="${dist.dir}/contrib"/>
- <mkdir dir="${dist.dir}/bin"/>
- <mkdir dir="${dist.dir}/docs"/>
- <mkdir dir="${dist.dir}/docs/api"/>
- <mkdir dir="${dist.dir}/docs/jdiff"/>
- <copy todir="${dist.dir}/lib" includeEmptyDirs="false" flatten="true">
- <fileset dir="${common.ivy.lib.dir}"/>
- </copy>
- <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
- <fileset dir="lib">
- <exclude name="**/native/**"/>
- </fileset>
- </copy>
- <exec dir="${dist.dir}" executable="sh" failonerror="true">
- <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/>
- <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
- <env key="DIST_LIB_DIR" value="${dist.dir}/lib/native"/>
- <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
- </exec>
- <subant target="package">
- <!--Pass down the version in case its needed again and the target
- distribution directory so contribs know where to install to.-->
- <property name="version" value="${version}"/>
- <property name="dist.dir" value="${dist.dir}"/>
- <fileset file="${contrib.dir}/build.xml"/>
- </subant>
- <copy todir="${dist.dir}/webapps">
- <fileset dir="${build.webapps}"/>
- </copy>
- <copy todir="${dist.dir}">
- <fileset file="${build.dir}/${name}-*-${version}.jar"/>
- </copy>
-
- <copy todir="${dist.dir}/bin">
- <fileset dir="bin"/>
- </copy>
- <copy todir="${dist.dir}/conf">
- <fileset dir="${conf.dir}" excludes="**/*.template"/>
- </copy>
- <copy todir="${dist.dir}/docs">
- <fileset dir="${build.docs}"/>
- </copy>
- <copy file="ivy.xml" tofile="${dist.dir}/ivy.xml"/>
- <copy todir="${dist.dir}/ivy">
- <fileset dir="ivy"/>
- </copy>
- <copy todir="${dist.dir}">
- <fileset dir=".">
- <include name="*.txt" />
- </fileset>
- </copy>
- <copy todir="${dist.dir}/src" includeEmptyDirs="true">
- <fileset dir="src" excludes="**/*.template **/docs/build/**/*"/>
- </copy>
-
- <copy todir="${dist.dir}/c++" includeEmptyDirs="false">
- <fileset dir="${build.dir}/c++"/>
- </copy>
- <copy todir="${dist.dir}/" file="build.xml"/>
- <chmod perm="ugo+x" type="file" parallel="false">
- <fileset dir="${dist.dir}/bin"/>
- <fileset dir="${dist.dir}/src/contrib/">
- <include name="*/bin/*" />
- </fileset>
- <fileset dir="${dist.dir}/src/contrib/ec2/bin/image"/>
- </chmod>
- <chmod perm="ugo+x" type="file">
- <fileset dir="${dist.dir}/src/c++/pipes/debug"/>
- </chmod>
- </target>
- <!-- ================================================================== -->
- <!-- Make release tarball -->
- <!-- ================================================================== -->
- <target name="tar" depends="package" description="Make release tarball">
- <macro_tar param.destfile="${build.dir}/${final.name}.tar.gz">
- <param.listofitems>
- <tarfileset dir="${build.dir}" mode="664">
- <exclude name="${final.name}/bin/*" />
- <exclude name="${final.name}/contrib/*/bin/*" />
- <exclude name="${final.name}/src/contrib/ec2/bin/*" />
- <exclude name="${final.name}/src/contrib/ec2/bin/image/*" />
- <include name="${final.name}/**" />
- </tarfileset>
- <tarfileset dir="${build.dir}" mode="755">
- <include name="${final.name}/bin/*" />
- <include name="${final.name}/contrib/*/bin/*" />
- <include name="${final.name}/src/contrib/ec2/bin/*" />
- <include name="${final.name}/src/contrib/ec2/bin/image/*" />
- </tarfileset>
- </param.listofitems>
- </macro_tar>
- </target>
- <target name="bin-package" depends="compile, jar, examples, tools-jar, jar-test, ant-tasks, package-librecordio"
- description="assembles artifacts for binary target">
- <mkdir dir="${dist.dir}"/>
- <mkdir dir="${dist.dir}/lib"/>
- <mkdir dir="${dist.dir}/contrib"/>
- <mkdir dir="${dist.dir}/bin"/>
- <copy todir="${dist.dir}/lib" includeEmptyDirs="false" flatten="true">
- <fileset dir="${common.ivy.lib.dir}"/>
- </copy>
- <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
- <fileset dir="lib">
- <exclude name="**/native/**"/>
- </fileset>
- </copy>
- <exec dir="${dist.dir}" executable="sh" failonerror="true">
- <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/>
- <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
- <env key="DIST_LIB_DIR" value="${dist.dir}/lib/native"/>
- <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
- </exec>
- <subant target="package">
- <!--Pass down the version in case its needed again and the target
- distribution directory so contribs know where to install to.-->
- <property name="version" value="${version}"/>
- <property name="dist.dir" value="${dist.dir}"/>
- <fileset file="${contrib.dir}/build.xml"/>
- </subant>
- <copy todir="${dist.dir}/webapps">
- <fileset dir="${build.webapps}"/>
- </copy>
- <copy todir="${dist.dir}">
- <fileset file="${build.dir}/${name}-*-${version}.jar"/>
- </copy>
-
- <copy todir="${dist.dir}/bin">
- <fileset dir="bin"/>
- </copy>
- <copy todir="${dist.dir}/conf">
- <fileset dir="${conf.dir}" excludes="**/*.template"/>
- </copy>
- <copy file="ivy.xml" tofile="${dist.dir}/ivy.xml"/>
- <copy todir="${dist.dir}/ivy">
- <fileset dir="ivy"/>
- </copy>
- <copy todir="${dist.dir}">
- <fileset dir=".">
- <include name="*.txt" />
- </fileset>
- </copy>
-
- <copy todir="${dist.dir}/c++" includeEmptyDirs="false">
- <fileset dir="${build.dir}/c++"/>
- </copy>
- <copy todir="${dist.dir}/" file="build.xml"/>
- <chmod perm="ugo+x" type="file" parallel="false">
- <fileset dir="${dist.dir}/bin"/>
- </chmod>
- </target>
- <target name="binary" depends="bin-package" description="Make tarball without source and documentation">
- <macro_tar param.destfile="${build.dir}/${final.name}-bin.tar.gz">
- <param.listofitems>
- <tarfileset dir="${build.dir}" mode="664">
- <exclude name="${final.name}/bin/*" />
- <exclude name="${final.name}/src/**" />
- <exclude name="${final.name}/docs/**" />
- <include name="${final.name}/**" />
- </tarfileset>
- <tarfileset dir="${build.dir}" mode="755">
- <include name="${final.name}/bin/*" />
- </tarfileset>
- </param.listofitems>
- </macro_tar>
- </target>
- <!-- ================================================================== -->
- <!-- Perform audit activities for the release -->
- <!-- ================================================================== -->
- <target name="releaseaudit" depends="package,ivy-retrieve-releaseaudit" description="Release Audit activities">
- <fail unless="rat.present" message="Failed to load class [${rat.reporting.classname}]."/>
- <java classname="${rat.reporting.classname}" fork="true">
- <classpath refid="releaseaudit-classpath"/>
- <arg value="${build.dir}/${final.name}"/>
- </java>
- </target>
- <!-- ================================================================== -->
- <!-- Clean. Delete the build files, and their directories -->
- <!-- ================================================================== -->
- <target name="clean" depends="clean-contrib, clean-sign" description="Clean. Delete the build files, and their directories">
- <delete dir="${build.dir}"/>
- <delete dir="${docs.src}/build"/>
- <delete dir="${src.docs.cn}/build"/>
- <delete file="${basedir}/ivy/hadoop-core-pom.xml"/>
- <delete file="${basedir}/ivy/hadoop-test-pom.xml"/>
- <delete file="${basedir}/ivy/hadoop-examples-pom.xml"/>
- <delete file="${basedir}/ivy/hadoop-tools-pom.xml"/>
- <delete file="${basedir}/ivy/hadoop-streaming-pom.xml"/>
- </target>
- <target name="clean-sign" description="Clean. Delete .asc files">
- <delete>
- <fileset dir="." includes="**/**/*.asc"/>
- </delete>
- </target>
-
- <target name="veryclean" depends="clean" description="Delete mvn ant task jar and ivy ant taks jar">
- <delete file="${ant_task.jar}"/>
- <delete file="${ivy.jar}"/>
- </target>
- <!-- ================================================================== -->
- <!-- Clean contrib target. For now, must be called explicitly -->
- <!-- Using subant instead of ant as a workaround for 30569 -->
- <!-- ================================================================== -->
- <target name="clean-contrib">
- <subant target="clean">
- <fileset file="src/contrib/build.xml"/>
- </subant>
- </target>
-
- <target name="test-c++-libhdfs" depends="compile-c++-libhdfs, compile-core" if="islibhdfs">
- <delete dir="${test.libhdfs.dir}"/>
- <mkdir dir="${test.libhdfs.dir}"/>
- <mkdir dir="${test.libhdfs.dir}/logs"/>
- <mkdir dir="${test.libhdfs.dir}/hdfs/name"/>
- <exec dir="${build.c++.libhdfs}" executable="${make.cmd}" failonerror="true">
- <env key="OS_NAME" value="${os.name}"/>
- <env key="OS_ARCH" value="${os.arch}"/>
- <env key="JVM_ARCH" value="${jvm.arch}"/>
- <env key="LIBHDFS_BUILD_DIR" value="${build.c++.libhdfs}"/>
- <env key="HADOOP_HOME" value="${basedir}"/>
- <env key="HADOOP_CONF_DIR" value="${test.libhdfs.conf.dir}"/>
- <env key="HADOOP_LOG_DIR" value="${test.libhdfs.dir}/logs"/>
- <env key="LIBHDFS_SRC_DIR" value="${c++.libhdfs.src}"/>
- <env key="LIBHDFS_INSTALL_DIR" value="${install.c++}/lib"/>
- <env key="LIB_DIR" value="${common.ivy.lib.dir}"/>
- <arg value="test"/>
- </exec>
- </target>
- <!-- ================================================================== -->
- <!-- librecordio targets. -->
- <!-- ================================================================== -->
- <target name="compile-librecordio" depends="init" if="librecordio" >
- <mkdir dir="${build.librecordio}"/>
- <exec dir="${librecordio.src}" executable="${make.cmd}" failonerror="true">
- <env key="XERCESCROOT" value="${xercescroot}"/>
- <env key="LIBRECORDIO_BUILD_DIR" value="${build.librecordio}"/>
- </exec>
- </target>
-
- <target name="test-librecordio" depends="compile-librecordio, compile-core" if="librecordio">
- <delete dir="${librecordio.test.dir}"/>
- <mkdir dir="${librecordio.test.dir}"/>
- <exec dir="${librecordio.src}/test" executable="${make.cmd}" failonerror="true">
- <env key="HADOOP_HOME" value="${basedir}"/>
- <env key="XERCESCROOT" value="${xercescroot}"/>
- <env key="LIBRECORDIO_BUILD_DIR" value="${build.librecordio}"/>
- <env key="LIBRECORDIO_TEST_DIR" value="${librecordio.test.dir}"/>
- <arg value="all"/>
- </exec>
- </target>
- <target name="package-librecordio" depends="compile-librecordio" if="librecordio">
- <mkdir dir="${dist.dir}/librecordio"/>
- <copy todir="${dist.dir}/librecordio">
- <fileset dir="${build.librecordio}" casesensitive="yes" followsymlinks="false">
- <exclude name="**/tests/**"/>
- <exclude name="*.so"/>
- <exclude name="*.o"/>
- </fileset>
- </copy>
- <chmod perm="ugo+x" type="file">
- <fileset dir="${dist.dir}/librecordio"/>
- </chmod>
- </target>
-
- <target name="create-c++-configure" depends="init" if="compile.c++">
- <exec executable="autoreconf" dir="${c++.utils.src}" searchpath="yes"
- failonerror="yes">
- <arg value="-if"/>
- </exec>
- <exec executable="autoreconf" dir="${c++.pipes.src}" searchpath="yes"
- failonerror="yes">
- <arg value="-if"/>
- </exec>
- <exec executable="autoreconf" dir="${c++.examples.pipes.src}"
- searchpath="yes" failonerror="yes">
- <arg value="-if"/>
- </exec>
- <antcall target="create-c++-configure-libhdfs"/>
- </target>
-
- <target name="create-c++-configure-libhdfs" depends="check-c++-libhdfs" if="islibhdfs">
- <exec executable="autoreconf" dir="${c++.libhdfs.src}"
- searchpath="yes" failonerror="yes">
- <arg value="-if"/>
- </exec>
- </target>
- <target name="check-c++-makefiles" depends="init" if="compile.c++">
- <condition property="need.c++.utils.makefile">
- <not> <available file="${build.c++.utils}/Makefile"/> </not>
- </condition>
- <condition property="need.c++.pipes.makefile">
- <not> <available file="${build.c++.pipes}/Makefile"/> </not>
- </condition>
- <condition property="need.c++.examples.pipes.makefile">
- <not> <available file="${build.c++.examples.pipes}/Makefile"/> </not>
- </condition>
- </target>
- <target name="check-c++-libhdfs">
- <condition property="islibhdfs">
- <and>
- <isset property="compile.c++"/>
- <isset property="libhdfs"/>
- </and>
- </condition>
- </target>
- <target name="check-c++-makefile-libhdfs" depends="init,check-c++-libhdfs" if="islibhdfs">
- <condition property="need.c++.libhdfs.makefile">
- <not> <available file="${build.c++.libhdfs}/Makefile"/> </not>
- </condition>
- </target>
- <target name="create-c++-libhdfs-makefile" depends="check-c++-makefile-libhdfs"
- if="need.c++.libhdfs.makefile">
- <mkdir dir="${build.c++.libhdfs}"/>
- <chmod file="${c++.libhdfs.src}/configure" perm="ugo+x"/>
- <exec executable="${c++.libhdfs.src}/configure" dir="${build.c++.libhdfs}"
- failonerror="yes">
- <env key="ac_cv_func_malloc_0_nonnull" value="yes"/>
- <env key="JVM_ARCH" value="${jvm.arch}"/>
- <arg value="--prefix=${install.c++}"/>
- </exec>
- </target>
- <target name="create-c++-utils-makefile" depends="check-c++-makefiles"
- if="need.c++.utils.makefile">
- <mkdir dir="${build.c++.utils}"/>
- <chmod file="${c++.utils.src}/configure" perm="ugo+x"/>
- <exec executable="${c++.utils.src}/configure" dir="${build.c++.utils}"
- failonerror="yes">
- <arg value="--prefix=${install.c++}"/>
- </exec>
- </target>
- <target name="compile-c++-utils" depends="create-c++-utils-makefile"
- if="compile.c++">
- <exec executable="${make.cmd}" dir="${build.c++.utils}" searchpath="yes"
- failonerror="yes">
- <arg value="install"/>
- </exec>
- </target>
- <target name="create-c++-pipes-makefile" depends="check-c++-makefiles"
- if="need.c++.pipes.makefile">
- <mkdir dir="${build.c++.pipes}"/>
- <chmod file="${c++.pipes.src}/configure" perm="ugo+x"/>
- <exec executable="${c++.pipes.src}/configure" dir="${build.c++.pipes}"
- failonerror="yes">
- <arg value="--prefix=${install.c++}"/>
- </exec>
- </target>
- <target name="compile-c++-pipes"
- depends="create-c++-pipes-makefile,compile-c++-utils"
- if="compile.c++">
- <exec executable="${make.cmd}" dir="${build.c++.pipes}" searchpath="yes"
- failonerror="yes">
- <arg value="install"/>
- </exec>
- </target>
- <target name="compile-c++"
- depends="compile-c++-pipes"/>
- <target name="create-c++-examples-pipes-makefile"
- depends="check-c++-makefiles"
- if="need.c++.examples.pipes.makefile">
- <mkdir dir="${build.c++.examples.pipes}"/>
- <chmod file="${c++.examples.pipes.src}/configure" perm="ugo+x"/>
- <exec executable="${c++.examples.pipes.src}/configure"
- dir="${build.c++.examples.pipes}"
- failonerror="yes">
- <arg value="--prefix=${install.c++.examples}"/>
- <arg value="--with-hadoop-utils=${install.c++}"/>
- <arg value="--with-hadoop-pipes=${install.c++}"/>
- </exec>
- </target>
- <target name="compile-c++-examples-pipes"
- depends="create-c++-examples-pipes-makefile,compile-c++-pipes"
- if="compile.c++">
- <exec executable="${make.cmd}" dir="${build.c++.examples.pipes}" searchpath="yes"
- failonerror="yes">
- <arg value="install"/>
- </exec>
- </target>
- <target name="compile-c++-examples"
- depends="compile-c++-examples-pipes"/>
- <target name="compile-c++-libhdfs" depends="create-c++-libhdfs-makefile" if="islibhdfs">
- <exec executable="${make.cmd}" dir="${build.c++.libhdfs}" searchpath="yes"
- failonerror="yes">
- <env key="ac_cv_func_malloc_0_nonnull" value="yes"/>
- <env key="JVM_ARCH" value="${jvm.arch}"/>
- <arg value="install"/>
- </exec>
- </target>
- <target name="compile-ant-tasks" depends="compile-core">
- <javac
- encoding="${build.encoding}"
- srcdir="${anttasks.dir}"
- includes="org/apache/hadoop/ant/**/*.java"
- destdir="${build.anttasks}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args}"/>
- <classpath refid="classpath"/>
- </javac>
- </target>
- <target name="ant-tasks" depends="jar, compile-ant-tasks">
- <copy file="${anttasks.dir}/org/apache/hadoop/ant/antlib.xml"
- todir="${build.anttasks}/org/apache/hadoop/ant"/>
- <jar destfile="${build.dir}/${ant.final.name}.jar">
- <fileset dir="${build.anttasks}"/>
- </jar>
- </target>
- <target name="clover" depends="clover.setup, clover.info" description="Instrument the Unit tests using Clover. To use, specify -Dclover.home=<base of clover installation> -Drun.clover=true on the command line."/>
- <target name="clover.setup" if="clover.enabled">
- <taskdef resource="cloverlib.xml" classpath="${clover.jar}"/>
- <mkdir dir="${clover.db.dir}"/>
- <clover-setup initString="${clover.db.dir}/hadoop_coverage.db">
- <fileset dir="src" includes="core/**/* tools/**/* hdfs/**/* mapred/**/*"/>
- </clover-setup>
- </target>
- <target name="clover.info" unless="clover.present">
- <echo>
- Clover not found. Code coverage reports disabled.
- </echo>
- </target>
- <target name="clover.check">
- <fail unless="clover.present">
- ##################################################################
- Clover not found.
- Please specify -Dclover.home=<base of clover installation>
- on the command line.
- ##################################################################
- </fail>
- </target>
- <target name="generate-clover-reports" depends="clover.check, clover">
- <mkdir dir="${clover.report.dir}"/>
- <clover-report>
- <current outfile="${clover.report.dir}" title="${final.name}">
- <format type="html"/>
- </current>
- </clover-report>
- <clover-report>
- <current outfile="${clover.report.dir}/clover.xml" title="${final.name}">
- <format type="xml"/>
- </current>
- </clover-report>
- </target>
- <target name="findbugs.check" depends="check-for-findbugs" unless="findbugs.present">
- <fail message="'findbugs.home' is not defined. Please pass -Dfindbugs.home=<base of Findbugs installation> to Ant on the command-line." />
- </target>
- <target name="patch.check" unless="patch.file">
- <fail message="'patch.file' is not defined. Please pass -Dpatch.file=<location of patch file> to Ant on the command-line." />
- </target>
- <target name="test-patch" depends="patch.check,findbugs.check,forrest.check">
- <exec executable="bash" failonerror="true">
- <arg value="${basedir}/src/test/bin/test-patch.sh"/>
- <arg value="DEVELOPER"/>
- <arg value="${patch.file}"/>
- <arg value="${scratch.dir}"/>
- <arg value="${svn.cmd}"/>
- <arg value="${grep.cmd}"/>
- <arg value="${patch.cmd}"/>
- <arg value="${findbugs.home}"/>
- <arg value="${forrest.home}"/>
- <arg value="${basedir}"/>
- <arg value="${java5.home}"/>
- </exec>
- </target>
- <target name="hudson-test-patch" depends="findbugs.check,forrest.check">
- <exec executable="bash" failonerror="true">
- <arg value="${basedir}/src/test/bin/test-patch.sh"/>
- <arg value="HUDSON"/>
- <arg value="${scratch.dir}"/>
- <arg value="${support.dir}"/>
- <arg value="${ps.cmd}"/>
- <arg value="${wget.cmd}"/>
- <arg value="${jiracli.cmd}"/>
- <arg value="${svn.cmd}"/>
- <arg value="${grep.cmd}"/>
- <arg value="${patch.cmd}"/>
- <arg value="${findbugs.home}"/>
- <arg value="${forrest.home}"/>
- <arg value="${eclipse.home}"/>
- <arg value="${python.home}"/>
- <arg value="${basedir}"/>
- <arg value="${trigger.url}"/>
- <arg value="${jira.passwd}"/>
- <arg value="${java5.home}"/>
- </exec>
- </target>
-
- <target name="eclipse-files" depends="init"
- description="Generate files for Eclipse">
- <pathconvert property="eclipse.project">
- <path path="${basedir}"/>
- <regexpmapper from="^.*/([^/]+)$$" to="\1" handledirsep="yes"/>
- </pathconvert>
- <copy todir="." overwrite="true">
- <fileset dir=".eclipse.templates">
- <exclude name="**/README.txt"/>
- </fileset>
- <filterset>
- <filter token="PROJECT" value="${eclipse.project}"/>
- </filterset>
- </copy>
- </target>
- <target name="ivy-init-dirs">
- <mkdir dir="${build.ivy.dir}" />
- <mkdir dir="${build.ivy.lib.dir}" />
- <mkdir dir="${build.ivy.report.dir}" />
- </target>
- <target name="ivy-probe-antlib" >
- <condition property="ivy.found">
- <typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
- </condition>
- </target>
- <target name="ivy-download" description="To download ivy" unless="offline">
- <get src="${ivy_repo_url}" dest="${ivy.jar}" usetimestamp="true"/>
- </target>
- <!--
- To avoid Ivy leaking things across big projects, always load Ivy in the same classloader.
- Also note how we skip loading Ivy if it is already there, just to make sure all is well.
- -->
- <target name="ivy-init-antlib" depends="ivy-download,ivy-init-dirs,ivy-probe-antlib" unless="ivy.found">
- <typedef uri="antlib:org.apache.ivy.ant" onerror="fail"
- loaderRef="ivyLoader">
- <classpath>
- <pathelement location="${ivy.jar}"/>
- </classpath>
- </typedef>
- <fail >
- <condition >
- <not>
- <typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
- </not>
- </condition>
- You need Apache Ivy 2.0 or later from http://ant.apache.org/
- It could not be loaded from ${ivy_repo_url}
- </fail>
- </target>
- <target name="ivy-init" depends="ivy-init-antlib" >
- <!--Configure Ivy by reading in the settings file
- If anyone has already read in a settings file into this settings ID, it gets priority
- -->
- <ivy:configure settingsid="${ant.project.name}.ivy.settings" file="${ivysettings.xml}" override='false'/>
- </target>
- <target name="ivy-resolve" depends="ivy-init">
- <ivy:resolve settingsRef="${ant.project.name}.ivy.settings"/>
- </target>
- <target name="ivy-resolve-javadoc" depends="ivy-init">
- <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="javadoc"/>
- </target>
- <target name="ivy-resolve-releaseaudit" depends="ivy-init">
- <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="releaseaudit"/>
- </target>
- <target name="ivy-resolve-test" depends="ivy-init">
- <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="test" />
- </target>
- <target name="ivy-resolve-common" depends="ivy-init">
- <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="common" />
- </target>
- <target name="ivy-resolve-jdiff" depends="ivy-init">
- <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="jdiff" />
- </target>
- <target name="ivy-resolve-checkstyle" depends="ivy-init">
- <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="checkstyle"/>
- </target>
- <target name="ivy-retrieve" depends="ivy-resolve"
- description="Retrieve Ivy-managed artifacts">
- <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
- pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
- </target>
- <target name="ivy-retrieve-checkstyle" depends="ivy-resolve-checkstyle"
- description="Retrieve Ivy-managed artifacts for the checkstyle configurations">
- <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
- pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
- <ivy:cachepath pathid="checkstyle-classpath" conf="checkstyle"/>
- </target>
- <target name="ivy-retrieve-jdiff" depends="ivy-resolve-jdiff"
- description="Retrieve Ivy-managed artifacts for the javadoc configurations">
- <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
- pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
- <ivy:cachepath pathid="jdiff-classpath" conf="jdiff"/>
- </target>
- <target name="ivy-retrieve-javadoc" depends="ivy-resolve-javadoc"
- description="Retrieve Ivy-managed artifacts for the javadoc configurations">
- <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
- pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
- <ivy:cachepath pathid="javadoc-classpath" conf="javadoc"/>
- </target>
- <target name="ivy-retrieve-test" depends="ivy-resolve-test"
- description="Retrieve Ivy-managed artifacts for the test configurations">
- <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
- pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
- <ivy:cachepath pathid="test.classpath" conf="test"/>
- </target>
- <target name="ivy-retrieve-common" depends="ivy-resolve-common"
- description="Retrieve Ivy-managed artifacts for the compile configurations">
- <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
- pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
- <ivy:cachepath pathid="ivy-common.classpath" conf="common"/>
- </target>
- <target name="ivy-retrieve-releaseaudit" depends="ivy-resolve-releaseaudit"
- description="Retrieve Ivy-managed artifacts for the compile configurations">
- <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
- pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" />
- <ivy:cachepath pathid="releaseaudit-classpath" conf="releaseaudit"/>
- <available classname="${rat.reporting.classname}"
- classpathref="releaseaudit-classpath" property="rat.present" value="true"/>
- </target>
- <target name="ivy-report" depends="ivy-resolve-releaseaudit"
- description="Generate">
- <ivy:report todir="${build.ivy.report.dir}" settingsRef="${ant.project.name}.ivy.settings"/>
- <echo>
- Reports generated:${build.ivy.report.dir}
- </echo>
- </target>
- <target name="ant-task-download" description="To download mvn-ant-task">
- <get src="${ant_task_repo_url}" dest="${ant_task.jar}" usetimestamp="true"/>
- </target>
- <target name="mvn-taskdef" depends="ant-task-download">
- <path id="mvn-ant-task.classpath" path="${ant_task.jar}"/>
- <typedef resource="org/apache/maven/artifact/ant/antlib.xml"
- uri="urn:maven-artifact-ant"
- classpathref="mvn-ant-task.classpath"/>
- </target>
- <target name="mvn-install" depends="mvn-taskdef,bin-package,set-version"
- description="To install hadoop core and test jars to local filesystem's m2 cache">
- <artifact:pom file="${hadoop-core.pom}" id="hadoop.core"/>
- <artifact:pom file="${hadoop-test.pom}" id="hadoop.test"/>
- <artifact:pom file="${hadoop-examples.pom}" id="hadoop.examples"/>
- <artifact:pom file="${hadoop-tools.pom}" id="hadoop.tools"/>
- <artifact:pom file="${hadoop-streaming.pom}" id="hadoop.streaming"/>
- <artifact:install file="${hadoop-core.jar}">
- <pom refid="hadoop.core"/>
- </artifact:install>
- <artifact:install file="${hadoop-test.jar}">
- <pom refid="hadoop.test"/>
- </artifact:install>
- <artifact:install file="${hadoop-tools.jar}">
- <pom refid="hadoop.tools"/>
- </artifact:install>
- <artifact:install file="${hadoop-examples.jar}">
- <pom refid="hadoop.examples"/>
- </artifact:install>
- <artifact:install file="${hadoop-streaming.jar}">
- <pom refid="hadoop.streaming"/>
- </artifact:install>
- </target>
- <target name="mvn-deploy" depends="mvn-taskdef, bin-package, set-version, signanddeploy, simpledeploy"
- description="To deploy hadoop core and test jar's to apache maven repository"/>
- <target name="signanddeploy" if="staging" depends="sign">
- <artifact:pom file="${hadoop-core.pom}" id="hadoop.core"/>
- <artifact:pom file="${hadoop-test.pom}" id="hadoop.core.test"/>
- <artifact:pom file="${hadoop-examples.pom}" id="hadoop.examples"/>
- <artifact:pom file="${hadoop-tools.pom}" id="hadoop.tools"/>
- <artifact:pom file="${hadoop-streaming.pom}" id="hadoop.streaming"/>
- <artifact:install-provider artifactId="wagon-http"
- version="${wagon-http.version}"/>
- <artifact:deploy file="${hadoop-core.jar}">
- <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
- <pom refid="hadoop.core"/>
- <attach file="${hadoop-core.jar}.asc" type="jar.asc"/>
- <attach file="${hadoop-core.pom}.asc" type="pom.asc"/>
- </artifact:deploy>
- <artifact:deploy file="${hadoop-test.jar}">
- <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
- <pom refid="hadoop.core.test"/>
- <attach file="${hadoop-test.jar}.asc" type="jar.asc"/>
- <attach file="${hadoop-test.pom}.asc" type="pom.asc"/>
- </artifact:deploy>
- <artifact:deploy file="${hadoop-tools.jar}">
- <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
- <pom refid="hadoop.tools"/>
- <attach file="${hadoop-tools.jar}.asc" type="jar.asc"/>
- <attach file="${hadoop-tools.pom}.asc" type="pom.asc"/>
- </artifact:deploy>
- <artifact:deploy file="${hadoop-examples.jar}">
- <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
- <pom refid="hadoop.examples"/>
- <attach file="${hadoop-examples.jar}.asc" type="jar.asc"/>
- <attach file="${hadoop-examples.pom}.asc" type="pom.asc"/>
- </artifact:deploy>
- <artifact:deploy file="${hadoop-streaming.jar}">
- <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
- <pom refid="hadoop.streaming"/>
- <attach file="${hadoop-streaming.jar}.asc" type="jar.asc"/>
- <attach file="${hadoop-streaming.pom}.asc" type="pom.asc"/>
- </artifact:deploy>
- </target>
- <target name="sign" depends="clean-sign" if="staging">
- <input message="password:>" addproperty="gpg.passphrase">
- <handler classname="org.apache.tools.ant.input.SecureInputHandler" />
- </input>
- <macrodef name="sign-artifact" description="Signs the artifact">
- <attribute name="input.file"/>
- <attribute name="output.file" default="@{input.file}.asc"/>
- <attribute name="gpg.passphrase"/>
- <sequential>
- <echo>Signing @{input.file} Sig File: @{output.file}</echo>
- <exec executable="gpg" >
- <arg value="--armor"/>
- <arg value="--output"/>
- <arg value="@{output.file}"/>
- <arg value="--passphrase"/>
- <arg value="@{gpg.passphrase}"/>
- <arg value="--detach-sig"/>
- <arg value="@{input.file}"/>
- </exec>
- </sequential>
- </macrodef>
- <sign-artifact input.file="${hadoop-core.jar}"
- output.file="${hadoop-core.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact input.file="${hadoop-test.jar}"
- output.file="${hadoop-test.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact input.file="${hadoop-tools.jar}"
- output.file="${hadoop-tools.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact input.file="${hadoop-examples.jar}"
- output.file="${hadoop-examples.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact input.file="${hadoop-streaming.jar}"
- output.file="${hadoop-streaming.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact input.file="${hadoop-core.pom}"
- output.file="${hadoop-core.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact input.file="${hadoop-test.pom}"
- output.file="${hadoop-test.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact input.file="${hadoop-tools.pom}"
- output.file="${hadoop-tools.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact input.file="${hadoop-examples.pom}"
- output.file="${hadoop-examples.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact input.file="${hadoop-streaming.pom}"
- output.file="${hadoop-streaming.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
- </target>
- <target name="simpledeploy" unless="staging">
- <artifact:pom file="${hadoop-core.pom}" id="hadoop.core"/>
- <artifact:pom file="${hadoop-test.pom}" id="hadoop.test"/>
- <artifact:pom file="${hadoop-examples.pom}" id="hadoop.examples"/>
- <artifact:pom file="${hadoop-tools.pom}" id="hadoop.tools"/>
- <artifact:pom file="${hadoop-streaming.pom}" id="hadoop.streaming"/>
- <artifact:install-provider artifactId="wagon-http" version="${wagon-http.version}"/>
- <artifact:deploy file="${hadoop-core.jar}">
- <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
- <pom refid="hadoop.core"/>
- </artifact:deploy>
- <artifact:deploy file="${hadoop-test.jar}">
- <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
- <pom refid="hadoop.test"/>
- </artifact:deploy>
- <artifact:deploy file="${hadoop-examples.jar}">
- <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
- <pom refid="hadoop.examples"/>
- </artifact:deploy>
- <artifact:deploy file="${hadoop-tools.jar}">
- <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
- <pom refid="hadoop.tools"/>
- </artifact:deploy>
- <artifact:deploy file="${hadoop-streaming.jar}">
- <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
- <pom refid="hadoop.streaming"/>
- </artifact:deploy>
- </target>
- <target name="set-version">
- <delete file="${hadoop-core.pom}"/>
- <delete file="${hadoop-test.pom}"/>
- <delete file="${hadoop-examples.pom}"/>
- <delete file="${hadoop-tools.pom}"/>
- <delete file="${hadoop-streaming.pom}"/>
- <copy file="${hadoop-core-pom-template.xml}" tofile="${hadoop-core.pom}"/>
- <copy file="${hadoop-test-pom-template.xml}" tofile="${hadoop-test.pom}"/>
- <copy file="${hadoop-examples-pom-template.xml}" tofile="${hadoop-examples.pom}"/>
- <copy file="${hadoop-tools-pom-template.xml}" tofile="${hadoop-tools.pom}"/>
- <copy file="${hadoop-streaming-pom-template.xml}" tofile="${hadoop-streaming.pom}"/>
- <replaceregexp byline="true">
- <regexp pattern="@version"/>
- <substitution expression="${version}"/>
- <fileset dir="${basedir}/ivy">
- <include name="hadoop-core-pom.xml"/>
- <include name="hadoop-test-pom.xml"/>
- <include name="hadoop-tools-pom.xml"/>
- <include name="hadoop-examples-pom.xml"/>
- <include name="hadoop-streaming-pom.xml"/>
- </fileset>
- </replaceregexp>
- </target>
- </project>
|