1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671 |
- <?xml version="1.0"?>
- <!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- -->
- <project name="Hadoop" default="compile"
- xmlns:artifact="urn:maven-artifact-ant"
- xmlns:ivy="antlib:org.apache.ivy.ant">
- <!-- Load all the default properties, and any the user wants -->
- <!-- to contribute (without having to type -D or edit this file -->
- <property file="${user.home}/build.properties" />
- <property file="${basedir}/build.properties" />
-
- <property name="Name" value="Hadoop"/>
- <property name="name" value="hadoop"/>
- <property name="version" value="0.20.206.0-SNAPSHOT"/>
- <property name="final.name" value="${name}-${version}"/>
- <property name="test.final.name" value="${name}-test-${version}"/>
- <property name="year" value="2009"/>
- <property name="package.release" value="1"/>
-
- <property name="core.final.name" value="${name}-core-${version}"/>
- <property name="test.final.name" value="${name}-test-${version}"/>
- <property name="examples.final.name" value="${name}-examples-${version}"/>
- <property name="tools.final.name" value="${name}-tools-${version}"/>
- <property name="ant.final.name" value="${name}-ant-${version}"/>
- <property name="streaming.final.name" value="${name}-streaming-${version}"/>
- <property name="src.dir" value="${basedir}/src"/>
- <property name="core.src.dir" value="${src.dir}/core"/>
- <property name="mapred.src.dir" value="${src.dir}/mapred"/>
- <property name="hdfs.src.dir" value="${src.dir}/hdfs"/>
- <property name="native.src.dir" value="${basedir}/src/native"/>
- <property name="examples.dir" value="${basedir}/src/examples"/>
- <property name="anttasks.dir" value="${basedir}/src/ant"/>
- <property name="lib.dir" value="${basedir}/lib"/>
- <property name="conf.dir" value="${basedir}/conf"/>
- <property name="contrib.dir" value="${basedir}/src/contrib"/>
- <property name="docs.src" value="${basedir}/src/docs"/>
- <property name="src.docs.cn" value="${basedir}/src/docs/cn"/>
- <property name="changes.src" value="${docs.src}/changes"/>
- <property name="c++.src" value="${basedir}/src/c++"/>
- <property name="c++.utils.src" value="${c++.src}/utils"/>
- <property name="c++.pipes.src" value="${c++.src}/pipes"/>
- <property name="c++.examples.pipes.src" value="${examples.dir}/pipes"/>
- <property name="c++.libhdfs.src" value="${c++.src}/libhdfs"/>
- <property name="librecordio.src" value="${c++.src}/librecordio"/>
- <property name="tools.src" value="${basedir}/src/tools"/>
- <property name="package.prefix" value="/usr"/>
- <property name="package.conf.dir" value="/etc/hadoop"/>
- <property name="package.log.dir" value="/var/log/hadoop"/>
- <property name="package.pid.dir" value="/var/run/hadoop"/>
- <property name="xercescroot" value=""/>
- <property name="build.dir" value="${basedir}/build"/>
- <property name="build.classes" value="${build.dir}/classes"/>
- <property name="build.src" value="${build.dir}/src"/>
- <property name="build.tools" value="${build.dir}/tools"/>
- <property name="build.webapps" value="${build.dir}/webapps"/>
- <property name="build.examples" value="${build.dir}/examples"/>
- <property name="build.anttasks" value="${build.dir}/ant"/>
- <property name="build.librecordio" value="${build.dir}/librecordio"/>
- <!-- convert spaces to _ so that mac os doesn't break things -->
- <exec executable="sed" inputstring="${os.name}"
- outputproperty="nonspace.os">
- <arg value="s/ /_/g"/>
- </exec>
- <property name="build.platform"
- value="${nonspace.os}-${os.arch}-${sun.arch.data.model}"/>
- <property name="jvm.arch"
- value="${sun.arch.data.model}"/>
- <property name="build.native" value="${build.dir}/native/${build.platform}"/>
- <property name="build.c++" value="${build.dir}/c++-build/${build.platform}"/>
- <property name="build.c++.utils" value="${build.c++}/utils"/>
- <property name="build.c++.pipes" value="${build.c++}/pipes"/>
- <property name="build.c++.libhdfs" value="${build.c++}/libhdfs"/>
- <property name="build.c++.examples.pipes"
- value="${build.c++}/examples/pipes"/>
- <property name="build.docs" value="${build.dir}/docs"/>
- <property name="build.docs.cn" value="${build.dir}/docs/cn"/>
- <property name="build.javadoc" value="${build.docs}/api"/>
- <property name="build.javadoc.dev" value="${build.docs}/dev-api"/>
- <property name="build.encoding" value="ISO-8859-1"/>
- <property name="install.c++" value="${build.dir}/c++/${build.platform}"/>
- <property name="install.c++.examples"
- value="${build.dir}/c++-examples/${build.platform}"/>
- <property name="test.src.dir" value="${basedir}/src/test"/>
- <property name="test.lib.dir" value="${basedir}/src/test/lib"/>
- <property name="test.build.dir" value="${build.dir}/test"/>
- <property name="test.generated.dir" value="${test.build.dir}/src"/>
- <property name="test.build.data" value="${test.build.dir}/data"/>
- <property name="test.cache.data" value="${test.build.dir}/cache"/>
- <property name="test.debug.data" value="${test.build.dir}/debug"/>
- <property name="test.log.dir" value="${test.build.dir}/logs"/>
- <property name="test.build.classes" value="${test.build.dir}/classes"/>
- <property name="test.build.testjar" value="${test.build.dir}/testjar"/>
- <property name="test.build.testshell" value="${test.build.dir}/testshell"/>
- <property name="test.build.extraconf" value="${test.build.dir}/extraconf"/>
- <property name="test.build.javadoc" value="${test.build.dir}/docs/api"/>
- <property name="test.build.javadoc.dev" value="${test.build.dir}/docs/dev-api"/>
- <property name="test.include" value="Test*"/>
- <property name="test.classpath.id" value="test.classpath"/>
- <property name="test.output" value="no"/>
- <property name="test.timeout" value="900000"/>
- <property name="test.junit.output.format" value="plain"/>
- <property name="test.junit.fork.mode" value="perTest" />
- <property name="test.junit.printsummary" value="yes" />
- <property name="test.junit.haltonfailure" value="no" />
- <property name="test.junit.maxmemory" value="512m" />
- <property name="test.tools.input.dir" value="${basedir}/src/test/tools/data"/>
-
- <property name="test.commit.tests.file" value="${test.src.dir}/commit-tests" />
- <property name="test.smoke.tests.file" value="${test.src.dir}/smoke-tests" />
- <property name="test.all.tests.file" value="${test.src.dir}/all-tests" />
- <property name="test.libhdfs.conf.dir" value="${c++.libhdfs.src}/tests/conf"/>
- <property name="test.libhdfs.dir" value="${test.build.dir}/libhdfs"/>
- <property name="librecordio.test.dir" value="${test.build.dir}/librecordio"/>
- <property name="web.src.dir" value="${basedir}/src/web"/>
- <property name="src.webapps" value="${basedir}/src/webapps"/>
- <property name="javadoc.link.java"
- value="http://java.sun.com/javase/6/docs/api/"/>
- <property name="javadoc.packages" value="org.apache.hadoop.*"/>
- <property name="javadoc.maxmemory" value="512m" />
- <property name="dist.dir" value="${build.dir}/${final.name}"/>
- <property name="javac.debug" value="on"/>
- <property name="javac.optimize" value="on"/>
- <property name="javac.deprecation" value="off"/>
- <property name="javac.version" value="1.6"/>
- <property name="javac.args" value=""/>
- <property name="javac.args.warnings" value="-Xlint:unchecked"/>
- <property name="clover.db.dir" location="${build.dir}/test/clover/db"/>
- <property name="clover.report.dir" location="${build.dir}/test/clover/reports"/>
- <property name="rat.reporting.classname" value="rat.Report"/>
- <property name="jdiff.build.dir" value="${build.docs}/jdiff"/>
- <property name="jdiff.xml.dir" value="${lib.dir}/jdiff"/>
- <property name="jdiff.stable" value="0.20.205.0"/>
- <property name="jdiff.stable.javadoc"
- value="http://hadoop.apache.org/core/docs/r${jdiff.stable}/api/"/>
- <property name="scratch.dir" value="${user.home}/tmp"/>
- <property name="svn.cmd" value="svn"/>
- <property name="grep.cmd" value="grep"/>
- <property name="patch.cmd" value="patch"/>
- <property name="make.cmd" value="make"/>
- <property name="jsvc.build.dir" value="${build.dir}/jsvc.${os.arch}" />
- <property name="jsvc.install.dir" value="${dist.dir}/libexec" />
- <condition property="os-arch" value="x86_64">
- <and>
- <os arch="amd64" />
- </and>
- </condition>
- <condition property="os-arch" value="i386">
- <or>
- <os arch="i386" />
- <os arch="i486" />
- <os arch="i586" />
- <os arch="i686" />
- </or>
- </condition>
- <property name="jsvc.location" value="http://archive.apache.org/dist/commons/daemon/binaries/1.0.2/linux/commons-daemon-1.0.2-bin-linux-${os-arch}.tar.gz" />
- <property name="jsvc.dest.name" value="jsvc.${os.arch}.tar.gz" />
- <!-- task-controller properties set here -->
- <!-- Source directory from where configure is run and files are copied
- -->
-
- <property name="c++.task-controller.src"
- value="${basedir}/src/c++/task-controller" />
- <!-- directory where autoconf files + temporary files and src is
- stored for compilation -->
- <property name="build.c++.task-controller"
- value="${build.c++}/task-controller" />
- <property name="task-controller.prefix.dir" value="${dist.dir}" />
- <!-- the configuration directory for the linux task controller -->
- <property name="hadoop.conf.dir" value="/etc/hadoop"/>
- <!-- end of task-controller properties -->
- <property name="package.buildroot" value="/tmp/hadoop_package_build_${user.name}"/>
- <property name="package.build.dir" value="/tmp/hadoop_package_build_${user.name}/BUILD"/>
- <!-- IVY properteis set here -->
- <property name="ivy.dir" location="ivy" />
- <loadproperties srcfile="${ivy.dir}/libraries.properties"/>
- <property name="mvnrepo" value="http://repo2.maven.org/maven2"/>
- <property name="asfrepo" value="https://repository.apache.org"/>
- <property name="ivy.jar" location="${ivy.dir}/ivy-${ivy.version}.jar"/>
- <property name="ivy_repo_url"
- value="${mvnrepo}/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar"/>
- <property name="ant_task.jar"
- location="${ivy.dir}/maven-ant-tasks-${ant-task.version}.jar"/>
- <property name="tsk.org" value="/org/apache/maven/maven-ant-tasks/"/>
- <property name="ant_task_repo_url"
- value="${mvnrepo}${tsk.org}${ant-task.version}/maven-ant-tasks-${ant-task.version}.jar"/>
- <property name="repo" value="snapshots"/>
- <property name="asfsnapshotrepo"
- value="${asfrepo}/content/repositories/snapshots"/>
- <property name="asfstagingrepo"
- value="${asfrepo}/service/local/staging/deploy/maven2"/>
- <property name="ivysettings.xml" location="${ivy.dir}/ivysettings.xml"/>
- <property name="ivy.org" value="org.apache.hadoop"/>
- <property name="build.dir" location="build" />
- <property name="dist.dir" value="${build.dir}/${final.name}"/>
- <property name="build.ivy.dir" location="${build.dir}/ivy" />
- <property name="build.ivy.lib.dir" location="${build.ivy.dir}/lib"/>
- <property name="common.ivy.lib.dir"
- location="${build.ivy.lib.dir}/${ant.project.name}/common"/>
- <property name="build.ivy.report.dir" location="${build.ivy.dir}/report"/>
- <property name="hadoop-core.pom" location="${ivy.dir}/hadoop-core-pom.xml"/>
- <property name="hadoop-core-pom-template.xml"
- location="${ivy.dir}/hadoop-core-pom-template.xml"/>
- <property name="hadoop-core.jar" location="${build.dir}/${core.final.name}.jar"/>
- <property name="hadoop-test.pom" location="${ivy.dir}/hadoop-test-pom.xml"/>
- <property name="hadoop-test-pom-template.xml"
- location="${ivy.dir}/hadoop-test-pom-template.xml" />
- <property name="hadoop-test.jar" location="${build.dir}/${test.final.name}.jar"/>
- <property name="hadoop-tools.pom" location="${ivy.dir}/hadoop-tools-pom.xml"/>
- <property name="hadoop-tools-pom-template.xml"
- location="${ivy.dir}/hadoop-tools-pom-template.xml" />
- <property name="hadoop-tools.jar" location="${build.dir}/${tools.final.name}.jar"/>
- <property name="hadoop-examples.pom" location="${ivy.dir}/hadoop-examples-pom.xml"/>
- <property name="hadoop-examples-pom-template.xml"
- location="${ivy.dir}/hadoop-examples-pom-template.xml"/>
- <property name="hadoop-examples.jar"
- location="${build.dir}/${examples.final.name}.jar"/>
- <property name="hadoop-streaming.pom"
- location="${ivy.dir}/hadoop-streaming-pom.xml"/>
- <property name="hadoop-streaming-pom-template.xml"
- location="${ivy.dir}/hadoop-streaming-pom-template.xml"/>
- <property name="hadoop-streaming.jar"
- location="${build.dir}/contrib/streaming/${streaming.final.name}.jar"/>
-
- <!--this is the naming policy for artifacts we want pulled down-->
- <property name="ivy.artifact.retrieve.pattern"
- value="${ant.project.name}/[conf]/[artifact]-[revision].[ext]"/>
- <!--this is how artifacts that get built are named-->
- <property name="ivy.publish.pattern" value="hadoop-[revision]-core.[ext]"/>
- <!-- jdiff.home property set -->
- <property name="jdiff.home"
- value="${build.ivy.lib.dir}/${ant.project.name}/jdiff"/>
- <property name="jdiff.jar" value="${jdiff.home}/jdiff-${jdiff.version}.jar"/>
- <property name="xerces.jar" value="${jdiff.home}/xerces-${xerces.version}.jar"/>
- <property name="clover.jar" location="${clover.home}/lib/clover.jar"/>
- <available property="clover.present" file="${clover.jar}" />
-
- <!-- Eclipse properties -->
- <property name="build.dir.eclipse" value="build/eclipse"/>
- <property name="build.dir.eclipse-main-classes" value="${build.dir.eclipse}/classes-main"/>
- <property name="build.dir.eclipse-test-classes" value="${build.dir.eclipse}/classes-test"/>
- <property name="build.dir.eclipse-test-generated-classes" value="${build.dir.eclipse}/classes-test-generated"/>
- <property name="build.dir.eclipse-example-classes" value="${build.dir.eclipse}/classes-example"/>
- <property name="build.dir.eclipse-tools-classes" value="${build.dir.eclipse}/classes-tools"/>
- <property name="build.dir.eclipse-contrib-classes" value="${build.dir.eclipse}/classes-contrib"/>
- <property name="build.dir.eclipse-test-resources" value="${build.dir.eclipse}/test-resources/"/>
- <property name="build.dir.eclipse-test-resources-webapps" value="${build.dir.eclipse}/test-resources/webapps"/>
- <!-- check if clover reports should be generated -->
- <condition property="clover.enabled">
- <and>
- <isset property="run.clover"/>
- <isset property="clover.present"/>
- </and>
- </condition>
- <condition property="staging">
- <equals arg1="${repo}" arg2="staging"/>
- </condition>
- <!-- the normal classpath -->
- <path id="classpath">
- <pathelement location="${build.classes}"/>
- <path refid="src.lib.classpath"/>
- <pathelement location="${conf.dir}"/>
- </path>
- <path id="src.lib.classpath">
- <fileset dir="${lib.dir}">
- <include name="**/*.jar" />
- <exclude name="**/excluded/" />
- </fileset>
- <path refid="ivy-common.classpath"/>
- </path>
- <path id="test.lib.classpath">
- <fileset dir="${test.lib.dir}">
- <include name="**/*.jar"/>
- <exclude name="**/excluded/"/>
- </fileset>
- </path>
- <!-- the unit test classpath: uses test.src.dir for configuration -->
- <path id="test.classpath">
- <pathelement location="${test.build.extraconf}"/>
- <pathelement location="${test.build.classes}" />
- <pathelement location="${test.src.dir}"/>
- <pathelement location="${build.dir}"/>
- <pathelement location="${build.examples}"/>
- <pathelement location="${build.tools}"/>
- <pathelement path="${clover.jar}"/>
- <path refid="test.lib.classpath"/>
- <pathelement location="${hadoop-core.jar}"/>
- <path refid="classpath"/>
- </path>
- <!-- the cluster test classpath: uses conf.dir for configuration -->
- <path id="test.cluster.classpath">
- <path refid="classpath"/>
- <pathelement location="${test.build.classes}" />
- <pathelement location="${test.src.dir}"/>
- <pathelement location="${build.dir}"/>
- </path>
- <!-- ====================================================== -->
- <!-- Macro definitions -->
- <!-- ====================================================== -->
- <macrodef name="macro_tar" description="Worker Macro for tar">
- <attribute name="param.destfile"/>
- <element name="param.listofitems"/>
- <sequential>
- <tar compression="gzip" longfile="gnu"
- destfile="@{param.destfile}">
- <param.listofitems/>
- </tar>
- </sequential>
- </macrodef>
- <!-- ====================================================== -->
- <!-- Stuff needed by all targets -->
- <!-- ====================================================== -->
- <target name="init" depends="ivy-retrieve-common">
- <mkdir dir="${build.dir}"/>
- <mkdir dir="${build.classes}"/>
- <mkdir dir="${build.tools}"/>
- <mkdir dir="${build.src}"/>
- <mkdir dir="${build.webapps}/task/WEB-INF"/>
- <mkdir dir="${build.webapps}/job/WEB-INF"/>
- <mkdir dir="${build.webapps}/history/WEB-INF"/>
- <mkdir dir="${build.webapps}/hdfs/WEB-INF"/>
- <mkdir dir="${build.webapps}/datanode/WEB-INF"/>
- <mkdir dir="${build.webapps}/secondary/WEB-INF"/>
- <mkdir dir="${build.examples}"/>
- <mkdir dir="${build.anttasks}"/>
- <mkdir dir="${build.dir}/c++"/>
-
- <mkdir dir="${test.build.dir}"/>
- <mkdir dir="${test.build.classes}"/>
- <mkdir dir="${test.build.testjar}"/>
- <mkdir dir="${test.build.testshell}"/>
- <mkdir dir="${test.build.extraconf}"/>
- <tempfile property="touch.temp.file" destDir="${java.io.tmpdir}"/>
- <touch millis="0" file="${touch.temp.file}">
- <fileset dir="${conf.dir}" includes="**/*.template"/>
- <fileset dir="${contrib.dir}" includes="**/*.template"/>
- </touch>
- <delete file="${touch.temp.file}"/>
- <!-- copy all of the jsp and static files -->
- <copy todir="${build.webapps}">
- <fileset dir="${src.webapps}">
- <exclude name="**/*.jsp" />
- </fileset>
- </copy>
- <copy todir="${conf.dir}" verbose="true">
- <fileset dir="${conf.dir}" includes="**/*.template"/>
- <mapper type="glob" from="*.template" to="*"/>
- </copy>
- <copy todir="${contrib.dir}" verbose="true">
- <fileset dir="${contrib.dir}" includes="**/*.template"/>
- <mapper type="glob" from="*.template" to="*"/>
- </copy>
- <exec executable="sh">
- <arg line="src/saveVersion.sh ${version} ${build.dir}"/>
- </exec>
-
- <exec executable="sh">
- <arg line="src/fixFontsPath.sh ${src.docs.cn}"/>
- </exec>
- </target>
- <import file="${test.src.dir}/aop/build/aop.xml"/>
- <!-- ====================================================== -->
- <!-- Compile the Java files -->
- <!-- ====================================================== -->
- <target name="record-parser" depends="init" if="javacc.home">
- <javacc
- target="${core.src.dir}/org/apache/hadoop/record/compiler/generated/rcc.jj"
- outputdirectory="${core.src.dir}/org/apache/hadoop/record/compiler/generated"
- javacchome="${javacc.home}" />
- </target>
-
- <target name="compile-rcc-compiler" depends="init, record-parser">
- <javac
- encoding="${build.encoding}"
- srcdir="${core.src.dir}"
- includes="org/apache/hadoop/record/compiler/**/*.java"
- destdir="${build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args}"/>
- <classpath refid="classpath"/>
- </javac>
-
- <taskdef name="recordcc" classname="org.apache.hadoop.record.compiler.ant.RccTask">
- <classpath refid="classpath" />
- </taskdef>
- </target>
-
- <target name="compile-core-classes" depends="init, compile-rcc-compiler">
- <taskdef classname="org.apache.jasper.JspC" name="jsp-compile" >
- <classpath refid="test.classpath"/>
- </taskdef>
- <!-- Compile Java files (excluding JSPs) checking warnings -->
- <javac
- encoding="${build.encoding}"
- srcdir="${core.src.dir}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="classpath"/>
- </javac>
- <copy todir="${build.classes}">
- <fileset dir="${core.src.dir}" includes="**/*.properties"/>
- <fileset dir="${core.src.dir}" includes="core-default.xml"/>
- </copy>
-
- </target>
- <target name="compile-mapred-classes" depends="compile-core-classes,compile-hdfs-classes">
- <taskdef classname="org.apache.jasper.JspC" name="jsp-compile" >
- <classpath refid="classpath"/>
- </taskdef>
- <jsp-compile
- uriroot="${src.webapps}/task"
- outputdir="${build.src}"
- package="org.apache.hadoop.mapred"
- webxml="${build.webapps}/task/WEB-INF/web.xml">
- </jsp-compile>
- <!-- Compile Java files (excluding JSPs) checking warnings -->
- <jsp-compile
- uriroot="${src.webapps}/history"
- outputdir="${build.src}"
- package="org.apache.hadoop.mapred"
- webxml="${build.webapps}/history/WEB-INF/web.xml">
- </jsp-compile>
-
- <copy todir="${build.webapps}/job">
- <fileset dir="${src.webapps}/job" includes="**/*.jsp"/>
- <fileset dir="${src.webapps}/history" includes="**/*.jsp"/>
- </copy>
-
- <jsp-compile
- uriroot="${build.webapps}/job"
- outputdir="${build.src}"
- package="org.apache.hadoop.mapred"
- webxml="${build.webapps}/job/WEB-INF/web.xml">
- </jsp-compile>
- <!-- Compile Java files (excluding JSPs) checking warnings -->
- <javac
- encoding="${build.encoding}"
- srcdir="${mapred.src.dir};${build.src}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="classpath"/>
- </javac>
-
- <copy todir="${build.classes}">
- <fileset dir="${mapred.src.dir}" includes="**/*.properties"/>
- <fileset dir="${mapred.src.dir}" includes="mapred-default.xml"/>
- </copy>
- </target>
- <target name="compile-hdfs-classes" depends="compile-core-classes">
- <jsp-compile
- uriroot="${src.webapps}/hdfs"
- outputdir="${build.src}"
- package="org.apache.hadoop.hdfs.server.namenode"
- webxml="${build.webapps}/hdfs/WEB-INF/web.xml">
- </jsp-compile>
- <jsp-compile
- uriroot="${src.webapps}/datanode"
- outputdir="${build.src}"
- package="org.apache.hadoop.hdfs.server.datanode"
- webxml="${build.webapps}/datanode/WEB-INF/web.xml">
- </jsp-compile>
- <!-- Compile Java files (excluding JSPs) checking warnings -->
- <javac
- encoding="${build.encoding}"
- srcdir="${hdfs.src.dir};${build.src}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="classpath"/>
- </javac>
- <copy todir="${build.classes}">
- <fileset dir="${hdfs.src.dir}" includes="**/*.properties"/>
- <fileset dir="${hdfs.src.dir}" includes="hdfs-default.xml"/>
- </copy>
- </target>
- <target name="compile-tools" depends="init">
- <javac
- encoding="${build.encoding}"
- srcdir="${tools.src}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${build.tools}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="classpath"/>
- </javac>
-
- <copy todir="${build.tools}">
- <fileset
- dir="${tools.src}"
- includes="**/*.properties"
- />
- </copy>
- </target>
- <target name="compile-native">
- <antcall target="compile-core-native">
- <param name="compile.native" value="true"/>
- </antcall>
- </target>
- <target name="compile-core-native" depends="compile-core-classes"
- if="compile.native">
-
- <mkdir dir="${build.native}/lib"/>
- <mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/zlib"/>
- <mkdir dir="${build.native}/src/org/apache/hadoop/io/nativeio"/>
- <mkdir dir="${build.native}/src/org/apache/hadoop/security"/>
- <javah
- classpath="${build.classes}"
- destdir="${build.native}/src/org/apache/hadoop/io/compress/zlib"
- force="yes"
- verbose="yes"
- >
- <class name="org.apache.hadoop.io.compress.zlib.ZlibCompressor" />
- <class name="org.apache.hadoop.io.compress.zlib.ZlibDecompressor" />
- </javah>
- <javah
- classpath="${build.classes}"
- destdir="${build.native}/src/org/apache/hadoop/io/nativeio"
- force="yes"
- verbose="yes"
- >
- <class name="org.apache.hadoop.io.nativeio.NativeIO" />
- </javah>
- <javah
- classpath="${build.classes}"
- destdir="${build.native}/src/org/apache/hadoop/security"
- force="yes"
- verbose="yes"
- >
- <class name="org.apache.hadoop.security.JniBasedUnixGroupsMapping" />
- </javah>
- <javah
- classpath="${build.classes}"
- destdir="${build.native}/src/org/apache/hadoop/security"
- force="yes"
- verbose="yes"
- >
- <class name="org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping" />
- </javah>
- <exec dir="${build.native}" executable="sh" failonerror="true">
- <env key="OS_NAME" value="${os.name}"/>
- <env key="OS_ARCH" value="${os.arch}"/>
- <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
- <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
- <arg line="${native.src.dir}/configure"/>
- </exec>
- <exec dir="${build.native}" executable="${make.cmd}" failonerror="true">
- <env key="OS_NAME" value="${os.name}"/>
- <env key="OS_ARCH" value="${os.arch}"/>
- <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
- <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
- </exec>
- <exec dir="${build.native}" executable="sh" failonerror="true">
- <arg line="${build.native}/libtool --mode=install cp ${build.native}/libhadoop.la ${build.native}/lib"/>
- </exec>
- </target>
- <target name="compile-core"
- depends="clover,compile-core-classes,compile-mapred-classes,
- compile-hdfs-classes,compile-core-native,compile-c++"
- description="Compile core only">
- </target>
- <target name="compile-contrib" depends="compile-core,tools-jar,compile-c++-libhdfs">
- <subant target="compile">
- <property name="version" value="${version}"/>
- <fileset file="${contrib.dir}/build.xml"/>
- </subant>
- </target>
-
- <target name="compile" depends="compile-core, compile-contrib, compile-ant-tasks, compile-tools" description="Compile core, contrib">
- </target>
- <target name="compile-examples"
- depends="compile-core,compile-tools,compile-c++-examples">
- <javac
- encoding="${build.encoding}"
- srcdir="${examples.dir}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${build.examples}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath>
- <path refid="classpath"/>
- <pathelement location="${build.tools}"/>
- </classpath>
- </javac>
- </target>
- <!-- ================================================================== -->
- <!-- Make hadoop.jar -->
- <!-- ================================================================== -->
- <!-- -->
- <!-- ================================================================== -->
- <target name="jar" depends="compile-core" description="Make hadoop.jar">
- <tar compression="gzip" destfile="${build.classes}/bin.tgz">
- <tarfileset dir="bin" mode="755"/>
- </tar>
- <property name="jar.properties.list"
- value="commons-logging.properties, log4j.properties, hadoop-metrics.properties"/>
- <jar jarfile="${build.dir}/${core.final.name}.jar"
- basedir="${build.classes}">
- <manifest>
- <section name="org/apache/hadoop">
- <attribute name="Implementation-Title" value="Hadoop"/>
- <attribute name="Implementation-Version" value="${version}"/>
- <attribute name="Implementation-Vendor" value="Apache"/>
- </section>
- </manifest>
- <service type="org.apache.hadoop.security.token.TokenRenewer">
- <provider classname="org.apache.hadoop.hdfs.DFSClient$Renewer"/>
- <provider classname="org.apache.hadoop.mapred.JobClient$Renewer"/>
- <provider classname="org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier$Renewer"/>
- <provider classname="org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier$Renewer"/>
- <provider classname="org.apache.hadoop.hdfs.HftpFileSystem$TokenManager"/>
- <provider classname="org.apache.hadoop.hdfs.web.WebHdfsFileSystem$DtRenewer"/>
- </service>
- <fileset dir="${conf.dir}" includes="${jar.properties.list}" />
- <fileset file="${jar.extra.properties.list}" />
- <zipfileset dir="${build.webapps}" prefix="webapps"/>
- </jar>
- </target>
- <!-- ================================================================== -->
- <!-- Make the Hadoop examples jar. -->
- <!-- ================================================================== -->
- <!-- -->
- <!-- ================================================================== -->
- <target name="examples" depends="jar, compile-examples" description="Make the Hadoop examples jar.">
- <macro-jar-examples
- build.dir="${build.dir}"
- basedir="${build.examples}">
- </macro-jar-examples>
- </target>
- <macrodef name="macro-jar-examples">
- <attribute name="build.dir" />
- <attribute name="basedir" />
- <sequential>
- <jar jarfile="@{build.dir}/${examples.final.name}.jar"
- basedir="@{basedir}">
- <manifest>
- <attribute name="Main-Class"
- value="org/apache/hadoop/examples/ExampleDriver"/>
- </manifest>
- </jar>
- </sequential>
- </macrodef>
- <target name="tools-jar" depends="jar, compile-tools"
- description="Make the Hadoop tools jar.">
- <jar jarfile="${build.dir}/${tools.final.name}.jar"
- basedir="${build.tools}">
- <manifest>
- <attribute name="Main-Class"
- value="org/apache/hadoop/examples/ExampleDriver"/>
- </manifest>
- </jar>
- </target>
- <!-- ================================================================== -->
- <!-- Make the Hadoop metrics plugin dev/sdk jar. (for use outside Hadoop) -->
- <!-- ================================================================== -->
- <!-- -->
- <!-- ================================================================== -->
- <target name="metrics.jar" depends="compile-core" description="Make the Hadoop metrics plugin dev/sdk jar. (for use outside Hadoop)">
- <jar jarfile="${build.dir}/hadoop-metrics-dev-${version}.jar"
- basedir="${build.classes}">
- <include name="**/metrics2/*.class" />
- <include name="**/metrics2/util/*.class" />
- </jar>
- </target>
- <target name="generate-test-records" depends="compile-rcc-compiler">
- <recordcc destdir="${test.generated.dir}">
- <fileset dir="${test.src.dir}"
- includes="**/*.jr" />
- </recordcc>
- </target>
-
- <!-- ================================================================== -->
- <!-- Compile test code -->
- <!-- ================================================================== -->
- <target name="compile-core-test" depends="compile-examples, compile-tools, generate-test-records">
- <javac
- encoding="${build.encoding}"
- srcdir="${test.generated.dir}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${test.build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args}" />
- <classpath refid="test.classpath"/>
- </javac>
- <javac
- encoding="${build.encoding}"
- srcdir="${test.src.dir}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${test.build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="test.classpath"/>
- </javac>
- <javac
- encoding="${build.encoding}"
- srcdir="${test.src.dir}/testjar"
- includes="*.java"
- destdir="${test.build.testjar}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="test.classpath"/>
- </javac>
- <delete file="${test.build.testjar}/testjob.jar"/>
- <jar jarfile="${test.build.testjar}/testjob.jar"
- basedir="${test.build.testjar}">
- </jar>
- <javac
- encoding="${build.encoding}"
- srcdir="${test.src.dir}/testshell"
- includes="*.java"
- destdir="${test.build.testshell}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}"/>
- <classpath refid="test.classpath"/>
- </javac>
- <delete file="${test.build.testshell}/testshell.jar"/>
- <jar jarfile="${test.build.testshell}/testshell.jar"
- basedir="${test.build.testshell}">
- </jar>
-
- <delete dir="${test.cache.data}"/>
- <mkdir dir="${test.cache.data}"/>
- <delete dir="${test.debug.data}"/>
- <mkdir dir="${test.debug.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/testscript.txt" todir="${test.debug.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.txt" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.jar" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.zip" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.tar" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.tgz" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.tar.gz" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/hdfs/hadoop-14-dfs-dir.tgz" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/hdfs/hadoop-dfs-dir.txt" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/cli/testConf.xml" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data15bytes" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data30bytes" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data60bytes" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data120bytes" todir="${test.cache.data}"/>
- </target>
- <!-- ================================================================== -->
- <!-- Make hadoop-test.jar -->
- <!-- ================================================================== -->
- <!-- -->
- <!-- ================================================================== -->
- <target name="jar-test" depends="jar,compile-core-test" description="Make hadoop-test.jar">
- <jar jarfile="${build.dir}/${test.final.name}.jar"
- basedir="${test.build.classes}">
- <manifest>
- <attribute name="Main-Class"
- value="org/apache/hadoop/test/AllTestDriver"/>
- <section name="org/apache/hadoop">
- <attribute name="Implementation-Title" value="Hadoop"/>
- <attribute name="Implementation-Version" value="${version}"/>
- <attribute name="Implementation-Vendor" value="Apache"/>
- </section>
- </manifest>
- </jar>
- </target>
- <!-- ================================================================== -->
- <!-- Fault injection customization section.
- These targets ought to be copied over to other projects and modified
- as needed -->
- <!-- ================================================================== -->
- <target name="-classes-compilation" depends="compile-core-classes,
- compile-hdfs-classes, compile-mapred-classes, compile-core-test"/>
- <target name="run-test-core-fault-inject" depends="injectfaults"
- description="Run full set of the unit tests with fault injection">
- <macro-run-tests-fault-inject target.name="test-core"
- testcasesonly="false"/>
- </target>
- <target name="jar-test-fault-inject" depends="injectfaults"
- description="Make hadoop-test-fi.jar">
- <macro-jar-test-fault-inject
- target.name="jar-test"
- jar.final.name="test.final.name"
- jar.final.value="${test.final.name}-fi" />
- </target>
- <target name="jar-fault-inject" depends="injectfaults"
- description="Make hadoop-fi.jar">
- <macro-jar-fault-inject
- target.name="jar"
- build.dir="${build-fi.dir}"
- jar.final.name="final.name"
- jar.final.value="${final.name}-fi" />
- </target>
- <!--This target is not included into the the top level list of target
- for it serves a special "regression" testing purpose of non-FI tests in
- FI environment -->
- <target name="run-fault-inject-with-testcaseonly" depends="injectfaults">
- <fail unless="testcase">Can't run this target without -Dtestcase setting!
- </fail>
- <macro-run-tests-fault-inject target.name="test-core"
- testcasesonly="true"/>
- </target>
- <!-- ================================================================== -->
- <!-- End of Fault injection customization section -->
- <!-- ================================================================== -->
- <condition property="tests.notestcase">
- <and>
- <isfalse value="${test.fault.inject}"/>
- <not>
- <isset property="testcase"/>
- </not>
- </and>
- </condition>
- <condition property="tests.notestcase.fi">
- <and>
- <not>
- <isset property="testcase" />
- </not>
- <istrue value="${test.fault.inject}" />
- </and>
- </condition>
- <condition property="tests.testcase">
- <and>
- <isfalse value="${test.fault.inject}" />
- <isset property="testcase" />
- </and>
- </condition>
- <condition property="tests.testcase.fi">
- <and>
- <istrue value="${test.fault.inject}" />
- <isset property="testcase" />
- </and>
- </condition>
- <!-- ================================================================== -->
- <!-- Define exclude lists for different kinds of testing -->
- <!-- ================================================================== -->
- <patternset id="empty.exclude.list.id" />
- <patternset id="commit.smoke.exclude.list.id">
- <excludesfile name="${test.commit.tests.file}"/>
- <excludesfile name="${test.smoke.tests.file}"/>
- </patternset>
- <!-- ================================================================== -->
- <!-- Run unit tests -->
- <!-- ================================================================== -->
- <macrodef name="macro-test-runner">
- <attribute name="test.file" />
- <attribute name="classpath" />
- <attribute name="test.dir" />
- <attribute name="fileset.dir" />
- <attribute name="hadoop.conf.dir.deployed" default="" />
- <attribute name="test.krb5.conf" default="" />
- <attribute name="test.krb5.conf.filename" default="" />
- <attribute name="exclude.list.id" default="empty.exclude.list.id" />
- <sequential>
- <delete file="${test.build.dir}/testsfailed"/>
- <delete dir="@{test.dir}/data" />
- <mkdir dir="@{test.dir}/data" />
- <delete dir="@{test.dir}/logs" />
- <mkdir dir="@{test.dir}/logs" />
- <copy file="${test.src.dir}/hadoop-policy.xml"
- todir="@{test.dir}/extraconf" />
- <copy file="${test.src.dir}/fi-site.xml"
- todir="@{test.dir}/extraconf" />
- <junit showoutput="${test.output}"
- printsummary="${test.junit.printsummary}"
- haltonfailure="${test.junit.haltonfailure}"
- fork="yes"
- forkmode="${test.junit.fork.mode}"
- maxmemory="${test.junit.maxmemory}"
- dir="${basedir}"
- timeout="${test.timeout}"
- errorProperty="tests.failed"
- failureProperty="tests.failed">
- <sysproperty key="test.build.data" value="${test.build.data}" />
- <sysproperty key="test.tools.input.dir"
- value="${test.tools.input.dir}" />
- <sysproperty key="test.cache.data" value="${test.cache.data}" />
- <sysproperty key="test.debug.data" value="${test.debug.data}" />
- <sysproperty key="hadoop.log.dir" value="${test.log.dir}" />
- <sysproperty key="test.src.dir" value="${test.src.dir}" />
- <sysproperty key="taskcontroller-path" value="${taskcontroller-path}" />
- <sysproperty key="taskcontroller-ugi" value="${taskcontroller-ugi}" />
- <sysproperty key="test.build.extraconf"
- value="@{test.dir}/extraconf" />
- <sysproperty key="@{test.krb5.conf}"
- value="@{test.krb5.conf.filename}"/>
- <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml" />
- <sysproperty key="java.library.path"
- value="${build.native}/lib:${lib.dir}/native/${build.platform}:${lib.file.path}" />
- <sysproperty key="install.c++.examples"
- value="${install.c++.examples}" />
- <sysproperty key="testjar"
- value="@{test.dir}/testjar" />
- <!-- System properties that are specifically set for system tests -->
- <sysproperty key="test.system.hdrc.deployed.hadoopconfdir"
- value="@{hadoop.conf.dir.deployed}" />
- <!-- set io.compression.codec.lzo.class in the child jvm only if it is set -->
- <syspropertyset dynamic="no">
- <propertyref name="io.compression.codec.lzo.class" />
- </syspropertyset>
- <!-- set compile.c++ in the child jvm only if it is set -->
- <syspropertyset dynamic="no">
- <propertyref name="compile.c++" />
- </syspropertyset>
- <classpath refid="@{classpath}" />
- <syspropertyset id="FaultProbabilityProperties">
- <propertyref regex="fi.*" />
- </syspropertyset>
- <formatter type="${test.junit.output.format}" />
- <batchtest todir="@{test.dir}" if="tests.notestcase">
- <fileset dir="@{fileset.dir}"
- excludes="**/${test.exclude}.java aop/** system/**">
- <patternset>
- <includesfile name="@{test.file}"/>
- </patternset>
- <patternset refid="@{exclude.list.id}"/>
- </fileset>
- </batchtest>
- <batchtest todir="${test.build.dir}" if="tests.notestcase.fi">
- <fileset dir="${test.src.dir}/aop"
- includes="**/${test.include}.java"
- excludes="**/${test.exclude}.java" />
- </batchtest>
- <batchtest todir="@{test.dir}" if="tests.testcase">
- <fileset dir="@{fileset.dir}"
- includes="**/${testcase}.java" excludes="aop/** system/**"/>
- </batchtest>
- <batchtest todir="${test.build.dir}" if="tests.testcase.fi">
- <fileset dir="${test.src.dir}/aop" includes="**/${testcase}.java" />
- </batchtest>
- <!--The following batch is for very special occasions only when
- a non-FI tests are needed to be executed against FI-environment -->
- <batchtest todir="${test.build.dir}" if="tests.testcaseonly">
- <fileset dir="${test.src.dir}" includes="**/${testcase}.java" />
- </batchtest>
- </junit>
- <antcall target="checkfailure"/>
- </sequential>
- </macrodef>
- <target name="test-core" depends="test-commit, test-smoke,
- test-core-excluding-commit-and-smoke,
- test-core-all-withtestcaseonly, jar-test"
- description="Run core unit tests">
- </target>
- <target name="test-core-all-withtestcaseonly" depends="jar-test" if="testcase">
- <macro-test-runner test.file="${test.all.tests.file}"
- classpath="${test.classpath.id}"
- test.dir="${test.build.dir}"
- fileset.dir="${test.src.dir}"
- test.krb5.conf="java.security.krb5.conf"
- test.krb5.conf.filename="${test.src.dir}/krb5.conf"
- >
- </macro-test-runner>
- </target>
- <target name="test-core-excluding-commit-and-smoke" depends="jar-test"
- unless="testcase">
- <macro-test-runner test.file="${test.all.tests.file}"
- classpath="${test.classpath.id}"
- test.dir="${test.build.dir}"
- fileset.dir="${test.src.dir}"
- test.krb5.conf="java.security.krb5.conf"
- test.krb5.conf.filename="${test.src.dir}/krb5.conf"
- exclude.list.id="commit.smoke.exclude.list.id"
- >
- </macro-test-runner>
- </target>
- <target name="test-commit" depends="jar-test"
- description="Run approx 10-minute set of unit tests prior to commiting"
- unless="testcase">
- <macro-test-runner test.file="${test.commit.tests.file}"
- classpath="${test.classpath.id}"
- test.dir="${test.build.dir}"
- fileset.dir="${test.src.dir}"
- test.krb5.conf="java.security.krb5.conf"
- test.krb5.conf.filename="${test.src.dir}/krb5.conf"
- >
- </macro-test-runner>
- </target>
- <target name="test-smoke" depends="jar-test"
- description="Run approx 30-minute set of functional tests prior to
- guarantee that the build is not DOA" unless="testcase">
- <macro-test-runner test.file="${test.smoke.tests.file}"
- classpath="${test.classpath.id}"
- test.dir="${test.build.dir}"
- fileset.dir="${test.src.dir}"
- test.krb5.conf="java.security.krb5.conf"
- test.krb5.conf.filename="${test.src.dir}/krb5.conf"
- >
- </macro-test-runner>
- </target>
- <target name="checkfailure" if="tests.failed">
- <touch file="${test.build.dir}/testsfailed"/>
- <fail unless="continueOnFailure">Tests failed!</fail>
- </target>
- <target name="test-contrib" depends="compile, compile-core-test" description="Run contrib unit tests">
- <subant target="test">
- <property name="version" value="${version}"/>
- <property name="clover.jar" value="${clover.jar}"/>
- <fileset file="${contrib.dir}/build.xml"/>
- </subant>
- </target>
-
- <target name="test" description="Run core, contrib, fault injection tests">
- <delete file="${test.build.dir}/testsfailed"/>
- <property name="continueOnFailure" value="true"/>
- <antcall target="test-core"/>
- <antcall target="test-contrib"/>
- <available file="${test.build.dir}/testsfailed" property="testsfailed"/>
- <fail if="testsfailed">Tests failed!</fail>
- </target>
- <!-- Run all unit tests, not just Test*, and use non-test configuration. -->
- <target name="test-cluster" description="Run all unit tests, not just Test*, and use non-test configuration.">
- <antcall target="test">
- <param name="test.include" value="*"/>
- <param name="test.classpath.id" value="test.cluster.classpath"/>
- </antcall>
- </target>
- <target name="nightly" depends="test, tar">
- </target>
-
- <!-- ================================================================== -->
- <!-- Run optional third-party tool targets -->
- <!-- ================================================================== -->
- <target name="checkstyle" depends="ivy-retrieve-checkstyle,check-for-checkstyle" if="checkstyle.present" description="Run optional third-party tool targets">
- <taskdef resource="checkstyletask.properties">
- <classpath refid="checkstyle-classpath"/>
- </taskdef>
-
- <mkdir dir="${test.build.dir}"/>
-
- <checkstyle config="${test.src.dir}/checkstyle.xml"
- failOnViolation="false">
- <fileset dir="${core.src.dir}" includes="**/*.java" excludes="**/generated/**"/>
- <fileset dir="${mapred.src.dir}" includes="**/*.java" excludes="**/generated/**"/>
- <fileset dir="${hdfs.src.dir}" includes="**/*.java" excludes="**/generated/**"/>
- <formatter type="xml" toFile="${test.build.dir}/checkstyle-errors.xml"/>
- </checkstyle>
-
- <xslt style="${test.src.dir}/checkstyle-noframes-sorted.xsl"
- in="${test.build.dir}/checkstyle-errors.xml"
- out="${test.build.dir}/checkstyle-errors.html"/>
- </target>
-
- <target name="check-for-checkstyle">
- <available property="checkstyle.present" resource="checkstyletask.properties">
- <classpath refid="checkstyle-classpath"/>
- </available>
- </target>
- <property name="findbugs.home" value=""/>
- <target name="findbugs" depends="check-for-findbugs, tar" if="findbugs.present" description="Run findbugs if present">
- <property name="findbugs.out.dir" value="${test.build.dir}/findbugs"/>
- <property name="findbugs.exclude.file" value="${test.src.dir}/findbugsExcludeFile.xml"/>
- <property name="findbugs.report.htmlfile" value="${findbugs.out.dir}/hadoop-findbugs-report.html"/>
- <property name="findbugs.report.xmlfile" value="${findbugs.out.dir}/hadoop-findbugs-report.xml"/>
- <taskdef name="findbugs" classname="edu.umd.cs.findbugs.anttask.FindBugsTask"
- classpath="${findbugs.home}/lib/findbugs-ant.jar" />
- <mkdir dir="${findbugs.out.dir}"/>
- <findbugs home="${findbugs.home}" output="xml:withMessages"
- outputFile="${findbugs.report.xmlfile}" effort="max"
- excludeFilter="${findbugs.exclude.file}" jvmargs="-Xmx512M">
- <auxClasspath>
- <fileset dir="${lib.dir}">
- <include name="**/*.jar"/>
- </fileset>
- <fileset dir="${build.ivy.lib.dir}/${ant.project.name}/common">
- <include name="**/*.jar"/>
- </fileset>
- </auxClasspath>
- <sourcePath path="${core.src.dir}"/>
- <sourcePath path="${mapred.src.dir}"/>
- <sourcePath path="${hdfs.src.dir}"/>
- <sourcePath path="${examples.dir}" />
- <sourcePath path="${tools.src}" />
- <sourcePath path="${basedir}/src/contrib/streaming/src/java" />
- <class location="${build.dir}/${core.final.name}.jar" />
- <class location="${build.dir}/${examples.final.name}.jar" />
- <class location="${build.dir}/${tools.final.name}.jar" />
- <class location="${build.dir}/contrib/streaming/${streaming.final.name}.jar" />
- </findbugs>
- <xslt style="${findbugs.home}/src/xsl/default.xsl"
- in="${findbugs.report.xmlfile}"
- out="${findbugs.report.htmlfile}"/>
- </target>
-
- <target name="check-for-findbugs">
- <available property="findbugs.present"
- file="${findbugs.home}/lib/findbugs.jar" />
- </target>
- <!-- ================================================================== -->
- <!-- Documentation -->
- <!-- ================================================================== -->
-
- <target name="docs" depends="forrest.check" description="Generate forrest-based documentation. To use, specify -Dforrest.home=<base of Apache Forrest installation> on the command line." if="forrest.home">
- <exec dir="${docs.src}" executable="${forrest.home}/bin/forrest"
- failonerror="true">
- <env key="JAVA_HOME" value="${java5.home}"/>
- </exec>
- <copy todir="${build.docs}">
- <fileset dir="${docs.src}/build/site/" />
- </copy>
- <copy file="${docs.src}/releasenotes.html" todir="${build.docs}"/>
- <style basedir="${core.src.dir}" destdir="${build.docs}"
- includes="core-default.xml" style="conf/configuration.xsl"/>
- <style basedir="${hdfs.src.dir}" destdir="${build.docs}"
- includes="hdfs-default.xml" style="conf/configuration.xsl"/>
- <style basedir="${mapred.src.dir}" destdir="${build.docs}"
- includes="mapred-default.xml" style="conf/configuration.xsl"/>
- <antcall target="changes-to-html"/>
- <antcall target="cn-docs"/>
- </target>
- <target name="cn-docs" depends="forrest.check, init"
- description="Generate forrest-based Chinese documentation. To use, specify -Dforrest.home=<base of Apache Forrest installation> on the command line."
- if="forrest.home">
- <exec dir="${src.docs.cn}" executable="${forrest.home}/bin/forrest" failonerror="true">
- <env key="LANG" value="en_US.utf8"/>
- <env key="JAVA_HOME" value="${java5.home}"/>
- </exec>
- <copy todir="${build.docs.cn}">
- <fileset dir="${src.docs.cn}/build/site/" />
- </copy>
- <style basedir="${core.src.dir}" destdir="${build.docs.cn}"
- includes="core-default.xml" style="conf/configuration.xsl"/>
- <style basedir="${hdfs.src.dir}" destdir="${build.docs.cn}"
- includes="hdfs-default.xml" style="conf/configuration.xsl"/>
- <style basedir="${mapred.src.dir}" destdir="${build.docs.cn}"
- includes="mapred-default.xml" style="conf/configuration.xsl"/>
- <antcall target="changes-to-html"/>
- </target>
- <target name="forrest.check" unless="forrest.home" depends="java5.check">
- <fail message="'forrest.home' is not defined. Please pass -Dforrest.home=<base of Apache Forrest installation> to Ant on the command-line." />
- </target>
- <target name="java5.check" unless="java5.home">
- <fail message="'java5.home' is not defined. Forrest requires Java 5. Please pass -Djava5.home=<base of Java 5 distribution> to Ant on the command-line." />
- </target>
-
- <target name="javadoc-dev" description="Generate javadoc for hadoop developers">
- <mkdir dir="${build.javadoc.dev}"/>
- <javadoc
- overview="${core.src.dir}/overview.html"
- packagenames="org.apache.hadoop.*"
- destdir="${build.javadoc.dev}"
- author="true"
- version="true"
- use="true"
- windowtitle="${Name} ${version} API"
- doctitle="${Name} ${version} Developer API"
- bottom="Copyright &copy; ${year} The Apache Software Foundation"
- maxmemory="${javadoc.maxmemory}"
- >
- <packageset dir="${core.src.dir}"/>
- <packageset dir="${mapred.src.dir}"/>
- <packageset dir="${hdfs.src.dir}"/>
- <packageset dir="${examples.dir}"/>
- <packageset dir="${tools.src}"/>
- <packageset dir="src/contrib/streaming/src/java"/>
- <packageset dir="src/contrib/data_join/src/java"/>
- <packageset dir="src/contrib/index/src/java"/>
- <link href="${javadoc.link.java}"/>
- <classpath >
- <path refid="classpath" />
- <fileset dir="src/contrib/">
- <include name="*/lib/*.jar" />
- </fileset>
- <pathelement path="${java.class.path}"/>
- </classpath>
- <group title="Core" packages="org.apache.*"/>
- <group title="Examples" packages="org.apache.hadoop.examples*"/>
- <group title="contrib: Streaming" packages="org.apache.hadoop.streaming*"/>
- <group title="contrib: DataJoin" packages="org.apache.hadoop.contrib.utils.join*"/>
- <group title="contrib: Index" packages="org.apache.hadoop.contrib.index*"/>
- </javadoc>
- </target>
- <target name="javadoc" depends="compile, ivy-retrieve-javadoc" description="Generate javadoc">
- <mkdir dir="${build.javadoc}"/>
- <javadoc
- overview="${core.src.dir}/overview.html"
- packagenames="org.apache.hadoop.*"
- destdir="${build.javadoc}"
- author="true"
- version="true"
- use="true"
- windowtitle="${Name} ${version} API"
- doctitle="${Name} ${version} API"
- bottom="Copyright &copy; ${year} The Apache Software Foundation"
- maxmemory="${javadoc.maxmemory}"
- >
- <packageset dir="${core.src.dir}"/>
- <packageset dir="${mapred.src.dir}"/>
- <packageset dir="${examples.dir}"/>
- <packageset dir="src/contrib/streaming/src/java"/>
- <packageset dir="src/contrib/data_join/src/java"/>
- <packageset dir="src/contrib/index/src/java"/>
- <packageset dir="src/contrib/failmon/src/java/"/>
-
- <link href="${javadoc.link.java}"/>
- <classpath >
- <path refid="classpath" />
- <fileset dir="src/contrib/">
- <include name="*/lib/*.jar" />
- </fileset>
- <path refid="javadoc-classpath"/>
- <pathelement path="${java.class.path}"/>
- <pathelement location="${build.tools}"/>
- </classpath>
- <group title="Core" packages="org.apache.*"/>
- <group title="Examples" packages="org.apache.hadoop.examples*"/>
- <group title="contrib: Streaming" packages="org.apache.hadoop.streaming*"/>
- <group title="contrib: DataJoin" packages="org.apache.hadoop.contrib.utils.join*"/>
- <group title="contrib: Index" packages="org.apache.hadoop.contrib.index*"/>
- <group title="contrib: FailMon" packages="org.apache.hadoop.contrib.failmon*"/>
- </javadoc>
- </target>
- <target name="api-xml" depends="ivy-retrieve-jdiff,javadoc,write-null">
- <javadoc maxmemory="${javadoc.maxmemory}">
- <doclet name="jdiff.JDiff"
- path="${jdiff.jar}:${xerces.jar}">
- <param name="-apidir" value="${jdiff.xml.dir}"/>
- <param name="-apiname" value="hadoop ${version}"/>
- </doclet>
- <packageset dir="src/core"/>
- <packageset dir="src/mapred"/>
- <packageset dir="src/tools"/>
- <packageset dir="${tools.src}"/>
- <packageset dir="${tools.src}"/>
- <classpath >
- <path refid="classpath" />
- <path refid="jdiff-classpath" />
- <pathelement path="${java.class.path}"/>
- </classpath>
- </javadoc>
- </target>
-
- <target name="write-null">
- <exec executable="touch">
- <arg value="${jdiff.home}/Null.java"/>
- </exec>
- </target>
- <target name="api-report" depends="ivy-retrieve-jdiff,api-xml">
- <mkdir dir="${jdiff.build.dir}"/>
- <javadoc sourcepath="src/core,src/hdfs,src,mapred,src/tools"
- destdir="${jdiff.build.dir}"
- sourceFiles="${jdiff.home}/Null.java"
- maxmemory="${javadoc.maxmemory}">
- <doclet name="jdiff.JDiff"
- path="${jdiff.jar}:${xerces.jar}">
- <param name="-oldapi" value="hadoop ${jdiff.stable}"/>
- <param name="-newapi" value="hadoop ${version}"/>
- <param name="-oldapidir" value="${jdiff.xml.dir}"/>
- <param name="-newapidir" value="${jdiff.xml.dir}"/>
- <param name="-javadocold" value="${jdiff.stable.javadoc}"/>
- <param name="-javadocnew" value="../../api/"/>
- <param name="-stats"/>
- </doclet>
- <classpath >
- <path refid="classpath" />
- <path refid="jdiff-classpath"/>
- <pathelement path="${java.class.path}"/>
- </classpath>
- </javadoc>
- </target>
-
- <target name="changes-to-html" description="Convert CHANGES.txt into an html file">
- <mkdir dir="${build.docs}"/>
- <exec executable="perl" input="CHANGES.txt" output="${build.docs}/changes.html" failonerror="true">
- <arg value="${changes.src}/changes2html.pl"/>
- </exec>
- <copy todir="${build.docs}">
- <fileset dir="${changes.src}" includes="*.css"/>
- </copy>
- </target>
- <!-- ================================================================== -->
- <!-- D I S T R I B U T I O N -->
- <!-- ================================================================== -->
- <!-- -->
- <!-- ================================================================== -->
- <target name="package" depends="compile, jar, javadoc, docs, cn-docs, api-report, examples, tools-jar, jar-test, ant-tasks, package-librecordio"
- description="Build distribution">
- <delete dir="${dist.dir}"/>
- <mkdir dir="${dist.dir}"/>
- <mkdir dir="${dist.dir}/lib"/>
- <mkdir dir="${dist.dir}/libexec"/>
- <mkdir dir="${dist.dir}/contrib"/>
- <mkdir dir="${dist.dir}/bin"/>
- <mkdir dir="${dist.dir}/docs"/>
- <mkdir dir="${dist.dir}/docs/api"/>
- <mkdir dir="${dist.dir}/docs/jdiff"/>
- <mkdir dir="${dist.dir}/sbin"/>
- <mkdir dir="${dist.dir}/share/${name}/templates/conf"/>
- <copy todir="${dist.dir}/share/${name}/templates/conf" includeEmptyDirs="false">
- <fileset dir="${basedir}/src/packages/templates/conf">
- <include name="*"/>
- </fileset>
- </copy>
- <copy todir="${dist.dir}/lib" includeEmptyDirs="false" flatten="true">
- <fileset dir="${common.ivy.lib.dir}"/>
- </copy>
- <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
- <fileset dir="lib">
- <exclude name="**/native/**"/>
- </fileset>
- </copy>
- <exec dir="${dist.dir}" executable="sh" failonerror="true">
- <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/>
- <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
- <env key="DIST_LIB_DIR" value="${dist.dir}/lib/native"/>
- <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
- </exec>
- <subant target="package">
- <!--Pass down the version in case its needed again and the target
- distribution directory so contribs know where to install to.-->
- <property name="version" value="${version}"/>
- <property name="dist.dir" value="${dist.dir}"/>
- <fileset file="${contrib.dir}/build.xml"/>
- </subant>
- <copy todir="${dist.dir}/webapps">
- <fileset dir="${build.webapps}"/>
- </copy>
- <copy todir="${dist.dir}">
- <fileset file="${build.dir}/${name}-*-${version}.jar"/>
- </copy>
-
- <copy todir="${dist.dir}/bin">
- <fileset dir="bin"/>
- </copy>
- <copy todir="${dist.dir}/libexec">
- <fileset dir="bin">
- <include name="hadoop-config.sh"/>
- </fileset>
- </copy>
- <copy todir="${dist.dir}/conf">
- <fileset dir="${conf.dir}" excludes="**/*.template"/>
- </copy>
- <copy todir="${dist.dir}/docs">
- <fileset dir="${build.docs}"/>
- </copy>
- <copy file="ivy.xml" tofile="${dist.dir}/ivy.xml"/>
- <copy todir="${dist.dir}/ivy">
- <fileset dir="ivy"/>
- </copy>
- <copy todir="${dist.dir}">
- <fileset dir=".">
- <include name="*.txt" />
- </fileset>
- </copy>
- <copy todir="${dist.dir}/sbin">
- <fileset dir="${basedir}/src/packages">
- <include name="*.sh" />
- </fileset>
- </copy>
- <copy todir="${dist.dir}/src" includeEmptyDirs="true">
- <fileset dir="src" excludes="**/*.template **/docs/build/**/*"/>
- </copy>
-
- <copy todir="${dist.dir}/c++" includeEmptyDirs="false">
- <fileset dir="${build.dir}/c++"/>
- </copy>
- <copy todir="${dist.dir}/" file="build.xml"/>
- <copy todir="${dist.dir}/.eclipse.templates">
- <fileset dir="${basedir}/.eclipse.templates"/>
- </copy>
- <subant target="task-controller">
- <fileset dir="." includes="build.xml"/>
- </subant>
- <subant target="jsvc">
- <fileset dir="." includes="build.xml"/>
- </subant>
- <chmod perm="ugo+x" type="file" parallel="false">
- <fileset dir="${dist.dir}/bin"/>
- <fileset dir="${dist.dir}/sbin"/>
- <fileset dir="${dist.dir}/src/contrib/">
- <include name="*/bin/*" />
- </fileset>
- <fileset dir="${dist.dir}/src/contrib/ec2/bin/image"/>
- </chmod>
- <chmod perm="ugo+x" type="file">
- <fileset dir="${dist.dir}/src/c++/pipes/debug"/>
- </chmod>
- </target>
- <!-- ================================================================== -->
- <!-- Make release tarball -->
- <!-- ================================================================== -->
- <target name="tar" depends="package" description="Make release tarball">
- <macro_tar param.destfile="${build.dir}/${final.name}.tar.gz">
- <param.listofitems>
- <tarfileset dir="${build.dir}" mode="664">
- <exclude name="${final.name}/bin/*" />
- <exclude name="${final.name}/sbin/*" />
- <exclude name="${final.name}/libexec/*" />
- <exclude name="${final.name}/contrib/*/bin/*" />
- <exclude name="${final.name}/src/contrib/ec2/bin/*" />
- <exclude name="${final.name}/src/contrib/ec2/bin/image/*" />
- <include name="${final.name}/**" />
- <include name="${final.name}/eclipse.templates/**" />
- </tarfileset>
- <tarfileset dir="${build.dir}" mode="755">
- <include name="${final.name}/bin/*" />
- <include name="${final.name}/sbin/*" />
- <include name="${final.name}/libexec/*" />
- <include name="${final.name}/contrib/*/bin/*" />
- <include name="${final.name}/src/contrib/ec2/bin/*" />
- <include name="${final.name}/src/contrib/ec2/bin/image/*" />
- </tarfileset>
- </param.listofitems>
- </macro_tar>
- </target>
- <target name="bin-package" depends="compile, jar, examples, tools-jar, jar-test, ant-tasks, package-librecordio"
- description="assembles artifacts for binary target">
- <delete dir="${dist.dir}"/>
- <mkdir dir="${dist.dir}"/>
- <mkdir dir="${dist.dir}/bin"/>
- <mkdir dir="${dist.dir}/etc/hadoop"/>
- <mkdir dir="${dist.dir}/lib"/>
- <mkdir dir="${dist.dir}/libexec"/>
- <mkdir dir="${dist.dir}/sbin"/>
- <mkdir dir="${dist.dir}/share/${name}/contrib"/>
- <mkdir dir="${dist.dir}/share/${name}/webapps"/>
- <mkdir dir="${dist.dir}/share/${name}/templates/conf"/>
- <copy todir="${dist.dir}/share/${name}/templates/conf" includeEmptyDirs="false">
- <fileset dir="${basedir}/src/packages/templates/conf">
- <include name="*"/>
- </fileset>
- </copy>
- <copy todir="${dist.dir}/share/${name}/lib" includeEmptyDirs="false" flatten="true">
- <fileset dir="${common.ivy.lib.dir}"/>
- </copy>
- <copy todir="${dist.dir}/share/${name}/lib" includeEmptyDirs="false">
- <fileset dir="lib">
- <exclude name="**/native/**"/>
- </fileset>
- </copy>
- <exec dir="${dist.dir}" executable="sh" failonerror="true">
- <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/>
- <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
- <env key="DIST_LIB_DIR" value="${dist.dir}/native"/>
- <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
- </exec>
- <move todir="${dist.dir}/lib" flatten="true">
- <fileset dir="${dist.dir}/native">
- <include name="*/*"/>
- </fileset>
- </move>
- <delete dir="${dist.dir}/native"/>
- <subant target="package">
- <!--Pass down the version in case its needed again and the target
- distribution directory so contribs know where to install to.-->
- <property name="version" value="${version}"/>
- <property name="dist.dir" value="${dist.dir}/share/${name}"/>
- <fileset file="${contrib.dir}/build.xml"/>
- </subant>
- <copy todir="${dist.dir}/share/${name}/webapps">
- <fileset dir="${build.webapps}"/>
- </copy>
- <copy todir="${dist.dir}/share/${name}">
- <fileset file="${build.dir}/${name}-*-${version}.jar"/>
- </copy>
-
- <copy file="bin/hadoop" todir="${dist.dir}/bin"/>
- <copy todir="${dist.dir}/sbin">
- <fileset dir="bin">
- <include name="*"/>
- <exclude name="hadoop"/>
- <exclude name="hadoop-config.sh"/>
- </fileset>
- </copy>
- <copy todir="${dist.dir}/libexec">
- <fileset dir="bin">
- <include name="hadoop-config.sh"/>
- </fileset>
- </copy>
- <copy todir="${dist.dir}/etc/hadoop">
- <fileset dir="${conf.dir}" excludes="**/*.template"/>
- </copy>
- <copy todir="${dist.dir}/share/doc/${name}">
- <fileset dir=".">
- <include name="*.txt" />
- </fileset>
- </copy>
-
- <copy todir="${dist.dir}/sbin">
- <fileset dir="${basedir}/src/packages">
- <include name="*.sh" />
- </fileset>
- </copy>
- <copy todir="${dist.dir}/include/${name}" includeEmptyDirs="false" flatten="true">
- <fileset dir="${build.dir}/c++">
- <include name="**/include/${name}/*"/>
- </fileset>
- </copy>
- <copy todir="${dist.dir}/lib" includeEmptyDirs="false" flatten="true">
- <fileset dir="${build.dir}/c++">
- <include name="**/lib/*"/>
- </fileset>
- </copy>
- <subant target="task-controller">
- <fileset dir="." includes="build.xml"/>
- </subant>
- <subant target="jsvc">
- <fileset dir="." includes="build.xml"/>
- </subant>
- <chmod perm="ugo+x" type="file" parallel="false">
- <fileset dir="${dist.dir}/bin"/>
- <fileset dir="${dist.dir}/libexec"/>
- <fileset dir="${dist.dir}/sbin"/>
- </chmod>
- </target>
- <target name="binary-system" depends="bin-package, jar-system, jar-test-system"
- description="make system test package for deployment">
- <copy todir="${system-test-build-dir}/${final.name}">
- <fileset dir="${dist.dir}">
- </fileset>
- </copy>
- <copy todir="${system-test-build-dir}/${final.name}"
- file="${system-test-build-dir}/${core.final.name}.jar" overwrite="true"/>
- <copy todir="${system-test-build-dir}/${final.name}"
- file="${system-test-build-dir}/${test.final.name}.jar" overwrite="true"/>
- <macro_tar
- param.destfile="${system-test-build-dir}/${final.name}-bin.tar.gz">
- <param.listofitems>
- <tarfileset dir="${system-test-build-dir}" mode="664">
- <exclude name="${final.name}/bin/*" />
- <exclude name="${final.name}/src/**" />
- <exclude name="${final.name}/docs/**" />
- <include name="${final.name}/**" />
- </tarfileset>
- <tarfileset dir="${build.dir}" mode="755">
- <include name="${final.name}/bin/*" />
- </tarfileset>
- </param.listofitems>
- </macro_tar>
- </target>
-
- <target name="binary" depends="bin-package" description="Make tarball without source and documentation">
- <macro_tar param.destfile="${build.dir}/${final.name}-bin.tar.gz">
- <param.listofitems>
- <tarfileset dir="${build.dir}" mode="664">
- <exclude name="${final.name}/bin/*" />
- <exclude name="${final.name}/src/**" />
- <exclude name="${final.name}/docs/**" />
- <include name="${final.name}/**" />
- </tarfileset>
- <tarfileset dir="${build.dir}" mode="755">
- <include name="${final.name}/bin/*" />
- </tarfileset>
- </param.listofitems>
- </macro_tar>
- </target>
- <target name="rpm" depends="binary" description="Make rpm package">
- <mkdir dir="${package.buildroot}/BUILD" />
- <mkdir dir="${package.buildroot}/RPMS" />
- <mkdir dir="${package.buildroot}/SRPMS" />
- <mkdir dir="${package.buildroot}/SOURCES" />
- <mkdir dir="${package.buildroot}/SPECS" />
- <macro_tar param.destfile="${package.buildroot}/SOURCES/${final.name}-script.tar.gz">
- <param.listofitems>
- <tarfileset dir="${basedir}/src/packages/rpm/init.d" mode="755">
- <include name="*" />
- </tarfileset>
- </param.listofitems>
- </macro_tar>
- <copy todir="${package.buildroot}/SOURCES">
- <fileset dir="${build.dir}">
- <include name="${final.name}-bin.tar.gz" />
- </fileset>
- </copy>
- <copy file="${src.dir}/packages/rpm/spec/hadoop.spec" todir="${package.buildroot}/SPECS">
- <filterchain>
- <replacetokens>
- <token key="final.name" value="${final.name}" />
- <token key="version" value="${hadoop.version}" />
- <token key="package.release" value="${package.release}" />
- <token key="package.build.dir" value="${package.build.dir}" />
- <token key="package.prefix" value="${package.prefix}" />
- <token key="package.conf.dir" value="${package.conf.dir}" />
- <token key="package.log.dir" value="${package.log.dir}" />
- <token key="package.pid.dir" value="${package.pid.dir}" />
- </replacetokens>
- </filterchain>
- </copy>
- <rpm specFile="hadoop.spec" command="-bb --target ${os.arch}" topDir="${package.buildroot}" cleanBuildDir="true" failOnError="true"/>
- <copy todir="${build.dir}/" flatten="true">
- <fileset dir="${package.buildroot}/RPMS">
- <include name="**/${name}*.rpm" />
- </fileset>
- <fileset dir="${package.buildroot}/SRPMS">
- <include name="**/${name}*.rpm" />
- </fileset>
- </copy>
- <delete dir="${package.buildroot}" quiet="true" verbose="false"/>
- </target>
- <target name="deb" depends="ant-tasks, binary" description="Make deb package">
- <taskdef name="deb"
- classname="org.vafer.jdeb.ant.DebAntTask">
- <classpath refid="classpath" />
- </taskdef>
- <mkdir dir="${package.build.dir}/hadoop.control" />
- <mkdir dir="${package.buildroot}/${package.prefix}/share/hadoop" />
- <copy todir="${package.buildroot}/${package.prefix}">
- <fileset dir="${build.dir}/${final.name}">
- <include name="**" />
- </fileset>
- </copy>
- <copy todir="${package.build.dir}/hadoop.control">
- <fileset dir="${src.dir}/packages/deb/hadoop.control">
- <exclude name="control" />
- </fileset>
- </copy>
- <copy file="${src.dir}/packages/deb/hadoop.control/control" todir="${package.build.dir}/hadoop.control">
- <filterchain>
- <replacetokens>
- <token key="final.name" value="${final.name}" />
- <token key="version" value="${hadoop.version}" />
- <token key="package.release" value="${package.release}" />
- <token key="package.build.dir" value="${package.build.dir}" />
- <token key="package.prefix" value="${package.prefix}" />
- <token key="package.conf.dir" value="${package.conf.dir}" />
- <token key="package.log.dir" value="${package.log.dir}" />
- <token key="package.pid.dir" value="${package.pid.dir}" />
- </replacetokens>
- </filterchain>
- </copy>
- <deb destfile="${package.buildroot}/${name}_${hadoop.version}-${package.release}_${os.arch}.deb" control="${package.build.dir}/hadoop.control">
- <tarfileset dir="${build.dir}/${final.name}" filemode="644" prefix="${package.prefix}">
- <exclude name="bin/*" />
- <exclude name="sbin/*" />
- <exclude name="libexec/*" />
- <exclude name="contrib/*/bin/*" />
- <exclude name="src/contrib/ec2/bin/*" />
- <exclude name="src/contrib/ec2/bin/image/*" />
- <exclude name="etc" />
- <exclude name="etc/**" />
- <include name="**" />
- </tarfileset>
- <tarfileset dir="${build.dir}/${final.name}" filemode="755" prefix="${package.prefix}">
- <include name="bin/*" />
- <include name="sbin/*" />
- <include name="libexec/*" />
- <include name="contrib/*/bin/*" />
- <include name="src/contrib/ec2/bin/*" />
- <include name="src/contrib/ec2/bin/image/*" />
- </tarfileset>
- <tarfileset dir="${build.dir}/${final.name}/etc/hadoop" filemode="644" prefix="${package.conf.dir}">
- <exclude name="core-site.xml" />
- <exclude name="hdfs-site.xml" />
- <exclude name="mapred-site.xml" />
- <include name="**" />
- </tarfileset>
- <tarfileset dir="${basedir}/src/packages/deb/init.d" filemode="755" prefix="/etc/init.d">
- <include name="**" />
- </tarfileset>
- </deb>
- <copy todir="${build.dir}/" flatten="true">
- <fileset dir="${package.buildroot}">
- <include name="**/${name}*.deb" />
- </fileset>
- </copy>
- <delete dir="${package.buildroot}" quiet="true" verbose="false"/>
- </target>
- <!-- ================================================================== -->
- <!-- Perform audit activities for the release -->
- <!-- ================================================================== -->
- <target name="releaseaudit" depends="package,ivy-retrieve-releaseaudit" description="Release Audit activities">
- <fail unless="rat.present" message="Failed to load class [${rat.reporting.classname}]."/>
- <java classname="${rat.reporting.classname}" fork="true">
- <classpath refid="releaseaudit-classpath"/>
- <arg value="${build.dir}/${final.name}"/>
- </java>
- </target>
- <!-- ================================================================== -->
- <!-- Clean. Delete the build files, and their directories -->
- <!-- ================================================================== -->
- <target name="clean" depends="clean-contrib, clean-sign, clean-fi" description="Clean. Delete the build files, and their directories">
- <delete dir="${build.dir}"/>
- <delete dir="${docs.src}/build"/>
- <delete dir="${src.docs.cn}/build"/>
- <delete dir="${package.buildroot}"/>
- <delete file="${basedir}/ivy/hadoop-core-pom.xml"/>
- <delete file="${basedir}/ivy/hadoop-test-pom.xml"/>
- <delete file="${basedir}/ivy/hadoop-examples-pom.xml"/>
- <delete file="${basedir}/ivy/hadoop-tools-pom.xml"/>
- <delete file="${basedir}/ivy/hadoop-streaming-pom.xml"/>
- </target>
- <target name="clean-sign" description="Clean. Delete .asc files">
- <delete>
- <fileset dir="." includes="**/**/*.asc"/>
- </delete>
- </target>
-
- <target name="veryclean" depends="clean" description="Delete mvn ant task jar and ivy ant taks jar">
- <delete file="${ant_task.jar}"/>
- <delete file="${ivy.jar}"/>
- </target>
- <!-- ================================================================== -->
- <!-- Clean contrib target. For now, must be called explicitly -->
- <!-- Using subant instead of ant as a workaround for 30569 -->
- <!-- ================================================================== -->
- <target name="clean-contrib">
- <subant target="clean">
- <fileset file="src/contrib/build.xml"/>
- </subant>
- </target>
-
- <target name="test-c++-libhdfs" depends="compile-c++-libhdfs, compile-core" if="islibhdfs">
- <delete dir="${test.libhdfs.dir}"/>
- <mkdir dir="${test.libhdfs.dir}"/>
- <mkdir dir="${test.libhdfs.dir}/logs"/>
- <mkdir dir="${test.libhdfs.dir}/hdfs/name"/>
- <exec dir="${build.c++.libhdfs}" executable="${make.cmd}" failonerror="true">
- <env key="OS_NAME" value="${os.name}"/>
- <env key="OS_ARCH" value="${os.arch}"/>
- <env key="JVM_ARCH" value="${jvm.arch}"/>
- <env key="LIBHDFS_BUILD_DIR" value="${build.c++.libhdfs}"/>
- <env key="HADOOP_HOME" value="${basedir}"/>
- <env key="HADOOP_CONF_DIR" value="${test.libhdfs.conf.dir}"/>
- <env key="HADOOP_LOG_DIR" value="${test.libhdfs.dir}/logs"/>
- <env key="LIBHDFS_SRC_DIR" value="${c++.libhdfs.src}"/>
- <env key="LIBHDFS_INSTALL_DIR" value="${install.c++}/lib"/>
- <env key="LIB_DIR" value="${common.ivy.lib.dir}"/>
- <arg value="test"/>
- </exec>
- </target>
- <!-- ================================================================== -->
- <!-- librecordio targets. -->
- <!-- ================================================================== -->
- <target name="compile-librecordio" depends="init" if="librecordio" >
- <mkdir dir="${build.librecordio}"/>
- <exec dir="${librecordio.src}" executable="${make.cmd}" failonerror="true">
- <env key="XERCESCROOT" value="${xercescroot}"/>
- <env key="LIBRECORDIO_BUILD_DIR" value="${build.librecordio}"/>
- </exec>
- </target>
-
- <target name="test-librecordio" depends="compile-librecordio, compile-core" if="librecordio">
- <delete dir="${librecordio.test.dir}"/>
- <mkdir dir="${librecordio.test.dir}"/>
- <exec dir="${librecordio.src}/test" executable="${make.cmd}" failonerror="true">
- <env key="HADOOP_HOME" value="${basedir}"/>
- <env key="XERCESCROOT" value="${xercescroot}"/>
- <env key="LIBRECORDIO_BUILD_DIR" value="${build.librecordio}"/>
- <env key="LIBRECORDIO_TEST_DIR" value="${librecordio.test.dir}"/>
- <arg value="all"/>
- </exec>
- </target>
- <target name="package-librecordio" depends="compile-librecordio" if="librecordio">
- <mkdir dir="${dist.dir}/lib/librecordio"/>
- <copy todir="${dist.dir}/lib/librecordio">
- <fileset dir="${build.librecordio}" casesensitive="yes" followsymlinks="false">
- <exclude name="**/tests/**"/>
- <exclude name="*.so"/>
- <exclude name="*.o"/>
- </fileset>
- </copy>
- <chmod perm="ugo+x" type="file">
- <fileset dir="${dist.dir}/lib/librecordio"/>
- </chmod>
- </target>
-
- <target name="create-c++-configure" depends="init" if="compile.c++">
- <exec executable="autoreconf" dir="${c++.utils.src}" searchpath="yes"
- failonerror="yes">
- <arg value="-if"/>
- </exec>
- <exec executable="autoreconf" dir="${c++.pipes.src}" searchpath="yes"
- failonerror="yes">
- <arg value="-if"/>
- </exec>
- <exec executable="autoreconf" dir="${c++.examples.pipes.src}"
- searchpath="yes" failonerror="yes">
- <arg value="-if"/>
- </exec>
- <antcall target="create-c++-configure-libhdfs"/>
- </target>
-
- <target name="create-c++-configure-libhdfs" depends="check-c++-libhdfs" if="islibhdfs">
- <exec executable="autoreconf" dir="${c++.libhdfs.src}"
- searchpath="yes" failonerror="yes">
- <arg value="-if"/>
- </exec>
- </target>
- <target name="check-c++-makefiles" depends="init" if="compile.c++">
- <condition property="need.c++.utils.makefile">
- <not> <available file="${build.c++.utils}/Makefile"/> </not>
- </condition>
- <condition property="need.c++.pipes.makefile">
- <not> <available file="${build.c++.pipes}/Makefile"/> </not>
- </condition>
- <condition property="need.c++.examples.pipes.makefile">
- <not> <available file="${build.c++.examples.pipes}/Makefile"/> </not>
- </condition>
- </target>
- <target name="check-c++-libhdfs">
- <condition property="islibhdfs">
- <and>
- <isset property="compile.c++"/>
- <isset property="libhdfs"/>
- </and>
- </condition>
- </target>
- <target name="check-c++-makefile-libhdfs" depends="init,check-c++-libhdfs" if="islibhdfs">
- <condition property="need.c++.libhdfs.makefile">
- <not> <available file="${build.c++.libhdfs}/Makefile"/> </not>
- </condition>
- </target>
- <target name="create-c++-libhdfs-makefile" depends="check-c++-makefile-libhdfs"
- if="need.c++.libhdfs.makefile">
- <mkdir dir="${build.c++.libhdfs}"/>
- <chmod file="${c++.libhdfs.src}/configure" perm="ugo+x"/>
- <exec executable="${c++.libhdfs.src}/configure" dir="${build.c++.libhdfs}"
- failonerror="yes">
- <env key="ac_cv_func_malloc_0_nonnull" value="yes"/>
- <env key="JVM_ARCH" value="${jvm.arch}"/>
- <arg value="--prefix=${install.c++}"/>
- </exec>
- </target>
- <target name="create-c++-utils-makefile" depends="check-c++-makefiles"
- if="need.c++.utils.makefile">
- <mkdir dir="${build.c++.utils}"/>
- <chmod file="${c++.utils.src}/configure" perm="ugo+x"/>
- <exec executable="${c++.utils.src}/configure" dir="${build.c++.utils}"
- failonerror="yes">
- <arg value="--prefix=${install.c++}"/>
- </exec>
- </target>
- <target name="compile-c++-utils" depends="create-c++-utils-makefile"
- if="compile.c++">
- <exec executable="${make.cmd}" dir="${build.c++.utils}" searchpath="yes"
- failonerror="yes">
- <arg value="install"/>
- </exec>
- </target>
- <target name="create-c++-pipes-makefile" depends="check-c++-makefiles"
- if="need.c++.pipes.makefile">
- <mkdir dir="${build.c++.pipes}"/>
- <chmod file="${c++.pipes.src}/configure" perm="ugo+x"/>
- <exec executable="${c++.pipes.src}/configure" dir="${build.c++.pipes}"
- failonerror="yes">
- <arg value="--prefix=${install.c++}"/>
- </exec>
- </target>
- <target name="compile-c++-pipes"
- depends="create-c++-pipes-makefile,compile-c++-utils"
- if="compile.c++">
- <exec executable="${make.cmd}" dir="${build.c++.pipes}" searchpath="yes"
- failonerror="yes">
- <arg value="install"/>
- </exec>
- </target>
- <target name="compile-c++"
- depends="compile-c++-pipes"/>
- <target name="create-c++-examples-pipes-makefile"
- depends="check-c++-makefiles"
- if="need.c++.examples.pipes.makefile">
- <mkdir dir="${build.c++.examples.pipes}"/>
- <chmod file="${c++.examples.pipes.src}/configure" perm="ugo+x"/>
- <exec executable="${c++.examples.pipes.src}/configure"
- dir="${build.c++.examples.pipes}"
- failonerror="yes">
- <arg value="--prefix=${install.c++.examples}"/>
- <arg value="--with-hadoop-utils=${install.c++}"/>
- <arg value="--with-hadoop-pipes=${install.c++}"/>
- </exec>
- </target>
- <target name="compile-c++-examples-pipes"
- depends="create-c++-examples-pipes-makefile,compile-c++-pipes"
- if="compile.c++">
- <exec executable="${make.cmd}" dir="${build.c++.examples.pipes}" searchpath="yes"
- failonerror="yes">
- <arg value="install"/>
- </exec>
- </target>
- <target name="compile-c++-examples"
- depends="compile-c++-examples-pipes"/>
- <target name="compile-c++-libhdfs" depends="create-c++-libhdfs-makefile" if="islibhdfs">
- <exec executable="${make.cmd}" dir="${build.c++.libhdfs}" searchpath="yes"
- failonerror="yes">
- <env key="ac_cv_func_malloc_0_nonnull" value="yes"/>
- <env key="JVM_ARCH" value="${jvm.arch}"/>
- <arg value="install"/>
- </exec>
- </target>
- <target name="compile-ant-tasks" depends="compile-core">
- <javac
- encoding="${build.encoding}"
- srcdir="${anttasks.dir}"
- includes="org/apache/hadoop/ant/**/*.java"
- destdir="${build.anttasks}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args}"/>
- <classpath refid="classpath"/>
- </javac>
- </target>
- <target name="ant-tasks" depends="jar, compile-ant-tasks">
- <copy file="${anttasks.dir}/org/apache/hadoop/ant/antlib.xml"
- todir="${build.anttasks}/org/apache/hadoop/ant"/>
- <jar destfile="${build.dir}/${ant.final.name}.jar">
- <fileset dir="${build.anttasks}"/>
- </jar>
- </target>
- <target name="clover" depends="clover.setup, clover.info" description="Instrument the Unit tests using Clover. To use, specify -Dclover.home=<base of clover installation> -Drun.clover=true on the command line."/>
- <target name="clover.setup" if="clover.enabled">
- <taskdef resource="cloverlib.xml" classpath="${clover.jar}"/>
- <mkdir dir="${clover.db.dir}"/>
- <clover-setup initString="${clover.db.dir}/hadoop_coverage.db">
- <fileset dir="${src.dir}" includes="core/**/* tools/**/* hdfs/**/* mapred/**/*"/>
- <testsources dir="${test.src.dir}" />
- </clover-setup>
- </target>
- <target name="clover.info" unless="clover.present">
- <echo>
- Clover not found. Code coverage reports disabled.
- </echo>
- </target>
- <target name="clover.check">
- <fail unless="clover.present">
- ##################################################################
- Clover not found.
- Please specify -Dclover.home=<base of clover installation>
- on the command line.
- ##################################################################
- </fail>
- </target>
- <target name="generate-clover-reports" depends="clover.check, clover">
- <mkdir dir="${clover.report.dir}"/>
- <clover-report>
- <current outfile="${clover.report.dir}" title="${final.name}">
- <format type="html"/>
- </current>
- </clover-report>
- <clover-report>
- <current outfile="${clover.report.dir}/clover.xml" title="${final.name}">
- <format type="xml"/>
- </current>
- </clover-report>
- </target>
- <target name="findbugs.check" depends="check-for-findbugs" unless="findbugs.present">
- <fail message="'findbugs.home' is not defined. Please pass -Dfindbugs.home=<base of Findbugs installation> to Ant on the command-line." />
- </target>
- <target name="patch.check" unless="patch.file">
- <fail message="'patch.file' is not defined. Please pass -Dpatch.file=<location of patch file> to Ant on the command-line." />
- </target>
- <target name="test-patch" depends="patch.check,findbugs.check,forrest.check">
- <exec executable="bash" failonerror="true">
- <arg value="${basedir}/src/test/bin/test-patch.sh"/>
- <arg value="DEVELOPER"/>
- <arg value="${patch.file}"/>
- <arg value="${scratch.dir}"/>
- <arg value="${svn.cmd}"/>
- <arg value="${grep.cmd}"/>
- <arg value="${patch.cmd}"/>
- <arg value="${findbugs.home}"/>
- <arg value="${forrest.home}"/>
- <arg value="${basedir}"/>
- <arg value="${java5.home}"/>
- </exec>
- </target>
- <target name="hudson-test-patch" depends="findbugs.check,forrest.check">
- <exec executable="bash" failonerror="true">
- <arg value="${basedir}/src/test/bin/test-patch.sh"/>
- <arg value="HUDSON"/>
- <arg value="${scratch.dir}"/>
- <arg value="${support.dir}"/>
- <arg value="${ps.cmd}"/>
- <arg value="${wget.cmd}"/>
- <arg value="${jiracli.cmd}"/>
- <arg value="${svn.cmd}"/>
- <arg value="${grep.cmd}"/>
- <arg value="${patch.cmd}"/>
- <arg value="${findbugs.home}"/>
- <arg value="${forrest.home}"/>
- <arg value="${eclipse.home}"/>
- <arg value="${python.home}"/>
- <arg value="${basedir}"/>
- <arg value="${trigger.url}"/>
- <arg value="${jira.passwd}"/>
- <arg value="${java5.home}"/>
- </exec>
- </target>
-
- <condition property="ant-eclipse.jar.exists">
- <available file="${build.dir}/lib/ant-eclipse-1.0-jvm1.2.jar"/>
- </condition>
- <target name="ant-eclipse-download" unless="ant-eclipse.jar.exists"
- description="Downloads the ant-eclipse binary.">
- <get src="http://downloads.sourceforge.net/project/ant-eclipse/ant-eclipse/1.0/ant-eclipse-1.0.bin.tar.bz2"
- dest="${build.dir}/ant-eclipse-1.0.bin.tar.bz2" usetimestamp="false" />
- <untar src="${build.dir}/ant-eclipse-1.0.bin.tar.bz2"
- dest="${build.dir}" compression="bzip2">
- <patternset>
- <include name="lib/ant-eclipse-1.0-jvm1.2.jar"/>
- </patternset>
- </untar>
- <delete file="${build.dir}/ant-eclipse-1.0.bin.tar.bz2" />
- </target>
- <target name="eclipse"
- depends="init,ant-eclipse-download,ivy-retrieve-common,ivy-retrieve-test,compile-core-test"
- description="Create eclipse project files">
- <pathconvert property="eclipse.project">
- <path path="${basedir}"/>
- <regexpmapper from="^.*/([^/]+)$$" to="\1" handledirsep="yes"/>
- </pathconvert>
- <taskdef name="eclipse"
- classname="prantl.ant.eclipse.EclipseTask"
- classpath="${build.dir}/lib/ant-eclipse-1.0-jvm1.2.jar" />
- <eclipse updatealways="true">
- <project name="${eclipse.project}" />
- <classpath>
- <source path="${src.dir}/core"
- output="${build.dir.eclipse-main-classes}" />
- <source path="${src.dir}/hdfs"
- output="${build.dir.eclipse-main-classes}" />
- <source path="${src.dir}/mapred"
- output="${build.dir.eclipse-main-classes}" />
- <source path="${test.src.dir}/"
- output="${build.dir.eclipse-test-classes}"
- excluding="aop/|mapred/|system/|bin/|ddl/|lib/|tools/"/>
- <source path="${test.src.dir}/aop"
- output="${build.dir.eclipse-test-classes}" />
- <source path="${test.generated.dir}"
- output="${build.dir.eclipse-test-generated-classes}" />
- <source path="${tools.src}"
- output="${build.dir.eclipse-tools-classes}" />
- <source path="${examples.dir}"
- output="${build.dir.eclipse-example-classes}" />
- <source path="${contrib.dir}/data_join/src/examples"
- output="${build.dir.eclipse-contrib-classes}/data_join/examples" />
- <source path="${contrib.dir}/data_join/src/test"
- output="${build.dir.eclipse-contrib-classes}/data_join/test" />
- <source path="${contrib.dir}/data_join/src/java"
- output="${build.dir.eclipse-contrib-classes}/data_join/java" />
- <source path="${contrib.dir}/streaming/src/java"
- output="${build.dir.eclipse-contrib-classes}/streaming/main" />
- <source path="${contrib.dir}/streaming/src/test"
- output="${build.dir.eclipse-contrib-classes}/streaming/test"
- excluding="system/"/>
- <source path="${contrib.dir}/vaidya/src/java"
- output="${build.dir.eclipse-contrib-classes}/vaidya/main" />
- <source path="${contrib.dir}/fairscheduler/src/java"
- output="${build.dir.eclipse-contrib-classes}/fairscheduler/main" />
- <source path="${contrib.dir}/fairscheduler/src/test"
- output="${build.dir.eclipse-contrib-classes}/fairscheduler/test" />
- <source path="${contrib.dir}/gridmix/src/java"
- output="${build.dir.eclipse-contrib-classes}/gridmix/main" />
- <source path="${contrib.dir}/gridmix/src/test"
- output="${build.dir.eclipse-contrib-classes}/gridmix/test"
- excluding="system/"/>
- <source path="${contrib.dir}/capacity-scheduler/src/java"
- output="${build.dir.eclipse-contrib-classes}/capacity-scheduler/main" />
- <source path="${contrib.dir}/capacity-scheduler/src/test"
- output="${build.dir.eclipse-contrib-classes}/capacity-scheduler/test" />
- <output path="${build.dir.eclipse-main-classes}" />
- <library pathref="src.lib.classpath" exported="false" />
- <library pathref="test.lib.classpath" exported="false" />
- <variable path="ANT_HOME/lib/ant.jar" exported="false" />
- <library path="${conf.dir}" exported="false" />
- <library path="${build.dir.eclipse-test-resources}" exported="false" />
- </classpath>
- </eclipse>
- <copy todir="." overwrite="true">
- <fileset dir=".eclipse.templates">
- <exclude name="**/README.txt"/>
- </fileset>
- <filterset>
- <filter token="PROJECT" value="${eclipse.project}"/>
- </filterset>
- </copy>
- <!-- copy all of the jsp and static files -->
- <copy todir="${build.dir.eclipse-test-resources-webapps}">
- <fileset dir="${build.webapps}">
- </fileset>
- </copy>
- </target>
- <target name="ivy-init-dirs">
- <mkdir dir="${build.ivy.dir}" />
- <mkdir dir="${build.ivy.lib.dir}" />
- <mkdir dir="${build.ivy.report.dir}" />
- </target>
- <target name="ivy-probe-antlib" >
- <condition property="ivy.found">
- <typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
- </condition>
- </target>
- <target name="ivy-download" description="To download ivy" unless="offline">
- <get src="${ivy_repo_url}" dest="${ivy.jar}" usetimestamp="true"/>
- </target>
- <!--
- To avoid Ivy leaking things across big projects, always load Ivy in the same classloader.
- Also note how we skip loading Ivy if it is already there, just to make sure all is well.
- -->
- <target name="ivy-init-antlib" depends="ivy-download,ivy-init-dirs,ivy-probe-antlib" unless="ivy.found">
- <typedef uri="antlib:org.apache.ivy.ant" onerror="fail"
- loaderRef="ivyLoader">
- <classpath>
- <pathelement location="${ivy.jar}"/>
- </classpath>
- </typedef>
- <fail >
- <condition >
- <not>
- <typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
- </not>
- </condition>
- You need Apache Ivy 2.0 or later from http://ant.apache.org/
- It could not be loaded from ${ivy_repo_url}
- </fail>
- </target>
- <target name="ivy-init" depends="ivy-init-antlib" >
- <!--Configure Ivy by reading in the settings file
- If anyone has already read in a settings file into this settings ID, it gets priority
- -->
- <ivy:configure settingsid="${ant.project.name}.ivy.settings" file="${ivysettings.xml}" override='false'/>
- </target>
- <target name="ivy-resolve" depends="ivy-init">
- <ivy:resolve settingsRef="${ant.project.name}.ivy.settings"/>
- </target>
- <target name="ivy-resolve-javadoc" depends="ivy-init">
- <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="javadoc"/>
- </target>
- <target name="ivy-resolve-releaseaudit" depends="ivy-init">
- <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="releaseaudit"/>
- </target>
- <target name="ivy-resolve-test" depends="ivy-init">
- <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="test" />
- </target>
- <target name="ivy-resolve-common" depends="ivy-init">
- <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="common" />
- </target>
- <target name="ivy-resolve-jdiff" depends="ivy-init">
- <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="jdiff" />
- </target>
- <target name="ivy-resolve-checkstyle" depends="ivy-init">
- <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="checkstyle"/>
- </target>
- <target name="ivy-retrieve" depends="ivy-resolve"
- description="Retrieve Ivy-managed artifacts">
- <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
- pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
- </target>
- <target name="ivy-retrieve-checkstyle" depends="ivy-resolve-checkstyle"
- description="Retrieve Ivy-managed artifacts for the checkstyle configurations">
- <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
- pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
- <ivy:cachepath pathid="checkstyle-classpath" conf="checkstyle"/>
- </target>
- <target name="ivy-retrieve-jdiff" depends="ivy-resolve-jdiff"
- description="Retrieve Ivy-managed artifacts for the javadoc configurations">
- <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
- pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
- <ivy:cachepath pathid="jdiff-classpath" conf="jdiff"/>
- </target>
- <target name="ivy-retrieve-javadoc" depends="ivy-resolve-javadoc"
- description="Retrieve Ivy-managed artifacts for the javadoc configurations">
- <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
- pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
- <ivy:cachepath pathid="javadoc-classpath" conf="javadoc"/>
- </target>
- <target name="ivy-retrieve-test" depends="ivy-resolve-test"
- description="Retrieve Ivy-managed artifacts for the test configurations">
- <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
- pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
- <ivy:cachepath pathid="ivy-test.classpath" conf="test"/>
- </target>
- <target name="ivy-retrieve-common" depends="ivy-resolve-common"
- description="Retrieve Ivy-managed artifacts for the compile configurations">
- <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
- pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
- <ivy:cachepath pathid="ivy-common.classpath" conf="common"/>
- </target>
- <target name="ivy-retrieve-releaseaudit" depends="ivy-resolve-releaseaudit"
- description="Retrieve Ivy-managed artifacts for the compile configurations">
- <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
- pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" />
- <ivy:cachepath pathid="releaseaudit-classpath" conf="releaseaudit"/>
- <available classname="${rat.reporting.classname}"
- classpathref="releaseaudit-classpath" property="rat.present" value="true"/>
- </target>
- <target name="ivy-report" depends="ivy-resolve-releaseaudit"
- description="Generate">
- <ivy:report todir="${build.ivy.report.dir}" settingsRef="${ant.project.name}.ivy.settings"/>
- <echo>
- Reports generated:${build.ivy.report.dir}
- </echo>
- </target>
- <target name="ant-task-download" description="To download mvn-ant-task">
- <get src="${ant_task_repo_url}" dest="${ant_task.jar}" usetimestamp="true"/>
- </target>
- <target name="mvn-taskdef" depends="ant-task-download">
- <path id="mvn-ant-task.classpath" path="${ant_task.jar}"/>
- <typedef resource="org/apache/maven/artifact/ant/antlib.xml"
- uri="urn:maven-artifact-ant"
- classpathref="mvn-ant-task.classpath"/>
- </target>
- <target name="mvn-install" depends="mvn-taskdef,bin-package,set-version"
- description="To install hadoop core and test jars to local filesystem's m2 cache">
- <artifact:pom file="${hadoop-core.pom}" id="hadoop.core"/>
- <artifact:pom file="${hadoop-test.pom}" id="hadoop.test"/>
- <artifact:pom file="${hadoop-examples.pom}" id="hadoop.examples"/>
- <artifact:pom file="${hadoop-tools.pom}" id="hadoop.tools"/>
- <artifact:pom file="${hadoop-streaming.pom}" id="hadoop.streaming"/>
- <artifact:install file="${hadoop-core.jar}">
- <pom refid="hadoop.core"/>
- </artifact:install>
- <artifact:install file="${hadoop-test.jar}">
- <pom refid="hadoop.test"/>
- </artifact:install>
- <artifact:install file="${hadoop-tools.jar}">
- <pom refid="hadoop.tools"/>
- </artifact:install>
- <artifact:install file="${hadoop-examples.jar}">
- <pom refid="hadoop.examples"/>
- </artifact:install>
- <artifact:install file="${hadoop-streaming.jar}">
- <pom refid="hadoop.streaming"/>
- </artifact:install>
- </target>
- <target name="mvn-deploy" depends="mvn-taskdef, bin-package, set-version, signanddeploy, simpledeploy"
- description="To deploy hadoop core and test jar's to apache maven repository"/>
- <target name="signanddeploy" if="staging" depends="sign">
- <artifact:pom file="${hadoop-core.pom}" id="hadoop.core"/>
- <artifact:pom file="${hadoop-test.pom}" id="hadoop.core.test"/>
- <artifact:pom file="${hadoop-examples.pom}" id="hadoop.examples"/>
- <artifact:pom file="${hadoop-tools.pom}" id="hadoop.tools"/>
- <artifact:pom file="${hadoop-streaming.pom}" id="hadoop.streaming"/>
- <artifact:install-provider artifactId="wagon-http"
- version="${wagon-http.version}"/>
- <artifact:deploy file="${hadoop-core.jar}">
- <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
- <pom refid="hadoop.core"/>
- <attach file="${hadoop-core.jar}.asc" type="jar.asc"/>
- <attach file="${hadoop-core.pom}.asc" type="pom.asc"/>
- </artifact:deploy>
- <artifact:deploy file="${hadoop-test.jar}">
- <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
- <pom refid="hadoop.core.test"/>
- <attach file="${hadoop-test.jar}.asc" type="jar.asc"/>
- <attach file="${hadoop-test.pom}.asc" type="pom.asc"/>
- </artifact:deploy>
- <artifact:deploy file="${hadoop-tools.jar}">
- <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
- <pom refid="hadoop.tools"/>
- <attach file="${hadoop-tools.jar}.asc" type="jar.asc"/>
- <attach file="${hadoop-tools.pom}.asc" type="pom.asc"/>
- </artifact:deploy>
- <artifact:deploy file="${hadoop-examples.jar}">
- <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
- <pom refid="hadoop.examples"/>
- <attach file="${hadoop-examples.jar}.asc" type="jar.asc"/>
- <attach file="${hadoop-examples.pom}.asc" type="pom.asc"/>
- </artifact:deploy>
- <artifact:deploy file="${hadoop-streaming.jar}">
- <remoteRepository id="apache.staging.https" url="${asfstagingrepo}"/>
- <pom refid="hadoop.streaming"/>
- <attach file="${hadoop-streaming.jar}.asc" type="jar.asc"/>
- <attach file="${hadoop-streaming.pom}.asc" type="pom.asc"/>
- </artifact:deploy>
- </target>
- <target name="sign" depends="clean-sign" if="staging">
- <input message="password:>" addproperty="gpg.passphrase">
- <handler classname="org.apache.tools.ant.input.SecureInputHandler" />
- </input>
- <macrodef name="sign-artifact" description="Signs the artifact">
- <attribute name="input.file"/>
- <attribute name="output.file" default="@{input.file}.asc"/>
- <attribute name="gpg.passphrase"/>
- <sequential>
- <echo>Signing @{input.file} Sig File: @{output.file}</echo>
- <exec executable="gpg" >
- <arg value="--armor"/>
- <arg value="--output"/>
- <arg value="@{output.file}"/>
- <arg value="--passphrase"/>
- <arg value="@{gpg.passphrase}"/>
- <arg value="--detach-sig"/>
- <arg value="@{input.file}"/>
- </exec>
- </sequential>
- </macrodef>
- <sign-artifact input.file="${hadoop-core.jar}"
- output.file="${hadoop-core.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact input.file="${hadoop-test.jar}"
- output.file="${hadoop-test.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact input.file="${hadoop-tools.jar}"
- output.file="${hadoop-tools.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact input.file="${hadoop-examples.jar}"
- output.file="${hadoop-examples.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact input.file="${hadoop-streaming.jar}"
- output.file="${hadoop-streaming.jar}.asc" gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact input.file="${hadoop-core.pom}"
- output.file="${hadoop-core.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact input.file="${hadoop-test.pom}"
- output.file="${hadoop-test.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact input.file="${hadoop-tools.pom}"
- output.file="${hadoop-tools.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact input.file="${hadoop-examples.pom}"
- output.file="${hadoop-examples.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact input.file="${hadoop-streaming.pom}"
- output.file="${hadoop-streaming.pom}.asc" gpg.passphrase="${gpg.passphrase}"/>
- </target>
- <target name="simpledeploy" unless="staging">
- <artifact:pom file="${hadoop-core.pom}" id="hadoop.core"/>
- <artifact:pom file="${hadoop-test.pom}" id="hadoop.test"/>
- <artifact:pom file="${hadoop-examples.pom}" id="hadoop.examples"/>
- <artifact:pom file="${hadoop-tools.pom}" id="hadoop.tools"/>
- <artifact:pom file="${hadoop-streaming.pom}" id="hadoop.streaming"/>
- <artifact:install-provider artifactId="wagon-http" version="${wagon-http.version}"/>
- <artifact:deploy file="${hadoop-core.jar}">
- <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
- <pom refid="hadoop.core"/>
- </artifact:deploy>
- <artifact:deploy file="${hadoop-test.jar}">
- <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
- <pom refid="hadoop.test"/>
- </artifact:deploy>
- <artifact:deploy file="${hadoop-examples.jar}">
- <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
- <pom refid="hadoop.examples"/>
- </artifact:deploy>
- <artifact:deploy file="${hadoop-tools.jar}">
- <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
- <pom refid="hadoop.tools"/>
- </artifact:deploy>
- <artifact:deploy file="${hadoop-streaming.jar}">
- <remoteRepository id="apache.snapshots.https" url="${asfsnapshotrepo}"/>
- <pom refid="hadoop.streaming"/>
- </artifact:deploy>
- </target>
- <target name="set-version">
- <delete file="${hadoop-core.pom}"/>
- <delete file="${hadoop-test.pom}"/>
- <delete file="${hadoop-examples.pom}"/>
- <delete file="${hadoop-tools.pom}"/>
- <delete file="${hadoop-streaming.pom}"/>
- <copy file="${hadoop-core-pom-template.xml}" tofile="${hadoop-core.pom}"/>
- <copy file="${hadoop-test-pom-template.xml}" tofile="${hadoop-test.pom}"/>
- <copy file="${hadoop-examples-pom-template.xml}" tofile="${hadoop-examples.pom}"/>
- <copy file="${hadoop-tools-pom-template.xml}" tofile="${hadoop-tools.pom}"/>
- <copy file="${hadoop-streaming-pom-template.xml}" tofile="${hadoop-streaming.pom}"/>
- <replaceregexp byline="true">
- <regexp pattern="@version"/>
- <substitution expression="${version}"/>
- <fileset dir="${basedir}/ivy">
- <include name="hadoop-core-pom.xml"/>
- <include name="hadoop-test-pom.xml"/>
- <include name="hadoop-tools-pom.xml"/>
- <include name="hadoop-examples-pom.xml"/>
- <include name="hadoop-streaming-pom.xml"/>
- </fileset>
- </replaceregexp>
- </target>
- <!-- taskcontroller targets -->
- <target name="task-controller" depends="init">
- <exec executable="autoreconf"
- dir="${c++.task-controller.src}"
- searchpath="yes" failonerror="yes">
- <arg value="-i"/>
- </exec>
- <mkdir dir="${build.c++.task-controller}" />
- <exec executable="/bin/sh"
- dir="${build.c++.task-controller}">
- <arg value="${c++.task-controller.src}/configure"/>
- <arg value="--prefix=${task-controller.prefix.dir}"/>
- <env key="CFLAGS"
- value="-DHADOOP_CONF_DIR=${hadoop.conf.dir}"/>
- </exec>
- <!-- delete main in case HADOOP_CONF_DIR is different -->
- <delete file="${build.c++.task-controller}/impl/main.o"
- quiet="true" failonerror="false"/>
- <exec executable="make"
- dir="${build.c++.task-controller}"
- searchpath="yes" failonerror="yes">
- <arg value="install"/>
- </exec>
- </target>
- <target name="test-task-controller" depends="init,task-controller">
- <exec executable="make"
- dir="${build.c++.task-controller}"
- searchpath="yes" failonerror="yes">
- <arg value="check"/>
- </exec>
- </target>
- <!-- end of task-controller targets -->
- <target name="jsvc" >
- <mkdir dir="${jsvc.build.dir}" />
- <get src="${jsvc.location}" dest="${jsvc.build.dir}/${jsvc.dest.name}" />
- <untar compression="gzip" src="${jsvc.build.dir}/${jsvc.dest.name}" dest="${jsvc.build.dir}" />
- <copy file="${jsvc.build.dir}/jsvc" toFile="${jsvc.install.dir}/jsvc.${os.arch}" verbose="true" />
- <chmod perm="ugo+x" type="file">
- <fileset file="${jsvc.install.dir}/jsvc.${os.arch}"/>
- </chmod>
- </target>
- </project>
|