|
@@ -1,18 +1,28 @@
|
|
To compile Hadoop Mapreduce next following, do the following:
|
|
To compile Hadoop Mapreduce next following, do the following:
|
|
|
|
|
|
-Step 1) Download Hadoop Common
|
|
|
|
|
|
+Step 1) Install dependencies for yarn
|
|
|
|
|
|
-svn checkout http://svn.apache.org/repos/asf/hadoop/common/branches/yahoo-merge/
|
|
|
|
|
|
+See http://svn.apache.org/repos/asf/hadoop/common/branches/MR-279/mapreduce/yarn/README
|
|
|
|
+Make sure protbuf library is in your library path or set: export LD_LIBRARY_PATH=/usr/local/lib
|
|
|
|
+
|
|
|
|
+Step 2) Checkout
|
|
|
|
+
|
|
|
|
+svn checkout http://svn.apache.org/repos/asf/hadoop/common/branches/MR-279/
|
|
|
|
+
|
|
|
|
+Step 3) Build common
|
|
|
|
+
|
|
|
|
+Go to common directory
|
|
ant veryclean mvn-install
|
|
ant veryclean mvn-install
|
|
|
|
|
|
-Step 2) Download Hadoop HDFS
|
|
|
|
|
|
+Step 4) Build HDFS
|
|
|
|
|
|
-svn checkout http://svn.apache.org/repos/asf/hadoop/hdfs/branches/HDFS-1052/
|
|
|
|
|
|
+Go to hdfs directory
|
|
ant veryclean mvn-install -Dresolvers=internal
|
|
ant veryclean mvn-install -Dresolvers=internal
|
|
|
|
|
|
-Step 3) Go to the root directory of hadoop mapreduce
|
|
|
|
|
|
+Step 5) Build yarn and mapreduce
|
|
|
|
|
|
-Step 4) Run
|
|
|
|
|
|
+Go to mapreduce directory
|
|
|
|
+export MAVEN_OPTS=-Xmx512m
|
|
|
|
|
|
mvn clean install assembly:assembly
|
|
mvn clean install assembly:assembly
|
|
ant veryclean jar jar-test -Dresolvers=internal
|
|
ant veryclean jar jar-test -Dresolvers=internal
|
|
@@ -25,14 +35,14 @@ ant veryclean jar jar-test -Dresolvers=internal
|
|
You will see a tarball in
|
|
You will see a tarball in
|
|
ls target/hadoop-mapreduce-1.0-SNAPSHOT-bin.tar.gz
|
|
ls target/hadoop-mapreduce-1.0-SNAPSHOT-bin.tar.gz
|
|
|
|
|
|
-Step 5) Untar the tarball in a clean and different directory.
|
|
|
|
|
|
+Step 6) Untar the tarball in a clean and different directory.
|
|
say HADOOP_YARN_INSTALL
|
|
say HADOOP_YARN_INSTALL
|
|
|
|
|
|
To run Hadoop Mapreduce next applications :
|
|
To run Hadoop Mapreduce next applications :
|
|
|
|
|
|
-Step 6) cd $HADOOP_YARN_INSTALL
|
|
|
|
|
|
+Step 7) cd $HADOOP_YARN_INSTALL
|
|
|
|
|
|
-Step 7) export the following variables:
|
|
|
|
|
|
+Step 8) export the following variables:
|
|
|
|
|
|
HADOOP_MAPRED_HOME=
|
|
HADOOP_MAPRED_HOME=
|
|
HADOOP_COMMON_HOME=
|
|
HADOOP_COMMON_HOME=
|
|
@@ -41,11 +51,11 @@ YARN_HOME=directory where you untarred yarn
|
|
HADOOP_CONF_DIR=
|
|
HADOOP_CONF_DIR=
|
|
YARN_CONF_DIR=$HADOOP_CONF_DIR
|
|
YARN_CONF_DIR=$HADOOP_CONF_DIR
|
|
|
|
|
|
-Step 8) bin/yarn-daemon.sh start resourcemanager
|
|
|
|
|
|
+Step 9) bin/yarn-daemon.sh start resourcemanager
|
|
|
|
|
|
-Step 9) bin/yarn-daemon.sh start nodemanager
|
|
|
|
|
|
+Step 10) bin/yarn-daemon.sh start nodemanager
|
|
|
|
|
|
-Step 10) Create the following symlinks in hadoop-common/lib
|
|
|
|
|
|
+Step 11) Create the following symlinks in hadoop-common/lib
|
|
|
|
|
|
ln -s $HADOOP_YARN_INSTALL/modules/hadoop-mapreduce-client-app-1.0-SNAPSHOT.jar .
|
|
ln -s $HADOOP_YARN_INSTALL/modules/hadoop-mapreduce-client-app-1.0-SNAPSHOT.jar .
|
|
ln -s $HADOOP_YARN_INSTALL/modules/yarn-api-1.0-SNAPSHOT.jar .
|
|
ln -s $HADOOP_YARN_INSTALL/modules/yarn-api-1.0-SNAPSHOT.jar .
|
|
@@ -53,9 +63,10 @@ ln -s $HADOOP_YARN_INSTALL/modules/hadoop-mapreduce-client-common-1.0-SNAPSHOT.j
|
|
ln -s $HADOOP_YARN_INSTALL/modules/yarn-common-1.0-SNAPSHOT.jar .
|
|
ln -s $HADOOP_YARN_INSTALL/modules/yarn-common-1.0-SNAPSHOT.jar .
|
|
ln -s $HADOOP_YARN_INSTALL/modules/hadoop-mapreduce-client-core-1.0-SNAPSHOT.jar .
|
|
ln -s $HADOOP_YARN_INSTALL/modules/hadoop-mapreduce-client-core-1.0-SNAPSHOT.jar .
|
|
ln -s $HADOOP_YARN_INSTALL/modules/yarn-server-common-1.0-SNAPSHOT.jar .
|
|
ln -s $HADOOP_YARN_INSTALL/modules/yarn-server-common-1.0-SNAPSHOT.jar .
|
|
|
|
+ln -s $HADOOP_YARN_INSTALL/modules/hadoop-mapreduce-client-jobclient-1.0-SNAPSHOT.jar .
|
|
ln -s $HADOOP_YARN_INSTALL/lib/protobuf-java-2.4.0a.jar .
|
|
ln -s $HADOOP_YARN_INSTALL/lib/protobuf-java-2.4.0a.jar .
|
|
|
|
|
|
-Step 11) You are all set, an example on how to run a job is:
|
|
|
|
|
|
+Step 12) You are all set, an example on how to run a job is:
|
|
|
|
|
|
$HADOOP_COMMON_HOME/bin/hadoop jar $HADOOP_MAPRED_HOME/build/hadoop-mapred-examples-0.22.0-SNAPSHOT.jar randomwriter -Dmapreduce.job.user.name=$USER -Dmapreduce.randomwriter.bytespermap=10000 -Ddfs.blocksize=536870912 -Ddfs.block.size=536870912 -libjars $HADOOP_YARN_INSTALL/hadoop-mapreduce-1.0-SNAPSHOT/modules/hadoop-mapreduce-client-jobclient-1.0-SNAPSHOT.jar output
|
|
$HADOOP_COMMON_HOME/bin/hadoop jar $HADOOP_MAPRED_HOME/build/hadoop-mapred-examples-0.22.0-SNAPSHOT.jar randomwriter -Dmapreduce.job.user.name=$USER -Dmapreduce.randomwriter.bytespermap=10000 -Ddfs.blocksize=536870912 -Ddfs.block.size=536870912 -libjars $HADOOP_YARN_INSTALL/hadoop-mapreduce-1.0-SNAPSHOT/modules/hadoop-mapreduce-client-jobclient-1.0-SNAPSHOT.jar output
|
|
|
|
|