فهرست منبع

HADOOP-7596. Makes packaging of 64-bit jsvc possible. Has other related fixes. Contributed by Eric Yang.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.20-security@1165905 13f79535-47bb-0310-9956-ffa450edef68
Devaraj Das 13 سال پیش
والد
کامیت
16c987118c

+ 3 - 0
CHANGES.txt

@@ -94,6 +94,9 @@ Release 0.20.205.0 - unreleased
 
     HDFS-1554. New semantics for recoverLease. (hairong)
 
+    HADOOP-7596. Makes packaging of 64-bit jsvc possible. Has other
+    bug fixes to do with packaging. (Eric Yang via ddas) 
+
   IMPROVEMENTS
 
     MAPREDUCE-2187. Reporter sends progress during sort/merge. (Anupam Seth via

+ 70 - 40
bin/hadoop

@@ -161,34 +161,58 @@ fi
 IFS=
 
 # for releases, add core hadoop jar & webapps to CLASSPATH
-if [ -d "$HADOOP_HOME/webapps" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME
-fi
-for f in $HADOOP_HOME/hadoop-core-*.jar; do
-  CLASSPATH=${CLASSPATH}:$f;
-done
-
-# add libs to CLASSPATH
-for f in $HADOOP_HOME/lib/*.jar; do
-  CLASSPATH=${CLASSPATH}:$f;
-done
-
-if [ -d "$HADOOP_HOME/build/ivy/lib/Hadoop/common" ]; then
-for f in $HADOOP_HOME/build/ivy/lib/Hadoop/common/*.jar; do
-  CLASSPATH=${CLASSPATH}:$f;
-done
-fi
+if [ -e $HADOOP_PREFIX/share/hadoop/hadoop-core-* ]; then
+  # binary layout
+  if [ -d "$HADOOP_PREFIX/share/hadoop/webapps" ]; then
+    CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/share/hadoop
+  fi
+  for f in $HADOOP_PREFIX/share/hadoop/hadoop-core-*.jar; do
+    CLASSPATH=${CLASSPATH}:$f;
+  done
+
+  # add libs to CLASSPATH
+  for f in $HADOOP_PREFIX/share/hadoop/lib/*.jar; do
+    CLASSPATH=${CLASSPATH}:$f;
+  done
+
+  for f in $HADOOP_PREFIX/share/hadoop/lib/jsp-2.1/*.jar; do
+    CLASSPATH=${CLASSPATH}:$f;
+  done
+
+  for f in $HADOOP_PREFIX/share/hadoop/hadoop-tools-*.jar; do
+    TOOL_PATH=${TOOL_PATH}:$f;
+  done
+else
+  # tarball layout
+  if [ -d "$HADOOP_HOME/webapps" ]; then
+    CLASSPATH=${CLASSPATH}:$HADOOP_HOME
+  fi
+  for f in $HADOOP_HOME/hadoop-core-*.jar; do
+    CLASSPATH=${CLASSPATH}:$f;
+  done
+
+  # add libs to CLASSPATH
+  for f in $HADOOP_HOME/lib/*.jar; do
+    CLASSPATH=${CLASSPATH}:$f;
+  done
+
+  if [ -d "$HADOOP_HOME/build/ivy/lib/Hadoop/common" ]; then
+    for f in $HADOOP_HOME/build/ivy/lib/Hadoop/common/*.jar; do
+      CLASSPATH=${CLASSPATH}:$f;
+    done
+  fi
 
-for f in $HADOOP_HOME/lib/jsp-2.1/*.jar; do
-  CLASSPATH=${CLASSPATH}:$f;
-done
+  for f in $HADOOP_HOME/lib/jsp-2.1/*.jar; do
+    CLASSPATH=${CLASSPATH}:$f;
+  done
 
-for f in $HADOOP_HOME/hadoop-tools-*.jar; do
-  TOOL_PATH=${TOOL_PATH}:$f;
-done
-for f in $HADOOP_HOME/build/hadoop-tools-*.jar; do
-  TOOL_PATH=${TOOL_PATH}:$f;
-done
+  for f in $HADOOP_HOME/hadoop-tools-*.jar; do
+    TOOL_PATH=${TOOL_PATH}:$f;
+  done
+  for f in $HADOOP_HOME/build/hadoop-tools-*.jar; do
+    TOOL_PATH=${TOOL_PATH}:$f;
+  done
+fi
 
 # add user-specified CLASSPATH last
 if [ "$HADOOP_USER_CLASSPATH_FIRST" = "" ] && [ "$HADOOP_CLASSPATH" != "" ]; then
@@ -302,9 +326,15 @@ if $cygwin; then
 fi
 # setup 'java.library.path' for native-hadoop code if necessary
 JAVA_LIBRARY_PATH=''
-if [ -d "${HADOOP_HOME}/build/native" -o -d "${HADOOP_HOME}/lib/native" ]; then
+if [ -d "${HADOOP_HOME}/build/native" -o -d "${HADOOP_HOME}/lib/native" -o -e "${HADOOP_PREFIX}/lib/libhadoop.a" ]; then
   JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} -Xmx32m ${HADOOP_JAVA_PLATFORM_OPTS} org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
   
+  if [ "$JAVA_PLATFORM" = "Linux-amd64-64" ]; then
+    JSVC_ARCH="amd64"
+  else
+    JSVC_ARCH="i386"
+  fi
+
   if [ -d "$HADOOP_HOME/build/native" ]; then
     JAVA_LIBRARY_PATH=${HADOOP_HOME}/build/native/${JAVA_PLATFORM}/lib
   fi
@@ -316,11 +346,11 @@ if [ -d "${HADOOP_HOME}/build/native" -o -d "${HADOOP_HOME}/lib/native" ]; then
       JAVA_LIBRARY_PATH=${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}
     fi
   fi
-fi
-if [ -e "${HADOOP_PREFIX}/lib/libhadoop.a" ]; then
-  JAVA_LIBRARY_PATH=${HADOOP_PREFIX}/lib
-fi
 
+  if [ -e "${HADOOP_PREFIX}/lib/libhadoop.a" ]; then
+    JAVA_LIBRARY_PATH=${HADOOP_PREFIX}/lib
+  fi
+fi
 
 # cygwin path translation
 if $cygwin; then
@@ -349,17 +379,17 @@ if [ "$starting_secure_dn" = "true" ]; then
   if [ "$HADOOP_PID_DIR" = "" ]; then
     HADOOP_SECURE_DN_PID="/tmp/hadoop_secure_dn.pid"
   else
-   HADOOP_SECURE_DN_PID="$HADOOP_PID_DIR/hadoop_secure_dn.pid"
+    HADOOP_SECURE_DN_PID="$HADOOP_PID_DIR/hadoop_secure_dn.pid"
   fi
 
-  exec "$HADOOP_HOME/libexec/jsvc" -Dproc_$COMMAND -outfile "$HADOOP_LOG_DIR/jsvc.out" \
-                                                   -errfile "$HADOOP_LOG_DIR/jsvc.err" \
-                                                   -pidfile "$HADOOP_SECURE_DN_PID" \
-                                                   -nodetach \
-                                                   -user "$HADOOP_SECURE_DN_USER" \
-                                                   -cp "$CLASSPATH" \
-                                                   $JAVA_HEAP_MAX $HADOOP_OPTS \
-                                                   org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter "$@"
+  exec "$HADOOP_HOME/libexec/jsvc.${JSVC_ARCH}" -Dproc_$COMMAND -outfile "$HADOOP_LOG_DIR/jsvc.out" \
+                                                -errfile "$HADOOP_LOG_DIR/jsvc.err" \
+                                                -pidfile "$HADOOP_SECURE_DN_PID" \
+                                                -nodetach \
+                                                -user "$HADOOP_SECURE_DN_USER" \
+                                                -cp "$CLASSPATH" \
+                                                $JAVA_HEAP_MAX $HADOOP_OPTS \
+                                                org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter "$@"
 else
   # run it
   exec "$JAVA" -Dproc_$COMMAND $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"

+ 1 - 5
bin/hadoop-config.sh

@@ -32,11 +32,7 @@ this="$config_bin/$script"
 
 # the root of the Hadoop installation
 export HADOOP_PREFIX=`dirname "$this"`/..
-if [ -d ${HADOOP_PREFIX}/share/hadoop/bin ]; then
-  export HADOOP_HOME=${HADOOP_PREFIX}/share/hadoop
-else
-  export HADOOP_HOME=${HADOOP_PREFIX}
-fi
+export HADOOP_HOME=${HADOOP_PREFIX}
 
 #check to see if the conf dir is given as an optional argument
 if [ $# -gt 1 ]

+ 7 - 1
bin/hadoop-daemon.sh

@@ -88,7 +88,13 @@ if [ "$HADOOP_LOG_DIR" = "" ]; then
   export HADOOP_LOG_DIR="$HADOOP_HOME/logs"
 fi
 mkdir -p "$HADOOP_LOG_DIR"
-chown $HADOOP_IDENT_STRING $HADOOP_LOG_DIR 
+touch $HADOOP_LOG_DIR/.hadoop_test > /dev/null 2>&1
+TEST_LOG_DIR=$?
+if [ "${TEST_LOG_DIR}" = "0" ]; then
+  rm -f $HADOOP_LOG_DIR/.hadoop_test
+else
+  chown $HADOOP_IDENT_STRING $HADOOP_LOG_DIR 
+fi
 
 if [ "$HADOOP_PID_DIR" = "" ]; then
   HADOOP_PID_DIR=/tmp

+ 18 - 5
build.xml

@@ -168,10 +168,23 @@
   <property name="patch.cmd" value="patch"/>
   <property name="make.cmd" value="make"/>
 
-  <property name="jsvc.build.dir" value="${build.dir}/jsvc" />
+  <property name="jsvc.build.dir" value="${build.dir}/jsvc.${os.arch}" />
   <property name="jsvc.install.dir" value="${dist.dir}/libexec" /> 
-  <property name="jsvc.location" value="http://archive.apache.org/dist/commons/daemon/binaries/1.0.2/linux/commons-daemon-1.0.2-bin-linux-i386.tar.gz" />
-  <property name="jsvc.dest.name" value="jsvc.tar.gz" />
+  <condition property="os-arch" value="x86_64">
+    <and>
+      <os arch="amd64" />
+    </and>
+  </condition>
+  <condition property="os-arch" value="i386">
+    <or>
+      <os arch="i386" />
+      <os arch="i486" />
+      <os arch="i586" />
+      <os arch="i686" />
+    </or>
+  </condition>
+  <property name="jsvc.location" value="http://archive.apache.org/dist/commons/daemon/binaries/1.0.2/linux/commons-daemon-1.0.2-bin-linux-${os-arch}.tar.gz" />
+  <property name="jsvc.dest.name" value="jsvc.${os.arch}.tar.gz" />
 
   <!-- task-controller properties set here -->
   <!-- Source directory from where configure is run and files are copied
@@ -2606,9 +2619,9 @@
 
     <untar compression="gzip" src="${jsvc.build.dir}/${jsvc.dest.name}" dest="${jsvc.build.dir}" />
 
-    <copy file="${jsvc.build.dir}/jsvc" todir="${jsvc.install.dir}" verbose="true" />
+    <copy file="${jsvc.build.dir}/jsvc" toFile="${jsvc.install.dir}/jsvc.${os.arch}" verbose="true" />
     <chmod perm="ugo+x" type="file">
-      <fileset file="${jsvc.install.dir}/jsvc"/>
+      <fileset file="${jsvc.install.dir}/jsvc.${os.arch}"/>
     </chmod>
  </target>
 

+ 15 - 7
src/packages/deb/init.d/hadoop-datanode

@@ -39,6 +39,14 @@ fi
 
 . /lib/lsb/init-functions
 
+if [ -n "$HADOOP_SECURE_DN_USER" ]; then
+  DN_USER="root"
+  IDENT_USER=${HADOOP_SECURE_DN_USER}
+else
+  DN_USER="hdfs"
+  IDENT_USER=${DN_USER}
+fi
+
 # Are we running from init?
 run_by_init() {
     ([ "$previous" ] && [ "$runlevel" ]) || [ "$runlevel" = S ]
@@ -73,7 +81,7 @@ case "$1" in
 	check_privsep_dir
 	check_for_no_start
 	log_daemon_msg "Starting Apache Hadoop Data Node server" "hadoop-datanode"
-	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid -c hdfs -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start datanode; then
+	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-datanode.pid -c ${DN_USER} -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start datanode; then
 	    log_end_msg 0
 	else
 	    log_end_msg 1
@@ -81,7 +89,7 @@ case "$1" in
 	;;
   stop)
 	log_daemon_msg "Stopping Apache Hadoop Data Node server" "hadoop-datanode"
-	if start-stop-daemon --stop --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid; then
+	if start-stop-daemon --stop --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-datanode.pid; then
 	    log_end_msg 0
 	else
 	    log_end_msg 1
@@ -91,9 +99,9 @@ case "$1" in
   restart)
 	check_privsep_dir
 	log_daemon_msg "Restarting Apache Hadoop Data Node server" "hadoop-datanode"
-	start-stop-daemon --stop --quiet --oknodo --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid
+	start-stop-daemon --stop --quiet --oknodo --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-datanode.pid
 	check_for_no_start log_end_msg
-	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid -c hdfs -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start datanode; then
+	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-datanode.pid -c ${DN_USER} -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start datanode; then
 	    log_end_msg 0
 	else
 	    log_end_msg 1
@@ -104,14 +112,14 @@ case "$1" in
 	check_privsep_dir
 	log_daemon_msg "Restarting Apache Hadoop Data Node server" "hadoop-datanode"
 	set +e
-	start-stop-daemon --stop --quiet --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid
+	start-stop-daemon --stop --quiet --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-datanode.pid
 	RET="$?"
 	set -e
 	case $RET in
 	    0)
 		# old daemon stopped
 		check_for_no_start log_end_msg
-		if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid -c hdfs -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start datanode; then
+		if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-datanode.pid -c ${DN_USER} -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start datanode; then
 		    log_end_msg 0
 		else
 		    log_end_msg 1
@@ -131,7 +139,7 @@ case "$1" in
 	;;
 
   status)
-	status_of_proc -p ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid ${JAVA_HOME}/bin/java hadoop-datanode && exit 0 || exit $?
+	status_of_proc -p ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-datanode.pid ${JAVA_HOME}/bin/java hadoop-datanode && exit 0 || exit $?
 	;;
 
   *)

+ 10 - 2
src/packages/rpm/init.d/hadoop-datanode

@@ -30,7 +30,11 @@ desc="Hadoop datanode daemon"
 
 start() {
   echo -n $"Starting $desc (hadoop-datanode): "
-  daemon --user hdfs ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" start datanode
+  if [ -n "$HADOOP_SECURE_DN_USER" ]; then
+    daemon ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" start datanode
+  else
+    daemon --user hdfs ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" start datanode
+  fi
   RETVAL=$?
   echo
   [ $RETVAL -eq 0 ] && touch /var/lock/subsys/hadoop-datanode
@@ -39,7 +43,11 @@ start() {
 
 stop() {
   echo -n $"Stopping $desc (hadoop-datanode): "
-  daemon --user hdfs ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" stop datanode
+  if [ -n "$HADOOP_SECURE_DN_USER" ]; then
+    daemon ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" stop datanode
+  else
+    daemon --user hdfs ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" stop datanode
+  fi
   RETVAL=$?
   sleep 5
   echo

+ 1 - 0
src/packages/rpm/spec/hadoop.spec

@@ -191,4 +191,5 @@ bash ${RPM_INSTALL_PREFIX0}/sbin/update-hadoop-env.sh \
 %config(noreplace) %{_conf_dir}/ssl-server.xml.example
 %config(noreplace) %{_conf_dir}/taskcontroller.cfg
 %{_prefix}
+%attr(0755,root,root) %{_prefix}/libexec
 %attr(0755,root,root) /etc/rc.d/init.d

+ 3 - 4
src/packages/templates/conf/hadoop-env.sh

@@ -10,7 +10,6 @@ export JAVA_HOME=${JAVA_HOME}
 
 # Location where Hadoop is installed
 export HADOOP_PREFIX=${HADOOP_PREFIX}
-export HADOOP_HOME=${HADOOP_PREFIX}/share/hadoop
 
 # Extra Java CLASSPATH elements.  Optional.
 # export HADOOP_CLASSPATH=
@@ -56,9 +55,9 @@ export HADOOP_IDENT_STRING=`whoami`
 # export HADOOP_NICENESS=10
 
 # Where log files are stored.  $HADOOP_HOME/logs by default.
-HADOOP_LOG_DIR=${HADOOP_LOG_DIR}/$HADOOP_IDENT_STRING
-export HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-$HADOOP_HOME/var/log}
+HADOOP_LOG_DIR=${HADOOP_LOG_DIR}
+export HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-$HADOOP_PREFIX/var/log}
 
 # Hadoop configuration directory
 HADOOP_CONF_DIR=${HADOOP_CONF_DIR}
-export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-$HADOOP_PREFIX/conf}
+export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-$HADOOP_PREFIX/etc/hadoop}

+ 4 - 10
src/packages/update-hadoop-env.sh

@@ -115,12 +115,12 @@ for var in PREFIX; do
 done
 
 ARCH=${ARCH:-i386}
-BIN_DIR=${BIN_DIR:-$PREFIX/share/hadoop/bin}
+BIN_DIR=${BIN_DIR:-$PREFIX/bin}
 CONF_DIR=${CONF_DIR:-$PREFIX/etc/hadoop}
 LIB_DIR=${LIB_DIR:-$PREFIX/lib}
 LOG_DIR=${LOG_DIR:-$PREFIX/var/log}
 PID_DIR=${PID_DIR:-$PREFIX/var/run}
-SBIN_DIR=${SBIN_DIR:-$PREFIX/share/hadoop/sbin}
+SBIN_DIR=${SBIN_DIR:-$PREFIX/sbin}
 UNINSTALL=${UNINSTALL:-0}
 
 if [ "${ARCH}" != "i386" ]; then
@@ -144,14 +144,8 @@ else
   ln -sf ${CONF_DIR}/hadoop-env.sh /etc/profile.d/hadoop-env.sh
 
   mkdir -p ${LOG_DIR}
-  mkdir -p ${LOG_DIR}/hdfs
-  mkdir -p ${LOG_DIR}/mapred
   chown root:hadoop ${LOG_DIR}
-  chown hdfs ${LOG_DIR}/hdfs
-  chown mapred ${LOG_DIR}/mapred
-  chmod 755 ${LOG_DIR}
-  chmod 755 ${LOG_DIR}/hdfs
-  chmod 755 ${LOG_DIR}/mapred
+  chmod 775 ${LOG_DIR}
 
   if [ ! -d ${PID_DIR} ]; then
     mkdir -p ${PID_DIR}
@@ -162,7 +156,7 @@ else
   TFILE="/tmp/$(basename $0).$$.tmp"
   if [ -z "${JAVA_HOME}" ]; then
     if [ -e /etc/debian_version ]; then
-      JAVA_HOME=`update-alternatives --config java | grep java | cut -f2 -d':' | cut -f2 -d' ' | sed -e 's/\/bin\/java//'`
+      JAVA_HOME=/usr/lib/jvm/java-6-sun
     else
       JAVA_HOME=/usr/java/default
     fi