Selaa lähdekoodia

HADOOP-9356. Remove remaining references to cygwin/cygpath from scripts. Contributed by Chris Nauroth.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-trunk-win@1452607 13f79535-47bb-0310-9956-ffa450edef68
Suresh Srinivas 12 vuotta sitten
vanhempi
commit
2664a809b6
21 muutettua tiedostoa jossa 16 lisäystä ja 178 poistoa
  1. 3 0
      hadoop-common-project/hadoop-common/CHANGES.branch-trunk-win.txt
  2. 0 6
      hadoop-common-project/hadoop-common/src/main/bin/hadoop
  3. 0 30
      hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
  4. 0 5
      hadoop-common-project/hadoop-common/src/main/bin/rcc
  5. 0 1
      hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/site.xml
  6. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
  7. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
  8. 0 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
  9. 1 19
      hadoop-common-project/hadoop-common/src/main/java/overview.html
  10. 1 13
      hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm
  11. 2 16
      hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
  12. 0 3
      hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
  13. 0 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/site.xml
  14. 1 19
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/overview.html
  15. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/native/tests/test-libhdfs.sh
  16. 0 4
      hadoop-mapreduce-project/bin/mapred
  17. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testshell/ExternalMapReduce.java
  18. 1 8
      hadoop-mapreduce-project/pom.xml
  19. 2 15
      hadoop-project-dist/pom.xml
  20. 0 21
      hadoop-yarn-project/hadoop-yarn/bin/yarn
  21. 1 8
      hadoop-yarn-project/pom.xml

+ 3 - 0
hadoop-common-project/hadoop-common/CHANGES.branch-trunk-win.txt

@@ -95,6 +95,9 @@ branch-trunk-win changes - unreleased
   HADOOP-9354. Windows native project files missing license headers.
   (Chris Nauroth via suresh)
 
+  HADOOP-9356. Remove remaining references to cygwin/cygpath from scripts.
+  (Chris Nauroth via suresh)
+
 Patch equivalent to trunk committed to branch-trunk-win
 
   HADOOP-8924. Add maven plugin alternative to shell script to save

+ 0 - 6
hadoop-common-project/hadoop-common/src/main/bin/hadoop

@@ -91,9 +91,6 @@ case $COMMAND in
     ;;
 
   classpath)
-    if $cygwin; then
-      CLASSPATH=`cygpath -p -w "$CLASSPATH"`
-    fi
     echo $CLASSPATH
     exit
     ;;
@@ -132,9 +129,6 @@ case $COMMAND in
     #make sure security appender is turned off
     HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}"
 
-    if $cygwin; then
-      CLASSPATH=`cygpath -p -w "$CLASSPATH"`
-    fi
     export CLASSPATH=$CLASSPATH
     exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS $CLASS "$@"
     ;;

+ 0 - 30
hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh

@@ -112,12 +112,6 @@ if [[ ( "$HADOOP_SLAVES" != '' ) && ( "$HADOOP_SLAVE_NAMES" != '' ) ]] ; then
   exit 1
 fi
 
-cygwin=false
-case "`uname`" in
-CYGWIN*) cygwin=true;;
-esac
-
-
 # check if net.ipv6.bindv6only is set to 1
 bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
 if [ -n "$bindv6only" ] && [ "$bindv6only" -eq "1" ] && [ "$HADOOP_ALLOW_IPV6" != "yes" ]
@@ -209,13 +203,6 @@ fi
 # restore ordinary behaviour
 unset IFS
 
-# cygwin path translation
-if $cygwin; then
-  HADOOP_PREFIX=`cygpath -w "$HADOOP_PREFIX"`
-  HADOOP_LOG_DIR=`cygpath -w "$HADOOP_LOG_DIR"`
-  JAVA_LIBRARY_PATH=`cygpath -w "$JAVA_LIBRARY_PATH"`
-fi
-
 # setup 'java.library.path' for native-hadoop code if necessary
 
 if [ -d "${HADOOP_PREFIX}/build/native" -o -d "${HADOOP_PREFIX}/$HADOOP_COMMON_LIB_NATIVE_DIR" ]; then
@@ -232,11 +219,6 @@ fi
 # setup a default TOOL_PATH
 TOOL_PATH="${TOOL_PATH:-$HADOOP_PREFIX/share/hadoop/tools/lib/*}"
 
-# cygwin path translation
-if $cygwin; then
-  JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
-fi
-
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_PREFIX"
@@ -303,15 +285,3 @@ if [ "$HADOOP_MAPRED_HOME/$MAPRED_DIR" != "$HADOOP_YARN_HOME/$YARN_DIR" ] ; then
 
   CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/$MAPRED_DIR'/*'
 fi
-
-# cygwin path translation
-if $cygwin; then
-  HADOOP_HDFS_HOME=`cygpath -w "$HADOOP_HDFS_HOME"`
-fi
-
-# cygwin path translation
-if $cygwin; then
-  TOOL_PATH=`cygpath -p -w "$TOOL_PATH"`
-fi
-
-

+ 0 - 5
hadoop-common-project/hadoop-common/src/main/bin/rcc

@@ -57,10 +57,5 @@ unset IFS
 
 CLASS='org.apache.hadoop.record.compiler.generated.Rcc'
 
-# cygwin path translation
-if expr `uname` : 'CYGWIN*' > /dev/null; then
-  CLASSPATH=`cygpath -p -w "$CLASSPATH"`
-fi
-
 # run it
 exec "$JAVA" $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"

+ 0 - 1
hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/site.xml

@@ -87,7 +87,6 @@ See http://forrest.apache.org/docs/linking.html for more info.
     <zlib      href="http://www.zlib.net/" />
     <gzip      href="http://www.gzip.org/" />
     <bzip      href="http://www.bzip.org/" />
-    <cygwin    href="http://www.cygwin.com/" />
     <osx       href="http://www.apple.com/macosx" />
     
     <relnotes href="releasenotes.html" />

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java

@@ -35,7 +35,7 @@ import com.google.common.annotations.VisibleForTesting;
 
 /** Filesystem disk space usage statistics.
  * Uses the unix 'df' program to get mount points, and java.io.File for
- * space utilization. Tested on Linux, FreeBSD, Cygwin. */
+ * space utilization. Tested on Linux, FreeBSD, Windows. */
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Evolving
 public class DF extends Shell {

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java

@@ -717,7 +717,7 @@ public class FileUtil {
   
   /**
    * Class for creating hardlinks.
-   * Supports Unix, Cygwin, WindXP.
+   * Supports Unix, WindXP.
    * @deprecated Use {@link org.apache.hadoop.fs.HardLink}
    */
   @Deprecated

+ 0 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java

@@ -254,11 +254,6 @@ public class HardLink {
   
   /**
    * Implementation of HardLinkCommandGetter class for Windows
-   * 
-   * Note that the linkCount shell command for Windows is actually
-   * a Cygwin shell command, and depends on ${cygwin}/bin
-   * being in the Windows PATH environment variable, so
-   * stat.exe can be found.
    */
   static class HardLinkCGWin extends HardLinkCommandGetter {
     //The Windows command getter impl class and its member fields are

+ 1 - 19
hadoop-common-project/hadoop-common/src/main/java/overview.html

@@ -60,9 +60,7 @@ that process vast amounts of data. Here's what makes Hadoop especially useful:</
     Hadoop was been demonstrated on GNU/Linux clusters with 2000 nodes.
   </li>
   <li>
-    Win32 is supported as a <i>development</i> platform. Distributed operation 
-    has not been well tested on Win32, so this is not a <i>production</i> 
-    platform.
+    Windows is also a supported platform.
   </li>  
 </ul>
   
@@ -84,15 +82,6 @@ that process vast amounts of data. Here's what makes Hadoop especially useful:</
   </li>
 </ol>
 
-<h4>Additional requirements for Windows</h4>
-
-<ol>
-  <li>
-    <a href="http://www.cygwin.com/">Cygwin</a> - Required for shell support in 
-    addition to the required software above.
-  </li>
-</ol>
-  
 <h3>Installing Required Software</h3>
 
 <p>If your platform does not have the required software listed above, you
@@ -104,13 +93,6 @@ $ sudo apt-get install ssh<br>
 $ sudo apt-get install rsync<br>
 </pre></blockquote></p>
 
-<p>On Windows, if you did not install the required software when you
-installed cygwin, start the cygwin installer and select the packages:</p>
-<ul>
-  <li>openssh - the "Net" category</li>
-  <li>rsync - the "Net" category</li>
-</ul>
-
 <h2>Getting Started</h2>
 
 <p>First, you need to get a copy of the Hadoop code.</p>

+ 1 - 13
hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm

@@ -33,9 +33,7 @@ Single Node Setup
      * GNU/Linux is supported as a development and production platform.
        Hadoop has been demonstrated on GNU/Linux clusters with 2000 nodes.
 
-     * Win32 is supported as a development platform. Distributed operation
-       has not been well tested on Win32, so it is not supported as a
-       production platform.
+     * Windows is also a supported platform.
 
 ** Required Software
 
@@ -46,11 +44,6 @@ Single Node Setup
     [[2]] ssh must be installed and sshd must be running to use the Hadoop
        scripts that manage remote Hadoop daemons.
 
-   Additional requirements for Windows include:
-
-    [[1]] Cygwin - Required for shell support in addition to the required
-       software above.
-
 ** Installing Software
 
    If your cluster doesn't have the requisite software you will need to
@@ -63,11 +56,6 @@ Single Node Setup
    $ sudo apt-get install rsync
 ----
 
-   On Windows, if you did not install the required software when you
-   installed cygwin, start the cygwin installer and select the packages:
-
-     * openssh - the Net category
-
 * Download
 
    To get a Hadoop distribution, download a recent stable release from one

+ 2 - 16
hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml

@@ -539,14 +539,7 @@
 
                     <!-- Using Unix script to preserve file permissions -->
                     <echo file="${project.build.directory}/tomcat-untar.sh">
-
-                      which cygpath 2&gt; /dev/null
-                      if [ $? != 0 ]; then
-                      BUILD_DIR="${project.build.directory}"
-                      else
-                      BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                      fi
-                      cd $BUILD_DIR/tomcat.exp
+                      cd "${project.build.directory}/tomcat.exp"
                       gzip -cd ../../downloads/apache-tomcat-${tomcat.version}.tar.gz | tar xf -
                     </echo>
                     <exec executable="sh" dir="${project.build.directory}" failonerror="true">
@@ -582,14 +575,7 @@
                   <target if="tar">
                     <!-- Using Unix script to preserve symlinks -->
                     <echo file="${project.build.directory}/dist-maketar.sh">
-
-                      which cygpath 2&gt; /dev/null
-                      if [ $? != 0 ]; then
-                      BUILD_DIR="${project.build.directory}"
-                      else
-                      BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                      fi
-                      cd $BUILD_DIR
+                      cd "${project.build.directory}"
                       tar cf - ${project.artifactId}-${project.version} | gzip > ${project.artifactId}-${project.version}.tar.gz
                     </echo>
                     <exec executable="sh" dir="${project.build.directory}" failonerror="true">

+ 0 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs

@@ -134,9 +134,6 @@ else
   CLASS="$COMMAND"
 fi
 
-if $cygwin; then
-  CLASSPATH=`cygpath -p -w "$CLASSPATH"`
-fi
 export CLASSPATH=$CLASSPATH
 
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}"

+ 0 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/site.xml

@@ -76,7 +76,6 @@ See http://forrest.apache.org/docs/linking.html for more info.
     <zlib      href="http://www.zlib.net/" />
     <gzip      href="http://www.gzip.org/" />
     <bzip      href="http://www.bzip.org/" />
-    <cygwin    href="http://www.cygwin.com/" />
     <osx       href="http://www.apple.com/macosx" />
     <hod href="">
       <cluster-resources href="http://www.clusterresources.com" />

+ 1 - 19
hadoop-hdfs-project/hadoop-hdfs/src/main/java/overview.html

@@ -60,9 +60,7 @@ that process vast amounts of data. Here's what makes Hadoop especially useful:</
     Hadoop was been demonstrated on GNU/Linux clusters with 2000 nodes.
   </li>
   <li>
-    Win32 is supported as a <i>development</i> platform. Distributed operation 
-    has not been well tested on Win32, so this is not a <i>production</i> 
-    platform.
+    Windows is also a supported platform.
   </li>  
 </ul>
   
@@ -84,15 +82,6 @@ that process vast amounts of data. Here's what makes Hadoop especially useful:</
   </li>
 </ol>
 
-<h4>Additional requirements for Windows</h4>
-
-<ol>
-  <li>
-    <a href="http://www.cygwin.com/">Cygwin</a> - Required for shell support in 
-    addition to the required software above.
-  </li>
-</ol>
-  
 <h3>Installing Required Software</h3>
 
 <p>If your platform does not have the required software listed above, you
@@ -104,13 +93,6 @@ $ sudo apt-get install ssh<br>
 $ sudo apt-get install rsync<br>
 </pre></blockquote></p>
 
-<p>On Windows, if you did not install the required software when you
-installed cygwin, start the cygwin installer and select the packages:</p>
-<ul>
-  <li>openssh - the "Net" category</li>
-  <li>rsync - the "Net" category</li>
-</ul>
-
 <h2>Getting Started</h2>
 
 <p>First, you need to get a copy of the Hadoop code.</p>

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/native/tests/test-libhdfs.sh

@@ -82,7 +82,7 @@ unset IFS
 findlibjvm () {
 javabasedir=$JAVA_HOME
 case $OS_NAME in
-    cygwin* | mingw* | pw23* )
+    mingw* | pw23* )
     lib_jvm_dir=`find $javabasedir -follow \( \
         \( -name client -type d -prune \) -o \
         \( -name "jvm.dll" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`

+ 0 - 4
hadoop-mapreduce-project/bin/mapred

@@ -135,10 +135,6 @@ for f in $HADOOP_MAPRED_HOME/modules/*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
 
-if $cygwin; then
-  CLASSPATH=`cygpath -p -w "$CLASSPATH"`
-fi
-
 if [ "$COMMAND" = "classpath" ] ; then
   echo $CLASSPATH
   exit

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testshell/ExternalMapReduce.java

@@ -72,7 +72,7 @@ public class ExternalMapReduce extends Configured implements Tool {
       }
       //fork off ls to see if the file exists.
       // java file.exists() will not work on 
-      // cygwin since it is a symlink
+      // Windows since it is a symlink
       String[] argv = new String[7];
       argv[0] = "ls";
       argv[1] = "files_tmp";

+ 1 - 8
hadoop-mapreduce-project/pom.xml

@@ -182,14 +182,7 @@
               <target if="tar">
                 <!-- Using Unix script to preserve symlinks -->
                 <echo file="${project.build.directory}/dist-maketar.sh">
-
-                  which cygpath 2&gt; /dev/null
-                  if [ $? != 0 ]; then
-                    BUILD_DIR="${project.build.directory}"
-                  else
-                    BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                  fi
-                  cd $BUILD_DIR
+                  cd "${project.build.directory}"
                   tar cf - ${project.artifactId}-${project.version} | gzip > ${project.artifactId}-${project.version}.tar.gz
                 </echo>
                 <exec executable="sh" dir="${project.build.directory}" failonerror="true">

+ 2 - 15
hadoop-project-dist/pom.xml

@@ -335,13 +335,7 @@
                   <target>
                     <!-- Using Unix script to preserve symlinks -->
                     <echo file="${project.build.directory}/dist-copynativelibs.sh">
-
-                      which cygpath 2&gt; /dev/null
-                      if [ $? != 0 ]; then
-                        BUILD_DIR="${project.build.directory}"
-                      else
-                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                      fi
+                      BUILD_DIR="${project.build.directory}"
                       TAR='tar cf -'
                       UNTAR='tar xfBp -'
                       LIB_DIR="${BUILD_DIR}/native/target/usr/local/lib"
@@ -379,14 +373,7 @@
                   <target if="tar">
                     <!-- Using Unix script to preserve symlinks -->
                     <echo file="${project.build.directory}/dist-maketar.sh">
-
-                      which cygpath 2&gt; /dev/null
-                      if [ $? != 0 ]; then
-                        BUILD_DIR="${project.build.directory}"
-                      else
-                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                      fi
-                      cd ${BUILD_DIR}
+                      cd "${project.build.directory}"
                       tar cf - ${project.artifactId}-${project.version} | gzip > ${project.artifactId}-${project.version}.tar.gz
                     </echo>
                     <exec executable="sh" dir="${project.build.directory}" failonerror="true">

+ 0 - 21
hadoop-yarn-project/hadoop-yarn/bin/yarn

@@ -72,11 +72,6 @@ function print_usage(){
   echo "Most commands print help when invoked w/o parameters."
 }
 
-cygwin=false
-case "`uname`" in
-CYGWIN*) cygwin=true;;
-esac
-
 # if no args specified, show usage
 if [ $# = 0 ]; then
   print_usage
@@ -177,9 +172,6 @@ unset IFS
 
 # figure out which class to run
 if [ "$COMMAND" = "classpath" ] ; then
-  if $cygwin; then
-    CLASSPATH=`cygpath -p -w "$CLASSPATH"`
-  fi
   echo $CLASSPATH
   exit
 elif [ "$COMMAND" = "rmadmin" ] ; then
@@ -227,19 +219,6 @@ else
   CLASS=$COMMAND
 fi
 
-# cygwin path translation
-if $cygwin; then
-  CLASSPATH=`cygpath -p -w "$CLASSPATH"`
-  HADOOP_YARN_HOME=`cygpath -w "$HADOOP_YARN_HOME"`
-  YARN_LOG_DIR=`cygpath -w "$YARN_LOG_DIR"`
-  TOOL_PATH=`cygpath -p -w "$TOOL_PATH"`
-fi
-
-# cygwin path translation
-if $cygwin; then
-  JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
-fi
-
 YARN_OPTS="$YARN_OPTS -Dhadoop.log.dir=$YARN_LOG_DIR"
 YARN_OPTS="$YARN_OPTS -Dyarn.log.dir=$YARN_LOG_DIR"
 YARN_OPTS="$YARN_OPTS -Dhadoop.log.file=$YARN_LOGFILE"

+ 1 - 8
hadoop-yarn-project/pom.xml

@@ -180,14 +180,7 @@
               <target if="tar">
                 <!-- Using Unix script to preserve symlinks -->
                 <echo file="${project.build.directory}/dist-maketar.sh">
-
-                  which cygpath 2&gt; /dev/null
-                  if [ $? != 0 ]; then
-                    BUILD_DIR="${project.build.directory}"
-                  else
-                    BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                  fi
-                  cd $BUILD_DIR
+                  cd "${project.build.directory}"
                   tar cf - ${project.artifactId}-${project.version} | gzip > ${project.artifactId}-${project.version}.tar.gz
                 </echo>
                 <exec executable="sh" dir="${project.build.directory}" failonerror="true">