Ver código fonte

HADOOP-3542. Diables the creation of _logs directory for the archives directory. Contributed by Mahadev Konar.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/trunk@667020 13f79535-47bb-0310-9956-ffa450edef68
Devaraj Das 17 anos atrás
pai
commit
40bcc0b800

+ 3 - 0
CHANGES.txt

@@ -582,6 +582,9 @@ Release 0.18.0 - Unreleased
     HADOOP-3418. When a directory is deleted, any leases that point to files
     HADOOP-3418. When a directory is deleted, any leases that point to files
     in the subdirectory are removed. ((Tsz Wo (Nicholas), SZE via dhruba)
     in the subdirectory are removed. ((Tsz Wo (Nicholas), SZE via dhruba)
 
 
+    HADOOP-3542. Diables the creation of _logs directory for the archives
+    directory. (Mahadev Konar via ddas)
+
 Release 0.17.0 - 2008-05-18
 Release 0.17.0 - 2008-05-18
 
 
   INCOMPATIBLE CHANGES
   INCOMPATIBLE CHANGES

+ 1 - 0
bin/hadoop

@@ -70,6 +70,7 @@ if [ $# = 0 ]; then
   echo "  version              print the version"
   echo "  version              print the version"
   echo "  jar <jar>            run a jar file"
   echo "  jar <jar>            run a jar file"
   echo "  distcp <srcurl> <desturl> copy file or directories recursively"
   echo "  distcp <srcurl> <desturl> copy file or directories recursively"
+  echo "  archive -archiveName NAME <src>* <dest> create a hadoop archive"
   echo "  daemonlog            get/set the log level for each daemon"
   echo "  daemonlog            get/set the log level for each daemon"
   echo " or"
   echo " or"
   echo "  CLASSNAME            run the class named CLASSNAME"
   echo "  CLASSNAME            run the class named CLASSNAME"

+ 1 - 0
src/test/org/apache/hadoop/fs/TestHarFileSystem.java

@@ -139,6 +139,7 @@ public class TestHarFileSystem extends TestCase {
     Path harPath = new Path("har://" + filePath.toUri().getPath());
     Path harPath = new Path("har://" + filePath.toUri().getPath());
     assertTrue(fs.exists(new Path(finalPath, "_index")));
     assertTrue(fs.exists(new Path(finalPath, "_index")));
     assertTrue(fs.exists(new Path(finalPath, "_masterindex")));
     assertTrue(fs.exists(new Path(finalPath, "_masterindex")));
+    assertTrue(!fs.exists(new Path(finalPath, "_logs")));
     //creation tested
     //creation tested
     //check if the archive is same
     //check if the archive is same
     // do ls and cat on all the files
     // do ls and cat on all the files

+ 1 - 0
src/tools/org/apache/hadoop/tools/HadoopArchives.java

@@ -377,6 +377,7 @@ public class HadoopArchives implements Tool {
     conf.setReducerClass(HArchivesReducer.class);
     conf.setReducerClass(HArchivesReducer.class);
     conf.setMapOutputKeyClass(IntWritable.class);
     conf.setMapOutputKeyClass(IntWritable.class);
     conf.setMapOutputValueClass(Text.class);
     conf.setMapOutputValueClass(Text.class);
+    conf.set("hadoop.job.history.user.location", "none");
     FileInputFormat.addInputPath(conf, jobDirectory);
     FileInputFormat.addInputPath(conf, jobDirectory);
     //make sure no speculative execution is done
     //make sure no speculative execution is done
     conf.setSpeculativeExecution(false);
     conf.setSpeculativeExecution(false);