浏览代码

HADOOP-3103. [HOD] Hadoop.tmp.dir should not be set to cluster directory. Contributed by Vinod Kumar Vavilapalli.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/branches/branch-0.16@642160 13f79535-47bb-0310-9956-ffa450edef68
Devaraj Das 17 年之前
父节点
当前提交
f8bcb7efcf

+ 3 - 0
CHANGES.txt

@@ -57,6 +57,9 @@ Release 0.16.2 - Unreleased
 
     HADOOP-3111. Remove HBase from Hadoop contrib
 
+    HADOOP-3103. [HOD] Hadoop.tmp.dir should not be set to cluster 
+    directory. (Vinod Kumar Vavilapalli via ddas)
+
 Release 0.16.1 - 2008-03-13
 
   INCOMPATIBLE CHANGES

+ 5 - 0
src/contrib/hod/hodlib/GridServices/hdfs.py

@@ -186,6 +186,7 @@ class Hdfs(MasterSlave):
 
   def _setWorkDirs(self, workDirs, envs, attrs, parentDirs, subDir):
     namedir = None
+    hadooptmpdir = None
     datadir = []
 
     for p in parentDirs:
@@ -193,6 +194,9 @@ class Hdfs(MasterSlave):
       workDirs.append(os.path.join(p, subDir))
       dir = os.path.join(p, subDir, 'dfs-data')
       datadir.append(dir)
+      if not hadooptmpdir:
+        # Not used currently, generating hadooptmpdir just in case
+        hadooptmpdir = os.path.join(p, subDir, 'hadoop-tmp')
 
       if not namedir:
         namedir = os.path.join(p, subDir, 'dfs-name')
@@ -202,6 +206,7 @@ class Hdfs(MasterSlave):
 
     # FIXME!! use csv
     attrs['dfs.name.dir'] = namedir
+    attrs['hadoop.tmp.dir'] = hadooptmpdir
     attrs['dfs.data.dir'] = ','.join(datadir)
     # FIXME -- change dfs.client.buffer.dir
     envs['HADOOP_ROOT_LOGGER'] = "INFO,DRFA"

+ 5 - 0
src/contrib/hod/hodlib/GridServices/mapred.py

@@ -191,6 +191,7 @@ class MapReduce(MasterSlave):
     local = []
     system = None
     temp = None
+    hadooptmpdir = None
     dfsclient = []
     
     for p in parentDirs:
@@ -202,6 +203,9 @@ class MapReduce(MasterSlave):
         system = os.path.join(p, subDir, 'mapred-system')
       if not temp:
         temp = os.path.join(p, subDir, 'mapred-temp')
+      if not hadooptmpdir:
+        # Not used currently, generating hadooptmpdir just in case
+        hadooptmpdir = os.path.join(p, subDir, 'hadoop-tmp')
       dfsclientdir = os.path.join(p, subDir, 'dfs-client')
       dfsclient.append(dfsclientdir)
       workDirs.append(dfsclientdir)
@@ -210,6 +214,7 @@ class MapReduce(MasterSlave):
     attrs['mapred.system.dir'] = 'fillindir'
     attrs['mapred.temp.dir'] = temp
     attrs['dfs.client.buffer.dir'] = ','.join(dfsclient)
+    attrs['hadoop.tmp.dir'] = hadooptmpdir
 
 
     envs['HADOOP_ROOT_LOGGER'] = "INFO,DRFA"

+ 4 - 2
src/contrib/hod/hodlib/Hod/hadoop.py

@@ -73,8 +73,10 @@ class hadoopConfig:
     description = {}
     paramsDict = {  'mapred.job.tracker'    : mapredAddr , \
                     'fs.default.name'       : hdfsAddr, \
-                    'hadoop.tmp.dir'        : confDir, \
-                    'dfs.client.buffer.dir' : tempDir, }
+                    'hadoop.tmp.dir'        : tempDir, \
+                    'dfs.client.buffer.dir' : os.path.join(tempDir, 'dfs',
+                                                                    'tmp'),
+                 }
 
     paramsDict['mapred.system.dir'] = mrSysDir