Browse Source

HADOOP-3103. Failed to update CHANGES.txt due to permissions. Reverting patch.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/trunk@642143 13f79535-47bb-0310-9956-ffa450edef68
Hemanth Yamijala 17 years ago
parent
commit
279e337ef5

+ 0 - 5
src/contrib/hod/hodlib/GridServices/hdfs.py

@@ -149,7 +149,6 @@ class Hdfs(MasterSlave):
   
   def _setWorkDirs(self, workDirs, envs, attrs, parentDirs, subDir):
     namedir = None
-    hadooptmpdir = None
     datadir = []
 
     for p in parentDirs:
@@ -157,9 +156,6 @@ class Hdfs(MasterSlave):
       workDirs.append(os.path.join(p, subDir))
       dir = os.path.join(p, subDir, 'dfs-data')
       datadir.append(dir)
-      if not hadooptmpdir:
-        # Not used currently, generating hadooptmpdir just in case
-        hadooptmpdir = os.path.join(p, subDir, 'hadoop-tmp')
 
       if not namedir:
         namedir = os.path.join(p, subDir, 'dfs-name')
@@ -169,7 +165,6 @@ class Hdfs(MasterSlave):
 
     # FIXME!! use csv
     attrs['dfs.name.dir'] = namedir
-    attrs['hadoop.tmp.dir'] = hadooptmpdir
     attrs['dfs.data.dir'] = ','.join(datadir)
     # FIXME -- change dfs.client.buffer.dir
     envs['HADOOP_ROOT_LOGGER'] = "INFO,DRFA"

+ 0 - 5
src/contrib/hod/hodlib/GridServices/mapred.py

@@ -153,7 +153,6 @@ class MapReduce(MasterSlave):
     local = []
     system = None
     temp = None
-    hadooptmpdir = None
     dfsclient = []
     
     for p in parentDirs:
@@ -165,9 +164,6 @@ class MapReduce(MasterSlave):
         system = os.path.join(p, subDir, 'mapred-system')
       if not temp:
         temp = os.path.join(p, subDir, 'mapred-temp')
-      if not hadooptmpdir:
-        # Not used currently, generating hadooptmpdir just in case
-        hadooptmpdir = os.path.join(p, subDir, 'hadoop-tmp')
       dfsclientdir = os.path.join(p, subDir, 'dfs-client')
       dfsclient.append(dfsclientdir)
       workDirs.append(dfsclientdir)
@@ -176,7 +172,6 @@ class MapReduce(MasterSlave):
     attrs['mapred.system.dir'] = 'fillindir'
     attrs['mapred.temp.dir'] = temp
     attrs['dfs.client.buffer.dir'] = ','.join(dfsclient)
-    attrs['hadoop.tmp.dir'] = hadooptmpdir
 
 
     envs['HADOOP_ROOT_LOGGER'] = "INFO,DRFA"

+ 2 - 4
src/contrib/hod/hodlib/Hod/hadoop.py

@@ -73,10 +73,8 @@ class hadoopConfig:
     description = {}
     paramsDict = {  'mapred.job.tracker'    : mapredAddr , \
                     'fs.default.name'       : "hdfs://" + hdfsAddr, \
-                    'hadoop.tmp.dir'        : tempDir, \
-                    'dfs.client.buffer.dir' : os.path.join(tempDir, 'dfs',
-                                                                    'tmp'),
-                 }
+                    'hadoop.tmp.dir'        : confDir, \
+                    'dfs.client.buffer.dir' : tempDir, }
 
     paramsDict['mapred.system.dir'] = mrSysDir
     

+ 0 - 3
src/contrib/hod/testing/testHadoop.py

@@ -92,9 +92,6 @@ class test_hadoopConfig(unittest.TestCase):
 
     # fs.default.name should start with hdfs://
     assert(keyvals['fs.default.name'].startswith('hdfs://'))
-    assert(keyvals['hadoop.tmp.dir'] == self.tempDir)
-    assert(keyvals['dfs.client.buffer.dir'] == os.path.join(self.tempDir,
-                                                            'dfs', 'tmp'))
 
     # TODO other tests
     pass