|
@@ -21,6 +21,8 @@ limitations under the License.
|
|
|
import os
|
|
|
import glob
|
|
|
import pwd
|
|
|
+import subprocess
|
|
|
+import AmbariConfig
|
|
|
|
|
|
class HostInfo:
|
|
|
|
|
@@ -31,10 +33,33 @@ class HostInfo:
|
|
|
return 'sym_link'
|
|
|
elif os.path.isdir(path):
|
|
|
return 'directory'
|
|
|
+ elif os.path.isfile(path):
|
|
|
+ return 'file'
|
|
|
return 'unknown'
|
|
|
|
|
|
- def hadoopDir(self):
|
|
|
- return self.dirType('/etc/hadoop')
|
|
|
+ def rpmInfo(self, rpmList):
|
|
|
+ config = AmbariConfig.config
|
|
|
+
|
|
|
+ try:
|
|
|
+ for rpmName in config.get('heartbeat', 'rpms').split(','):
|
|
|
+ rpmName = rpmName.strip()
|
|
|
+ rpm = { }
|
|
|
+ rpm['name'] = rpmName
|
|
|
+
|
|
|
+ try:
|
|
|
+ osStat = subprocess.Popen(["rpm", "-q", rpmName], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
|
+ out, err = osStat.communicate()
|
|
|
+ if (0 != osStat.returncode or 0 == len(out.strip())):
|
|
|
+ rpm['installed'] = False
|
|
|
+ else:
|
|
|
+ rpm['installed'] = True
|
|
|
+ rpm['version'] = out.strip()
|
|
|
+ except:
|
|
|
+ rpm['available'] = False
|
|
|
+
|
|
|
+ rpmList.append(rpm)
|
|
|
+ except:
|
|
|
+ pass
|
|
|
|
|
|
def hadoopVarRunCount(self):
|
|
|
if not os.path.exists('/var/run/hadoop'):
|
|
@@ -47,60 +72,88 @@ class HostInfo:
|
|
|
return 0
|
|
|
logs = glob.glob('/var/log/hadoop/*/*.log')
|
|
|
return len(logs)
|
|
|
+
|
|
|
+ def etcAlternativesConf(self, etcList):
|
|
|
+ if not os.path.exists('/etc/alternatives'):
|
|
|
+ return []
|
|
|
+ confs = glob.glob('/etc/alternatives/*conf')
|
|
|
+
|
|
|
+ for conf in confs:
|
|
|
+ confinfo = { }
|
|
|
+ realconf = conf
|
|
|
+ if os.path.islink(conf):
|
|
|
+ realconf = os.path.realpath(conf)
|
|
|
+ confinfo['name'] = conf
|
|
|
+ confinfo['target'] = realconf
|
|
|
+ etcList.append(confinfo)
|
|
|
+
|
|
|
+ def repos(self):
|
|
|
+ # centos, redhat
|
|
|
+ try:
|
|
|
+ osStat = subprocess.Popen(["yum", "-C", "repolist"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
|
+ out, err = osStat.communicate()
|
|
|
+ return out
|
|
|
+ except:
|
|
|
+ pass
|
|
|
+ # suse, only if above failed
|
|
|
+ try:
|
|
|
+ osStat = subprocess.Popen(["zypper", "repos"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
|
+ out, err = osStat.communicate()
|
|
|
+ return out
|
|
|
+ except:
|
|
|
+ pass
|
|
|
|
|
|
- def dirHelper(self, dict, name, prefix):
|
|
|
- dict[name] = self.dirType(os.path.join(prefix, name))
|
|
|
+ # default, never return empty
|
|
|
+ return "could_not_determine"
|
|
|
+
|
|
|
|
|
|
def register(self, dict):
|
|
|
dict['varLogHadoopLogCount'] = self.hadoopVarLogCount()
|
|
|
dict['varRunHadoopPidCount'] = self.hadoopVarRunCount()
|
|
|
-
|
|
|
- etcDirs = { }
|
|
|
- self.dirHelper(etcDirs, 'hadoop', '/etc')
|
|
|
- etcDirs['hadoop_conf'] = self.dirType('/etc/hadoop/conf')
|
|
|
- self.dirHelper(etcDirs, 'hbase', '/etc')
|
|
|
- self.dirHelper(etcDirs, 'hcatalog', '/etc')
|
|
|
- self.dirHelper(etcDirs, 'hive', '/etc')
|
|
|
- self.dirHelper(etcDirs, 'oozie', '/etc')
|
|
|
- self.dirHelper(etcDirs, 'sqoop', '/etc')
|
|
|
- self.dirHelper(etcDirs, 'ganglia', '/etc')
|
|
|
- self.dirHelper(etcDirs, 'nagios', '/etc')
|
|
|
- dict['etcDirs'] = etcDirs
|
|
|
|
|
|
- varRunDirs = { }
|
|
|
- self.dirHelper(varRunDirs, 'hadoop', '/var/run')
|
|
|
- self.dirHelper(varRunDirs, 'zookeeper', '/var/run')
|
|
|
- self.dirHelper(varRunDirs, 'hbase', '/var/run')
|
|
|
- self.dirHelper(varRunDirs, 'templeton', '/var/run')
|
|
|
- self.dirHelper(varRunDirs, 'oozie', '/var/run')
|
|
|
- dict['varRunDirs'] = varRunDirs
|
|
|
-
|
|
|
- varLogDirs = { }
|
|
|
- self.dirHelper(varLogDirs, 'hadoop', '/var/log')
|
|
|
- self.dirHelper(varLogDirs, 'zookeeper', '/var/log')
|
|
|
- self.dirHelper(varLogDirs, 'hbase', '/var/log')
|
|
|
- self.dirHelper(varLogDirs, 'hive', '/var/log')
|
|
|
- self.dirHelper(varLogDirs, 'templeton', '/var/log')
|
|
|
- self.dirHelper(varLogDirs, 'nagios', '/var/log')
|
|
|
- dict['varLogDirs'] = varLogDirs
|
|
|
+ etcs = []
|
|
|
+ self.etcAlternativesConf(etcs)
|
|
|
+ dict['etcAlternativesConf'] = etcs
|
|
|
+
|
|
|
+ dirs = []
|
|
|
+ config = AmbariConfig.config
|
|
|
+ try:
|
|
|
+ for dirName in config.get('heartbeat', 'dirs').split(','):
|
|
|
+ obj = { }
|
|
|
+ obj['type'] = self.dirType(dirName.strip())
|
|
|
+ obj['name'] = dirName.strip()
|
|
|
+ dirs.append(obj)
|
|
|
+ except:
|
|
|
+ pass
|
|
|
+
|
|
|
+ dict['paths'] = dirs
|
|
|
|
|
|
java = []
|
|
|
- self.hadoopJava(java)
|
|
|
- dict['hadoopJavaProcs'] = java
|
|
|
+ self.javaProcs(java)
|
|
|
+ dict['javaProcs'] = java
|
|
|
+
|
|
|
+ rpms = []
|
|
|
+ self.rpmInfo(rpms)
|
|
|
+ dict['rpms'] = rpms
|
|
|
+
|
|
|
+ dict['repoInfo'] = self.repos()
|
|
|
|
|
|
- def hadoopJava(self, list):
|
|
|
+ def javaProcs(self, list):
|
|
|
try:
|
|
|
pids = [pid for pid in os.listdir('/proc') if pid.isdigit()]
|
|
|
for pid in pids:
|
|
|
cmd = open(os.path.join('/proc', pid, 'cmdline'), 'rb').read()
|
|
|
- if 'java' in cmd and 'hadoop' in cmd:
|
|
|
+ cmd = cmd.replace('\0', ' ')
|
|
|
+ if 'java' in cmd:
|
|
|
+ dict = { }
|
|
|
+ dict['pid'] = int(pid)
|
|
|
+ dict['hadoop'] = True if 'hadoop' in cmd else False
|
|
|
+ dict['command'] = cmd.strip()
|
|
|
for line in open(os.path.join('/proc', pid, 'status')):
|
|
|
if line.startswith('Uid:'):
|
|
|
uid = int(line.split()[1])
|
|
|
- dict = { }
|
|
|
dict['user'] = pwd.getpwuid(uid).pw_name
|
|
|
- dict['pid'] = int(pid)
|
|
|
- list.append(dict)
|
|
|
+ list.append(dict)
|
|
|
except:
|
|
|
pass
|
|
|
pass
|