Browse Source

AMBARI-2119. ambari-agent modifications to allow for Hadoop Compatible Filesystems (HCFS). (Bradley Childs via mahadev)

git-svn-id: https://svn.apache.org/repos/asf/incubator/ambari/trunk@1490167 13f79535-47bb-0310-9956-ffa450edef68
Mahadev Konar 12 years ago
parent
commit
1491139ff5

+ 36 - 0
ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/hcfs.pp

@@ -0,0 +1,36 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+class hdp-hadoop::hcfs_client(
+  $service_state = $hdp::params::cluster_client_state,
+  $opts = {}
+) inherits hdp-hadoop::params
+{
+  $hdp::params::service_exists['hdp-hadoop::hcfs_client'] = true
+  Hdp-hadoop::Common<||>{service_states +> $service_state}
+  Hdp-hadoop::Package<||>{include_64_bit => true}
+  Hdp-hadoop::Configfile<||>{sizes +> 64}
+  
+  if ($service_state == 'no_op') {
+  } elsif ($service_state in ['running','stopped','installed_and_configured','uninstalled']) {
+  	#adds package, users and directories, and common hadoop configs
+  	include hdp-hadoop::initialize
+  }
+}

+ 26 - 0
ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/hcfs_service_check.pp

@@ -0,0 +1,26 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+class hdp-hadoop::hcfs_service_check(
+  $service_state = $hdp::params::cluster_client_state
+) inherits hdp-hadoop::params
+{
+  $hdp::params::service_exists['hdp-hadoop::hcfs'] = true
+}

+ 1 - 1
ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/init.pp

@@ -229,7 +229,7 @@ class hdp-hadoop(
     hdp::user{ $hdfs_user:
       groups => [$hdp::params::user_group]
     }
-    if ($hdfs_user != $mapred_user) {
+    if ( !defined(hdp::user[$mapred_user]) ) {
       hdp::user { $mapred_user:
         groups => [$hdp::params::user_group]
       }

+ 1 - 1
ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/params.pp

@@ -54,7 +54,7 @@ class hdp-hadoop::params(
   $dtnode_heapsize = hdp_default("dtnode_heapsize","1024m")
   $ttnode_heapsize = hdp_default("ttnode_heapsize","1024m")
 
-  $hadoop_heapsize = hdp_default("hadoop_heapsize","1024m")
+  $hadoop_heapsize = hdp_default("hadoop_heapsize","1024")
 
   $hdfs_log_dir_prefix = hdp_default("hdfs_log_dir_prefix","/var/log/hadoop")
 

+ 7 - 1
ambari-agent/src/main/python/ambari_agent/AmbariConfig.py

@@ -55,7 +55,7 @@ passphrase_env_var_name=AMBARI_PASSPHRASE
 [heartbeat]
 state_interval = 6
 dirs=/etc/hadoop,/etc/hadoop/conf,/var/run/hadoop,/var/log/hadoop
-rpms=hadoop,openssl,wget,net-snmp,ntpd,ruby,ganglia,nagios
+rpms=glusterfs,openssl,wget,net-snmp,ntpd,ruby,ganglia,nagios,glusterfs
 """
 s = StringIO.StringIO(content)
 config.readfp(s)
@@ -77,6 +77,9 @@ imports = [
 ]
 
 rolesToClass = {
+  'HCFS': 'hdp-hadoop::hcfs',
+  'HCFS_CLIENT': 'hdp-hadoop::hcfs_client',
+  'HCFS_SERVICE_CHECK': 'hdp-hadoop::hcfs_service_check',
   'NAMENODE': 'hdp-hadoop::namenode',
   'DATANODE': 'hdp-hadoop::datanode',
   'SECONDARY_NAMENODE': 'hdp-hadoop::snamenode',
@@ -136,6 +139,7 @@ serviceStates = {
 }
 
 servicesToPidNames = {
+  'HCFS' : 'glusterd.pid$',    
   'NAMENODE': 'hadoop-{USER}-namenode.pid$',
   'SECONDARY_NAMENODE': 'hadoop-{USER}-secondarynamenode.pid$',
   'DATANODE': 'hadoop-{USER}-datanode.pid$',
@@ -164,6 +168,8 @@ servicesToPidNames = {
 linuxUserPattern = '[A-Za-z0-9_-]*[$]?'
 
 pidPathesVars = [
+  {'var' : 'hcfs_pid_dir_prefix',
+   'defaultValue' : '/var/run'},      
   {'var' : 'hadoop_pid_dir_prefix',
    'defaultValue' : '/var/run/hadoop'},
   {'var' : 'hadoop_pid_dir_prefix',