Browse Source

AMBARI-2728. Remove unnecessary warnings while executing puppet scripts. (Andrew Onischuk via smohanty)

Sumit Mohanty 12 năm trước cách đây
mục cha
commit
c77941197b
25 tập tin đã thay đổi với 116 bổ sung95 xóa
  1. 1 1
      ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/hdfs/directory.pp
  2. 7 7
      ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/init.pp
  3. 1 1
      ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/namenode.pp
  4. 2 2
      ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/service.pp
  5. 1 1
      ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/health_check.erb
  6. 1 1
      ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/taskcontroller.cfg.erb
  7. 1 1
      ambari-agent/src/main/puppet/modules/hdp-hbase/templates/hbase-env.sh.erb
  8. 2 2
      ambari-agent/src/main/puppet/modules/hdp-hcat/templates/hcat-env.sh.erb
  9. 1 1
      ambari-agent/src/main/puppet/modules/hdp-hive/manifests/hive/service_check.pp
  10. 2 3
      ambari-agent/src/main/puppet/modules/hdp-hive/manifests/jdbc-connector.pp
  11. 11 6
      ambari-agent/src/main/puppet/modules/hdp-hive/manifests/service.pp
  12. 1 1
      ambari-agent/src/main/puppet/modules/hdp-hive/templates/hive-env.sh.erb
  13. 1 1
      ambari-agent/src/main/puppet/modules/hdp-nagios/manifests/server.pp
  14. 37 37
      ambari-agent/src/main/puppet/modules/hdp-nagios/templates/hadoop-services.cfg.erb
  15. 1 1
      ambari-agent/src/main/puppet/modules/hdp-nagios/templates/nagios.cfg.erb
  16. 1 1
      ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/oozie/service_check.pp
  17. 10 8
      ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/service.pp
  18. 1 1
      ambari-agent/src/main/puppet/modules/hdp-templeton/manifests/server.pp
  19. 1 1
      ambari-agent/src/main/puppet/modules/hdp-templeton/manifests/service.pp
  20. 10 1
      ambari-agent/src/main/puppet/modules/hdp-templeton/manifests/templeton/service_check.pp
  21. 5 5
      ambari-agent/src/main/puppet/modules/hdp-templeton/templates/webhcat-env.sh.erb
  22. 9 9
      ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/init.pp
  23. 7 1
      ambari-agent/src/main/puppet/modules/hdp-zookeeper/manifests/quorum/service_check.pp
  24. 1 1
      ambari-agent/src/main/puppet/modules/hdp/lib/puppet/parser/functions/hdp_default.rb
  25. 1 1
      ambari-agent/src/main/puppet/modules/hdp/lib/puppet/parser/functions/hdp_host.rb

+ 1 - 1
ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/hdfs/directory.pp

@@ -36,7 +36,7 @@ define hdp-hadoop::hdfs::directory(
   if ($service_state == 'running') {
 
 
-    if (hdp_get_major_stack_version($stack_version) >= 2) {
+    if (hdp_get_major_stack_version($hdp::params::stack_version) >= 2) {
       $mkdir_cmd = "fs -mkdir -p ${name}"
     } else {
       $mkdir_cmd = "fs -mkdir ${name}"

+ 7 - 7
ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/init.pp

@@ -105,8 +105,8 @@ class hdp-hadoop::initialize()
     }
   }
 
-  $task_log4j_properties_location = "${conf_dir}/task-log4j.properties"
-
+  $task_log4j_properties_location = "${hdp-hadoop::params::conf_dir}/task-log4j.properties"
+  
   file { $task_log4j_properties_location:
     owner   => $hdp-hadoop::params::mapred_user,
     group   => $hdp::params::user_group,
@@ -188,7 +188,7 @@ class hdp-hadoop::initialize()
     group => $hdp::params::user_group
   }
 
-  if (hdp_get_major_stack_version($stack_version) >= 2) {
+  if (hdp_get_major_stack_version($hdp::params::stack_version) >= 2) {
     if (hdp_is_empty($configuration) == false and hdp_is_empty($configuration['hdfs-site']) == false) {
       if (hdp_is_empty($configuration['hdfs-site']['dfs.hosts.exclude']) == false) {
         $exlude_file_path = $configuration['hdfs-site']['dfs.hosts.exclude']
@@ -232,7 +232,7 @@ class hdp-hadoop(
     }
 
     hdp::directory_recursive_create { $hadoop_config_dir:
-      service_state => $service_state,
+      service_state => $::service_state,
       force => true
     }
 
@@ -243,7 +243,7 @@ class hdp-hadoop(
 
 
     hdp::directory_recursive_create { $hadoop_config_dir:
-      service_state => $service_state,
+      service_state => $::service_state,
       force => true,
       owner => $hdfs_user,
       group => $hdp::params::user_group
@@ -323,7 +323,7 @@ class hdp-hadoop(
       }
     }
 
-    if (hdp_get_major_stack_version($stack_version) >= 2) {
+    if (hdp_get_major_stack_version($hdp::params::stack_version) >= 2) {
       hdp::directory_recursive_create { "$hadoop_tmp_dir":
         service_state => $service_state,
         force => true,
@@ -331,7 +331,7 @@ class hdp-hadoop(
       }
     }
 
-    if (hdp_get_major_stack_version($stack_version) >= 2) {
+    if (hdp_get_major_stack_version($hdp::params::stack_version) >= 2) {
       Anchor['hdp-hadoop::begin'] -> Hdp-hadoop::Package<||> ->  Hdp::User<|title == $hdfs_user or title == $mapred_user|>  ->
       Hdp::Directory_recursive_create[$hadoop_config_dir] -> Hdp-hadoop::Configfile<|tag == 'common'|> ->
       Hdp::Directory_recursive_create[$logdirprefix] -> Hdp::Directory_recursive_create[$piddirprefix] -> Hdp::Directory_recursive_create["$hadoop_tmp_dir"] -> Anchor['hdp-hadoop::end']

+ 1 - 1
ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/namenode.pp

@@ -166,7 +166,7 @@ define hdp-hadoop::namenode::create_app_directories($service_state)
       }
     }
 
-    if (hdp_get_major_stack_version($stack_version) >= 2) {
+    if (hdp_get_major_stack_version($hdp::params::stack_version) >= 2) {
       if ($hdp::params::nm_hosts != "") {
         if ($hdp::params::yarn_log_aggregation_enabled == "true") {
           $yarn_user = $hdp::params::yarn_user

+ 2 - 2
ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/service.pp

@@ -73,7 +73,7 @@ define hdp-hadoop::service(
     hdp::directory_recursive_create { $pid_dir: 
       owner       => $user,
       context_tag => 'hadoop_service',
-      service_state => $service_state,
+      service_state => $::service_state,
       force => true
     }
   }
@@ -82,7 +82,7 @@ define hdp-hadoop::service(
     hdp::directory_recursive_create { $log_dir: 
       owner       => $user,
       context_tag => 'hadoop_service',
-      service_state => $service_state,
+      service_state => $::service_state,
       force => true
     }
   }

+ 1 - 1
ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/health_check.erb

@@ -53,7 +53,7 @@ function check_taskcontroller {
 
 function check_jetty {
   hname=`hostname`
-  jmx=`curl -s -S -m 5 "http://$hname:<%=scope.function_hdp_template_var("tasktracker_port")%>/jmx?qry=Hadoop:service=TaskTracker,name=ShuffleServerMetrics" 2>/dev/null` ;
+  jmx=`curl -s -S -m 5 "http://$hname:<%=scope.function_hdp_template_var("::hdp::tasktracker_port")%>/jmx?qry=Hadoop:service=TaskTracker,name=ShuffleServerMetrics" 2>/dev/null` ;
   if [ $? -eq 0 ] ; then
     e=`echo $jmx | awk '/shuffle_exceptions_caught/ {printf"%d",$2}'` ;
     e=${e:-0} # no jmx servlet ?

+ 1 - 1
ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/taskcontroller.cfg.erb

@@ -16,5 +16,5 @@
 # * limitations under the License.
 # */
 mapred.local.dir=<%=scope.function_hdp_template_var("mapred_local_dir")%>
-mapreduce.tasktracker.group=<%=scope.function_hdp_default(["mapred-site/mapreduce.tasktracker.group","hadoop"])%>
+mapreduce.tasktracker.group=<%=scope.function_hdp_default(["::mapred-site/mapreduce.tasktracker.group","hadoop"])%>
 hadoop.log.dir=<%=scope.function_hdp_template_var("hdfs_log_dir_prefix")%>/<%=scope.function_hdp_template_var("mapred_user")%>

+ 1 - 1
ambari-agent/src/main/puppet/modules/hdp-hbase/templates/hbase-env.sh.erb

@@ -25,7 +25,7 @@ export JAVA_HOME=<%=scope.function_hdp_java_home()%>
 export HBASE_CONF_DIR=${HBASE_CONF_DIR:-<%=scope.function_hdp_template_var("hbase_conf_dir")%>}
 
 # Extra Java CLASSPATH elements. Optional.
-export HBASE_CLASSPATH=${HBASE_CLASSPATH}:<%=scope.function_hdp_template_var("::hdp-hadoop::params::conf_dir")%>
+export HBASE_CLASSPATH=${HBASE_CLASSPATH}:<%=scope.function_hdp_template_var("conf_dir")%>
 
 # The maximum amount of heap to use, in MB. Default is 1000.
 # export HBASE_HEAPSIZE=1000

+ 2 - 2
ambari-agent/src/main/puppet/modules/hdp-hcat/templates/hcat-env.sh.erb

@@ -15,8 +15,8 @@
 # limitations under the License.
 
 JAVA_HOME=<%=scope.function_hdp_java_home()%>
-HCAT_PID_DIR=<%=scope.function_hdp_template_var("hcat_pid_dir")%>/
-HCAT_LOG_DIR=<%=scope.function_hdp_template_var("hcat_log_dir")%>/
+HCAT_PID_DIR=<%=scope.function_hdp_template_var("::hcat_pid_dir")%>/
+HCAT_LOG_DIR=<%=scope.function_hdp_template_var("::hcat_log_dir")%>/
 HCAT_CONF_DIR=<%=scope.function_hdp_template_var("hcat_conf_dir")%>
 HADOOP_HOME=${HADOOP_HOME:-<%=scope.function_hdp_template_var("::hdp::params::hadoop_home")%>}
 #DBROOT is the path where the connector jars are downloaded

+ 1 - 1
ambari-agent/src/main/puppet/modules/hdp-hive/manifests/hive/service_check.pp

@@ -29,7 +29,7 @@ class hdp-hive::hive::service_check() inherits hdp-hive::params
   if ($security_enabled == true) {
     $kinit_cmd = "${hdp::params::kinit_path_local} -kt ${smoke_user_keytab} ${smoke_test_user};"
     $hive_principal_ext = "principal=${hdp-hive::params::hive_metatore_keytab_path}"
-    $hive_url_ext = "${hive_url}/\;${hive_principal_ext}"
+    $hive_url_ext = "${hive_url}/\\;${hive_principal_ext}"
     $smoke_cmd = "${kinit_cmd} env JAVA_HOME=${hdp::params::java64_home} ${smoke_test_path} ${hive_url_ext} ${smoke_test_sql}"
   } else {
     $smoke_cmd = "env JAVA_HOME=$hdp::params::java64_home $smoke_test_path $hive_url $smoke_test_sql"

+ 2 - 3
ambari-agent/src/main/puppet/modules/hdp-hive/manifests/jdbc-connector.pp

@@ -33,10 +33,9 @@ class hdp-hive::jdbc-connector()
      require   => Anchor['hdp-hive::jdbc-connector::begin']
    }
 
-
   if ($hive_jdbc_driver == "com.mysql.jdbc.Driver"){
    hdp::exec { 'hive mkdir -p ${artifact_dir} ;  cp /usr/share/java/${jdbc_jar_name}  ${target}':
-       command => "mkdir -p ${artifact_dir} ;  cp /usr/share/java/${jdbc_jar_name}  ${target}",
+       command => "mkdir -p ${::artifact_dir} ;  cp /usr/share/java/${jdbc_jar_name}  ${target}",
        unless  => "test -f ${target}",
        creates => $target,
        path    => ["/bin","/usr/bin/"],
@@ -45,7 +44,7 @@ class hdp-hive::jdbc-connector()
    }
   } elsif ($hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver") {
    hdp::exec { 'hive mkdir -p ${artifact_dir} ; curl -kf --retry 10 ${driver_curl_source} -o ${driver_curl_target} &&  cp ${driver_curl_target} ${target}':
-       command => "mkdir -p ${artifact_dir} ; curl -kf --retry 10 ${driver_curl_source} -o ${driver_curl_target} &&  cp ${driver_curl_target} ${target}",
+       command => "mkdir -p ${::artifact_dir} ; curl -kf --retry 10 ${driver_curl_source} -o ${driver_curl_target} &&  cp ${driver_curl_target} ${target}",
        unless  => "test -f ${target}",
        path    => ["/bin","/usr/bin/"],
        notify  =>  Anchor['hdp-hive::jdbc-connector::end'],

+ 11 - 6
ambari-agent/src/main/puppet/modules/hdp-hive/manifests/service.pp

@@ -25,17 +25,22 @@ class hdp-hive::service(
 {
   include $hdp-hive::params
   
-  $user = $hdp-hive::params::hive_user
+  $hive_user = $hdp-hive::params::hive_user
   $hadoop_home = $hdp::params::hadoop_home
+  $hive_pid_dir = $hdp-hive::params::hive_pid_dir
+  $hive_pid = $hdp-hive::params::hive_pid
   $hive_log_dir = $hdp-hive::params::hive_log_dir
+  $start_hiveserver2_script = $hdp-hive::params::start_hiveserver2_script
+  $start_metastore_script = $hdp-hive::params::start_metastore_script
+  $hive_var_lib = $hdp-hive::params::hive_var_lib
+  $hive_server_conf_dir = $hdp-hive::params::hive_server_conf_dir
 
   $start_hiveserver2_path = "/tmp/$start_hiveserver2_script"
   $start_metastore_path = "/tmp/$start_metastore_script"
 
   if ($service_type == 'metastore') {
-
-    $pid_file = "${hdp-hive::params::hive_pid_dir}/hive.pid" 
-    $cmd = "env HADOOP_HOME=${hadoop_home} JAVA_HOME=$hdp::params::java64_home $start_metastore_path ${hive_log_dir}/hive.out ${hive_log_dir}/hive.log $pid_file ${hive_server_conf_dir}"
+    $pid_file = "$hive_pid_dir/hive.pid" 
+    $cmd = "env HADOOP_HOME=${hadoop_home} JAVA_HOME=$hdp::params::java64_home $start_metastore_path ${hive_log_dir}/hive.out ${hive_log_dir}/hive.log $pid_file $hdp-hive::params::hive_server_conf_dir"
     
   } elsif ($service_type == 'hiveserver2') {
     $pid_file = "$hive_pid_dir/$hive_pid" 
@@ -101,9 +106,9 @@ class hdp-hive::service(
 define hdp-hive::service::directory()
 {
   hdp::directory_recursive_create { $name: 
-    owner => $hive_user,
+    owner => $hdp-hive::params::hive_user,
     mode => '0755',
-    service_state => $ensure,
+    service_state => $::ensure,
     force => true
   }
 }

+ 1 - 1
ambari-agent/src/main/puppet/modules/hdp-hive/templates/hive-env.sh.erb

@@ -36,7 +36,7 @@
 
 # The heap size of the jvm stared by hive shell script can be controlled via:
 
-export HADOOP_HEAPSIZE="<%=scope.function_hdp_template_var("::hdp::params::hadoop_heapsize")%>"
+export HADOOP_HEAPSIZE="<%=scope.function_hdp_template_var("::hadoop_heapsize")%>"
 export HADOOP_CLIENT_OPTS="-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS"
 
 # Larger heap size may be required when running queries over large number of files or partitions.

+ 1 - 1
ambari-agent/src/main/puppet/modules/hdp-nagios/manifests/server.pp

@@ -205,7 +205,7 @@ class hdp-nagios::server(
     anchor{'hdp-nagios::server::begin':}
     anchor{'hdp-nagios::server::end':}
 
-    Anchor['hdp-nagios::server::begin'] -> Class['hdp-nagios::server::packages'] -> file[$nagios_httpd_config_file] -> Class['hdp-nagios::server::enable_snmp']->
+    Anchor['hdp-nagios::server::begin'] -> Class['hdp-nagios::server::packages'] -> File[$nagios_httpd_config_file] -> Class['hdp-nagios::server::enable_snmp']->
     Hdp::Directory[$nagios_config_dir] -> Hdp::Directory[$plugins_dir] -> Hdp::Directory_recursive_create[$nagios_pid_dir] ->
     Hdp::Directory[$nagios_obj_dir] -> Hdp::Directory_Recursive_Create[$nagios_var_dir] ->
     Hdp::Directory_Recursive_Create[$check_result_path] -> Hdp::Directory_Recursive_Create[$nagios_rw_dir] ->

+ 37 - 37
ambari-agent/src/main/puppet/modules/hdp-nagios/templates/hadoop-services.cfg.erb

@@ -35,7 +35,7 @@ define service {
         use                     hadoop-service
         service_description     NAGIOS::Nagios status log staleness
         servicegroups           NAGIOS
-        check_command           check_nagios!10!/var/nagios/status.dat!<%=nagios_lookup_daemon_str%>
+        check_command           check_nagios!10!/var/nagios/status.dat!<%=scope.function_hdp_template_var("::hdp-nagios::server::config::nagios_lookup_daemon_str")%>
         normal_check_interval   5
         retry_check_interval    0.5
         max_check_attempts      2
@@ -126,7 +126,7 @@ define service {
         use                     hadoop-service
         service_description     GANGLIA::Ganglia [gmetad] process down
         servicegroups           GANGLIA
-        check_command           check_tcp!<%=scope.function_hdp_template_var("ganglia_port")%>!-w 1 -c 1
+        check_command           check_tcp!<%=scope.function_hdp_template_var("::hdp::ganglia_port")%>!-w 1 -c 1
         normal_check_interval   0.25
         retry_check_interval    0.25
         max_check_attempts      4
@@ -137,7 +137,7 @@ define service {
         use                     hadoop-service
         service_description     GANGLIA::Ganglia Collector [gmond] process down alert for slaves
         servicegroups           GANGLIA
-        check_command           check_tcp!<%=scope.function_hdp_template_var("ganglia_collector_slaves_port")%>!-w 1 -c 1
+        check_command           check_tcp!<%=scope.function_hdp_template_var("::hdp::ganglia_collector_slaves_port")%>!-w 1 -c 1
         normal_check_interval   0.25
         retry_check_interval    0.25
         max_check_attempts      4
@@ -148,7 +148,7 @@ define service {
         use                     hadoop-service
         service_description     GANGLIA::Ganglia Collector [gmond] process down alert for NameNode
         servicegroups           GANGLIA
-        check_command           check_tcp!<%=scope.function_hdp_template_var("ganglia_collector_namenode_port")%>!-w 1 -c 1
+        check_command           check_tcp!<%=scope.function_hdp_template_var("::hdp::ganglia_collector_namenode_port")%>!-w 1 -c 1
         normal_check_interval   0.25
         retry_check_interval    0.25
         max_check_attempts      4
@@ -159,7 +159,7 @@ define service {
         use                     hadoop-service
         service_description     GANGLIA::Ganglia Collector [gmond] process down alert for JobTracker
         servicegroups           GANGLIA
-        check_command           check_tcp!<%=scope.function_hdp_template_var("ganglia_collector_jobtracker_port")%>!-w 1 -c 1
+        check_command           check_tcp!<%=scope.function_hdp_template_var("::hdp::ganglia_collector_jobtracker_port")%>!-w 1 -c 1
         normal_check_interval   0.25
         retry_check_interval    0.25
         max_check_attempts      4
@@ -171,7 +171,7 @@ define service {
         use                     hadoop-service
         service_description     GANGLIA::Ganglia Collector [gmond] process down alert for HBase Master
         servicegroups           GANGLIA
-        check_command           check_tcp!<%=scope.function_hdp_template_var("ganglia_collector_hbase_port")%>!-w 1 -c 1
+        check_command           check_tcp!<%=scope.function_hdp_template_var("::hdp::ganglia_collector_hbase_port")%>!-w 1 -c 1
         normal_check_interval   0.25
         retry_check_interval    0.25
         max_check_attempts      4
@@ -184,7 +184,7 @@ define service {
         use                     hadoop-service
         service_description     GANGLIA::Ganglia Collector [gmond] process down alert for Resource Manager
         servicegroups           GANGLIA
-        check_command           check_tcp!<%=scope.function_hdp_template_var("ganglia_collector_rm_port")%>!-w 1 -c 1
+        check_command           check_tcp!<%=scope.function_hdp_template_var("::hdp::ganglia_collector_rm_port")%>!-w 1 -c 1
         normal_check_interval   0.25
         retry_check_interval    0.25
         max_check_attempts      4
@@ -197,7 +197,7 @@ define service {
         use                     hadoop-service
         service_description     GANGLIA::Ganglia Collector [gmond] process down alert for Node Manager
         servicegroups           GANGLIA
-        check_command           check_tcp!<%=scope.function_hdp_template_var("ganglia_collector_nm_port")%>!-w 1 -c 1
+        check_command           check_tcp!<%=scope.function_hdp_template_var("::hdp::ganglia_collector_nm_port")%>!-w 1 -c 1
         normal_check_interval   0.25
         retry_check_interval    0.25
         max_check_attempts      4
@@ -210,7 +210,7 @@ define service {
         use                     hadoop-service
         service_description     GANGLIA::Ganglia Collector [gmond] process down alert for History Server 2
         servicegroups           GANGLIA
-        check_command           check_tcp!<%=scope.function_hdp_template_var("ganglia_collector_hs_port")%>!-w 1 -c 1
+        check_command           check_tcp!<%=scope.function_hdp_template_var("::hdp::ganglia_collector_hs_port")%>!-w 1 -c 1
         normal_check_interval   0.25
         retry_check_interval    0.25
         max_check_attempts      4
@@ -239,7 +239,7 @@ define service {
         use                     hadoop-service
         service_description     NAMENODE::NameNode Web UI down
         servicegroups           HDFS
-        check_command           check_webui!namenode!<%=scope.function_hdp_template_var("namenode_port")%>
+        check_command           check_webui!namenode!<%=scope.function_hdp_template_var("::hdp::namenode_port")%>
         normal_check_interval   1
         retry_check_interval    1
         max_check_attempts      3
@@ -250,7 +250,7 @@ define service {
         use                     hadoop-service
         service_description     NAMENODE::NameNode edit logs directory status
         servicegroups           HDFS
-        check_command           check_name_dir_status!<%=scope.function_hdp_template_var("namenode_port")%>
+        check_command           check_name_dir_status!<%=scope.function_hdp_template_var("::hdp::namenode_port")%>
         normal_check_interval   0.5
         retry_check_interval    0.5
         max_check_attempts      3
@@ -273,7 +273,7 @@ define service {
         use                     hadoop-service
         service_description     NAMENODE::NameNode process down
         servicegroups           HDFS
-        check_command           check_tcp!<%=scope.function_hdp_template_var("namenode_metadata_port")%>!-w 1 -c 1
+        check_command           check_tcp!<%=scope.function_hdp_template_var("::hdp::namenode_metadata_port")%>!-w 1 -c 1
         normal_check_interval   0.5
         retry_check_interval    0.25
         max_check_attempts      3
@@ -284,7 +284,7 @@ define service {
         use                     hadoop-service
         service_description     HDFS::Corrupt/Missing blocks
         servicegroups           HDFS
-        check_command           check_hdfs_blocks!<%=scope.function_hdp_template_var("namenode_port")%>!0%!0%
+        check_command           check_hdfs_blocks!<%=scope.function_hdp_template_var("::hdp::namenode_port")%>!0%!0%
         normal_check_interval   2
         retry_check_interval    1 
         max_check_attempts      1
@@ -295,7 +295,7 @@ define service {
         use                     hadoop-service
         service_description     HDFS::HDFS capacity utilization
         servicegroups           HDFS
-        check_command           check_hdfs_capacity!<%=scope.function_hdp_template_var("namenode_port")%>!80%!90%
+        check_command           check_hdfs_capacity!<%=scope.function_hdp_template_var("::hdp::namenode_port")%>!80%!90%
         normal_check_interval   10
         retry_check_interval    1 
         max_check_attempts      1
@@ -306,7 +306,7 @@ define service {
         use                     hadoop-service
         service_description     HDFS::NameNode RPC latency
         servicegroups           HDFS
-        check_command           check_rpcq_latency!NameNode!<%=scope.function_hdp_template_var("namenode_port")%>!3000!5000
+        check_command           check_rpcq_latency!NameNode!<%=scope.function_hdp_template_var("::hdp::namenode_port")%>!3000!5000
         normal_check_interval   5
         retry_check_interval    1 
         max_check_attempts      5
@@ -320,7 +320,7 @@ define service {
         use                     hadoop-service
         service_description     JOBTRACKER::JobTracker Web UI down
         servicegroups           MAPREDUCE
-        check_command           check_webui!jobtracker!<%=scope.function_hdp_template_var("jtnode_port")%>
+        check_command           check_webui!jobtracker!<%=scope.function_hdp_template_var("::hdp::jtnode_port")%>
         normal_check_interval   1
         retry_check_interval    1
         max_check_attempts      3
@@ -331,7 +331,7 @@ define service {
         use                     hadoop-service
         service_description     JOBTRACKER::JobHistory Web UI down
         servicegroups           MAPREDUCE
-        check_command           check_webui!jobhistory!<%=scope.function_hdp_template_var("jobhistory_port")%>
+        check_command           check_webui!jobhistory!<%=scope.function_hdp_template_var("::hdp::jobhistory_port")%>
         normal_check_interval   1
         retry_check_interval    1
         max_check_attempts      3
@@ -355,7 +355,7 @@ define service {
         use                     hadoop-service
         service_description     JOBTRACKER::JobTracker process down
         servicegroups           MAPREDUCE
-        check_command           check_tcp!<%=scope.function_hdp_template_var("jtnode_port")%>!-w 1 -c 1
+        check_command           check_tcp!<%=scope.function_hdp_template_var("::hdp::jtnode_port")%>!-w 1 -c 1
         normal_check_interval   0.5
         retry_check_interval    0.25
         max_check_attempts      4
@@ -366,7 +366,7 @@ define service {
         use                     hadoop-service
         service_description     MAPREDUCE::JobTracker RPC latency
         servicegroups           MAPREDUCE
-        check_command           check_rpcq_latency!JobTracker!<%=scope.function_hdp_template_var("jtnode_port")%>!3000!5000
+        check_command           check_rpcq_latency!JobTracker!<%=scope.function_hdp_template_var("::hdp::jtnode_port")%>!3000!5000
         normal_check_interval   5
         retry_check_interval    1 
         max_check_attempts      5
@@ -378,7 +378,7 @@ define service {
         use                     hadoop-service
         service_description     TASKTRACKER::TaskTracker process down
         servicegroups           MAPREDUCE
-        check_command           check_tcp!<%=scope.function_hdp_template_var("tasktracker_port")%>!-w 1 -c 1
+        check_command           check_tcp!<%=scope.function_hdp_template_var("::hdp::tasktracker_port")%>!-w 1 -c 1
         normal_check_interval   1
         retry_check_interval    0.5
         max_check_attempts      3
@@ -390,7 +390,7 @@ define service {
         use                     hadoop-service
         service_description     TASKTRACKER::Mapreduce local dir used space
         servicegroups           MAPREDUCE
-        check_command           check_mapred_local_dir_used_space!<%=scope.function_hdp_default("mapred-site/mapred.local.dir")%>!85%
+        check_command           check_mapred_local_dir_used_space!<%=scope.function_hdp_default("::hdp::mapred-site/mapred.local.dir")%>!85%
         normal_check_interval   0.5
         retry_check_interval    0.25
         max_check_attempts      3
@@ -405,7 +405,7 @@ define service {
         use                     hadoop-service
         service_description     RESOURCEMANAGER::Resource Manager Web UI down
         servicegroups           YARN
-        check_command           check_webui!resorcemanager!<%=scope.function_hdp_template_var("rm_port")%>
+        check_command           check_webui!resorcemanager!<%=scope.function_hdp_template_var("::hdp::rm_port")%>
         normal_check_interval   1
         retry_check_interval    1
         max_check_attempts      3
@@ -427,7 +427,7 @@ define service {
         use                     hadoop-service
         service_description     RESOURCEMANAGER::Resource Manager RPC latency
         servicegroups           YARN
-        check_command           check_rpcq_latency!ResorceManager!<%=scope.function_hdp_template_var("rm_port")%>!3000!5000
+        check_command           check_rpcq_latency!ResorceManager!<%=scope.function_hdp_template_var("::hdp::rm_port")%>!3000!5000
         normal_check_interval   5
         retry_check_interval    1 
         max_check_attempts      5
@@ -456,7 +456,7 @@ define service {
         use                     hadoop-service
         service_description     JOBHISTORY::History Server 2 Web UI down
         servicegroups           MAPREDUCE
-        check_command           check_webui!historyserver2!<%=scope.function_hdp_template_var("hs_port")%>
+        check_command           check_webui!historyserver2!<%=scope.function_hdp_template_var("::hdp::hs_port")%>
         normal_check_interval   1
         retry_check_interval    1
         max_check_attempts      3
@@ -478,7 +478,7 @@ define service {
         use                     hadoop-service
         service_description     JOBHISTORY::History Server 2 RPC latency
         servicegroups           MAPREDUCE
-        check_command           check_rpcq_latency!JobHistoryServer!<%=scope.function_hdp_template_var("hs_port")%>!3000!5000
+        check_command           check_rpcq_latency!JobHistoryServer!<%=scope.function_hdp_template_var("::hdp::hs_port")%>!3000!5000
         normal_check_interval   5
         retry_check_interval    1 
         max_check_attempts      5
@@ -493,7 +493,7 @@ define service {
         use                     hadoop-service
         service_description     DATANODE::DataNode process down
         servicegroups           HDFS
-        check_command           check_tcp!<%=scope.function_hdp_template_var("datanode_port")%>!-w 1 -c 1
+        check_command           check_tcp!<%=scope.function_hdp_template_var("::hdp::datanode_port")%>!-w 1 -c 1
         normal_check_interval   1
         retry_check_interval    0.5
         max_check_attempts      3
@@ -504,7 +504,7 @@ define service {
         use                     hadoop-service
         service_description     DATANODE::DataNode storage full
         servicegroups           HDFS
-        check_command           check_datanode_storage!<%=scope.function_hdp_template_var("datanode_port")%>!90%!90%
+        check_command           check_datanode_storage!<%=scope.function_hdp_template_var("::hdp::datanode_port")%>!90%!90%
         normal_check_interval   5
         retry_check_interval    1
         max_check_attempts      2
@@ -534,7 +534,7 @@ define service {
         use                     hadoop-service
         service_description     ZOOKEEPER::ZooKeeper Server process down
         servicegroups           ZOOKEEPER
-        check_command           check_tcp!<%=scope.function_hdp_template_var("clientPort")%>!-w 1 -c 1
+        check_command           check_tcp!<%=scope.function_hdp_template_var("::clientPort")%>!-w 1 -c 1
         normal_check_interval   1
         retry_check_interval    0.5
         max_check_attempts      3
@@ -548,7 +548,7 @@ define service {
         use                     hadoop-service
         service_description     REGIONSERVER::RegionServer process down
         servicegroups           HBASE
-        check_command           check_tcp!<%=scope.function_hdp_template_var("hbase_rs_port")%>!-w 1 -c 1
+        check_command           check_tcp!<%=scope.function_hdp_template_var("::hdp::hbase_rs_port")%>!-w 1 -c 1
         normal_check_interval   1
         retry_check_interval    0.5
         max_check_attempts      3
@@ -560,7 +560,7 @@ define service {
         use                     hadoop-service
         service_description     HBASEMASTER::HBase Master Web UI down
         servicegroups           HBASE
-        check_command           check_webui!hbase!<%=scope.function_hdp_template_var("hbase_master_port")%>
+        check_command           check_webui!hbase!<%=scope.function_hdp_template_var("::hdp::hbase_master_port")%>
         normal_check_interval   1
         retry_check_interval    1
         max_check_attempts      3
@@ -582,7 +582,7 @@ define service {
         use                     hadoop-service
         service_description     HBASEMASTER::HBase Master process down
         servicegroups           HBASE
-        check_command           check_tcp!<%=scope.function_hdp_template_var("hbase_master_port")%>!-w 1 -c 1
+        check_command           check_tcp!<%=scope.function_hdp_template_var("::hdp::hbase_master_port")%>!-w 1 -c 1
         normal_check_interval   0.5
         retry_check_interval    0.25
         max_check_attempts      4
@@ -597,9 +597,9 @@ define service {
         service_description     HIVE-METASTORE::Hive Metastore status check
         servicegroups           HIVE-METASTORE
         <%if scope.function_hdp_template_var("security_enabled")-%>
-        check_command           check_hive_metastore_status!<%=scope.function_hdp_template_var("hive_metastore_port")%>!<%=scope.function_hdp_template_var("java64_home")%>!true!<%=scope.function_hdp_template_var("nagios_keytab_path")%>!<%=scope.function_hdp_template_var("nagios_principal_name")%>!<%=scope.function_hdp_template_var("kinit_path_local")%>
+        check_command           check_hive_metastore_status!<%=scope.function_hdp_template_var("::hive_metastore_port")%>!<%=scope.function_hdp_template_var("java64_home")%>!true!<%=scope.function_hdp_template_var("nagios_keytab_path")%>!<%=scope.function_hdp_template_var("nagios_principal_name")%>!<%=scope.function_hdp_template_var("kinit_path_local")%>
         <%else-%>
-        check_command           check_hive_metastore_status!<%=scope.function_hdp_template_var("hive_metastore_port")%>!<%=scope.function_hdp_template_var("java64_home")%>!false
+        check_command           check_hive_metastore_status!<%=scope.function_hdp_template_var("::hive_metastore_port")%>!<%=scope.function_hdp_template_var("java64_home")%>!false
         <%end-%>
         normal_check_interval   0.5
         retry_check_interval    0.5
@@ -614,9 +614,9 @@ define service {
         service_description     OOZIE::Oozie Server status check
         servicegroups           OOZIE
         <%if scope.function_hdp_template_var("security_enabled")-%>
-        check_command           check_oozie_status!<%=scope.function_hdp_template_var("oozie_server_port")%>!<%=scope.function_hdp_template_var("java64_home")%>!true!<%=scope.function_hdp_template_var("nagios_keytab_path")%>!<%=scope.function_hdp_template_var("nagios_principal_name")%>!<%=scope.function_hdp_template_var("kinit_path_local")%>
+        check_command           check_oozie_status!<%=scope.function_hdp_template_var("::hdp::oozie_server_port")%>!<%=scope.function_hdp_template_var("java64_home")%>!true!<%=scope.function_hdp_template_var("nagios_keytab_path")%>!<%=scope.function_hdp_template_var("nagios_principal_name")%>!<%=scope.function_hdp_template_var("kinit_path_local")%>
         <%else-%>
-        check_command           check_oozie_status!<%=scope.function_hdp_template_var("oozie_server_port")%>!<%=scope.function_hdp_template_var("java64_home")%>!false
+        check_command           check_oozie_status!<%=scope.function_hdp_template_var("::hdp::oozie_server_port")%>!<%=scope.function_hdp_template_var("java64_home")%>!false
         <%end-%>
         normal_check_interval   1
         retry_check_interval    1
@@ -631,9 +631,9 @@ define service {
         service_description     WEBHCAT::WebHCat Server status check
         servicegroups           WEBHCAT 
         <%if scope.function_hdp_template_var("security_enabled")-%>
-        check_command           check_templeton_status!<%=scope.function_hdp_template_var("templeton_port")%>!v1!true!<%=scope.function_hdp_template_var("nagios_keytab_path")%>!<%=scope.function_hdp_template_var("nagios_principal_name")%>!<%=scope.function_hdp_template_var("kinit_path_local")%>
+        check_command           check_templeton_status!<%=scope.function_hdp_template_var("::hdp::templeton_port")%>!v1!true!<%=scope.function_hdp_template_var("nagios_keytab_path")%>!<%=scope.function_hdp_template_var("nagios_principal_name")%>!<%=scope.function_hdp_template_var("kinit_path_local")%>
         <%else-%>
-        check_command           check_templeton_status!<%=scope.function_hdp_template_var("templeton_port")%>!v1!false
+        check_command           check_templeton_status!<%=scope.function_hdp_template_var("::hdp::templeton_port")%>!v1!false
         <%end-%>
         normal_check_interval   1
         retry_check_interval    0.5

+ 1 - 1
ambari-agent/src/main/puppet/modules/hdp-nagios/templates/nagios.cfg.erb

@@ -1141,7 +1141,7 @@ date_format=us
 # embedded Perl interpreter) is located.  If you didn't compile
 # Nagios with embedded Perl support, this option has no effect.
 
-p1_file = <%=nagios_p1_pl %>
+p1_file = <%=scope.function_hdp_template_var("::hdp-nagios::server::nagios_p1_pl") %>
 
 
 

+ 1 - 1
ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/oozie/service_check.pp

@@ -24,7 +24,7 @@ class hdp-oozie::oozie::service_check()
 
   $smoke_shell_files = ['oozieSmoke.sh']
 
-  if (hdp_get_major_stack_version($stack_version) >= 2) {
+  if (hdp_get_major_stack_version($hdp::params::stack_version) >= 2) {
     $smoke_test_file_name = 'oozieSmoke2.sh'
   } else {
     $smoke_test_file_name = 'oozieSmoke.sh'

+ 10 - 8
ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/service.pp

@@ -33,12 +33,14 @@ class hdp-oozie::service(
   $cmd = "env HADOOP_HOME=${hadoop_home} /usr/sbin/oozie_server.sh"
   $pid_file = "${hdp-oozie::params::oozie_pid_dir}/oozie.pid" 
   $jar_location = $hdp::params::hadoop_jar_location
-  if (hdp_get_major_stack_version($stack_version) >= 2) {
+  if (hdp_get_major_stack_version($hdp::params::stack_version) >= 2) {
     $ext_js_path = "/usr/share/HDP-oozie/ext-2.2.zip"
   } else {
     $ext_js_path = "/usr/share/HDP-oozie/ext.zip"
   }
 
+  $lzo_enabled = $hdp::params::lzo_enabled
+
   $security = $hdp::params::security_enabled
   $oozie_keytab = $hdp-oozie::params::oozie_service_keytab
   $oozie_principal = $configuration['oozie-site']['oozie.service.HadoopAccessorService.kerberos.principal']
@@ -94,7 +96,7 @@ class hdp-oozie::service(
   $cmd2 =  "cd /usr/lib/oozie && mkdir -p ${oozie_tmp}"
   $cmd3 =  "cd /usr/lib/oozie && chown ${user}:${hdp::params::user_group} ${oozie_tmp}" 
      
-  if (hdp_get_major_stack_version($stack_version) >= 2) {
+  if (hdp_get_major_stack_version($hdp::params::stack_version) >= 2) {
     $cmd4 = $jdbc_driver_name ? {
         /(com.mysql.jdbc.Driver|oracle.jdbc.driver.OracleDriver)/ => "cd ${oozie_tmp} && /usr/lib/oozie/bin/oozie-setup.sh -hadoop 2.x /usr/lib/ -extjs $ext_js_path $jar_option $jar_path",
         default            => "cd ${oozie_tmp} && /usr/lib/oozie/bin/oozie-setup.sh -hadoop 2.x /usr/lib/ -extjs $ext_js_path $jar_option $jar_path",
@@ -160,7 +162,7 @@ define hdp-oozie::service::directory()
   hdp::directory_recursive_create { $name: 
     owner => $hdp-oozie::params::oozie_user,
     mode => '0755',
-    service_state => $ensure,
+    service_state => $hdp-oozie::service::ensure,
     force => true
   }
 }
@@ -174,20 +176,20 @@ define hdp-oozie::service::createsymlinks()
 
 define hdp-oozie::service::exec_sh()
 {
-  $no_op_test = "ls ${pid_file} >/dev/null 2>&1 && ps `cat ${pid_file}` >/dev/null 2>&1"
+  $no_op_test = "ls ${hdp-oozie::service::pid_file} >/dev/null 2>&1 && ps `cat ${hdp-oozie::service::pid_file}` >/dev/null 2>&1"
   hdp::exec { "exec $name":
     command => "/bin/sh -c '$name'",
     unless  => $no_op_test,
-    initial_wait => $initial_wait
+    initial_wait => $hdp-oozie::service::initial_wait
   }
 }
 
 define hdp-oozie::service::exec_user()
 {
-  $no_op_test = "ls ${pid_file} >/dev/null 2>&1 && ps `cat ${pid_file}` >/dev/null 2>&1"
+  $no_op_test = "ls ${hdp-oozie::service::pid_file} >/dev/null 2>&1 && ps `cat ${hdp-oozie::service::pid_file}` >/dev/null 2>&1"
   hdp::exec { "exec $name":
-    command => "su - ${user} -c '$name'",
+    command => "su - ${hdp-oozie::service::user} -c '$name'",
     unless  => $no_op_test,
-    initial_wait => $initial_wait
+    initial_wait => $hdp-oozie::service::initial_wait
   }
 }

+ 1 - 1
ambari-agent/src/main/puppet/modules/hdp-templeton/manifests/server.pp

@@ -95,7 +95,7 @@ class hdp-templeton::copy-hdfs-directories($service_state)
     path => ['/bin']
   }
 
-  if (hdp_get_major_stack_version($stack_version) >= 2) {
+  if (hdp_get_major_stack_version($hdp::params::stack_version) >= 2) {
     hdp-hadoop::hdfs::copyfromlocal { '/usr/lib/hadoop-mapreduce/hadoop-streaming*.jar':
       service_state => $service_state,
       owner => $webhcat_user,

+ 1 - 1
ambari-agent/src/main/puppet/modules/hdp-templeton/manifests/service.pp

@@ -60,7 +60,7 @@ define hdp-templeton::service::directory()
   hdp::directory_recursive_create { $name: 
     owner => $hdp-templeton::params::webhcat_user,
     mode => '0755',
-    service_state => $ensure,
+    service_state => $hdp-templeton::service::ensure,
     force => true
   }
 }

+ 10 - 1
ambari-agent/src/main/puppet/modules/hdp-templeton/manifests/templeton/service_check.pp

@@ -44,12 +44,21 @@ class hdp-templeton::templeton::service_check()
 
 define hdp-templeton::smoke_shell_file()
 {
+  $smoke_test_user = $hdp::params::smokeuser
+    
+  $security = $hdp-templeton::templeton::service_check::security
+
+  $kinit_path = $hdp::params::kinit_path_local
+  $smoke_user_keytab = $hdp::params::smokeuser_keytab
+
+  $templeton_host = $hdp::params::webhcat_server_host
+
   file { '/tmp/templetonSmoke.sh':
     ensure => present,
     source => "puppet:///modules/hdp-templeton/templetonSmoke.sh",
     mode => '0755'
   }
-
+  
   exec { '/tmp/templetonSmoke.sh':
     command   => "sh /tmp/templetonSmoke.sh ${templeton_host} ${smoke_test_user} ${smoke_user_keytab} ${security} ${kinit_path}",
     tries     => 3,

+ 5 - 5
ambari-agent/src/main/puppet/modules/hdp-templeton/templates/webhcat-env.sh.erb

@@ -21,18 +21,18 @@
 #
 
 # The file containing the running pid
-PID_FILE=<%=scope.function_hdp_template_var("hcat_pid_dir")%>/webhcat.pid
+PID_FILE=<%=scope.function_hdp_template_var("::hcat_pid_dir")%>/webhcat.pid
 
-TEMPLETON_LOG_DIR=<%=scope.function_hdp_template_var("hcat_log_dir")%>/
+TEMPLETON_LOG_DIR=<%=scope.function_hdp_template_var("::hcat_log_dir")%>/
 
 
-WEBHCAT_LOG_DIR=<%=scope.function_hdp_template_var("hcat_log_dir")%>/
+WEBHCAT_LOG_DIR=<%=scope.function_hdp_template_var("::hcat_log_dir")%>/
 
 # The console error log
-ERROR_LOG=<%=scope.function_hdp_template_var("hcat_log_dir")%>/webhcat-console-error.log
+ERROR_LOG=<%=scope.function_hdp_template_var("::hcat_log_dir")%>/webhcat-console-error.log
 
 # The console log
-CONSOLE_LOG=<%=scope.function_hdp_template_var("hcat_log_dir")%>/webhcat-console.log
+CONSOLE_LOG=<%=scope.function_hdp_template_var("::hcat_log_dir")%>/webhcat-console.log
 
 #TEMPLETON_JAR=<%=scope.function_hdp_template_var("templeton_jar_name")%>
 

+ 9 - 9
ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/init.pp

@@ -48,41 +48,41 @@ define hdp-yarn::generate_common_configs() {
   $yarn_config_dir = $hdp-yarn::params::conf_dir
 
   # Generate configs
-  if has_key($configuration, 'mapred-site') {
+  if has_key($::configuration, 'mapred-site') {
     configgenerator::configfile{'mapred-site': 
       modulespath => $yarn_config_dir,
       filename => 'mapred-site.xml',
       module => 'hdp-yarn',
-      configuration => $configuration['mapred-site'],
-      owner => $yarn_user,
+      configuration => $::configuration['mapred-site'],
+      owner => $hdp-yarn::params::yarn_user,
       mode => 755
     }
   } else { # Manually overriding ownership of file installed by hadoop package
     file { "${yarn_config_dir}/mapred-site.xml":
-      owner => $yarn_user,
+      owner => $hdp-yarn::params::yarn_user,
       mode => 755
     }
   }
   
-  if has_key($configuration, 'yarn-site') {
+  if has_key($::configuration, 'yarn-site') {
     configgenerator::configfile{'yarn-site': 
       modulespath => $yarn_config_dir,
       filename => 'yarn-site.xml',
       module => 'hdp-yarn',
-      configuration => $configuration['yarn-site'],
-      owner => $yarn_user,
+      configuration => $::configuration['yarn-site'],
+      owner => $hdp-yarn::params::yarn_user,
       mode => 755
     }
   } else { # Manually overriding ownership of file installed by hadoop package
     file { "${yarn_config_dir}/yarn-site.xml":
-      owner => $yarn_user,
+      owner => $hdp-yarn::params::yarn_user,
       mode => 755
     }
   }
 
   hdp::configfile {"${yarn_config_dir}/yarn-env.sh":
     component      => 'yarn',
-    owner          => $yarn_user,
+    owner          => $hdp-yarn::params::yarn_user,
     mode           => 755
   }
 }

+ 7 - 1
ambari-agent/src/main/puppet/modules/hdp-zookeeper/manifests/quorum/service_check.pp

@@ -38,6 +38,12 @@ class hdp-zookeeper::quorum::service_check()
 
 define hdp-zookeeper::quorum_smoke_shell_file()
 {
+  $conf_dir = $hdp-zookeeper::params::conf_dir
+  $smoke_test_user = $hdp::params::smokeuser
+  $smoke_script = $hdp::params::zk_smoke_test_script
+  $smoke_user_keytab = $hdp::params::smokeuser_keytab
+  $kinit_path = $hdp::params::kinit_path_local
+
   file { '/tmp/zkSmoke.sh':
     ensure => present,
     source => "puppet:///modules/hdp-zookeeper/zkSmoke.sh",
@@ -45,7 +51,7 @@ define hdp-zookeeper::quorum_smoke_shell_file()
   }
 
   exec { '/tmp/zkSmoke.sh':
-    command   => "sh /tmp/zkSmoke.sh ${smoke_script} ${smoke_test_user} ${conf_dir} ${clientPort} ${security_enabled} ${kinit_path} ${smoke_user_keytab}",
+   command   => "sh /tmp/zkSmoke.sh ${smoke_script} ${smoke_test_user} ${conf_dir} ${::clientPort} ${::security_enabled} ${kinit_path} ${smoke_user_keytab}",
     tries     => 3,
     try_sleep => 5,
     require   => File['/tmp/zkSmoke.sh'],

+ 1 - 1
ambari-agent/src/main/puppet/modules/hdp/lib/puppet/parser/functions/hdp_default.rb

@@ -27,7 +27,7 @@ module Puppet::Parser::Functions
     default = args[1]    
     val = lookupvar("::#{var_name}")    
     # Lookup value inside a hash map.
-    if var_parts.length > 1 and function_hdp_is_empty(val) and function_hdp_is_empty(lookupvar("configuration")) == false and function_hdp_is_empty(lookupvar("#{var_parts[-2]}")) == false
+    if var_parts.length > 1 and function_hdp_is_empty(val) and function_hdp_is_empty(lookupvar("::configuration")) == false and function_hdp_is_empty(lookupvar("#{var_parts[-2]}")) == false
       keyHash = var_parts[-2]
       hashMap = lookupvar("#{keyHash}") 
       val = hashMap.fetch(var_name, default.to_s)

+ 1 - 1
ambari-agent/src/main/puppet/modules/hdp/lib/puppet/parser/functions/hdp_host.rb

@@ -22,7 +22,7 @@ module Puppet::Parser::Functions
   newfunction(:hdp_host, :type => :rvalue) do |args|
     args = function_hdp_args_as_array(args)
     var = args[0]
-    val = lookupvar(var)
+    val = lookupvar("::"+var)
     function_hdp_is_empty(val) ? "" : val 
   end
 end