Quellcode durchsuchen

AMBARI-1211. Ability to configure the same username for all the services in Ambari. (mahadev)

git-svn-id: https://svn.apache.org/repos/asf/incubator/ambari/trunk@1435415 13f79535-47bb-0310-9956-ffa450edef68
Mahadev Konar vor 12 Jahren
Ursprung
Commit
2d68dc5ee7

+ 5 - 0
CHANGES.txt

@@ -62,6 +62,11 @@ Trunk (unreleased changes):
  AMBARI-1178. Fix use of use ip address for JMX metrics request. (tbeerbower
  via mahadev)
 
+ AMBARI-1191. Datatable API needs work. (Billie Rinaldi via mahadev)
+
+ AMBARI-1211. Ability to configure the same username for all the services in
+ Ambari. (mahadev)
+
 AMBARI-1.2.0 branch:
 
  INCOMPATIBLE CHANGES

+ 3 - 1
ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/init.pp

@@ -163,7 +163,9 @@ class hdp-hadoop(
     }
  
     hdp::user{ $hdfs_user:}
-    hdp::user { $mapred_user:}
+    if ($hdfs_user != $mapred_user) {
+      hdp::user { $mapred_user:}
+    }
 
     $logdirprefix = $hdp-hadoop::params::hdfs_log_dir_prefix
     hdp::directory_recursive_create { $logdirprefix: 

+ 7 - 4
ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/namenode.pp

@@ -113,7 +113,8 @@ define hdp-hadoop::namenode::create_app_directories($service_state)
 {
   if ($service_state == 'running') {
     $smoke_test_user = $hdp::params::smokeuser
-    hdp-hadoop::hdfs::directory{ "/user/${smoke_test_user}":
+    $smoke_hdfs_user_dir = $hdp::params::smoke_hdfs_user_dir
+    hdp-hadoop::hdfs::directory{ $smoke_hdfs_user_dir:
       service_state => $service_state,
       owner => $smoke_test_user,
       mode  => '770',
@@ -153,7 +154,7 @@ define hdp-hadoop::namenode::create_app_directories($service_state)
         mode             => '777',
         recursive_chmod  => true
       }
-      hdp-hadoop::hdfs::directory{ "/user/${hive_user}":
+      hdp-hadoop::hdfs::directory{ $hive_hdfs_user_dir:
         service_state => $service_state,
         owner         => $hive_user
       }
@@ -161,7 +162,8 @@ define hdp-hadoop::namenode::create_app_directories($service_state)
 
     if ($hdp::params::oozie_server != "") {
       $oozie_user = $hdp::params::oozie_user
-      hdp-hadoop::hdfs::directory{ "/user/${oozie_user}":
+      $oozie_hdfs_user_dir = $hdp::params::oozie_hdfs_user_dir
+      hdp-hadoop::hdfs::directory{ $oozie_hdfs_user_dir:
         service_state => $service_state,
         owner => $oozie_user,
         mode  => '775',
@@ -171,7 +173,8 @@ define hdp-hadoop::namenode::create_app_directories($service_state)
     
     if ($hdp::params::webhcat_server_host != "") {
       $templeton_user = $hdp::params::templeton_user
-      hdp-hadoop::hdfs::directory{ '/user/hcat':
+      $hcat_hdfs_user_dir = $hdp::params::hcat_hdfs_user_dir
+      hdp-hadoop::hdfs::directory{ $hcat_hdfs_user_dir:
         service_state => $service_state,
         owner => $templeton_user,
         mode  => '755',

+ 3 - 2
ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/service.pp

@@ -29,6 +29,7 @@ class hdp-oozie::service(
   $user = "$hdp-oozie::params::oozie_user"
   $hadoop_home = $hdp-oozie::params::hadoop_prefix
   $oozie_tmp = $hdp-oozie::params::oozie_tmp_dir
+  $oozie_hdfs_user_dir = $hdp::params::oozie_hdfs_user_dir
   $cmd = "env HADOOP_HOME=${hadoop_home} /usr/sbin/oozie_server.sh"
   $pid_file = "${hdp-oozie::params::oozie_pid_dir}/oozie.pid" 
   $jar_location = $hdp::params::hadoop_jar_location
@@ -42,10 +43,10 @@ class hdp-oozie::service(
 
   $cmd1 = "cd /usr/lib/oozie && tar -xvf oozie-sharelib.tar.gz"
   $cmd2 =  "cd /usr/lib/oozie && mkdir -p ${oozie_tmp}"
-  $cmd3 =  "cd /usr/lib/oozie && chown ${user}:hadoop ${oozie_tmp}"    
+  $cmd3 =  "cd /usr/lib/oozie && chown ${user}:${hdp::params::user_group} ${oozie_tmp}"    
   $cmd4 =  "cd ${oozie_tmp} && /usr/lib/oozie/bin/oozie-setup.sh -hadoop 0.20.200 $jar_location -extjs $ext_js_path $lzo_jar_suffix"
   $cmd5 =  "cd ${oozie_tmp} && /usr/lib/oozie/bin/ooziedb.sh create -sqlfile oozie.sql -run ; echo 0"
-  $cmd6 =  "su - ${user} -c 'hadoop dfs -put /usr/lib/oozie/share share ; hadoop dfs -chmod -R 755 /user/${user}/share'"
+  $cmd6 =  "su - ${user} -c 'hadoop dfs -put /usr/lib/oozie/share ${oozie_hdfs_user_dir} ; hadoop dfs -chmod -R 755 ${oozie_hdfs_user_dir}/share'"
   #$cmd7 = "/usr/lib/oozie/bin/oozie-start.sh"
 
   if ($ensure == 'installed_and_configured') {

+ 15 - 19
ambari-agent/src/main/puppet/modules/hdp/manifests/init.pp

@@ -98,28 +98,26 @@ class hdp::pre_install_pkgs
 
 class hdp::create_smoke_user()
 {
+
   $smoke_group = $hdp::params::smoke_user_group
   $smoke_user = $hdp::params::smokeuser
   $security_enabled = $hdp::params::security_enabled
 
-  
   if ( $smoke_group != $proxyuser_group) {
     group { $smoke_group :
       ensure => present
     }
   }
-
-  group { $proxyuser_group :
-    ensure => present
+  
+  if ($hdp::params::user_group != $proxyuser_group) {
+    group { $proxyuser_group :
+      ensure => present
+    }
   }
-
-  hdp::user { $smoke_user: gid => $proxyuser_group}
-
-  $cmd = "usermod -g  $smoke_group  $smoke_user"
-  $check_group_cmd = "id -gn $smoke_user | grep $smoke_group"
-  hdp::exec{ $cmd:
-     command => $cmd,
-     unless => $check_group_cmd
+  
+  hdp::user { $smoke_user: 
+              gid    => $hdp::params::user_group,
+              groups => ["$proxyuser_group"]
   }
 
   if ($security_enabled == true) {
@@ -133,11 +131,7 @@ class hdp::create_smoke_user()
      }
   }
 
-  if ( $smoke_group != $proxyuser_group) {
-    Group[$smoke_group] -> Group[$proxyuser_group] -> Hdp::User[$smoke_user] -> Hdp::Exec[$cmd]
-  } else {
-    Group[$smoke_group] -> Hdp::User[$smoke_user] -> Hdp::Exec[$cmd]
-  }
+  Group<||> -> Hdp::User[$smoke_user]
 }
 
 
@@ -153,7 +147,8 @@ class hdp::set_selinux()
 
 define hdp::user(
   $gid = $hdp::params::user_group,
-  $just_validate = undef
+  $just_validate = undef,
+  $groups = undef
 )
 {
   $user_info = $hdp::params::user_info[$name]
@@ -175,7 +170,8 @@ define hdp::user(
       ensure     => present,
       managehome => true,
       gid        => $gid, #TODO either remove this to support LDAP env or fix it
-      shell      => '/bin/bash'
+      shell      => '/bin/bash',
+      groups     => $groups 
     }
   }
 }

+ 7 - 2
ambari-agent/src/main/puppet/modules/hdp/manifests/params.pp

@@ -110,8 +110,6 @@ class hdp::params()
     $public_webhcat_server_host = hdp_default("webhcat_server_host")
   }
 
-  ############ Hdfs directories
-  $hbase_hdfs_root_dir = hdp_default("hadoop/hbase-site/hbase_hdfs_root_dir","/apps/hbase/data")
 
   ############ users
   $user_info = hdp_default("user_info",{})
@@ -133,6 +131,13 @@ class hdp::params()
 
   $smokeuser = hdp_default("smokeuser","ambari_qa")
   $smoke_user_group = hdp_default("smoke_user_group","users")
+  
+  ############ Hdfs directories
+  $hbase_hdfs_root_dir = hdp_default("hadoop/hbase-site/hbase_hdfs_root_dir","/apps/hbase/data")
+  $oozie_hdfs_user_dir = hdp_default("oozie_hdfs_user_dir", "/user/oozie")
+  $hcat_hdfs_user_dir = hdp_default("hcat_hdfs_user_dir", "/user/hcat")
+  $hive_hdfs_user_dir = hdp_default("hive_hdfs_user_dir", "/user/hive")
+  $smoke_hdfs_user_dir = hdp_default("smoke_hdfs_user_dir", "/user/${smokeuser}")
 
   #because of Puppet user resource issue make sure that $hadoop_user is different from user_group
   if ($security_enabled == true) {

+ 2 - 2
ambari-agent/src/test/python/TestGrep.py

@@ -80,8 +80,8 @@ debug: Finishing transaction 70171639726240
     fragment = self.grep.tail(self.string_good, 3)
     desired = """
 debug: Finishing transaction 70060456663980
-debug: Received report to process from ambari-dmi.cybervisiontech.com.ua
-debug: Processing report from ambari-dmi.cybervisiontech.com.ua with processor Puppet::Reports::Store
+debug: Received report to process from ambari-dmi
+debug: Processing report from ambari-dmi with processor Puppet::Reports::Store
 """.strip()
     self.assertEquals(fragment, desired, "Grep tail function should return only last 3 lines of file")
 

+ 2 - 2
ambari-agent/src/test/python/dummy_puppet_output_error.txt

@@ -41,5 +41,5 @@ debug: Storing state
 debug: Stored state in 0.01 seconds
 notice: Finished catalog run in 0.23 seconds
 debug: Finishing transaction 70171638871060
-debug: Received report to process from ambari-dmi.cybervisiontech.com.ua
-debug: Processing report from ambari-dmi.cybervisiontech.com.ua with processor Puppet::Reports::Store
+debug: Received report to process from ambari-dmi
+debug: Processing report from ambari-dmi with processor Puppet::Reports::Store

+ 2 - 2
ambari-agent/src/test/python/dummy_puppet_output_good.txt

@@ -43,5 +43,5 @@ debug: Storing state
 debug: Stored state in 0.01 seconds
 notice: Finished catalog run in 0.59 seconds
 debug: Finishing transaction 70060456663980
-debug: Received report to process from ambari-dmi.cybervisiontech.com.ua
-debug: Processing report from ambari-dmi.cybervisiontech.com.ua with processor Puppet::Reports::Store
+debug: Received report to process from ambari-dmi
+debug: Processing report from ambari-dmi with processor Puppet::Reports::Store

+ 7 - 0
ambari-server/src/main/java/org/apache/ambari/eventdb/db/PostgresConnector.java

@@ -245,6 +245,13 @@ public class PostgresConnector implements DBConnector {
     DataTable table = new DataTable();
     table.setiTotalRecords(total);
     table.setiTotalDisplayRecords(summary.getNumRows());
+    if (workflows.isEmpty()) {
+      table.setStartIndex(-1);
+      table.setEndIndex(-1);
+    } else {
+      table.setStartIndex(offset);
+      table.setEndIndex(offset + workflows.size() - 1);
+    }
     table.setAaData(workflows);
     table.setsEcho(echo);
     table.setSummary(summary);

+ 18 - 0
ambari-server/src/main/java/org/apache/ambari/eventdb/model/DataTable.java

@@ -33,6 +33,8 @@ public class DataTable {
   int sEcho;
   int iTotalRecords;
   int iTotalDisplayRecords;
+  int startIndex;
+  int endIndex;
   List<WorkflowDBEntry> aaData;
   Summary summary;
   
@@ -206,6 +208,22 @@ public class DataTable {
     this.iTotalDisplayRecords = iTotalDisplayRecords;
   }
   
+  public int getStartIndex() {
+    return startIndex;
+  }
+  
+  public void setStartIndex(int startIndex) {
+    this.startIndex = startIndex;
+  }
+  
+  public int getEndIndex() {
+    return endIndex;
+  }
+  
+  public void setEndIndex(int endIndex) {
+    this.endIndex = endIndex;
+  }
+  
   public List<WorkflowDBEntry> getAaData() {
     return aaData;
   }