Browse Source

AMBARI-6955. Allow Customers To Define The Sqoop User (dlysnichenko)

Lisnichenko Dmitro 11 years ago
parent
commit
65417f715c
24 changed files with 94 additions and 19 deletions
  1. 3 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
  2. 7 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
  3. 5 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/configuration/sqoop-env.xml
  4. 1 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/params.py
  5. 2 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
  6. 8 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
  7. 5 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/configuration/sqoop-env.xml
  8. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
  9. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/sqoop.py
  10. 2 1
      ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json
  11. 2 1
      ambari-server/src/test/python/stacks/1.3.2/configs/default.json
  12. 2 1
      ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json
  13. 2 1
      ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
  14. 5 0
      ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py
  15. 2 1
      ambari-server/src/test/python/stacks/2.0.6/configs/default.json
  16. 2 1
      ambari-server/src/test/python/stacks/2.0.6/configs/default.non_gmetad_host.json
  17. 3 2
      ambari-server/src/test/python/stacks/2.0.6/configs/ha_default.json
  18. 3 2
      ambari-server/src/test/python/stacks/2.0.6/configs/ha_secured.json
  19. 2 1
      ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
  20. 5 0
      ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
  21. 2 1
      ambari-server/src/test/python/stacks/2.1/configs/default.json
  22. 2 1
      ambari-server/src/test/python/stacks/2.1/configs/secured.json
  23. 14 0
      ambari-web/app/data/HDP2/site_properties.js
  24. 13 0
      ambari-web/app/data/site_properties.js

+ 3 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py

@@ -80,6 +80,7 @@ hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 zk_user = config['configurations']['zookeeper-env']['zk_user']
 zk_user = config['configurations']['zookeeper-env']['zk_user']
 gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
 gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
 gmond_user = config['configurations']['ganglia-env']["gmond_user"]
 gmond_user = config['configurations']['ganglia-env']["gmond_user"]
+sqoop_user = config['configurations']['sqoop-env']['sqoop_user']
 
 
 user_group = config['configurations']['hadoop-env']['user_group']
 user_group = config['configurations']['hadoop-env']['user_group']
 proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
 proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
@@ -103,6 +104,7 @@ namenode_host = default("/clusterHostInfo/namenode_host", [])
 zk_hosts = default("/clusterHostInfo/zookeeper_hosts", [])
 zk_hosts = default("/clusterHostInfo/zookeeper_hosts", [])
 ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
 ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
 
 
+has_sqoop_client = 'sqoop-env' in config['configurations']
 has_resourcemanager = not len(rm_host) == 0
 has_resourcemanager = not len(rm_host) == 0
 has_namenode = not len(namenode_host) == 0
 has_namenode = not len(namenode_host) == 0
 has_jt = not len(jtnode_host) == 0
 has_jt = not len(jtnode_host) == 0
@@ -130,4 +132,4 @@ ignore_groupsusers_create = default("/configurations/hadoop-env/ignore_groupsuse
 
 
 #repo params
 #repo params
 repo_info = config['hostLevelParams']['repo_info']
 repo_info = config['hostLevelParams']['repo_info']
-service_repo_info = default("/hostLevelParams/service_repo_info",None)
+service_repo_info = default("/hostLevelParams/service_repo_info",None)

+ 7 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py

@@ -124,6 +124,13 @@ def setup_users():
          ignore_failures = params.ignore_groupsusers_create
          ignore_failures = params.ignore_groupsusers_create
     )
     )
 
 
+  if params.has_sqoop_client:
+    User(params.sqoop_user,
+         gid=params.user_group,
+         groups=[params.user_group],
+         ignore_failures=params.ignore_groupsusers_create
+    )
+
 def set_uid(user, user_dirs):
 def set_uid(user, user_dirs):
   """
   """
   user_dirs - comma separated directories
   user_dirs - comma separated directories

+ 5 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/configuration/sqoop-env.xml

@@ -45,5 +45,10 @@ export ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}
 export SQOOP_USER_CLASSPATH="`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}"
 export SQOOP_USER_CLASSPATH="`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}"
     </value>
     </value>
   </property>
   </property>
+  <property>
+    <name>sqoop_user</name>
+    <description>User to run Sqoop as</description>
+    <value>sqoop</value>
+  </property>
   
   
 </configuration>
 </configuration>

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/params.py

@@ -31,7 +31,7 @@ hbase_home = "/usr"
 hive_home = "/usr"
 hive_home = "/usr"
 zoo_conf_dir = "/etc/zookeeper"
 zoo_conf_dir = "/etc/zookeeper"
 sqoop_lib = "/usr/lib/sqoop/lib"
 sqoop_lib = "/usr/lib/sqoop/lib"
-sqoop_user = "sqoop"
+sqoop_user = config['configurations']['sqoop-env']['sqoop_user']
 
 
 keytab_path = config['configurations']['hadoop-env']['keytab_path']
 keytab_path = config['configurations']['hadoop-env']['keytab_path']
 smoke_user_keytab = config['configurations']['hadoop-env']['smokeuser_keytab']
 smoke_user_keytab = config['configurations']['hadoop-env']['smokeuser_keytab']

+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py

@@ -41,6 +41,7 @@ gmond_user = config['configurations']['ganglia-env']["gmond_user"]
 storm_user = config['configurations']['storm-env']['storm_user']
 storm_user = config['configurations']['storm-env']['storm_user']
 tez_user = config['configurations']['tez-env']['tez_user']
 tez_user = config['configurations']['tez-env']['tez_user']
 falcon_user = config['configurations']['falcon-env']['falcon_user']
 falcon_user = config['configurations']['falcon-env']['falcon_user']
+sqoop_user = config['configurations']['sqoop-env']['sqoop_user']
 
 
 user_group = config['configurations']['hadoop-env']['user_group']
 user_group = config['configurations']['hadoop-env']['user_group']
 proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
 proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
@@ -66,6 +67,7 @@ ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
 storm_server_hosts = default("/clusterHostInfo/nimbus_hosts", [])
 storm_server_hosts = default("/clusterHostInfo/nimbus_hosts", [])
 falcon_host =  default('/clusterHostInfo/falcon_server_hosts', [])
 falcon_host =  default('/clusterHostInfo/falcon_server_hosts', [])
 
 
+has_sqoop_client = 'sqoop-env' in config['configurations']
 has_namenode = not len(namenode_host) == 0
 has_namenode = not len(namenode_host) == 0
 has_hs = not len(hs_host) == 0
 has_hs = not len(hs_host) == 0
 has_resourcemanager = not len(rm_host) == 0
 has_resourcemanager = not len(rm_host) == 0

+ 8 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py

@@ -144,13 +144,20 @@ def setup_users():
          ignore_failures = params.ignore_groupsusers_create
          ignore_failures = params.ignore_groupsusers_create
     )
     )
     
     
-  if params.has_tez:  
+  if params.has_tez:
     User(params.tez_user,
     User(params.tez_user,
       gid=params.user_group,
       gid=params.user_group,
       groups=[params.proxyuser_group],
       groups=[params.proxyuser_group],
       ignore_failures = params.ignore_groupsusers_create
       ignore_failures = params.ignore_groupsusers_create
     )
     )
 
 
+  if params.has_sqoop_client:
+    User(params.sqoop_user,
+         gid=params.user_group,
+         groups=[params.user_group],
+         ignore_failures=params.ignore_groupsusers_create
+    )
+
 def set_uid(user, user_dirs):
 def set_uid(user, user_dirs):
   """
   """
   user_dirs - comma separated directories
   user_dirs - comma separated directories

+ 5 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/configuration/sqoop-env.xml

@@ -45,5 +45,9 @@ export ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}
 export SQOOP_USER_CLASSPATH="`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}"
 export SQOOP_USER_CLASSPATH="`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}"
     </value>
     </value>
   </property>
   </property>
-  
+  <property>
+    <name>sqoop_user</name>
+    <description>User to run Sqoop as</description>
+    <value>sqoop</value>
+  </property>
 </configuration>
 </configuration>

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py

@@ -32,7 +32,7 @@ hbase_home = "/usr"
 hive_home = "/usr"
 hive_home = "/usr"
 zoo_conf_dir = "/etc/zookeeper"
 zoo_conf_dir = "/etc/zookeeper"
 sqoop_lib = "/usr/lib/sqoop/lib"
 sqoop_lib = "/usr/lib/sqoop/lib"
-sqoop_user = "sqoop"
+sqoop_user = config['configurations']['sqoop-env']['sqoop_user']
 
 
 keytab_path = config['configurations']['hadoop-env']['keytab_path']
 keytab_path = config['configurations']['hadoop-env']['keytab_path']
 smoke_user_keytab = config['configurations']['hadoop-env']['smokeuser_keytab']
 smoke_user_keytab = config['configurations']['hadoop-env']['smokeuser_keytab']

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/sqoop.py

@@ -24,7 +24,7 @@ def sqoop(type=None):
   import params
   import params
   Link(params.sqoop_lib + "/mysql-connector-java.jar",
   Link(params.sqoop_lib + "/mysql-connector-java.jar",
        to = '/usr/share/java/mysql-connector-java.jar'
        to = '/usr/share/java/mysql-connector-java.jar'
-  )
+  ) 
   Directory(params.sqoop_conf_dir,
   Directory(params.sqoop_conf_dir,
             owner = params.sqoop_user,
             owner = params.sqoop_user,
             group = params.user_group
             group = params.user_group

+ 2 - 1
ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json

@@ -296,7 +296,8 @@
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n    "
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n    "
         }, 
         }, 
         "sqoop-env": {
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"\n    "
+            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"\n    ",
+            "sqoop_user": "sqoop"
         }, 
         }, 
         "mapred-env": {
         "mapred-env": {
             "mapreduce_userlog_retainhours": "24", 
             "mapreduce_userlog_retainhours": "24", 

+ 2 - 1
ambari-server/src/test/python/stacks/1.3.2/configs/default.json

@@ -296,7 +296,8 @@
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n    "
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n    "
         }, 
         }, 
         "sqoop-env": {
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"\n    "
+            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"\n    ",
+            "sqoop_user": "sqoop"
         }, 
         }, 
         "mapred-env": {
         "mapred-env": {
             "mapreduce_userlog_retainhours": "24", 
             "mapreduce_userlog_retainhours": "24", 

+ 2 - 1
ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json

@@ -296,7 +296,8 @@
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n    "
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n    "
         }, 
         }, 
         "sqoop-env": {
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"\n    "
+            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"\n    ",
+            "sqoop_user": "sqoop"
         }, 
         }, 
         "mapred-env": {
         "mapred-env": {
             "mapreduce_userlog_retainhours": "24", 
             "mapreduce_userlog_retainhours": "24", 

+ 2 - 1
ambari-server/src/test/python/stacks/1.3.2/configs/secured.json

@@ -474,7 +474,8 @@
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n    "
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n    "
         }, 
         }, 
         "sqoop-env": {
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"\n    "
+            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"\n    ",
+            "sqoop_user": "sqoop"
         }, 
         }, 
         "mapred-env": {
         "mapred-env": {
             "mapreduce_userlog_retainhours": "24", 
             "mapreduce_userlog_retainhours": "24", 

+ 5 - 0
ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py

@@ -133,5 +133,10 @@ class TestHookBeforeInstall(RMFTestCase):
         gid = 'hadoop',
         gid = 'hadoop',
         ignore_failures = False,
         ignore_failures = False,
     )
     )
+    self.assertResourceCalled('User', 'sqoop',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = ['hadoop'],
+    )
     self.assertResourceCalled('Package', 'unzip',)
     self.assertResourceCalled('Package', 'unzip',)
     self.assertNoMoreResources()
     self.assertNoMoreResources()

+ 2 - 1
ambari-server/src/test/python/stacks/2.0.6/configs/default.json

@@ -543,7 +543,8 @@
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif [ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS -Dmapreduce.framework.name=yarn\"\nfi"
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif [ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS -Dmapreduce.framework.name=yarn\"\nfi"
         }, 
         }, 
         "sqoop-env": {
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\""
+            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"",
+            "sqoop_user": "sqoop"
         }, 
         }, 
         "hdfs-log4j": {
         "hdfs-log4j": {
             "content": "log4jproperties\nline2"
             "content": "log4jproperties\nline2"

+ 2 - 1
ambari-server/src/test/python/stacks/2.0.6/configs/default.non_gmetad_host.json

@@ -529,7 +529,8 @@
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif [ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS -Dmapreduce.framework.name=yarn\"\nfi"
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif [ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS -Dmapreduce.framework.name=yarn\"\nfi"
         }, 
         }, 
         "sqoop-env": {
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\""
+            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"",
+            "sqoop_user": "sqoop"
         }, 
         }, 
         "hdfs-log4j": {
         "hdfs-log4j": {
             "content": "log4jproperties\nline2"
             "content": "log4jproperties\nline2"

+ 3 - 2
ambari-server/src/test/python/stacks/2.0.6/configs/ha_default.json

@@ -485,7 +485,8 @@
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif [ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS -Dmapreduce.framework.name=yarn\"\nfi"
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif [ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS -Dmapreduce.framework.name=yarn\"\nfi"
         }, 
         }, 
         "sqoop-env": {
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\""
+            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"",
+            "sqoop_user": "sqoop"
         }
         }
     },
     },
     "configuration_attributes": {
     "configuration_attributes": {
@@ -616,4 +617,4 @@
             "c6402.ambari.apache.org"
             "c6402.ambari.apache.org"
         ]
         ]
     }
     }
-}
+}

+ 3 - 2
ambari-server/src/test/python/stacks/2.0.6/configs/ha_secured.json

@@ -510,7 +510,8 @@
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif [ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS -Dmapreduce.framework.name=yarn\"\nfi"
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif [ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS -Dmapreduce.framework.name=yarn\"\nfi"
         }, 
         }, 
         "sqoop-env": {
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\""
+            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"",
+            "sqoop_user": "sqoop"
         }
         }
     },
     },
     "configuration_attributes": {
     "configuration_attributes": {
@@ -641,4 +642,4 @@
             "c6402.ambari.apache.org"
             "c6402.ambari.apache.org"
         ]
         ]
     }
     }
-}
+}

+ 2 - 1
ambari-server/src/test/python/stacks/2.0.6/configs/secured.json

@@ -572,7 +572,8 @@
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif [ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS -Dmapreduce.framework.name=yarn\"\nfi"
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif [ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS -Dmapreduce.framework.name=yarn\"\nfi"
         }, 
         }, 
         "sqoop-env": {
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\""
+            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"",
+            "sqoop_user": "sqoop"
         },
         },
         "hdfs-log4j": {
         "hdfs-log4j": {
             "content": "log4jproperties\nline2"
             "content": "log4jproperties\nline2"

+ 5 - 0
ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py

@@ -154,4 +154,9 @@ class TestHookBeforeInstall(RMFTestCase):
         ignore_failures = False,
         ignore_failures = False,
         groups = ['users'],
         groups = ['users'],
     )
     )
+    self.assertResourceCalled('User', 'sqoop',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = ['hadoop'],
+    )
     self.assertNoMoreResources()
     self.assertNoMoreResources()

+ 2 - 1
ambari-server/src/test/python/stacks/2.1/configs/default.json

@@ -659,7 +659,8 @@
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif [ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS -Dmapreduce.framework.name=yarn\"\nfi"
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif [ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS -Dmapreduce.framework.name=yarn\"\nfi"
         }, 
         }, 
         "sqoop-env": {
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\""
+            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"",
+            "sqoop_user": "sqoop"
         },
         },
         "hdfs-log4j": {
         "hdfs-log4j": {
             "property1": "value1"
             "property1": "value1"

+ 2 - 1
ambari-server/src/test/python/stacks/2.1/configs/secured.json

@@ -640,7 +640,8 @@
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif [ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS -Dmapreduce.framework.name=yarn\"\nfi"
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif [ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS -Dmapreduce.framework.name=yarn\"\nfi"
         }, 
         }, 
         "sqoop-env": {
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\""
+            "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"",
+            "sqoop_user": "sqoop"
         }, 
         }, 
         "hdfs-log4j": {
         "hdfs-log4j": {
             "property1": "value1"
             "property1": "value1"

+ 14 - 0
ambari-web/app/data/HDP2/site_properties.js

@@ -3597,6 +3597,20 @@ module.exports =
       "belongsToService": ["HDFS"],
       "belongsToService": ["HDFS"],
       "index": 17
       "index": 17
     },
     },
+    {
+      "id": "puppet var",
+      "name": "sqoop_user",
+      "displayName": "Sqoop User",
+      "isReconfigurable": false,
+      "displayType": "user",
+      "isOverridable": false,
+      "isVisible": true,
+      "serviceName": "MISC",
+      "filename": "sqoop-env.xml",
+      "category": "Users and Groups",
+      "belongsToService": ["SQOOP"],
+      "index": 17
+    },
     {
     {
       "id": "puppet var",
       "id": "puppet var",
       "name": "rrdcached_base_dir",
       "name": "rrdcached_base_dir",

+ 13 - 0
ambari-web/app/data/site_properties.js

@@ -2252,6 +2252,19 @@ module.exports =
       "category": "Users and Groups",
       "category": "Users and Groups",
       "belongsToService": ["HDFS"]
       "belongsToService": ["HDFS"]
     },
     },
+    {
+      "id": "puppet var",
+      "name": "sqoop_user",
+      "displayName": "Sqoop User",
+      "isReconfigurable": false,
+      "displayType": "user",
+      "isOverridable": false,
+      "isVisible": true,
+      "serviceName": "MISC",
+      "filename": "sqoop-env.xml",
+      "category": "Users and Groups",
+      "belongsToService": ["SQOOP"]
+    },
     {
     {
       "id": "puppet var",
       "id": "puppet var",
       "name": "mapred_user",
       "name": "mapred_user",