Pārlūkot izejas kodu

AMBARI-17391: Spark thriftserver fails to start when umask = 027 due to permission issues on java-opts (jluniya)

Jayush Luniya 9 gadi atpakaļ
vecāks
revīzija
6afb23b211

+ 6 - 2
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_spark.py

@@ -64,6 +64,7 @@ def setup_spark(env, type, upgrade_type=None, action=None, config_dir=None):
     key_value_delimiter = " ",
     owner=params.spark_user,
     group=params.spark_group,
+    mode=0644
   )
 
   # create spark-env.sh in etc/conf dir
@@ -86,13 +87,15 @@ def setup_spark(env, type, upgrade_type=None, action=None, config_dir=None):
   File(os.path.join(config_dir, 'metrics.properties'),
        owner=params.spark_user,
        group=params.spark_group,
-       content=InlineTemplate(params.spark_metrics_properties)
+       content=InlineTemplate(params.spark_metrics_properties),
+       mode=0644
   )
 
   File(os.path.join(params.spark_conf, 'java-opts'),
       owner=params.spark_user,
       group=params.spark_group,
-      content=InlineTemplate(params.spark_javaopts_properties)
+      content=InlineTemplate(params.spark_javaopts_properties),
+      mode=0644
   )
 
   Directory(params.spark_logs_dir,
@@ -115,6 +118,7 @@ def setup_spark(env, type, upgrade_type=None, action=None, config_dir=None):
       owner = params.hive_user,
       group = params.user_group,
       key_value_delimiter = " ",
+      mode=0644
     )
 
   effective_version = params.version if upgrade_type is not None else params.stack_version_formatted

+ 4 - 1
ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/setup_spark.py

@@ -53,6 +53,7 @@ def setup_spark(env, type, upgrade_type = None, action = None):
     key_value_delimiter = " ",
     owner=params.spark_user,
     group=params.spark_group,
+    mode=0644
   )
 
   # create spark-env.sh in etc/conf dir
@@ -75,7 +76,8 @@ def setup_spark(env, type, upgrade_type = None, action = None):
   File(os.path.join(params.spark_conf, 'metrics.properties'),
        owner=params.spark_user,
        group=params.spark_group,
-       content=InlineTemplate(params.spark_metrics_properties)
+       content=InlineTemplate(params.spark_metrics_properties),
+       mode=0644
   )
 
   if params.is_hive_installed:
@@ -92,6 +94,7 @@ def setup_spark(env, type, upgrade_type = None, action = None):
       owner = params.hive_user,
       group = params.user_group,
       key_value_delimiter = " ",
+      mode=0644
     )
 
   effective_version = params.version if upgrade_type is not None else params.stack_version_formatted

+ 6 - 0
ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py

@@ -207,6 +207,7 @@ class TestJobHistoryServer(RMFTestCase):
         key_value_delimiter = ' ',
         group = 'spark',
         properties = self.getConfig()['configurations']['spark-defaults'],
+        mode = 0644
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh',
         content = InlineTemplate(self.getConfig()['configurations']['spark-env']['content']),
@@ -224,11 +225,13 @@ class TestJobHistoryServer(RMFTestCase):
         content = InlineTemplate(self.getConfig()['configurations']['spark-metrics-properties']['content']),
         owner = 'spark',
         group = 'spark',
+        mode = 0644
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
         content = InlineTemplate(' '),
         owner = 'spark',
         group = 'spark',
+        mode = 0644
     )
     self.assertResourceCalled('Directory', '/usr/hdp/current/spark-client/logs',
         owner = 'spark',
@@ -285,6 +288,7 @@ class TestJobHistoryServer(RMFTestCase):
         key_value_delimiter = ' ',
         group = 'spark',
         properties = self.getConfig()['configurations']['spark-defaults'],
+        mode = 0644
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh',
         content = InlineTemplate(self.getConfig()['configurations']['spark-env']['content']),
@@ -302,11 +306,13 @@ class TestJobHistoryServer(RMFTestCase):
         content = InlineTemplate(self.getConfig()['configurations']['spark-metrics-properties']['content']),
         owner = 'spark',
         group = 'spark',
+        mode = 0644
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
         content = InlineTemplate(' '),
         owner = 'spark',
         group = 'spark',
+        mode = 0644
     )
     self.assertResourceCalled('Directory', '/usr/hdp/current/spark-client/logs',
         owner = 'spark',

+ 6 - 0
ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py

@@ -69,6 +69,7 @@ class TestSparkClient(RMFTestCase):
         key_value_delimiter = ' ',
         group = 'spark',
         properties = self.getConfig()['configurations']['spark-defaults'],
+        mode = 0644
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh',
         content = InlineTemplate(self.getConfig()['configurations']['spark-env']['content']),
@@ -86,11 +87,13 @@ class TestSparkClient(RMFTestCase):
         content = InlineTemplate(self.getConfig()['configurations']['spark-metrics-properties']['content']),
         owner = 'spark',
         group = 'spark',
+        mode = 0644
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
         content = InlineTemplate(' '),
         owner = 'spark',
         group = 'spark',
+        mode = 0644
     )
     self.assertResourceCalled('Directory', '/usr/hdp/current/spark-client/logs',
         owner = 'spark',
@@ -117,6 +120,7 @@ class TestSparkClient(RMFTestCase):
         key_value_delimiter = ' ',
         group = 'spark',
         properties = self.getConfig()['configurations']['spark-defaults'],
+        mode = 0644
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh',
         content = InlineTemplate(self.getConfig()['configurations']['spark-env']['content']),
@@ -134,11 +138,13 @@ class TestSparkClient(RMFTestCase):
         content = InlineTemplate(self.getConfig()['configurations']['spark-metrics-properties']['content']),
         owner = 'spark',
         group = 'spark',
+        mode = 0644
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
         content = InlineTemplate(' '),
         owner = 'spark',
         group = 'spark',
+        mode = 0644
     )
     self.assertResourceCalled('Directory', '/usr/hdp/current/spark-client/logs',
         owner = 'spark',

+ 5 - 1
ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py

@@ -127,6 +127,7 @@ class TestSparkThriftServer(RMFTestCase):
         key_value_delimiter = ' ',
         group = 'spark',
         properties = self.getConfig()['configurations']['spark-defaults'],
+        mode = 0644
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh',
         content = InlineTemplate(self.getConfig()['configurations']['spark-env']['content']),
@@ -144,11 +145,13 @@ class TestSparkThriftServer(RMFTestCase):
         content = InlineTemplate(self.getConfig()['configurations']['spark-metrics-properties']['content']),
         owner = 'spark',
         group = 'spark',
+        mode = 0644
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
         content = InlineTemplate(' '),
         owner = 'spark',
         group = 'spark',
+        mode = 0644
     )
     self.assertResourceCalled('Directory', '/usr/hdp/current/spark-client/logs',
         owner = 'spark',
@@ -159,7 +162,8 @@ class TestSparkThriftServer(RMFTestCase):
         key_value_delimiter = ' ',
         owner = 'hive',
         group = 'hadoop',
-        properties = self.getConfig()['configurations']['spark-thrift-sparkconf']
+        properties = self.getConfig()['configurations']['spark-thrift-sparkconf'],
+        mode = 0644
     )
 
   @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")