Browse Source

AMBARI-13899. Remove hdp.version configuration in Ambari Spark definition (Saisai Shao via smohanty)

Sumit Mohanty 9 năm trước cách đây
mục cha
commit
cacbb55326

+ 0 - 22
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/configuration/spark-defaults.xml

@@ -115,28 +115,6 @@
     </description>
     </description>
   </property>
   </property>
 
 
-  <property>
-    <name>spark.driver.extraJavaOptions</name>
-    <value>-Dhdp.version={{hdp_full_version}}</value>
-    <description>
-      Specifies parameters that are passed to the JVM of the Spark driver.
-    </description>
-    <value-attributes>
-      <empty-value-valid>true</empty-value-valid>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>spark.yarn.am.extraJavaOptions</name>
-    <value>-Dhdp.version={{hdp_full_version}}</value>
-    <description>
-      Specifies the parameters that are passed to the JVM of the Spark Application Master.
-    </description>
-    <value-attributes>
-      <empty-value-valid>true</empty-value-valid>
-    </value-attributes>
-  </property>
-
   <property>
   <property>
     <name>spark.history.kerberos.principal</name>
     <name>spark.history.kerberos.principal</name>
     <value>none</value>
     <value>none</value>

+ 0 - 30
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/configuration/spark-javaopts-properties.xml

@@ -1,30 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="true">
-  <property>
-    <name>content</name>
-    <description>Spark-javaopts-properties</description>
-    <value> </value>
-    <value-attributes>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-  </property>
-</configuration>

+ 0 - 1
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/metainfo.xml

@@ -131,7 +131,6 @@
         <config-type>spark-env</config-type>
         <config-type>spark-env</config-type>
         <config-type>spark-log4j-properties</config-type>
         <config-type>spark-log4j-properties</config-type>
         <config-type>spark-metrics-properties</config-type>
         <config-type>spark-metrics-properties</config-type>
-        <config-type>spark-javaopts-properties</config-type>
       </configuration-dependencies>
       </configuration-dependencies>
 
 
       <commandScript>
       <commandScript>

+ 0 - 15
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py

@@ -112,25 +112,10 @@ spark_history_ui_port = config['configurations']['spark-defaults']['spark.histor
 spark_env_sh = config['configurations']['spark-env']['content']
 spark_env_sh = config['configurations']['spark-env']['content']
 spark_log4j_properties = config['configurations']['spark-log4j-properties']['content']
 spark_log4j_properties = config['configurations']['spark-log4j-properties']['content']
 spark_metrics_properties = config['configurations']['spark-metrics-properties']['content']
 spark_metrics_properties = config['configurations']['spark-metrics-properties']['content']
-spark_javaopts_properties = config['configurations']['spark-javaopts-properties']['content']
 
 
 hive_server_host = default("/clusterHostInfo/hive_server_host", [])
 hive_server_host = default("/clusterHostInfo/hive_server_host", [])
 is_hive_installed = not len(hive_server_host) == 0
 is_hive_installed = not len(hive_server_host) == 0
 
 
-hdp_full_version = functions.get_hdp_version('spark-client')
-
-spark_driver_extraJavaOptions = str(config['configurations']['spark-defaults']['spark.driver.extraJavaOptions'])
-if spark_driver_extraJavaOptions.find('-Dhdp.version') == -1:
-  spark_driver_extraJavaOptions = spark_driver_extraJavaOptions + ' -Dhdp.version=' + str(hdp_full_version)
-
-spark_yarn_am_extraJavaOptions = str(config['configurations']['spark-defaults']['spark.yarn.am.extraJavaOptions'])
-if spark_yarn_am_extraJavaOptions.find('-Dhdp.version') == -1:
-  spark_yarn_am_extraJavaOptions = spark_yarn_am_extraJavaOptions + ' -Dhdp.version=' + str(hdp_full_version)
-
-spark_javaopts_properties = str(spark_javaopts_properties)
-if spark_javaopts_properties.find('-Dhdp.version') == -1:
-  spark_javaopts_properties = spark_javaopts_properties+ ' -Dhdp.version=' + str(hdp_full_version)
-
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 spark_kerberos_keytab =  config['configurations']['spark-defaults']['spark.history.kerberos.keytab']
 spark_kerberos_keytab =  config['configurations']['spark-defaults']['spark.history.kerberos.keytab']

+ 4 - 10
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py

@@ -44,12 +44,12 @@ def setup_spark(env, type, action = None):
                        mode=0775
                        mode=0775
     )
     )
     params.HdfsResource(None, action="execute")
     params.HdfsResource(None, action="execute")
-    
+
   PropertiesFile(format("{spark_conf}/spark-defaults.conf"),
   PropertiesFile(format("{spark_conf}/spark-defaults.conf"),
     properties = params.config['configurations']['spark-defaults'],
     properties = params.config['configurations']['spark-defaults'],
-    key_value_delimiter = " ", 
+    key_value_delimiter = " ",
     owner=params.spark_user,
     owner=params.spark_user,
-    group=params.spark_group,              
+    group=params.spark_group,
   )
   )
 
 
   # create spark-env.sh in etc/conf dir
   # create spark-env.sh in etc/conf dir
@@ -73,12 +73,6 @@ def setup_spark(env, type, action = None):
        content=InlineTemplate(params.spark_metrics_properties)
        content=InlineTemplate(params.spark_metrics_properties)
   )
   )
 
 
-  File(os.path.join(params.spark_conf, 'java-opts'),
-       owner=params.spark_user,
-       group=params.spark_group,
-       content=params.spark_javaopts_properties
-  )
-
   if params.is_hive_installed:
   if params.is_hive_installed:
     XmlConfig("hive-site.xml",
     XmlConfig("hive-site.xml",
           conf_dir=params.spark_conf,
           conf_dir=params.spark_conf,
@@ -92,5 +86,5 @@ def setup_spark(env, type, action = None):
       and 'spark-thrift-sparkconf' in params.config['configurations']:
       and 'spark-thrift-sparkconf' in params.config['configurations']:
     PropertiesFile(params.spark_thrift_server_conf_file,
     PropertiesFile(params.spark_thrift_server_conf_file,
       properties = params.config['configurations']['spark-thrift-sparkconf'],
       properties = params.config['configurations']['spark-thrift-sparkconf'],
-      key_value_delimiter = " ",             
+      key_value_delimiter = " ",
     )
     )

+ 0 - 1
ambari-server/src/main/resources/common-services/SPARK/1.3.1.2.3/metainfo.xml

@@ -125,7 +125,6 @@
         <config-type>spark-env</config-type>
         <config-type>spark-env</config-type>
         <config-type>spark-log4j-properties</config-type>
         <config-type>spark-log4j-properties</config-type>
         <config-type>spark-metrics-properties</config-type>
         <config-type>spark-metrics-properties</config-type>
-        <config-type>spark-javaopts-properties</config-type>
       </configuration-dependencies>
       </configuration-dependencies>
 
 
       <commandScript>
       <commandScript>

+ 0 - 1
ambari-server/src/main/resources/common-services/SPARK/1.4.1.2.3/metainfo.xml

@@ -68,7 +68,6 @@
         <config-type>spark-env</config-type>
         <config-type>spark-env</config-type>
         <config-type>spark-log4j-properties</config-type>
         <config-type>spark-log4j-properties</config-type>
         <config-type>spark-metrics-properties</config-type>
         <config-type>spark-metrics-properties</config-type>
-        <config-type>spark-javaopts-properties</config-type>
         <config-type>spark-thrift-sparkconf</config-type>
         <config-type>spark-thrift-sparkconf</config-type>
         <config-type>spark-hive-site-override</config-type>
         <config-type>spark-hive-site-override</config-type>
       </configuration-dependencies>
       </configuration-dependencies>

+ 0 - 16
ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/configuration/spark-thrift-sparkconf.xml

@@ -91,22 +91,6 @@
     </description>
     </description>
   </property>
   </property>
 
 
-  <property>
-    <name>spark.driver.extraJavaOptions</name>
-    <value>-Dhdp.version={{hdp_full_version}}</value>
-    <description>
-      Specifies parameters that are passed to the JVM of the Spark driver.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.yarn.am.extraJavaOptions</name>
-    <value>-Dhdp.version={{hdp_full_version}}</value>
-    <description>
-      Specifies the parameters that are passed to the JVM of the Spark Application Master.
-    </description>
-  </property>
-
   <property>
   <property>
     <name>spark.yarn.max.executor.failures</name>
     <name>spark.yarn.max.executor.failures</name>
     <value>3</value>
     <value>3</value>

+ 0 - 1
ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/metainfo.xml

@@ -58,7 +58,6 @@
             <config-type>spark-env</config-type>
             <config-type>spark-env</config-type>
             <config-type>spark-log4j-properties</config-type>
             <config-type>spark-log4j-properties</config-type>
             <config-type>spark-metrics-properties</config-type>
             <config-type>spark-metrics-properties</config-type>
-            <config-type>spark-javaopts-properties</config-type>
             <config-type>spark-thrift-sparkconf</config-type>
             <config-type>spark-thrift-sparkconf</config-type>
             <config-type>spark-hive-site-override</config-type>
             <config-type>spark-hive-site-override</config-type>
           </configuration-dependencies>
           </configuration-dependencies>

+ 2 - 11
ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py

@@ -215,12 +215,7 @@ class TestJobHistoryServer(RMFTestCase):
         owner = 'spark',
         owner = 'spark',
         group = 'spark',
         group = 'spark',
     )
     )
-    self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
-        content = '  -Dhdp.version=2.3.0.0-1597',
-        owner = 'spark',
-        group = 'spark',
-    )
-      
+
   def assert_configure_secured(self):
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/var/run/spark',
     self.assertResourceCalled('Directory', '/var/run/spark',
         owner = 'spark',
         owner = 'spark',
@@ -282,11 +277,7 @@ class TestJobHistoryServer(RMFTestCase):
         owner = 'spark',
         owner = 'spark',
         group = 'spark',
         group = 'spark',
     )
     )
-    self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
-        content = '  -Dhdp.version=2.3.0.0-1597',
-        owner = 'spark',
-        group = 'spark',
-    )
+
 
 
   @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
   @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
   def test_pre_upgrade_restart_23(self, copy_to_hdfs_mock):
   def test_pre_upgrade_restart_23(self, copy_to_hdfs_mock):

+ 2 - 10
ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py

@@ -83,11 +83,7 @@ class TestSparkClient(RMFTestCase):
         owner = 'spark',
         owner = 'spark',
         group = 'spark',
         group = 'spark',
     )
     )
-    self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
-        content = '  -Dhdp.version=2.3.0.0-1597',
-        owner = 'spark',
-        group = 'spark',
-    )
+
       
       
   def assert_configure_secured(self):
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/var/run/spark',
     self.assertResourceCalled('Directory', '/var/run/spark',
@@ -121,11 +117,7 @@ class TestSparkClient(RMFTestCase):
         owner = 'spark',
         owner = 'spark',
         group = 'spark',
         group = 'spark',
     )
     )
-    self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
-        content = '  -Dhdp.version=2.3.0.0-1597',
-        owner = 'spark',
-        group = 'spark',
-    )
+
 
 
   def test_pre_upgrade_restart_23(self):
   def test_pre_upgrade_restart_23(self):
     config_file = self.get_src_folder()+"/test/python/stacks/2.2/configs/default.json"
     config_file = self.get_src_folder()+"/test/python/stacks/2.2/configs/default.json"

+ 0 - 5
ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py

@@ -138,11 +138,6 @@ class TestSparkThriftServer(RMFTestCase):
         owner = 'spark',
         owner = 'spark',
         group = 'spark',
         group = 'spark',
     )
     )
-    self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
-        content = '  -Dhdp.version=2.3.2.0-1597',
-        owner = 'spark',
-        group = 'spark',
-    )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-thrift-sparkconf.conf',
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-thrift-sparkconf.conf',
         key_value_delimiter = ' ',
         key_value_delimiter = ' ',
         properties = self.getConfig()['configurations']['spark-thrift-sparkconf']
         properties = self.getConfig()['configurations']['spark-thrift-sparkconf']