فهرست منبع

AMBARI-17954: Fix Spark hdp.version issues in upgrading and fresh install (Saisai Shao via jluniya)

Jayush Luniya 9 سال پیش
والد
کامیت
ea99e7ae6c
16فایلهای تغییر یافته به همراه181 افزوده شده و 55 حذف شده
  1. 1 0
      ambari-common/src/main/python/resource_management/libraries/functions/constants.py
  2. 2 2
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/configuration/spark-defaults.xml
  3. 9 1
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
  4. 14 9
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_spark.py
  5. 16 0
      ambari-server/src/main/resources/common-services/SPARK/1.5.2/configuration/spark-thrift-sparkconf.xml
  6. 6 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
  7. 26 1
      ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
  8. 16 4
      ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
  9. 17 5
      ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
  10. 8 1
      ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
  11. 8 1
      ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
  12. 0 2
      ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
  13. 32 0
      ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
  14. 10 11
      ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
  15. 10 12
      ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
  16. 6 6
      ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py

+ 1 - 0
ambari-common/src/main/python/resource_management/libraries/functions/constants.py

@@ -95,3 +95,4 @@ class StackFeature:
   RANGER_KMS_PID_SUPPORT = "ranger_kms_pid_support"
   RANGER_ADMIN_PASSWD_CHANGE = "ranger_admin_password_change"
   STORM_METRICS_APACHE_CLASSES = "storm_metrics_apache_classes"
+  SPARK_JAVA_OPTS_SUPPORT = "spark_java_opts_support"

+ 2 - 2
ambari-server/src/main/resources/common-services/SPARK/1.2.1/configuration/spark-defaults.xml

@@ -121,7 +121,7 @@
   </property>
   <property>
     <name>spark.driver.extraJavaOptions</name>
-    <value></value>
+    <value>-Dhdp.version={{full_stack_version}}</value>
     <description>
       Specifies parameters that are passed to the JVM of the Spark driver.
     </description>
@@ -132,7 +132,7 @@
   </property>
   <property>
     <name>spark.yarn.am.extraJavaOptions</name>
-    <value></value>
+    <value>-Dhdp.version={{full_stack_version}}</value>
     <description>
       Specifies the parameters that are passed to the JVM of the Spark Application Master.
     </description>

+ 9 - 1
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py

@@ -125,8 +125,16 @@ hive_server_host = default("/clusterHostInfo/hive_server_host", [])
 is_hive_installed = not len(hive_server_host) == 0
 
 full_stack_version = get_stack_version('spark-client')
-spark_javaopts_properties = default("/configurations/spark-javaopts-properties/content", " ")
 
+spark_javaopts_properties = default("/configurations/spark-javaopts-properties/content", " ")
+if spark_javaopts_properties.find('-Dhdp.version') == -1:
+  spark_javaopts_properties = spark_javaopts_properties+ ' -Dhdp.version=' + str(full_stack_version)
+else:
+  lists = spark_javaopts_properties.split(" ")
+  for idx, val in enumerate(lists):
+    if (val.startswith("-Dhdp.version=")):
+        lists[idx] = "-Dhdp.version=" + str(full_stack_version)
+  spark_javaopts_properties = " ".join(lists)
 
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))

+ 14 - 9
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_spark.py

@@ -91,17 +91,10 @@ def setup_spark(env, type, upgrade_type=None, action=None, config_dir=None):
        mode=0644
   )
 
-  File(os.path.join(params.spark_conf, 'java-opts'),
-      owner=params.spark_user,
-      group=params.spark_group,
-      content=InlineTemplate(params.spark_javaopts_properties),
-      mode=0644
-  )
-
   Directory(params.spark_logs_dir,
        owner=params.spark_user,
        group=params.spark_group,
-       mode=0755,   
+       mode=0755,
   )
 
   if params.is_hive_installed:
@@ -125,6 +118,18 @@ def setup_spark(env, type, upgrade_type=None, action=None, config_dir=None):
   if effective_version:
     effective_version = format_stack_version(effective_version)
 
+  if effective_version and check_stack_feature(StackFeature.SPARK_JAVA_OPTS_SUPPORT, effective_version):
+    File(os.path.join(params.spark_conf, 'java-opts'),
+      owner=params.spark_user,
+      group=params.spark_group,
+      content=InlineTemplate(params.spark_javaopts_properties),
+      mode=0644
+    )
+  else:
+    File(os.path.join(params.spark_conf, 'java-opts'),
+      action="delete"
+    )
+
   if params.spark_thrift_fairscheduler_content and effective_version and check_stack_feature(StackFeature.SPARK_16PLUS, effective_version):
     # create spark-thrift-fairscheduler.xml
     File(os.path.join(config_dir,"spark-thrift-fairscheduler.xml"),
@@ -132,4 +137,4 @@ def setup_spark(env, type, upgrade_type=None, action=None, config_dir=None):
       group=params.spark_group,
       mode=0755,
       content=InlineTemplate(params.spark_thrift_fairscheduler_content)
-    )
+    )

+ 16 - 0
ambari-server/src/main/resources/common-services/SPARK/1.5.2/configuration/spark-thrift-sparkconf.xml

@@ -37,6 +37,22 @@
     </description>
     <on-ambari-upgrade add="true"/>
   </property>
+  <property>
+    <name>spark.driver.extraJavaOptions</name>
+    <value>-Dhdp.version={{full_stack_version}}</value>
+    <description>
+      Specifies parameters that are passed to the JVM of the Spark driver.
+    </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>spark.yarn.am.extraJavaOptions</name>
+    <value>-Dhdp.version={{full_stack_version}}</value>
+    <description>
+      Specifies the parameters that are passed to the JVM of the Spark Application Master.
+    </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
   <property>
     <name>spark.yarn.driver.memoryOverhead</name>
     <value>384</value>

+ 6 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json

@@ -276,6 +276,12 @@
       "name": "storm_metrics_apache_classes",
       "description": "Metrics sink for Storm that uses Apache class names",
       "min_version": "2.5.0.0"
+    },
+    {
+      "name": "spark_java_opts_support",
+      "description": "Allow Spark to generate java-opts file",
+      "min_version": "2.2.0.0",
+      "max_version": "2.4.0.0"
     }
   ]
 }

+ 26 - 1
ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml

@@ -363,8 +363,27 @@
           <definition xsi:type="configure" id="hdp_2_4_0_0_spark_jobhistoryserver">
             <type>spark-defaults</type>
             <transfer operation="delete" delete-key="spark.yarn.services" />
+            <transfer operation="delete" delete-key="spark.driver.extraJavaOptions" />
+            <transfer operation="delete" delete-key="spark.yarn.am.extraJavaOptions" />
             <set key="spark.history.provider" value="org.apache.spark.deploy.history.FsHistoryProvider"/>
           </definition>
+          <definition xsi:type="configure" id="hdp_2_4_0_0_spark_java_opts">
+            <type>spark-javaopts-properties</type>
+            <transfer operation="delete" delete-key="content" />
+          </definition>
+        </changes>
+      </component>
+      <component name="SPARK_CLIENT">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_4_0_0_remove_spark_properties_extraJavaOptions">
+            <type>spark-defaults</type>
+            <transfer operation="delete" delete-key="spark.driver.extraJavaOptions" />
+            <transfer operation="delete" delete-key="spark.yarn.am.extraJavaOptions" />
+          </definition>
+          <definition xsi:type="configure" id="hdp_2_4_0_0_spark_java_opts">
+            <type>spark-javaopts-properties</type>
+            <transfer operation="delete" delete-key="content" />
+          </definition>
         </changes>
       </component>
       <component name="SPARK_THRIFTSERVER">
@@ -379,6 +398,12 @@
             <transfer operation="delete" delete-key="spark.yarn.submit.file.replication" />
             <transfer operation="delete" delete-key="spark.yarn.preserve.staging.files" />
             <transfer operation="delete" delete-key="spark.yarn.max.executor.failures" />
+            <transfer operation="delete" delete-key="spark.driver.extraJavaOptions" />
+            <transfer operation="delete" delete-key="spark.yarn.am.extraJavaOptions" />
+          </definition>
+          <definition xsi:type="configure" id="hdp_2_4_0_0_spark_java_opts">
+            <type>spark-javaopts-properties</type>
+            <transfer operation="delete" delete-key="content" />
           </definition>
         </changes>
       </component>
@@ -442,7 +467,7 @@
             <transfer operation="delete" delete-key="xasecure.audit.credential.provider.file" />
             <transfer operation="delete" delete-key="xasecure.audit.destination.db.batch.filespool.dir" />
           </definition>
-          
+
           <definition xsi:type="configure" id="hdp_2_5_0_0_upgrade_storm_1.0">
             <type>storm-site</type>
             <replace key="_storm.thrift.nonsecure.transport" find="backtype.storm.security.auth.SimpleTransportPlugin"

+ 16 - 4
ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml

@@ -71,7 +71,7 @@
       <service name="FLUME">
         <component>FLUME_HANDLER</component>
       </service>
-      
+
       <service name="ACCUMULO">
         <component>ACCUMULO_TRACER</component>
         <component>ACCUMULO_GC</component>
@@ -304,9 +304,21 @@
       <execute-stage service="SPARK" component="SPARK_JOBHISTORYSERVER" title="Apply config changes for Spark JobHistoryServer">
         <task xsi:type="configure" id="hdp_2_4_0_0_spark_jobhistoryserver"/>
       </execute-stage>
+      <execute-stage service="SPARK" component="SPARK_JOBHISTORYSERVER" title="Apply config changes for Spark JobHistoryServer">
+        <task xsi:type="configure" id="hdp_2_4_0_0_spark_java_opts"/>
+      </execute-stage>
       <execute-stage service="SPARK" component="SPARK_THRIFTSERVER" title="Apply config changes for Spark ThriftServer">
         <task xsi:type="configure" id="hdp_2_4_0_0_spark_thriftserver"/>
       </execute-stage>
+      <execute-stage service="SPARK" component="SPARK_THRIFTSERVER" title="Apply config changes for Spark ThriftServer">
+        <task xsi:type="configure" id="hdp_2_4_0_0_spark_java_opts"/>
+      </execute-stage>
+      <execute-stage service="SPARK" component="SPARK_CLIENT" title="Apply config changes for Spark">
+        <task xsi:type="configure" id="hdp_2_4_0_0_remove_spark_properties_extraJavaOptions"/>
+      </execute-stage>
+      <execute-stage service="SPARK" component="SPARK_CLIENT" title="Apply config changes for Spark">
+        <task xsi:type="configure" id="hdp_2_4_0_0_spark_java_opts"/>
+      </execute-stage>
     </group>
 
     <!--
@@ -618,7 +630,7 @@
         <component>FLUME_HANDLER</component>
       </service>
     </group>
-    
+
     <group xsi:type="restart" name="ACCUMULO" title="Accumulo">
       <service-check>false</service-check>
       <skippable>true</skippable>
@@ -648,7 +660,7 @@
 
     <group xsi:type="cluster" name="FINALIZE_PRE_CHECK" title="Finalize {{direction.text.proper}} Pre-Check">
       <direction>UPGRADE</direction>
-      
+
       <execute-stage title="Check Component Versions">
         <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.ComponentVersionCheckAction" />
       </execute-stage>
@@ -1050,7 +1062,7 @@
             <function>delete_storm_local_data</function>
           </task>
         </pre-downgrade>
-        
+
         <upgrade>
           <task xsi:type="restart-task"/>
         </upgrade>

+ 17 - 5
ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml

@@ -80,7 +80,7 @@
       <service name="FLUME">
         <component>FLUME_HANDLER</component>
       </service>
-      
+
       <service name="ACCUMULO">
         <component>ACCUMULO_TRACER</component>
         <component>ACCUMULO_GC</component>
@@ -364,9 +364,21 @@
       <execute-stage service="SPARK" component="SPARK_JOBHISTORYSERVER" title="Apply config changes for Spark JobHistoryServer">
         <task xsi:type="configure" id="hdp_2_4_0_0_spark_jobhistoryserver"/>
       </execute-stage>
+      <execute-stage service="SPARK" component="SPARK_JOBHISTORYSERVER" title="Apply config changes for Spark JobHistoryServer">
+        <task xsi:type="configure" id="hdp_2_4_0_0_spark_java_opts"/>
+      </execute-stage>
       <execute-stage service="SPARK" component="SPARK_THRIFTSERVER" title="Apply config changes for Spark ThriftServer">
         <task xsi:type="configure" id="hdp_2_4_0_0_spark_thriftserver"/>
       </execute-stage>
+      <execute-stage service="SPARK" component="SPARK_THRIFTSERVER" title="Apply config changes for Spark ThriftServer">
+        <task xsi:type="configure" id="hdp_2_4_0_0_spark_java_opts"/>
+      </execute-stage>
+      <execute-stage service="SPARK" component="SPARK_CLIENT" title="Apply config changes for Spark">
+        <task xsi:type="configure" id="hdp_2_4_0_0_remove_spark_properties_extraJavaOptions"/>
+      </execute-stage>
+      <execute-stage service="SPARK" component="SPARK_CLIENT" title="Apply config changes for Spark">
+        <task xsi:type="configure" id="hdp_2_4_0_0_spark_java_opts"/>
+      </execute-stage>
 
       <!-- RANGER -->
       <execute-stage service="RANGER" component="RANGER_ADMIN" title="Apply config changes for Ranger Admin">
@@ -437,7 +449,7 @@
         </task>
       </execute-stage>
     </group>
-    
+
     <!-- Now, restart all of the services. -->
     <group xsi:type="restart" name="ZOOKEEPER" title="ZooKeeper">
       <service-check>false</service-check>
@@ -732,7 +744,7 @@
         <component>FLUME_HANDLER</component>
       </service>
     </group>
-    
+
     <group xsi:type="restart" name="ACCUMULO" title="Accumulo">
       <service-check>false</service-check>
       <skippable>true</skippable>
@@ -762,7 +774,7 @@
 
     <group xsi:type="cluster" name="FINALIZE_PRE_CHECK" title="Finalize {{direction.text.proper}} Pre-Check">
       <direction>UPGRADE</direction>
-      
+
       <execute-stage title="Check Component Versions">
         <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.ComponentVersionCheckAction" />
       </execute-stage>
@@ -1160,7 +1172,7 @@
             <function>delete_storm_local_data</function>
           </task>
         </pre-downgrade>
-        
+
         <upgrade>
           <task xsi:type="restart-task"/>
         </upgrade>

+ 8 - 1
ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml

@@ -573,7 +573,7 @@
           <task xsi:type="restart-task" />
         </upgrade>
       </component>
-      
+
       <component name="NFS_GATEWAY">
         <upgrade>
           <task xsi:type="restart-task" />
@@ -787,6 +787,7 @@
         <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_4_0_0_spark_jobhistoryserver"/>
+          <task xsi:type="configure" id="hdp_2_4_0_0_spark_java_opts"/>
         </pre-upgrade>
         <upgrade>
           <task xsi:type="restart-task" />
@@ -796,12 +797,18 @@
         <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_4_0_0_spark_thriftserver"/>
+          <task xsi:type="configure" id="hdp_2_4_0_0_spark_java_opts"/>
         </pre-upgrade>
         <upgrade>
           <task xsi:type="restart-task"/>
         </upgrade>
       </component>
       <component name="SPARK_CLIENT">
+        <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_4_0_0_remove_spark_properties_extraJavaOptions"/>
+          <task xsi:type="configure" id="hdp_2_4_0_0_spark_java_opts"/>
+        </pre-upgrade>
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>

+ 8 - 1
ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml

@@ -639,7 +639,7 @@
           <task xsi:type="restart-task" />
         </upgrade>
       </component>
-      
+
       <component name="NFS_GATEWAY">
         <upgrade>
           <task xsi:type="restart-task" />
@@ -874,6 +874,7 @@
         <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_4_0_0_spark_jobhistoryserver"/>
+          <task xsi:type="configure" id="hdp_2_4_0_0_spark_java_opts"/>
         </pre-upgrade>
         <upgrade>
           <task xsi:type="restart-task" />
@@ -883,12 +884,18 @@
         <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_4_0_0_spark_thriftserver"/>
+          <task xsi:type="configure" id="hdp_2_4_0_0_spark_java_opts"/>
         </pre-upgrade>
         <upgrade>
           <task xsi:type="restart-task"/>
         </upgrade>
       </component>
       <component name="SPARK_CLIENT">
+        <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_4_0_0_remove_spark_properties_extraJavaOptions"/>
+          <task xsi:type="configure" id="hdp_2_4_0_0_spark_java_opts"/>
+        </pre-upgrade>
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>

+ 0 - 2
ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/spark-defaults.xml → ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml

@@ -21,13 +21,11 @@
 <configuration supports_final="true">
   <property>
     <name>spark.driver.extraJavaOptions</name>
-    <value></value>
     <deleted>true</deleted>
     <on-ambari-upgrade add="false"/>
   </property>
   <property>
     <name>spark.yarn.am.extraJavaOptions</name>
-    <value></value>
     <deleted>true</deleted>
     <on-ambari-upgrade add="false"/>
   </property>

+ 32 - 0
ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml

@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="true">
+  <property>
+    <name>spark.driver.extraJavaOptions</name>
+    <deleted>true</deleted>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.yarn.am.extraJavaOptions</name>
+    <deleted>true</deleted>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>

+ 10 - 11
ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py

@@ -227,16 +227,16 @@ class TestJobHistoryServer(RMFTestCase):
         group = 'spark',
         mode = 0644
     )
-    self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
-        content = InlineTemplate(' '),
+    self.assertResourceCalled('Directory', '/usr/hdp/current/spark-client/logs',
         owner = 'spark',
         group = 'spark',
-        mode = 0644
+        mode = 0755,
     )
-    self.assertResourceCalled('Directory', '/usr/hdp/current/spark-client/logs',
+    self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
+        content = InlineTemplate('  -Dhdp.version=None'),
         owner = 'spark',
         group = 'spark',
-        mode = 0755,
+        mode = 0644
     )
 
   def assert_configure_secured(self):
@@ -308,19 +308,18 @@ class TestJobHistoryServer(RMFTestCase):
         group = 'spark',
         mode = 0644
     )
-    self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
-        content = InlineTemplate(' '),
+    self.assertResourceCalled('Directory', '/usr/hdp/current/spark-client/logs',
         owner = 'spark',
         group = 'spark',
-        mode = 0644
+        mode = 0755,
     )
-    self.assertResourceCalled('Directory', '/usr/hdp/current/spark-client/logs',
+    self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
+        content = InlineTemplate('  -Dhdp.version=None'),
         owner = 'spark',
         group = 'spark',
-        mode = 0755,
+        mode = 0644
     )
 
-
   @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
   def test_pre_upgrade_restart_23(self, copy_to_hdfs_mock):
     config_file = self.get_src_folder()+"/test/python/stacks/2.2/configs/default.json"

+ 10 - 12
ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py

@@ -89,19 +89,18 @@ class TestSparkClient(RMFTestCase):
         group = 'spark',
         mode = 0644
     )
-    self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
-        content = InlineTemplate(' '),
+    self.assertResourceCalled('Directory', '/usr/hdp/current/spark-client/logs',
         owner = 'spark',
         group = 'spark',
-        mode = 0644
+        mode = 0755,
     )
-    self.assertResourceCalled('Directory', '/usr/hdp/current/spark-client/logs',
+    self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
+        content = InlineTemplate('  -Dhdp.version=None'),
         owner = 'spark',
         group = 'spark',
-        mode = 0755,
+        mode = 0644
     )
 
-
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/var/run/spark',
         owner = 'spark',
@@ -140,19 +139,18 @@ class TestSparkClient(RMFTestCase):
         group = 'spark',
         mode = 0644
     )
-    self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
-        content = InlineTemplate(' '),
+    self.assertResourceCalled('Directory', '/usr/hdp/current/spark-client/logs',
         owner = 'spark',
         group = 'spark',
-        mode = 0644
+        mode = 0755,
     )
-    self.assertResourceCalled('Directory', '/usr/hdp/current/spark-client/logs',
+    self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
+        content = InlineTemplate('  -Dhdp.version=None'),
         owner = 'spark',
         group = 'spark',
-        mode = 0755,
+        mode = 0644
     )
 
-
   def test_pre_upgrade_restart_23(self):
     config_file = self.get_src_folder()+"/test/python/stacks/2.2/configs/default.json"
     with open(config_file, "r") as f:

+ 6 - 6
ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py

@@ -147,12 +147,6 @@ class TestSparkThriftServer(RMFTestCase):
         group = 'spark',
         mode = 0644
     )
-    self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
-        content = InlineTemplate(' '),
-        owner = 'spark',
-        group = 'spark',
-        mode = 0644
-    )
     self.assertResourceCalled('Directory', '/usr/hdp/current/spark-client/logs',
         owner = 'spark',
         group = 'spark',
@@ -165,6 +159,12 @@ class TestSparkThriftServer(RMFTestCase):
         properties = self.getConfig()['configurations']['spark-thrift-sparkconf'],
         mode = 0644
     )
+    self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts',
+        content = InlineTemplate('  -Dhdp.version=None'),
+        owner = 'spark',
+        group = 'spark',
+        mode = 0644
+    )
 
   @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
   def test_pre_upgrade_restart_23(self, copy_to_hdfs_mock):