Kaynağa Gözat

AMBARI-14299. Ranger env files are incorrectly sourced on EU (ncole)

Nate Cole 9 yıl önce
ebeveyn
işleme
e90fd73010

+ 7 - 3
ambari-common/src/main/python/resource_management/libraries/functions/version.py

@@ -60,14 +60,18 @@ def format_hdp_stack_version(input):
   return ""
   return ""
 
 
 
 
-def compare_versions(version1, version2):
+def compare_versions(version1, version2, format=False):
   """
   """
   Used to compare either Ambari Versions, or Stack versions
   Used to compare either Ambari Versions, or Stack versions
   E.g., Ambari version 1.6.1 vs 1.7.0,
   E.g., Ambari version 1.6.1 vs 1.7.0,
   Stack Version 2.0.6.0 vs 2.2.0.0
   Stack Version 2.0.6.0 vs 2.2.0.0
   :param version1: First parameter for version
   :param version1: First parameter for version
   :param version2: Second parameter for version
   :param version2: Second parameter for version
+  :param format: optionally format the versions via format_hdp_stack_version before comparing them
   :return: Returns -1 if version1 is before version2, 0 if they are equal, and 1 if version1 is after version2
   :return: Returns -1 if version1 is before version2, 0 if they are equal, and 1 if version1 is after version2
   """
   """
-  max_segments = max(len(version1.split(".")), len(version2.split(".")))
-  return cmp(_normalize(version1, desired_segments=max_segments), _normalize(version2, desired_segments=max_segments))
+  v1 = version1 if not format else format_hdp_stack_version(version1)
+  v2 = version2 if not format else format_hdp_stack_version(version2)
+
+  max_segments = max(len(v1.split(".")), len(v2.split(".")))
+  return cmp(_normalize(v1, desired_segments=max_segments), _normalize(v2, desired_segments=max_segments))

+ 13 - 1
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/setup_ranger_hdfs.py

@@ -17,13 +17,18 @@ See the License for the specific language governing permissions and
 limitations under the License.
 limitations under the License.
 
 
 """
 """
+import os
 from resource_management.core.logger import Logger
 from resource_management.core.logger import Logger
+from resource_management.core.resources.system import Execute
+from resource_management.libraries.functions.constants import Direction
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions.version import compare_versions
 
 
 def setup_ranger_hdfs(upgrade_type=None):
 def setup_ranger_hdfs(upgrade_type=None):
   import params
   import params
 
 
   if params.has_ranger_admin:
   if params.has_ranger_admin:
-    
+
     if params.xml_configurations_supported:
     if params.xml_configurations_supported:
       from resource_management.libraries.functions.setup_ranger_plugin_xml import setup_ranger_plugin
       from resource_management.libraries.functions.setup_ranger_plugin_xml import setup_ranger_plugin
     else:
     else:
@@ -54,6 +59,13 @@ def setup_ranger_hdfs(upgrade_type=None):
                         credential_file=params.credential_file, xa_audit_db_password=params.xa_audit_db_password, 
                         credential_file=params.credential_file, xa_audit_db_password=params.xa_audit_db_password, 
                         ssl_truststore_password=params.ssl_truststore_password, ssl_keystore_password=params.ssl_keystore_password,
                         ssl_truststore_password=params.ssl_truststore_password, ssl_keystore_password=params.ssl_keystore_password,
                         hdp_version_override = hdp_version, skip_if_rangeradmin_down= not params.retryAble)
                         hdp_version_override = hdp_version, skip_if_rangeradmin_down= not params.retryAble)
+
+    if hdp_version and params.upgrade_direction == Direction.UPGRADE:
+      # when upgrading to 2.3+, this env file must be removed
+      if compare_versions(hdp_version, '2.3', format=True) > 0:
+        source_file = os.path.join(params.hadoop_conf_dir, 'set-hdfs-plugin-env.sh')
+        target_file = source_file + ".bak"
+        Execute(("mv", source_file, target_file), sudo=True, only_if=format("test -f {source_file}"))
   else:
   else:
     Logger.info('Ranger admin not installed')
     Logger.info('Ranger admin not installed')
 
 

+ 37 - 1
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py

@@ -1426,6 +1426,42 @@ class TestNamenode(RMFTestCase):
     self.assertTrue(len(calls) >= 1)
     self.assertTrue(len(calls) >= 1)
     self.assertTrue(calls[0].startsWith("conf-select create-conf-dir --package hadoop --stack-version 2.3.2.0-2844 --conf-version 0"))
     self.assertTrue(calls[0].startsWith("conf-select create-conf-dir --package hadoop --stack-version 2.3.2.0-2844 --conf-version 0"))
 
 
+
+  @patch("hdfs_namenode.is_active_namenode")
+  @patch("resource_management.libraries.functions.setup_ranger_plugin_xml.setup_ranger_plugin")
+  @patch("utils.get_namenode_states")
+  def test_upgrade_restart_eu_with_ranger(self, get_namenode_states_mock, setup_ranger_plugin_mock, is_active_nn_mock):
+    is_active_nn_mock.return_value = True
+
+    config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_eu.json"
+    with open(config_file, "r") as f:
+      json_content = json.load(f)
+    version = '2.3.4.0-1111'
+    json_content['commandParams']['version'] = version
+
+    active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
+    standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
+    unknown_namenodes = []
+
+    mocks_dict = {}
+    get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "start",
+                       command_args=["nonrolling"],
+                       config_dict = json_content,
+                       hdp_stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES,
+                       call_mocks = [(0, None, ''), (0, None)],
+                       mocks_dict=mocks_dict)
+
+    self.assertTrue(setup_ranger_plugin_mock.called)
+
+    self.assertResourceCalledByIndex(7, 'Execute',
+      ('mv', '/usr/hdp/2.3.4.0-1111/hadoop/conf/set-hdfs-plugin-env.sh', '/usr/hdp/2.3.4.0-1111/hadoop/conf/set-hdfs-plugin-env.sh.bak'),
+      only_if='test -f /usr/hdp/2.3.4.0-1111/hadoop/conf/set-hdfs-plugin-env.sh',
+      sudo=True)
+
   def test_pre_upgrade_restart(self):
   def test_pre_upgrade_restart(self):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     with open(config_file, "r") as f:
     with open(config_file, "r") as f:
@@ -1440,7 +1476,7 @@ class TestNamenode(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
     self.assertResourceCalled('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-namenode', version), sudo=True)
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-namenode', version), sudo=True)
-    self.assertNoMoreResources()
+
 
 
   @patch("resource_management.core.shell.call")
   @patch("resource_management.core.shell.call")
   def test_pre_upgrade_restart_23(self, call_mock):
   def test_pre_upgrade_restart_23(self, call_mock):

Dosya farkı çok büyük olduğundan ihmal edildi
+ 173 - 0
ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu.json


Bu fark içinde çok fazla dosya değişikliği olduğu için bazı dosyalar gösterilmiyor