Browse Source

AMBARI-10055. RU failed on RESTART Oozie Server because hadoop-lzo should only be copied if lzo compression is enabled (alejandro)

Alejandro Fernandez 10 years ago
parent
commit
5c4d8bb1df

+ 10 - 8
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py

@@ -266,26 +266,28 @@ HdfsDirectory = functools.partial(
   bin_dir = hadoop_bin_dir
 )
 
-io_compression_codecs = config['configurations']['core-site']['io.compression.codecs']
-lzo_enabled = "com.hadoop.compression.lzo" in io_compression_codecs
+# The logic for LZO also exists in OOZIE's params.py
+io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None)
+lzo_enabled = io_compression_codecs is not None and "com.hadoop.compression.lzo" in io_compression_codecs.lower()
+
 # stack_is_hdp22_or_further
-underscorred_version = stack_version_unformatted.replace('.', '_')
+underscored_version = stack_version_unformatted.replace('.', '_')
 dashed_version = stack_version_unformatted.replace('.', '-')
 lzo_packages_to_family = {
-  "any": ["hadoop-lzo"],
+  "any": ["hadoop-lzo", ],
   "redhat": ["lzo", "hadoop-lzo-native"],
   "suse": ["lzo", "hadoop-lzo-native"],
-  "ubuntu": ["liblzo2-2"]
+  "ubuntu": ["liblzo2-2", ]
 }
 
 if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
-  lzo_packages_to_family["redhat"] += [format("hadooplzo_{underscorred_version}_*")]
-  lzo_packages_to_family["suse"] += [format("hadooplzo_{underscorred_version}_*")]
+  lzo_packages_to_family["redhat"] += [format("hadooplzo_{underscored_version}_*")]
+  lzo_packages_to_family["suse"] += [format("hadooplzo_{underscored_version}_*")]
   lzo_packages_to_family["ubuntu"] += [format("hadooplzo_{dashed_version}_*")]
 
 lzo_packages_for_current_host = lzo_packages_to_family['any'] + lzo_packages_to_family[System.get_instance().os_family]
 all_lzo_packages = set(itertools.chain(*lzo_packages_to_family.values()))
- 
+
 exclude_packages = []
 if not lzo_enabled:
   exclude_packages += all_lzo_packages

+ 1 - 1
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py

@@ -182,7 +182,7 @@ def oozie_server_specific():
     Execute(format('{sudo} chown {oozie_user}:{user_group} {oozie_libext_dir}/falcon-oozie-el-extension-*.jar'),
       not_if  = no_op_test,
     )
-  if params.lzo_enabled:
+  if params.lzo_enabled and len(params.lzo_packages_for_current_host) > 0:
     Package(params.lzo_packages_for_current_host)
     Execute(format('{sudo} cp {hadoop_lib_home}/hadoop-lzo*.jar {oozie_lib_dir}'),
       not_if  = no_op_test,

+ 7 - 4
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py

@@ -119,7 +119,10 @@ def prepare_libext_directory():
   # /usr/hdp/current/hadoop-client ; we must use params.version directly
   # however, this only works when upgrading beyond 2.2.0.0; don't do this
   # for downgrade to 2.2.0.0 since hadoop-lzo will not be present
-  if params.upgrade_direction == Direction.UPGRADE or target_version_needs_compression_libraries:
+  # This can also be called during a Downgrade.
+  # When a version is Intalled, it is responsible for downloading the hadoop-lzo packages
+  # if lzo is enabled.
+  if params.lzo_enabled and (params.upgrade_direction == Direction.UPGRADE or target_version_needs_compression_libraries):
     hadoop_lzo_pattern = 'hadoop-lzo*.jar'
     hadoop_client_new_lib_dir = format("/usr/hdp/{version}/hadoop/lib")
 
@@ -132,9 +135,9 @@ def prepare_libext_directory():
     files_copied = False
     for file in files:
       if os.path.isfile(file):
-        files_copied = True
         Logger.info("Copying {0} to {1}".format(str(file), params.oozie_libext_customer_dir))
-        shutil.copy(file, params.oozie_libext_customer_dir)
+        shutil.copy2(file, params.oozie_libext_customer_dir)
+        files_copied = True
 
     if not files_copied:
       raise Fail("There are no files at {0} matching {1}".format(
@@ -146,7 +149,7 @@ def prepare_libext_directory():
     raise Fail("Unable to copy {0} because it does not exist".format(oozie_ext_zip_file))
 
   Logger.info("Copying {0} to {1}".format(oozie_ext_zip_file, params.oozie_libext_customer_dir))
-  shutil.copy(oozie_ext_zip_file, params.oozie_libext_customer_dir)
+  shutil.copy2(oozie_ext_zip_file, params.oozie_libext_customer_dir)
 
 
 def upgrade_oozie():

+ 9 - 11
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py

@@ -205,25 +205,23 @@ HdfsDirectory = functools.partial(
   bin_dir = hadoop_bin_dir
 )
 
-#LZO support
-
-io_compression_codecs = config['configurations']['core-site']['io.compression.codecs']
-lzo_enabled = "com.hadoop.compression.lzo" in io_compression_codecs
+# The logic for LZO also exists in HDFS' params.py
+io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None)
+lzo_enabled = io_compression_codecs is not None and "com.hadoop.compression.lzo" in io_compression_codecs.lower()
 # stack_is_hdp22_or_further
-underscorred_version = stack_version_unformatted.replace('.', '_')
+underscored_version = stack_version_unformatted.replace('.', '_')
 dashed_version = stack_version_unformatted.replace('.', '-')
 lzo_packages_to_family = {
-  "any": ["hadoop-lzo"],
+  "any": ["hadoop-lzo", ],
   "redhat": ["lzo", "hadoop-lzo-native"],
   "suse": ["lzo", "hadoop-lzo-native"],
-  "ubuntu": ["liblzo2-2"]
+  "ubuntu": ["liblzo2-2", ]
 }
 
 if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
-  lzo_packages_to_family["redhat"] += [format("hadooplzo_{underscorred_version}_*")]
-  lzo_packages_to_family["suse"] += [format("hadooplzo_{underscorred_version}_*")]
+  lzo_packages_to_family["redhat"] += [format("hadooplzo_{underscored_version}_*")]
+  lzo_packages_to_family["suse"] += [format("hadooplzo_{underscored_version}_*")]
   lzo_packages_to_family["ubuntu"] += [format("hadooplzo_{dashed_version}_*")]
 
 lzo_packages_for_current_host = lzo_packages_to_family['any'] + lzo_packages_to_family[System.get_instance().os_family]
-all_lzo_packages = set(itertools.chain(*lzo_packages_to_family.values()))
-
+all_lzo_packages = set(itertools.chain(*lzo_packages_to_family.values()))

+ 10 - 4
ambari-server/src/main/resources/custom_actions/scripts/install_packages.py

@@ -204,6 +204,14 @@ class InstallPackages(Script):
     :return: filtered package_list
     """
     filtered_package_list = []
+
+    # hadoop-lzo package is installed only if LZO compression is enabled
+    lzo_packages = ['hadoop-lzo', 'lzo', 'hadoop-lzo-native', 'liblzo2-2', 'hadooplzo']
+    has_lzo = False
+    io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None)
+    if io_compression_codecs:
+      has_lzo = "com.hadoop.compression.lzo" in io_compression_codecs.lower()
+
     for package in package_list:
       skip_package = False
       # mysql* package logic is managed at HIVE scripts
@@ -212,10 +220,8 @@ class InstallPackages(Script):
       # Ambari metrics packages should not be upgraded during RU
       if package['name'].startswith('ambari-metrics'):
         skip_package = True
-      # hadooplzo package is installed only if LZO comperession is enabled
-      io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None)
-      if not io_compression_codecs or "com.hadoop.compression.lzo" not in io_compression_codecs:
-        lzo_packages = ['hadoop-lzo', 'lzo', 'hadoop-lzo-native', 'liblzo2-2', 'hadooplzo']
+
+      if not has_lzo:
         for lzo_package in lzo_packages:
           if package['name'].startswith(lzo_package):
             skip_package = True

+ 3 - 3
ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py

@@ -793,7 +793,7 @@ class TestOozieServer(RMFTestCase):
   @patch("os.chmod")
   @patch("shutil.rmtree", new = MagicMock())
   @patch("glob.iglob")
-  @patch("shutil.copy", new = MagicMock())
+  @patch("shutil.copy2", new = MagicMock())
   @patch.object(shell, "call")
   def test_upgrade(self, call_mock, glob_mock, chmod_mock, remove_mock,
       isfile_mock, exists_mock, isdir_mock, tarfile_open_mock):
@@ -845,7 +845,7 @@ class TestOozieServer(RMFTestCase):
   @patch("os.remove")
   @patch("os.chmod")
   @patch("shutil.rmtree", new = MagicMock())
-  @patch("shutil.copy", new = MagicMock())
+  @patch("shutil.copy2", new = MagicMock())
   @patch.object(shell, "call")
   def test_downgrade_no_compression_library_copy(self, call_mock, chmod_mock, remove_mock,
       isfile_mock, exists_mock, isdir_mock, tarfile_open_mock):
@@ -891,7 +891,7 @@ class TestOozieServer(RMFTestCase):
   @patch("os.chmod")
   @patch("shutil.rmtree", new = MagicMock())
   @patch("glob.iglob", new = MagicMock(return_value=["/usr/hdp/2.2.1.0-2187/hadoop/lib/hadoop-lzo-0.6.0.2.2.1.0-2187.jar"]))
-  @patch("shutil.copy")
+  @patch("shutil.copy2")
   @patch.object(shell, "call")
   def test_upgrade_failed_prepare_war(self, call_mock, shutil_copy_mock, chmod_mock, remove_mock,
       isfile_mock, exists_mock, isdir_mock, tarfile_open_mock):

+ 1 - 1
ambari-server/src/test/python/stacks/2.2/configs/oozie-downgrade.json

@@ -94,7 +94,7 @@
             "fs.trash.interval": "360", 
             "hadoop.proxyuser.hive.groups": "users", 
             "ipc.server.tcpnodelay": "true", 
-            "io.compression.codecs": "org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec", 
+            "io.compression.codecs": "org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec,com.hadoop.compression.lzo.LzoCodec",
             "ipc.client.idlethreshold": "8000", 
             "io.file.buffer.size": "131072", 
             "io.serializations": "org.apache.hadoop.io.serializer.WritableSerialization", 

+ 1 - 1
ambari-server/src/test/python/stacks/2.2/configs/oozie-upgrade.json

@@ -93,7 +93,7 @@
             "fs.trash.interval": "360", 
             "hadoop.proxyuser.hive.groups": "users", 
             "ipc.server.tcpnodelay": "true", 
-            "io.compression.codecs": "org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec", 
+            "io.compression.codecs": "org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec,com.hadoop.compression.lzo.LzoCodec",
             "ipc.client.idlethreshold": "8000", 
             "io.file.buffer.size": "131072", 
             "io.serializations": "org.apache.hadoop.io.serializer.WritableSerialization",