|
|
@@ -131,8 +131,8 @@ def hive(name=None):
|
|
|
# *********************************
|
|
|
# HDP 2.2 or higher, copy mapreduce.tar.gz to HDFS
|
|
|
if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, '2.2') >= 0:
|
|
|
- copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user)
|
|
|
- copy_to_hdfs("tez", params.user_group, params.hdfs_user)
|
|
|
+ copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
|
|
|
+ copy_to_hdfs("tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
|
|
|
|
|
|
# Always copy pig.tar.gz and hive.tar.gz using the appropriate mode.
|
|
|
# This can use a different source and dest location to account for both HDP 2.1 and 2.2
|
|
|
@@ -141,13 +141,15 @@ def hive(name=None):
|
|
|
params.hdfs_user,
|
|
|
file_mode=params.tarballs_mode,
|
|
|
custom_source_file=params.pig_tar_source,
|
|
|
- custom_dest_file=params.pig_tar_dest_file)
|
|
|
+ custom_dest_file=params.pig_tar_dest_file,
|
|
|
+ host_sys_prepped=params.host_sys_prepped)
|
|
|
copy_to_hdfs("hive",
|
|
|
params.user_group,
|
|
|
params.hdfs_user,
|
|
|
file_mode=params.tarballs_mode,
|
|
|
custom_source_file=params.hive_tar_source,
|
|
|
- custom_dest_file=params.hive_tar_dest_file)
|
|
|
+ custom_dest_file=params.hive_tar_dest_file,
|
|
|
+ host_sys_prepped=params.host_sys_prepped)
|
|
|
|
|
|
wildcard_tarballs = ["sqoop", "hadoop_streaming"]
|
|
|
for tarball_name in wildcard_tarballs:
|
|
|
@@ -167,7 +169,8 @@ def hive(name=None):
|
|
|
params.hdfs_user,
|
|
|
file_mode=params.tarballs_mode,
|
|
|
custom_source_file=source_file,
|
|
|
- custom_dest_file=dest_file)
|
|
|
+ custom_dest_file=dest_file,
|
|
|
+ host_sys_prepped=params.host_sys_prepped)
|
|
|
# ******* End Copy Tarballs *******
|
|
|
# *********************************
|
|
|
|