|
@@ -23,7 +23,10 @@ from resource_management.libraries.script.script import Script
|
|
|
from resource_management.libraries.resources.hdfs_resource import HdfsResource
|
|
|
from resource_management.libraries.functions import conf_select
|
|
|
from resource_management.libraries.functions import stack_select
|
|
|
+from resource_management.libraries.functions import StackFeature
|
|
|
+from resource_management.libraries.functions.stack_features import check_stack_feature
|
|
|
from resource_management.libraries.functions.version import format_stack_version
|
|
|
+from resource_management.libraries.functions.format import format
|
|
|
from resource_management.libraries.functions.default import default
|
|
|
from resource_management.libraries.functions import get_kinit_path
|
|
|
from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
|
|
@@ -33,6 +36,7 @@ config = Script.get_config()
|
|
|
tmp_dir = Script.get_tmp_dir()
|
|
|
|
|
|
stack_name = default("/hostLevelParams/stack_name", None)
|
|
|
+stack_root = Script.get_stack_root()
|
|
|
|
|
|
stack_version_unformatted = config['hostLevelParams']['stack_version']
|
|
|
stack_version_formatted = format_stack_version(stack_version_unformatted)
|
|
@@ -47,11 +51,11 @@ pig_conf_dir = "/etc/pig/conf"
|
|
|
hadoop_home = '/usr'
|
|
|
pig_bin_dir = ""
|
|
|
|
|
|
-# hadoop parameters for 2.2+
|
|
|
-if Script.is_stack_greater_or_equal("2.2"):
|
|
|
- pig_conf_dir = "/usr/hdp/current/pig-client/conf"
|
|
|
+# hadoop parameters for stack versions supporting rolling_upgrade
|
|
|
+if stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted):
|
|
|
+ pig_conf_dir = format("{stack_root}/current/pig-client/conf")
|
|
|
hadoop_home = stack_select.get_hadoop_dir("home")
|
|
|
- pig_bin_dir = '/usr/hdp/current/pig-client/bin'
|
|
|
+ pig_bin_dir = format("{stack_root}/current/pig-client/bin")
|
|
|
|
|
|
hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
|
|
|
hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
|