1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495 |
- """
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- Ambari Agent
- """
- from resource_management import *
- from resource_management.libraries.functions import conf_select
- from resource_management.libraries.functions import stack_select
- from resource_management.libraries.functions import format
- from resource_management.libraries.functions.version import format_stack_version
- from resource_management.libraries.functions.default import default
- from resource_management.libraries.functions import get_kinit_path
- from resource_management.libraries.script.script import Script
- # server configurations
- config = Script.get_config()
- tmp_dir = Script.get_tmp_dir()
- stack_name = default("/hostLevelParams/stack_name", None)
- host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
- stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
- stack_version_formatted = format_stack_version(stack_version_unformatted)
- # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
- version = default("/commandParams/version", None)
- #mahout params
- mahout_home = "/usr/hdp/current/mahout-client"
- mahout_conf_dir = "/usr/hdp/current/mahout-client/conf"
- mahout_user = config['configurations']['mahout-env']['mahout_user']
- yarn_log_dir_prefix = config['configurations']['yarn-env']['yarn_log_dir_prefix']
- #hadoop params
- hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
- hadoop_home = stack_select.get_hadoop_dir("home")
- # the configuration direction for HDFS/YARN/MapR is the hadoop config
- # directory, which is symlinked by hadoop-client only
- hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
- hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
- yarn_user = config['configurations']['yarn-env']['yarn_user']
- hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
- hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
- smokeuser = config['configurations']['cluster-env']['smokeuser']
- smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
- user_group = config['configurations']['cluster-env']['user_group']
- security_enabled = config['configurations']['cluster-env']['security_enabled']
- smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
- kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
- # not supporting 32 bit jdk.
- java64_home = config['hostLevelParams']['java_home']
- log4j_props = config['configurations']['mahout-log4j']['content']
- hdfs_site = config['configurations']['hdfs-site']
- default_fs = config['configurations']['core-site']['fs.defaultFS']
- dfs_type = default("/commandParams/dfs_type", "")
- import functools
- #create partial functions with common arguments for every HdfsResource call
- #to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
- HdfsResource = functools.partial(
- HdfsResource,
- user=hdfs_user,
- hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
- security_enabled = security_enabled,
- keytab = hdfs_user_keytab,
- kinit_path_local = kinit_path_local,
- hadoop_bin_dir = hadoop_bin_dir,
- hadoop_conf_dir = hadoop_conf_dir,
- principal_name = hdfs_principal_name,
- hdfs_site = hdfs_site,
- default_fs = default_fs,
- dfs_type = dfs_type
- )
|