params.py 7.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176
  1. #!/usr/bin/env python
  2. """
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. """
  17. from functions import calc_xmn_from_xms
  18. from resource_management import *
  19. import status_params
  20. # server configurations
  21. config = Script.get_config()
  22. exec_tmp_dir = Script.get_tmp_dir()
  23. #AMS data
  24. ams_user = status_params.ams_user
  25. ams_pid_dir = status_params.ams_collector_pid_dir
  26. ams_collector_script = "/usr/sbin/ambari-metrics-collector"
  27. ams_collector_conf_dir = "/etc/ambari-metrics-collector/conf"
  28. ams_collector_pid_dir = status_params.ams_collector_pid_dir
  29. ams_collector_hosts = default("/clusterHostInfo/metric_collector_hosts", [])
  30. ams_collector_host_single = ams_collector_hosts[0] #TODO cardinality is 1+ so we can have more than one host
  31. metric_collector_port = default("/configurations/ams-site/timeline.metrics.service.webapp.address", "0.0.0.0:8188")
  32. if metric_collector_port and metric_collector_port.find(':') != -1:
  33. metric_collector_port = metric_collector_port.split(':')[1]
  34. pass
  35. ams_collector_log_dir = config['configurations']['ams-env']['ams_collector_log_dir']
  36. ams_monitor_log_dir = config['configurations']['ams-env']['ams_monitor_log_dir']
  37. ams_monitor_conf_dir = "/etc/ambari-metrics-monitor/conf/"
  38. ams_monitor_dir = "/usr/lib/python2.6/site-packages/resource_monitoring"
  39. ams_monitor_pid_dir = status_params.ams_monitor_pid_dir
  40. ams_monitor_script = "/usr/sbin/ambari-metrics-monitor"
  41. #RPM versioning support
  42. rpm_version = default("/configurations/hadoop-env/rpm_version", None)
  43. #hadoop params
  44. if rpm_version is not None:
  45. #RPM versioning support
  46. rpm_version = default("/configurations/hadoop-env/rpm_version", None)
  47. #hadoop params
  48. if rpm_version is not None:
  49. hadoop_native_lib = format("/usr/hdp/current/hadoop-client/lib/native/")
  50. hadoop_bin_dir = format("/usr/hdp/current/hadoop/bin")
  51. daemon_script = format('/usr/hdp/current/hbase/bin/hbase-daemon.sh')
  52. region_mover = format('/usr/hdp/current/hbase/bin/region_mover.rb')
  53. region_drainer = format('/usr/hdp/current/hbase/bin/draining_servers.rb')
  54. hbase_cmd = format('/usr/hdp/current/hbase/bin/hbase')
  55. else:
  56. hadoop_native_lib = format("/usr/lib/hadoop/lib/native")
  57. hadoop_bin_dir = "/usr/bin"
  58. daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
  59. region_mover = "/usr/lib/hbase/bin/region_mover.rb"
  60. region_drainer = "/usr/lib/hbase/bin/draining_servers.rb"
  61. hbase_cmd = "/usr/lib/hbase/bin/hbase"
  62. hadoop_conf_dir = "/etc/hadoop/conf"
  63. #hbase_conf_dir = "/etc/ams-hbase/conf"
  64. hbase_conf_dir = "/etc/ams-hbase/conf"
  65. hbase_excluded_hosts = config['commandParams']['excluded_hosts']
  66. hbase_drain_only = config['commandParams']['mark_draining_only']
  67. hbase_included_hosts = config['commandParams']['included_hosts']
  68. hbase_user = status_params.hbase_user
  69. smokeuser = config['configurations']['cluster-env']['smokeuser']
  70. hbase_hdfs_root_dir = config['configurations']['ams-hbase-site']['hbase.rootdir']
  71. is_hbase_distributed = hbase_hdfs_root_dir.startswith('hdfs://')
  72. # security is disabled for embedded mode, when HBase is backed by file
  73. security_enabled = False if not is_hbase_distributed else config['configurations']['cluster-env']['security_enabled']
  74. # this is "hadoop-metrics.properties" for 1.x stacks
  75. metric_prop_file_name = "hadoop-metrics2-hbase.properties"
  76. # not supporting 32 bit jdk.
  77. java64_home = config['hostLevelParams']['java_home']
  78. hbase_log_dir = config['configurations']['ams-hbase-env']['hbase_log_dir']
  79. master_heapsize = config['configurations']['ams-hbase-env']['hbase_master_heapsize']
  80. regionserver_heapsize = config['configurations']['ams-hbase-env']['hbase_regionserver_heapsize']
  81. regionserver_xmn_max = config['configurations']['ams-hbase-env']['hbase_regionserver_xmn_max']
  82. regionserver_xmn_percent = config['configurations']['ams-hbase-env']['hbase_regionserver_xmn_ratio']
  83. regionserver_xmn_size = calc_xmn_from_xms(regionserver_heapsize, regionserver_xmn_percent, regionserver_xmn_max)
  84. # For embedded mode
  85. hbase_heapsize = master_heapsize
  86. ams_checkpoint_dir = config['configurations']['ams-site']['timeline.metrics.aggregator.checkpoint.dir']
  87. hbase_pid_dir = status_params.hbase_pid_dir
  88. hbase_tmp_dir = config['configurations']['ams-hbase-site']['hbase.tmp.dir']
  89. # TODO UPGRADE default, update site during upgrade
  90. _local_dir_conf = default('/configurations/ams-hbase-site/hbase.local.dir', "${hbase.tmp.dir}/local")
  91. local_dir = substitute_vars(_local_dir_conf, config['configurations']['ams-hbase-site'])
  92. client_jaas_config_file = format("{hbase_conf_dir}/hbase_client_jaas.conf")
  93. master_jaas_config_file = format("{hbase_conf_dir}/hbase_master_jaas.conf")
  94. regionserver_jaas_config_file = format("{hbase_conf_dir}/hbase_regionserver_jaas.conf")
  95. rs_hosts = ["localhost"]
  96. smoke_test_user = config['configurations']['cluster-env']['smokeuser']
  97. smokeuser_permissions = "RWXCA"
  98. service_check_data = functions.get_unique_id_and_date()
  99. user_group = config['configurations']['cluster-env']["user_group"]
  100. if security_enabled:
  101. _hostname_lowercase = config['hostname'].lower()
  102. master_jaas_princ = default('/configurations/ams-hbase-site/hbase.master.kerberos.principal', 'hbase/_HOST@EXAMPLE.COM').replace('_HOST',_hostname_lowercase)
  103. regionserver_jaas_princ = default('/configurations/ams-hbase-site/hbase.regionserver.kerberos.principal', 'hbase/_HOST@EXAMPLE.COM').replace('_HOST',_hostname_lowercase)
  104. master_keytab_path = config['configurations']['ams-hbase-site']['hbase.master.keytab.file']
  105. regionserver_keytab_path = config['configurations']['ams-hbase-site']['hbase.regionserver.keytab.file']
  106. smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
  107. hbase_user_keytab = config['configurations']['ams-hbase-env']['hbase_user_keytab']
  108. kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
  109. if security_enabled:
  110. kinit_cmd = format("{kinit_path_local} -kt {hbase_user_keytab} {hbase_user};")
  111. else:
  112. kinit_cmd = ""
  113. #log4j.properties
  114. if (('ams-hbase-log4j' in config['configurations']) and ('content' in config['configurations']['ams-hbase-log4j'])):
  115. hbase_log4j_props = config['configurations']['ams-hbase-log4j']['content']
  116. else:
  117. hbase_log4j_props = None
  118. if (('ams-log4j' in config['configurations']) and ('content' in config['configurations']['ams-log4j'])):
  119. log4j_props = config['configurations']['ams-log4j']['content']
  120. else:
  121. log4j_props = None
  122. hbase_env_sh_template = config['configurations']['ams-hbase-env']['content']
  123. ams_env_sh_template = config['configurations']['ams-env']['content']
  124. hbase_staging_dir = "/apps/hbase/staging"
  125. #for create_hdfs_directory
  126. hostname = config["hostname"]
  127. hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
  128. hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
  129. hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
  130. kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
  131. import functools
  132. #create partial functions with common arguments for every HdfsDirectory call
  133. #to create hdfs directory we need to call params.HdfsDirectory in code
  134. # HdfsDirectory = functools.partial(
  135. # HdfsDirectory,
  136. # conf_dir=hadoop_conf_dir,
  137. # hdfs_user=hdfs_user,
  138. # security_enabled = security_enabled,
  139. # keytab = hdfs_user_keytab,
  140. # kinit_path_local = kinit_path_local,
  141. # bin_dir = hadoop_bin_dir
  142. # )