datanode.py 5.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152
  1. """
  2. Licensed to the Apache Software Foundation (ASF) under one
  3. or more contributor license agreements. See the NOTICE file
  4. distributed with this work for additional information
  5. regarding copyright ownership. The ASF licenses this file
  6. to you under the Apache License, Version 2.0 (the
  7. "License"); you may not use this file except in compliance
  8. with the License. You may obtain a copy of the License at
  9. http://www.apache.org/licenses/LICENSE-2.0
  10. Unless required by applicable law or agreed to in writing, software
  11. distributed under the License is distributed on an "AS IS" BASIS,
  12. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. See the License for the specific language governing permissions and
  14. limitations under the License.
  15. """
  16. import datanode_upgrade
  17. from hdfs_datanode import datanode
  18. from resource_management import *
  19. from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
  20. from resource_management.libraries.functions.security_commons import build_expectations, \
  21. cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, FILE_TYPE_XML
  22. from hdfs import hdfs
  23. class DataNode(Script):
  24. def get_stack_to_component(self):
  25. return {"HDP": "hadoop-hdfs-datanode"}
  26. def install(self, env):
  27. import params
  28. self.install_packages(env, params.exclude_packages)
  29. env.set_params(params)
  30. def pre_rolling_restart(self, env):
  31. Logger.info("Executing DataNode Rolling Upgrade pre-restart")
  32. import params
  33. env.set_params(params)
  34. if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
  35. Execute(format("hdp-select set hadoop-hdfs-datanode {version}"))
  36. def post_rolling_restart(self, env):
  37. Logger.info("Executing DataNode Rolling Upgrade post-restart")
  38. import params
  39. env.set_params(params)
  40. # ensure the DataNode has started and rejoined the cluster
  41. datanode_upgrade.post_upgrade_check()
  42. def start(self, env, rolling_restart=False):
  43. import params
  44. env.set_params(params)
  45. self.configure(env)
  46. datanode(action="start")
  47. def stop(self, env, rolling_restart=False):
  48. import params
  49. env.set_params(params)
  50. # pre-upgrade steps shutdown the datanode, so there's no need to call
  51. # action=stop
  52. if rolling_restart:
  53. datanode_upgrade.pre_upgrade_shutdown()
  54. else:
  55. datanode(action="stop")
  56. def configure(self, env):
  57. import params
  58. env.set_params(params)
  59. hdfs()
  60. datanode(action="configure")
  61. def status(self, env):
  62. import status_params
  63. env.set_params(status_params)
  64. check_process_status(status_params.datanode_pid_file)
  65. def security_status(self, env):
  66. import status_params
  67. env.set_params(status_params)
  68. props_value_check = {"hadoop.security.authentication": "kerberos",
  69. "hadoop.security.authorization": "true"}
  70. props_empty_check = ["hadoop.security.auth_to_local"]
  71. props_read_check = None
  72. core_site_expectations = build_expectations('core-site', props_value_check, props_empty_check,
  73. props_read_check)
  74. props_value_check = None
  75. props_empty_check = ['dfs.datanode.keytab.file',
  76. 'dfs.datanode.kerberos.principal']
  77. props_read_check = ['dfs.datanode.keytab.file']
  78. hdfs_site_expectations = build_expectations('hdfs-site', props_value_check, props_empty_check,
  79. props_read_check)
  80. hdfs_expectations = {}
  81. hdfs_expectations.update(core_site_expectations)
  82. hdfs_expectations.update(hdfs_site_expectations)
  83. security_params = get_params_from_filesystem(status_params.hadoop_conf_dir,
  84. {'core-site.xml': FILE_TYPE_XML,
  85. 'hdfs-site.xml': FILE_TYPE_XML})
  86. if 'core-site' in security_params and 'hadoop.security.authentication' in security_params['core-site'] and \
  87. security_params['core-site']['hadoop.security.authentication'].lower() == 'kerberos':
  88. result_issues = validate_security_config_properties(security_params, hdfs_expectations)
  89. if not result_issues: # If all validations passed successfully
  90. try:
  91. # Double check the dict before calling execute
  92. if ('hdfs-site' not in security_params or
  93. 'dfs.datanode.keytab.file' not in security_params['hdfs-site'] or
  94. 'dfs.datanode.kerberos.principal' not in security_params['hdfs-site']):
  95. self.put_structured_out({"securityState": "UNSECURED"})
  96. self.put_structured_out(
  97. {"securityIssuesFound": "Keytab file or principal are not set property."})
  98. return
  99. cached_kinit_executor(status_params.kinit_path_local,
  100. status_params.hdfs_user,
  101. security_params['hdfs-site']['dfs.datanode.keytab.file'],
  102. security_params['hdfs-site']['dfs.datanode.kerberos.principal'],
  103. status_params.hostname,
  104. status_params.tmp_dir)
  105. self.put_structured_out({"securityState": "SECURED_KERBEROS"})
  106. except Exception as e:
  107. self.put_structured_out({"securityState": "ERROR"})
  108. self.put_structured_out({"securityStateErrorInfo": str(e)})
  109. else:
  110. issues = []
  111. for cf in result_issues:
  112. issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
  113. self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
  114. self.put_structured_out({"securityState": "UNSECURED"})
  115. else:
  116. self.put_structured_out({"securityState": "UNSECURED"})
  117. if __name__ == "__main__":
  118. DataNode().execute()