瀏覽代碼

AMBARI-18165. Alert on Atlas after adding it to a secure cluster as HBase table initialization fails (Vitalyi Brodetskyi via alejandro)

Alejandro Fernandez 9 年之前
父節點
當前提交
e662dcb7c8

+ 1 - 0
ambari-common/src/main/python/resource_management/libraries/functions/constants.py

@@ -99,3 +99,4 @@ class StackFeature:
   RANGER_ADMIN_PASSWD_CHANGE = "ranger_admin_password_change"
   STORM_METRICS_APACHE_CLASSES = "storm_metrics_apache_classes"
   SPARK_JAVA_OPTS_SUPPORT = "spark_java_opts_support"
+  ATLAS_HBASE_SETUP="atlas_hbase_setup"

+ 8 - 4
ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata.py

@@ -19,7 +19,7 @@ limitations under the License.
 """
 
 from resource_management.core.resources.system import Directory, File
-from resource_management.core.source import StaticFile, InlineTemplate
+from resource_management.core.source import StaticFile, InlineTemplate, Template
 from resource_management.core.exceptions import Fail
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.decorator import retry
@@ -108,6 +108,10 @@ def metadata(type='server'):
          group=params.user_group
     )
 
+    if params.security_enabled:
+      TemplateConfig(format(params.atlas_jaas_file),
+                     owner=params.metadata_user)
+
     if type == 'server' and params.search_backend_solr and params.has_infra_solr:
       solr_cloud_util.setup_solr_client(params.config)
       check_znode()
@@ -118,9 +122,9 @@ def metadata(type='server'):
       create_collection('edge_index', 'basic_configs', jaasFile)
       create_collection('fulltext_index', 'basic_configs', jaasFile)
 
-    if params.security_enabled:
-        TemplateConfig(format(params.atlas_jaas_file),
-                         owner=params.metadata_user)
+    File(params.atlas_hbase_setup,
+         content=Template("atlas_hbase_setup.rb.j2")
+    )
 
 def upload_conf_set(config_set, jaasFile):
   import params

+ 19 - 1
ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py

@@ -19,7 +19,7 @@ limitations under the License.
 from metadata import metadata
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
-from resource_management import Execute, File, check_process_status, Script
+from resource_management import Execute, File, check_process_status, Script, format_stack_version
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.security_commons import build_expectations, \
   get_params_from_filesystem, validate_security_config_properties, \
@@ -68,6 +68,8 @@ class MetadataServer(Script):
 
     daemon_cmd = format('source {params.conf_dir}/atlas-env.sh ; {params.metadata_start_script}')
     no_op_test = format('ls {params.pid_file} >/dev/null 2>&1 && ps -p `cat {params.pid_file}` >/dev/null 2>&1')
+    atlas_hbase_setup_command = format("cat {atlas_hbase_setup} | hbase shell -n")
+    secure_atlas_hbase_setup_command = format("kinit -kt {hbase_user_keytab} {hbase_principal_name}; ") + atlas_hbase_setup_command
 
     if params.stack_supports_atlas_ranger_plugin:
       Logger.info('Atlas plugin is enabled, configuring Atlas plugin.')
@@ -76,6 +78,22 @@ class MetadataServer(Script):
       Logger.info('Atlas plugin is not supported or enabled.')
 
     try:
+      effective_version = format_stack_version(params.version) if upgrade_type is not None else params.stack_version_formatted
+
+      if check_stack_feature(StackFeature.ATLAS_HBASE_SETUP, effective_version):
+        if params.security_enabled and params.has_hbase_master:
+          Execute(secure_atlas_hbase_setup_command,
+                  tries = 5,
+                  try_sleep = 10,
+                  user=params.hbase_user
+          )
+        elif params.has_ranger_admin and params.has_hbase_master and not params.security_enabled:
+          Execute(atlas_hbase_setup_command,
+                  tries = 5,
+                  try_sleep = 10,
+                  user=params.hbase_user
+          )
+
       Execute(daemon_cmd,
               user=params.metadata_user,
               not_if=no_op_test

+ 14 - 0
ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py

@@ -70,6 +70,7 @@ def configs_for_ha(atlas_hosts, metadata_port, is_atlas_ha_enabled):
 
 # server configurations
 config = Script.get_config()
+exec_tmp_dir = Script.get_tmp_dir()
 stack_root = Script.get_stack_root()
 
 # Needed since this is an Atlas Hook service.
@@ -225,6 +226,19 @@ enable_ranger_atlas = False
 atlas_server_xmx = default("configurations/atlas-env/atlas_server_xmx", 2048)
 atlas_server_max_new_size = default("configurations/atlas-env/atlas_server_max_new_size", 614)
 
+hbase_master_hosts = default('/clusterHostInfo/hbase_master_hosts', [])
+has_hbase_master = not len(hbase_master_hosts) == 0
+
+ranger_admin_hosts = default('/clusterHostInfo/ranger_admin_hosts', [])
+has_ranger_admin = not len(ranger_admin_hosts) == 0
+
+atlas_hbase_setup = format("{exec_tmp_dir}/atlas_hbase_setup.rb")
+atlas_graph_storage_hbase_table = default('/configurations/application-properties/atlas.graph.storage.hbase.table', None)
+atlas_audit_hbase_tablename = default('/configurations/application-properties/atlas.audit.hbase.tablename', None)
+
+hbase_user_keytab = default('/configurations/hbase-env/hbase_user_keytab', None)
+hbase_principal_name = default('/configurations/hbase-env/hbase_principal_name', None)
+
 if has_ranger_admin and stack_supports_atlas_ranger_plugin:
   # for create_hdfs_directory
   namenode_host = set(default("/clusterHostInfo/namenode_host", []))

+ 1 - 0
ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/status_params.py

@@ -45,6 +45,7 @@ pid_dir = default("/configurations/atlas-env/metadata_pid_dir", "/var/run/atlas"
 pid_file = format("{pid_dir}/atlas.pid")
 
 metadata_user = default("/configurations/atlas-env/metadata_user", None)
+hbase_user = default("/configurations/hbase-env/hbase_user", None)
 
 # Security related/required params
 hostname = config['hostname']

+ 24 - 0
ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/templates/atlas_hbase_setup.rb.j2

@@ -0,0 +1,24 @@
+_tbl_titan = '{{atlas_graph_storage_hbase_table}}'
+_tbl_audit = '{{atlas_audit_hbase_tablename}}'
+_usr_atlas = '{{metadata_user}}'
+
+
+if not list.include? _tbl_titan
+  begin
+    create _tbl_titan,{NAME => 'e',DATA_BLOCK_ENCODING => 'FAST_DIFF', COMPRESSION =>'GZ', BLOOMFILTER =>'ROW'},{NAME => 'g',DATA_BLOCK_ENCODING => 'FAST_DIFF', COMPRESSION =>'GZ', BLOOMFILTER =>'ROW'},{NAME => 'i',DATA_BLOCK_ENCODING => 'FAST_DIFF', COMPRESSION =>'GZ', BLOOMFILTER =>'ROW'},{NAME => 's',DATA_BLOCK_ENCODING => 'FAST_DIFF', COMPRESSION =>'GZ', BLOOMFILTER =>'ROW'},{NAME => 'm',DATA_BLOCK_ENCODING => 'FAST_DIFF', COMPRESSION =>'GZ', BLOOMFILTER =>'ROW'},{NAME => 'l',DATA_BLOCK_ENCODING => 'FAST_DIFF', COMPRESSION =>'GZ', BLOOMFILTER =>'ROW', TTL => 604800, KEEP_DELETED_CELLS =>false}
+  rescue RuntimeError => e
+    raise e if not e.message.include? "Table already exists"
+  end
+end
+
+
+if not list.include? _tbl_audit
+  begin
+    create _tbl_audit, {NAME => 'dt', DATA_BLOCK_ENCODING => 'FAST_DIFF', COMPRESSION =>'GZ', BLOOMFILTER =>'ROW'}
+  rescue RuntimeError => e
+    raise e if not e.message.include? "Table already exists"
+  end
+end
+
+grant _usr_atlas, 'RWCA', _tbl_titan
+grant _usr_atlas, 'RWCA', _tbl_audit

+ 0 - 26
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py

@@ -86,32 +86,6 @@ class HbaseRegionServerDefault(HbaseRegionServer):
   def post_start(self, env, upgrade_type=None):
     import params
 
-    self.apply_atlas_acl(params.hbase_user)
-
-  def apply_atlas_acl(self, hbase_user):
-    import params
-
-    if params.security_enabled and params.has_atlas and params.atlas_with_managed_hbase:
-      current_host = params.hostname.lower()
-      sorted_rs_hosts = sorted([host.lower() for host in params.rs_hosts])
-
-      # if list of rs_hosts are empty, try to apply permissions regardless of host
-      if len(sorted_rs_hosts) == 0:
-        can_apply_permissions = True
-      else:
-        can_apply_permissions = current_host == sorted_rs_hosts[0]
-
-      if can_apply_permissions:
-        kinit_cmd = format("{kinit_path_local} -kt {regionserver_keytab_path} {regionserver_jaas_princ}")
-        permissions_cmd = [
-          format("echo \"grant '{metadata_user}', 'RWXCA', '{atlas_graph_storage_hbase_table}'\" | hbase shell -n"),
-          format("echo \"grant '{metadata_user}', 'RWXCA', '{atlas_audit_hbase_tablename}'\" | hbase shell -n"),
-        ]
-
-        # no additional logging required, as it supported by checked_call itself
-        # re-tries needed to suffer fails on Kerberos wizard as RegionServer update security features status over some time
-        for perm_cmd in permissions_cmd:
-          shell.checked_call(format("{kinit_cmd}; {perm_cmd}"), user=params.hbase_user, tries=10, try_sleep=10)
 
   def start(self, env, upgrade_type=None):
     import params

+ 5 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json

@@ -298,6 +298,11 @@
       "description": "Allow Spark to generate java-opts file",
       "min_version": "2.2.0.0",
       "max_version": "2.4.0.0"
+    },
+    {
+      "name": "atlas_hbase_setup",
+      "description": "Use script to create Atlas tables in Hbase and set permissions for Atlas user.",
+      "min_version": "2.5.0.0"
     }
   ]
 }

+ 36 - 0
ambari-server/src/main/resources/stacks/HDP/2.5/services/ATLAS/metainfo.xml

@@ -22,6 +22,42 @@
       <name>ATLAS</name>
       <version>0.7.0.2.5</version>
       <extends>common-services/ATLAS/0.7.0.2.5</extends>
+      <components>
+        <component>
+          <name>ATLAS_SERVER</name>
+          <displayName>Atlas Metadata Server</displayName>
+          <category>MASTER</category>
+          <cardinality>1</cardinality>
+          <versionAdvertised>false</versionAdvertised>
+          <dependencies>
+            <dependency>
+              <name>HBASE/HBASE_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/metadata_server.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>1200</timeout>
+          </commandScript>
+          <logs>
+            <log>
+              <logId>atlas_app</logId>
+              <primary>true</primary>
+            </log>
+          </logs>
+        </component>
+      </components>
       <osSpecifics>
         <osSpecific>
           <osFamily>redhat7,amazon2015,redhat6,suse11,suse12</osFamily>

+ 0 - 48
ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py

@@ -91,54 +91,6 @@ class TestHbaseRegionServer(RMFTestCase):
     self.assert_configure_secured()
     self.assertNoMoreResources()
 
-  def test_atlas_apply_acl(self):
-    config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/secured.json"
-    with open(config_file, "r") as f:
-      json_content = json.load(f)
-
-    json_content['clusterHostInfo']['atlas_server_hosts'] = [
-      "c6401.ambari.apache.org",
-      "c6402.ambari.apache.org"
-    ]
-    json_content['clusterHostInfo']['hbase_rs_hosts'] = [
-      "c6401.ambari.apache.org",
-      "c6402.ambari.apache.org"
-    ]
-    json_content['configurations']['atlas-env'] = {
-        "metadata_user": "atlas"
-    }
-    if 'hbase-site' in json_content['configurations']:
-        json_content['configurations']['hbase-site']["hbase.zookeeper.quorum"] = ",".join(json_content['clusterHostInfo']['atlas_server_hosts'])
-    else:
-        json_content['configurations']['hbase-site'] = {
-            "hbase.zookeeper.quorum": ",".join(json_content['clusterHostInfo']['atlas_server_hosts'])
-        }
-
-    json_content['configurations']['application-properties'] = {
-        "atlas.graph.storage.hostname": json_content['configurations']['hbase-site']['hbase.zookeeper.quorum'],
-        "atlas.graph.storage.hbase.table": "test"
-    }
-
-    mock_dicts = {}
-    self.executeScript(
-      self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
-      classname="HbaseRegionServer",
-      command="start",
-      config_dict=json_content,
-      stack_version=self.STACK_VERSION,
-      target=RMFTestCase.TARGET_COMMON_SERVICES,
-      mocks_dict=mock_dicts
-    )
-    permission_apply_call_found = False
-    pattern_search = "/usr/bin/kinit -kt /etc/security/keytabs/hbase.service.keytab hbase/c6401.ambari.apache.org@EXAMPLE.COM; echo \"grant 'atlas', 'RWXCA', 'test'\" | hbase shell -n"
-    if "checked_call" in mock_dicts:
-        for _call in mock_dicts["checked_call"].call_args_list:
-            if len(_call) > 0 and isinstance(_call[0], tuple) and len(_call[0]) > 0 and \
-                    _call[0][0] == pattern_search:
-                permission_apply_call_found = True
-
-    self.assertEquals(True, permission_apply_call_found)
-
 
   def test_start_secured(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",

+ 156 - 4
ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py

@@ -167,6 +167,149 @@ class TestMetadataServer(RMFTestCase):
       self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --create-collection --collection edge_index --config-set basic_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
       self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --create-collection --collection fulltext_index --config-set basic_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
 
+  def configureResourcesCalledSecure(self):
+    # Both server and client
+    self.assertResourceCalled('Directory', '/etc/atlas/conf',
+                              owner='atlas',
+                              group='hadoop',
+                              create_parents = True,
+                              cd_access='a',
+                              mode=0755
+    )
+
+    # Pid dir
+    self.assertResourceCalled('Directory', '/var/run/atlas',
+                              owner='atlas',
+                              group='hadoop',
+                              create_parents = True,
+                              cd_access='a',
+                              mode=0755
+    )
+    self.assertResourceCalled('Directory', '/etc/atlas/conf/solr',
+                              owner='atlas',
+                              group='hadoop',
+                              create_parents = True,
+                              cd_access='a',
+                              mode=0755,
+                              recursive_ownership = True
+    )
+    # Log dir
+    self.assertResourceCalled('Directory', '/var/log/atlas',
+                              owner='atlas',
+                              group='hadoop',
+                              create_parents = True,
+                              cd_access='a',
+                              mode=0755
+    )
+    # Data dir
+    self.assertResourceCalled('Directory', '/usr/hdp/current/atlas-server/data',
+                              owner='atlas',
+                              group='hadoop',
+                              create_parents = True,
+                              cd_access='a',
+                              mode=0644
+    )
+    # Expanded war dir
+    self.assertResourceCalled('Directory', '/usr/hdp/current/atlas-server/server/webapp',
+                              owner='atlas',
+                              group='hadoop',
+                              create_parents = True,
+                              cd_access='a',
+                              mode=0644
+    )
+    self.assertResourceCalled('File', '/usr/hdp/current/atlas-server/server/webapp/atlas.war',
+                              content = StaticFile('/usr/hdp/current/atlas-server/server/webapp/atlas.war'),
+                              )
+    host_name = u"c6401.ambari.apache.org"
+    app_props =  dict(self.getConfig()['configurations']['application-properties'])
+    app_props['atlas.server.bind.address'] = host_name
+
+    metadata_protocol = "https" if app_props["atlas.enableTLS"] is True else "http"
+    metadata_port = app_props["atlas.server.https.port"] if metadata_protocol == "https" else app_props["atlas.server.http.port"]
+    app_props["atlas.rest.address"] = u'%s://%s:%s' % (metadata_protocol, host_name, metadata_port)
+    app_props["atlas.server.ids"] = "id1"
+    app_props["atlas.server.address.id1"] = u"%s:%s" % (host_name, metadata_port)
+    app_props["atlas.server.ha.enabled"] = "false"
+
+    self.assertResourceCalled('File', '/etc/atlas/conf/atlas-log4j.xml',
+                              content=InlineTemplate(
+                                self.getConfig()['configurations'][
+                                  'atlas-log4j']['content']),
+                              owner='atlas',
+                              group='hadoop',
+                              mode=0644,
+                              )
+    self.assertResourceCalled('File', '/etc/atlas/conf/atlas-env.sh',
+                              content=InlineTemplate(
+                                self.getConfig()['configurations'][
+                                  'atlas-env']['content']),
+                              owner='atlas',
+                              group='hadoop',
+                              mode=0755,
+                              )
+    self.assertResourceCalled('File', '/etc/atlas/conf/solr/solrconfig.xml',
+                              content=InlineTemplate(
+                                self.getConfig()['configurations'][
+                                  'atlas-solrconfig']['content']),
+                              owner='atlas',
+                              group='hadoop',
+                              mode=0644,
+                              )
+    # application.properties file
+    self.assertResourceCalled('PropertiesFile',
+                              '/etc/atlas/conf/application.properties',
+                              properties=app_props,
+                              owner=u'atlas',
+                              group=u'hadoop',
+                              mode=0644,
+                              )
+
+    self.assertResourceCalled('TemplateConfig', '/etc/atlas/conf/atlas_jaas.conf',
+                              owner = 'atlas',
+                              )
+
+    self.assertResourceCalled('Directory', '/var/log/ambari-infra-solr-client',
+                              create_parents = True,
+                              cd_access='a',
+                              mode=0755
+    )
+
+    self.assertResourceCalled('Directory', '/usr/lib/ambari-infra-solr-client',
+                              create_parents = True,
+                              recursive_ownership = True,
+                              cd_access='a',
+                              mode=0755
+    )
+    self.assertResourceCalled('File', '/usr/lib/ambari-infra-solr-client/solrCloudCli.sh',
+                              content=StaticFile('/usr/lib/ambari-infra-solr-client/solrCloudCli.sh'),
+                              mode=0755,
+                              )
+    self.assertResourceCalled('File', '/usr/lib/ambari-infra-solr-client/log4j.properties',
+                              content=InlineTemplate(self.getConfig()['configurations'][
+                                'infra-solr-client-log4j']['content']),
+                              mode=0644,
+                              )
+    self.assertResourceCalled('File', '/var/log/ambari-infra-solr-client/solr-client.log',
+                              mode=0664,
+                              content=''
+    )
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /infra-solr --check-znode --retry 5 --interval 10')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --download-config --config-dir /tmp/solr_config_basic_configs_0.[0-9]* --config-set basic_configs --retry 30 --interval 5')
+    self.assertResourceCalledRegexp('^File$', '^/tmp/solr_config_basic_configs_0.[0-9]*',
+                                    content=InlineTemplate(self.getConfig()['configurations']['atlas-solrconfig']['content']),
+                                    only_if='test -d /tmp/solr_config_basic_configs_0.[0-9]*')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /tmp/solr_config_basic_configs_0.[0-9]* --config-set basic_configs --retry 30 --interval 5',
+                                    only_if='test -d /tmp/solr_config_basic_configs_0.[0-9]*')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /etc/atlas/conf/solr --config-set basic_configs --retry 30 --interval 5',
+                                    not_if='test -d /tmp/solr_config_basic_configs_0.[0-9]*')
+    self.assertResourceCalledRegexp('^Directory$', '^/tmp/solr_config_basic_configs_0.[0-9]*',
+                                    action=['delete'],
+                                    create_parents=True)
+
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --create-collection --collection vertex_index --config-set basic_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --create-collection --collection edge_index --config-set basic_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --create-collection --collection fulltext_index --config-set basic_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/metadata_server.py",
                        classname = "MetadataServer",
@@ -177,6 +320,10 @@ class TestMetadataServer(RMFTestCase):
     )
 
     self.configureResourcesCalled()
+
+    self.assertResourceCalled('File', '/tmp/atlas_hbase_setup.rb',
+                              content=Template("atlas_hbase_setup.rb.j2"))
+
     self.assertNoMoreResources()
 
   def test_configure_secure(self):
@@ -188,10 +335,11 @@ class TestMetadataServer(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES
                        )
 
-    self.configureResourcesCalled()
-    self.assertResourceCalled('TemplateConfig', '/etc/atlas/conf/atlas_jaas.conf',
-                              owner = 'atlas',
-                              )
+    self.configureResourcesCalledSecure()
+
+    self.assertResourceCalled('File', '/tmp/atlas_hbase_setup.rb',
+                              content=Template("atlas_hbase_setup.rb.j2"))
+
     self.assertNoMoreResources()
 
   def test_start_default(self):
@@ -203,6 +351,10 @@ class TestMetadataServer(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     self.configureResourcesCalled()
+
+    self.assertResourceCalled('File', '/tmp/atlas_hbase_setup.rb',
+                              content=Template("atlas_hbase_setup.rb.j2"))
+
     self.assertResourceCalled('Execute', 'source /etc/atlas/conf/atlas-env.sh ; /usr/hdp/current/atlas-server/bin/atlas_start.py',
                               not_if = 'ls /var/run/atlas/atlas.pid >/dev/null 2>&1 && ps -p `cat /var/run/atlas/atlas.pid` >/dev/null 2>&1',
                               user = 'atlas',

+ 4 - 0
ambari-server/src/test/python/stacks/2.5/ATLAS/test_atlas_server.py

@@ -176,6 +176,10 @@ class TestAtlasServer(RMFTestCase):
                        )
 
     self.configureResourcesCalled()
+
+    self.assertResourceCalled('File', '/tmp/atlas_hbase_setup.rb',
+                              content=Template("atlas_hbase_setup.rb.j2"))
+
     self.assertNoMoreResources()
 
   @patch("resource_management.libraries.functions.security_commons.build_expectations")