Parcourir la source

AMBARI-17678. Misc Atlas fixes, remove conf dir from classpath of Falcon and Storm in HDP 2.5, add more security properties to Atlas Hooks, and delete deprecated configs (alejandro)

Alejandro Fernandez il y a 9 ans
Parent
commit
b30d3c3a5d

+ 1 - 0
ambari-common/src/main/python/resource_management/libraries/functions/constants.py

@@ -90,6 +90,7 @@ class StackFeature:
   HBASE_HOME_DIRECTORY = "hbase_home_directory"
   ATLAS_RANGER_PLUGIN_SUPPORT = "atlas_ranger_plugin_support"
   ATLAS_UPGRADE_SUPPORT = "atlas_upgrade_support"
+  ATLAS_CONF_DIR_IN_PATH = "atlas_conf_dir_in_path"
   RANGER_PID_SUPPORT = "ranger_pid_support"
   RANGER_KMS_PID_SUPPORT = "ranger_kms_pid_support"
   RANGER_ADMIN_PASSWD_CHANGE = "ranger_admin_password_change"

+ 6 - 2
ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py

@@ -73,14 +73,18 @@ SHARED_ATLAS_HOOK_CONFIGS = set(
   "atlas.notification.topics",
   "atlas.notification.kafka.service.principal",
   "atlas.notification.kafka.keytab.location",
+  "atlas.cluster.name",
+
+  # Security properties
+  "atlas.kafka.sasl.kerberos.service.name",
+  "atlas.kafka.security.protocol",
   "atlas.jaas.KafkaClient.loginModuleName",
   "atlas.jaas.KafkaClient.loginModuleControlFlag",
   "atlas.jaas.KafkaClient.option.useKeyTab",
   "atlas.jaas.KafkaClient.option.storeKey",
   "atlas.jaas.KafkaClient.option.serviceName",
   "atlas.jaas.KafkaClient.option.keyTab",
-  "atlas.jaas.KafkaClient.option.principal",
-  "atlas.cluster.name"]
+  "atlas.jaas.KafkaClient.option.principal"]
 )
 
 def has_atlas_in_cluster():

+ 1 - 1
ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/atlas-log4j.xml

@@ -28,7 +28,7 @@
   </property>
   <property>
     <name>audit_log_level</name>
-    <value>OFF</value>
+    <value>info</value>
     <description>Log level for audit logging</description>
     <on-ambari-upgrade add="true"/>
   </property>

+ 6 - 5
ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py

@@ -127,14 +127,18 @@ else:
 
 metadata_host = config['hostname']
 
+atlas_hosts = sorted(default('/clusterHostInfo/atlas_server_hosts', []))
+metadata_server_host = atlas_hosts[0] if len(atlas_hosts) > 0 else "UNKNOWN_HOST"
+metadata_server_url = format('{metadata_protocol}://{metadata_server_host}:{metadata_port}')
+
 # application properties
 application_properties = dict(config['configurations']['application-properties'])
-application_properties['atlas.server.bind.address'] = metadata_host
+application_properties["atlas.server.bind.address"] = metadata_host
+application_properties["atlas.rest.address"] = metadata_server_url
 
 # Atlas HA should populate
 # atlas.server.ids = id1,id2,...,idn
 # atlas.server.address.id# = host#:port
-atlas_hosts = default('/clusterHostInfo/atlas_server_hosts', [])
 # User should not have to modify this property, but still allow overriding it to False if multiple Atlas servers exist
 # This can be None, True, or False
 is_atlas_ha_enabled = default("/configurations/application-properties/atlas.server.ha.enabled", None)
@@ -256,9 +260,6 @@ if has_ranger_admin and stack_supports_atlas_ranger_plugin:
   enable_ranger_atlas = config['configurations']['ranger-atlas-plugin-properties']['ranger-atlas-plugin-enabled']
   enable_ranger_atlas = not is_empty(enable_ranger_atlas) and enable_ranger_atlas.lower() == 'yes'
   policymgr_mgr_url = config['configurations']['admin-properties']['policymgr_external_url']
-  atlas_hosts = sorted(default('/clusterHostInfo/atlas_server_hosts', []))
-  metadata_server_host = atlas_hosts[0]
-  metadata_server_url = format('{metadata_protocol}://{metadata_server_host}:{metadata_port}')
 
   downloaded_custom_connector = None
   driver_curl_source = None

+ 28 - 12
ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/configuration/application-properties.xml

@@ -102,12 +102,6 @@
     <description>Path for the Atlas policy file.</description>
     <on-ambari-upgrade add="false"/>
   </property>
-  <property>
-    <name>atlas.kafka.data</name>
-    <value>{{metadata_home}}/data/kafka</value>
-    <description>The Kafka data directory.</description>
-    <on-ambari-upgrade add="false"/>
-  </property>
 
   <!-- Start: Shared Atlas Hooks that are also written out to configs for Falcon, Storm, Hive, and Sqoop.
   There are several more properties for when Atlas is Kerberized.
@@ -149,12 +143,6 @@
     <description></description>
     <on-ambari-upgrade add="false"/>
   </property>
-  <property>
-    <name>atlas.kafka.entities.group.id</name>
-    <value>entities</value>
-    <description>Kafka group id for the entity topic.</description>
-    <on-ambari-upgrade add="false"/>
-  </property>
   <property>
     <name>atlas.kafka.auto.commit.enable</name>
     <value>false</value>
@@ -237,6 +225,34 @@
     <deleted>true</deleted>
     <on-ambari-upgrade add="false"/>
   </property>
+
+  <property>
+    <name>atlas.graph.index.search.directory</name>
+    <deleted>true</deleted>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>atlas.graph.index.search.elasticsearch.client-only</name>
+    <deleted>true</deleted>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>atlas.graph.index.search.elasticsearch.local-mode</name>
+    <deleted>true</deleted>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>atlas.graph.storage.directory</name>
+    <deleted>true</deleted>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>atlas.kafka.entities.group.id</name>
+    <description>Kafka group id for the entity topic.</description>
+    <deleted>true</deleted>
+    <on-ambari-upgrade add="false"/>
+  </property>
+
   <property>
     <name>atlas.solr.kerberos.enable</name>
     <value>false</value>

+ 2 - 1
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py

@@ -154,7 +154,8 @@ atlas_hook_cp = ""
 if has_atlas_in_cluster():
   atlas_hook_filename = default('/configurations/atlas-env/metadata_conf_file', 'atlas-application.properties')
 
-  if has_atlas_server_on_host:
+  # Only append /etc/atlas/conf to classpath if on HDP 2.4.* and atlas server is running on this host.
+  if has_atlas_server_on_host and check_stack_feature(StackFeature.ATLAS_CONF_DIR_IN_PATH, stack_version_formatted):
     atlas_conf_dir = os.environ['METADATA_CONF'] if 'METADATA_CONF' in os.environ else format('{stack_root}/current/atlas-server/conf')
     atlas_home_dir = os.environ['METADATA_HOME_DIR'] if 'METADATA_HOME_DIR' in os.environ else format('{stack_root}/current/atlas-server')
     atlas_hook_cp = atlas_conf_dir + os.pathsep + os.path.join(atlas_home_dir, "hook", "falcon", "*") + os.pathsep

+ 5 - 3
ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py

@@ -194,7 +194,6 @@ metric_collector_legacy_sink_jar = "/usr/lib/storm/lib/ambari-metrics-storm-sink
 
 jar_jvm_opts = ''
 
-
 ########################################################
 ############# Atlas related params #####################
 ########################################################
@@ -203,8 +202,11 @@ storm_atlas_application_properties = default('/configurations/storm-atlas-applic
 
 if has_atlas_in_cluster():
   atlas_hook_filename = default('/configurations/atlas-env/metadata_conf_file', 'atlas-application.properties')
-  atlas_conf_dir = os.environ['METADATA_CONF'] if 'METADATA_CONF' in os.environ else '/etc/atlas/conf'
-  jar_jvm_opts += '-Datlas.conf=' + atlas_conf_dir
+
+  # Only append /etc/atlas/conf to classpath if on HDP 2.4.*
+  if check_stack_feature(StackFeature.ATLAS_CONF_DIR_IN_PATH, stack_version_formatted):
+    atlas_conf_dir = os.environ['METADATA_CONF'] if 'METADATA_CONF' in os.environ else '/etc/atlas/conf'
+    jar_jvm_opts += '-Datlas.conf=' + atlas_conf_dir
 #endregion
 
 

+ 6 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json

@@ -246,6 +246,12 @@
       "description": "Atlas Ranger plugin support",
       "min_version": "2.5.0.0"
     },
+    {
+      "name": "atlas_conf_dir_in_path",
+      "description": "Prepend the Atlas conf dir (/etc/atlas/conf) to the classpath of Storm and Falcon",
+      "min_version": "2.3.0.0",
+      "max_version": "2.4.99.99"
+    },
     {
       "name": "atlas_upgrade_support",
       "description": "Atlas supports express and rolling upgrades",

+ 11 - 3
ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py

@@ -83,8 +83,16 @@ class TestMetadataServer(RMFTestCase):
       self.assertResourceCalled('File', '/usr/hdp/current/atlas-server/server/webapp/atlas.war',
           content = StaticFile('/usr/hdp/current/atlas-server/server/webapp/atlas.war'),
       )
+      host_name = u"c6401.ambari.apache.org"
       app_props =  dict(self.getConfig()['configurations']['application-properties'])
-      app_props['atlas.server.bind.address'] = 'c6401.ambari.apache.org'
+      app_props['atlas.server.bind.address'] = host_name
+
+      metadata_protocol = "https" if app_props["atlas.enableTLS"] is True else "http"
+      metadata_port = app_props["atlas.server.https.port"] if metadata_protocol == "https" else app_props["atlas.server.http.port"]
+      app_props["atlas.rest.address"] = u'%s://%s:%s' % (metadata_protocol, host_name, metadata_port)
+      app_props["atlas.server.ids"] = "id1"
+      app_props["atlas.server.address.id1"] = u"%s:%s" % (host_name, metadata_port)
+      app_props["atlas.server.ha.enabled"] = "false"
 
       self.assertResourceCalled('File', '/etc/atlas/conf/atlas-log4j.xml',
                           content=InlineTemplate(
@@ -114,8 +122,8 @@ class TestMetadataServer(RMFTestCase):
       self.assertResourceCalled('PropertiesFile',
                                 '/etc/atlas/conf/application.properties',
                                 properties=app_props,
-                                owner='atlas',
-                                group='hadoop',
+                                owner=u'atlas',
+                                group=u'hadoop',
                                 mode=0644,
       )
       self.assertResourceCalled('Directory', '/var/log/ambari-logsearch-solr-client',

+ 13 - 10
ambari-server/src/test/python/stacks/2.3/configs/default.json

@@ -336,17 +336,20 @@
             "c6401.ambari.apache.org",
             "c6402.ambari.apache.org"
         ],
-      "knox_gateway_hosts": [
-        "jaimin-knox-1.c.pramod-thangali.internal"
-      ],
-      "kafka_broker_hosts": [
-        "c6401.ambari.apache.org"
-      ],
-      "logsearch_solr_hosts": [
-        "c6401.ambari.apache.org"
-      ],
-       "zookeeper_hosts": [
+        "knox_gateway_hosts": [
+          "jaimin-knox-1.c.pramod-thangali.internal"
+        ],
+        "kafka_broker_hosts": [
+          "c6401.ambari.apache.org"
+        ],
+        "logsearch_solr_hosts": [
+          "c6401.ambari.apache.org"
+        ],
+        "zookeeper_hosts": [
          "c6401.ambari.apache.org"
+        ],
+        "atlas_server_hosts": [
+          "c6401.ambari.apache.org"
         ]
 
 }

+ 3 - 0
ambari-server/src/test/python/stacks/2.3/configs/secure.json

@@ -338,6 +338,9 @@
     ],
     "zookeeper_hosts": [
       "c6401.ambari.apache.org"
+    ],
+    "atlas_server_hosts": [
+      "c6401.ambari.apache.org"
     ]
 
   }

+ 10 - 3
ambari-server/src/test/python/stacks/2.5/ATLAS/test_atlas_server.py

@@ -79,10 +79,17 @@ class TestAtlasServer(RMFTestCase):
     self.assertResourceCalled('File', '/usr/hdp/current/atlas-server/server/webapp/atlas.war',
                               content = StaticFile('/usr/hdp/current/atlas-server/server/webapp/atlas.war'),
     )
+    host_name = u"c6401.ambari.apache.org"
     app_props =  dict(self.getConfig()['configurations'][
                        'application-properties'])
     app_props['atlas.http.authentication.kerberos.name.rules'] = ' \\ \n'.join(app_props['atlas.http.authentication.kerberos.name.rules'].splitlines())
-    app_props['atlas.server.bind.address'] = 'c6401.ambari.apache.org'
+    app_props['atlas.server.bind.address'] = host_name
+    metadata_protocol = "https" if app_props["atlas.enableTLS"] is True else "http"
+    metadata_port = app_props["atlas.server.https.port"] if metadata_protocol == "https" else app_props["atlas.server.http.port"]
+    app_props["atlas.rest.address"] = u'%s://%s:%s' % (metadata_protocol, host_name, metadata_port)
+    app_props["atlas.server.ids"] = "id1"
+    app_props["atlas.server.address.id1"] = u"%s:%s" % (host_name, metadata_port)
+    app_props["atlas.server.ha.enabled"] = "false"
 
     self.assertResourceCalled('File', '/etc/atlas/conf/atlas-log4j.xml',
                           content=InlineTemplate(
@@ -112,8 +119,8 @@ class TestAtlasServer(RMFTestCase):
     self.assertResourceCalled('PropertiesFile',
                               '/etc/atlas/conf/atlas-application.properties',
                               properties=app_props,
-                              owner='atlas',
-                              group='hadoop',
+                              owner=u'atlas',
+                              group=u'hadoop',
                               mode=0644,
                               )
     self.assertResourceCalled('Directory', '/var/log/ambari-logsearch-solr-client',

+ 3 - 0
ambari-server/src/test/python/stacks/2.5/configs/default.json

@@ -383,6 +383,9 @@
     ],
     "ranger_tagsync_hosts": [
       "c6401.ambari.apache.org"
+    ],
+    "atlas_server_hosts": [
+      "c6401.ambari.apache.org"
     ]
 
   }