Browse Source

AMBARI-14853 - Atlas Integration: Support deploying latest Atlas(which depends on kafka) using Ambari (tbeerbower)

tbeerbower 9 năm trước cách đây
mục cha
commit
1d9f1bb069

+ 35 - 1
ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/application-properties.xml

@@ -130,6 +130,41 @@
       DEFAULT</value>
     <description></description>
   </property>
+  <property>
+    <name>atlas.notification.embedded</name>
+    <value>false</value>
+    <description>Indicates whether or not the notification service should be embedded.</description>
+  </property>
+  <property>
+    <name>atlas.kafka.data</name>
+    <value>{{metadata_home}}/data/kafka</value>
+    <description>The Kafka data directory.</description>
+  </property>
+  <property>
+    <name>atlas.kafka.bootstrap.servers</name>
+    <value>{{kafka_bootstrap_servers}}</value>
+    <description>Comma separated list of Kafka broker endpoints in host:port form</description>
+  </property>
+  <property>
+    <name>atlas.kafka.zookeeper.connect</name>
+    <value>{{kafka_zookeeper_connect}}</value>
+    <description>Comma separated list of servers forming Zookeeper quorum used by Kafka.</description>
+  </property>
+  <property>
+    <name>atlas.kafka.hook.group.id</name>
+    <value>atlas</value>
+    <description>Kafka group id for the hook topic.</description>
+  </property>
+  <property>
+    <name>atlas.kafka.entities.group.id</name>
+    <value>entities</value>
+    <description>Kafka group id for the entity topic.</description>
+  </property>
+  <property>
+    <name>atlas.cluster.name</name>
+    <value>{{cluster_name}}</value>
+    <description>The cluster name.</description>
+  </property>
   <property>
     <name>atlas.server.http.port</name>
     <value>21000</value>
@@ -140,5 +175,4 @@
     <value>21443</value>
     <description></description>
   </property>
-
 </configuration>

+ 4 - 0
ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/metainfo.xml

@@ -92,6 +92,10 @@
         <timeout>300</timeout>
       </commandScript>
 
+      <requiredServices>
+        <service>KAFKA</service>
+      </requiredServices>
+
       <configuration-dependencies>
         <config-type>application-properties</config-type>
         <config-type>atlas-env</config-type>

+ 11 - 0
ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py

@@ -28,6 +28,8 @@ import status_params
 # server configurations
 config = Script.get_config()
 
+cluster_name = config['clusterName']
+
 # security enabled
 security_enabled = status_params.security_enabled
 
@@ -112,3 +114,12 @@ if security_enabled:
     smoke_cmd = format('curl --negotiate -u : -b ~/cookiejar.txt -c ~/cookiejar.txt -s -o /dev/null -w "%{{http_code}}" http://{metadata_host}:{metadata_port}/')
 else:
     smoke_cmd = format('curl -s -o /dev/null -w "%{{http_code}}" http://{metadata_host}:{metadata_port}/')
+
+# kafka
+kafka_bootstrap_servers = ""
+kafka_broker_hosts = config['clusterHostInfo']['kafka_broker_hosts']
+if not len(kafka_broker_hosts) == 0:
+  kafka_broker_port = default("/configurations/kafka-broker/port", 6667)
+  kafka_bootstrap_servers = kafka_broker_hosts[0] + ":" + str(kafka_broker_port)
+
+kafka_zookeeper_connect = default("/configurations/kafka-broker/zookeeper.connect", None)

+ 2 - 0
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-env.xml

@@ -226,6 +226,8 @@ else
 fi
 export METASTORE_PORT={{hive_metastore_port}}
 
+export HADOOP_CLASSPATH={{atlas_conf_dir}}:{{atlas_home_dir}}/hook/hive:${HADOOP_CLASSPATH}
+
     </value>
     <value-attributes>
       <type>content</type>

+ 2 - 0
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py

@@ -398,6 +398,8 @@ atlas_plugin_package = "atlas-metadata*-hive-plugin"
 atlas_ubuntu_plugin_package = "atlas-metadata.*-hive-plugin"
 
 if has_atlas:
+  atlas_home_dir = os.environ['METADATA_HOME_DIR'] if 'METADATA_HOME_DIR' in os.environ else '/usr/hdp/current/atlas-server'
+  atlas_conf_dir = os.environ['METADATA_CONF'] if 'METADATA_CONF' in os.environ else '/etc/atlas/conf'
   # client.properties
   atlas_client_props = {}
   auth_enabled = config['configurations']['application-properties'].get(

+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/configuration/hive-env.xml

@@ -69,6 +69,8 @@ fi
 
 export METASTORE_PORT={{hive_metastore_port}}
 
+export HADOOP_CLASSPATH={{atlas_conf_dir}}:{{atlas_home_dir}}/hook/hive:${HADOOP_CLASSPATH}
+
 {% if sqla_db_used or lib_dir_available %}
 export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:{{jdbc_libs_dir}}"
 export JAVA_LIBRARY_PATH="$JAVA_LIBRARY_PATH:{{jdbc_libs_dir}}"

+ 8 - 1
ambari-server/src/test/python/stacks/2.3/configs/default.json

@@ -164,6 +164,7 @@
         "log.retention.hours": "168"
       },
       "application-properties": {
+        "atlas.cluster.name" : "c2",
         "atlas.graph.storage.backend": "berkeleyje",
         "atlas.graph.storage.directory": "data/berkley",
         "atlas.graph.index.search.backend": "elasticsearch",
@@ -185,7 +186,13 @@
         "atlas.http.authentication.kerberos.principal": "HTTP/_HOST@EXAMPLE.COM",
         "atlas.http.authentication.kerberos.keytab": "/etc/security/keytabs/spnego.service.keytab",
         "atlas.http.authentication.kerberos.name.rules": "DEFAULT",
-        "atlas.server.http.port" : "21000"
+        "atlas.server.http.port" : "21000",
+        "atlas.notification.embedded" : false,
+        "atlas.kafka.bootstrap.servers" : "c6401.ambari.apache.org:6667",
+        "atlas.kafka.data" : "/usr/hdp/current/atlas-server/data/kafka",
+        "atlas.kafka.entities.group.id" : "entities",
+        "atlas.kafka.hook.group.id" : "atlas",
+        "atlas.kafka.zookeeper.connect" : "c6401.ambari.apache.org:2181"
       },
       "atlas-env": {
         "content": "# The java implementation to use. If JAVA_HOME is not found we expect java and jar to be in path\nexport JAVA_HOME={{java64_home}}\n# any additional java opts you want to set. This will apply to both client and server operations\nexport METADATA_OPTS={{metadata_opts}}\n# metadata configuration directory\nexport METADATA_CONF={{conf_dir}}\n# Where log files are stored. Defatult is logs directory under the base install location\nexport METADATA_LOG_DIR={{log_dir}}\n# additional classpath entries\nexport METADATACPPATH={{metadata_classpath}}\n# data dir\nexport METADATA_DATA_DIR={{data_dir}}\n# Where do you want to expand the war file. By Default it is in /server/webapp dir under the base install dir.\nexport METADATA_EXPANDED_WEBAPP_DIR={{expanded_war_dir}}",