فهرست منبع

AMBARI-7814 Flume agent on Ambari uses the default Java on machine (dsen)

Dmytro Sen 10 سال پیش
والد
کامیت
d1e4a81cb2

+ 38 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/configuration/flume-env.xml

@@ -37,4 +37,42 @@
     <property-type>USER</property-type>
     <description>Flume User</description>
   </property>
+
+  <!-- flume-env.sh -->
+  <property>
+    <name>content</name>
+    <description>This is the jinja template for flume-env.sh file</description>
+    <value>
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# If this file is placed at FLUME_CONF_DIR/flume-env.sh, it will be sourced
+# during Flume startup.
+
+# Enviroment variables can be set here.
+
+export JAVA_HOME={{java_home}}
+
+# Give Flume more memory and pre-allocate, enable remote monitoring via JMX
+# export JAVA_OPTS="-Xms100m -Xmx2000m -Dcom.sun.management.jmxremote"
+
+# Note that the Flume conf directory is always included in the classpath.
+#FLUME_CLASSPATH=""
+
+# export HIVE_HOME=/usr/lib/hive
+# export HCAT_HOME=/usr/lib/hive-hcatalog
+    </value>
+  </property>
 </configuration>

+ 5 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py

@@ -33,6 +33,11 @@ def flume(action = None):
     Directory(params.flume_conf_dir, recursive=True)
     Directory(params.flume_log_dir, owner=params.flume_user)
 
+    File(format("{flume_conf_dir}/flume-env.sh"),
+         owner=params.flume_user,
+         content=InlineTemplate(params.flume_env_sh_template)
+    )
+
     flume_agents = {}
     if params.flume_conf_content is not None:
       flume_agents = build_flume_topology(params.flume_conf_content)

+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py

@@ -58,6 +58,8 @@ else:
 targets = default('/commandParams/flume_handler', None)
 flume_command_targets = [] if targets is None else targets.split(',')
 
+flume_env_sh_template = config['configurations']['flume-env']['content']
+
 ganglia_server_hosts = default('/clusterHostInfo/ganglia_server_host', [])
 ganglia_server_host = None
 if 0 != len(ganglia_server_hosts):

+ 10 - 1
ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py

@@ -22,7 +22,6 @@ from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
 import resource_management.core.source
 
-@patch.object(resource_management.core.source, "InlineTemplate", new = MagicMock(return_value='InlineTemplateMock'))
 class TestFlumeHandler(RMFTestCase):
 
   def test_configure_default(self):
@@ -161,6 +160,11 @@ class TestFlumeHandler(RMFTestCase):
 
     self.assertResourceCalled('Directory', '/var/log/flume', owner = 'flume')
 
+    self.assertResourceCalled('File', "/etc/flume/conf/flume-env.sh",
+      owner="flume",
+      content=InlineTemplate(self.getConfig()['configurations']['flume-env']['content'])
+    )
+
     self.assertResourceCalled('Directory', '/etc/flume/conf/a1')
 
     self.assertResourceCalled('PropertiesFile', '/etc/flume/conf/a1/flume.conf',
@@ -184,6 +188,11 @@ class TestFlumeHandler(RMFTestCase):
 
     self.assertResourceCalled('Directory', '/var/log/flume', owner = 'flume')
 
+    self.assertResourceCalled('File', "/etc/flume/conf/flume-env.sh",
+         owner="flume",
+         content=InlineTemplate(self.getConfig()['configurations']['flume-env']['content'])
+    )
+
     top = build_flume(self.getConfig()['configurations']['flume-conf']['content'])
 
     # a1

+ 6 - 2
ambari-server/src/test/python/stacks/2.0.6/configs/default.json

@@ -512,8 +512,12 @@
         "tez-env": {
             "content": "\n# Tez specific configuration\nexport TEZ_CONF_DIR={{config_dir}}\n\n# Set HADOOP_HOME to point to a specific hadoop install directory\nexport HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\n# The java implementation to use.\nexport JAVA_HOME={{java64_home}}", 
             "tez_user": "tez"
-        }, 
-        "storm-env": {
+        },
+        "flume-env": {
+             "content": "export JAVA_HOME={{java64_home}}",
+             "flume_user": "flume"
+        },
+      "storm-env": {
             "content": "\n#!/bin/bash\n\n# Set Storm specific environment variables here.\n\n# The java implementation to use.\nexport JAVA_HOME={{java_home}}\n\n# export STORM_CONF_DIR=\"\"", 
             "storm_log_dir": "/var/log/storm", 
             "storm_pid_dir": "/var/run/storm", 

+ 5 - 1
ambari-server/src/test/python/stacks/2.0.6/configs/default.non_gmetad_host.json

@@ -529,7 +529,11 @@
         }, 
         "pig-env": {
             "content": "\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif [ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS -Dmapreduce.framework.name=yarn\"\nfi"
-        }, 
+        },
+        "flume-env": {
+          "content": "export JAVA_HOME={{java64_home}}",
+          "flume_user": "flume"
+        },
         "sqoop-env": {
             "content": "\n# Set Hadoop-specific environment variables here.\n\n#Set path to where bin/hadoop is available\n#Set path to where bin/hadoop is available\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path to where bin/hive is available\nexport HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add libthrift in hive to sqoop class path first so hive imports work\nexport SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}\"",
             "sqoop_user": "sqoop"

+ 4 - 0
ambari-server/src/test/python/stacks/2.0.6/configs/flume_target.json

@@ -485,6 +485,10 @@
             "yarn.resourcemanager.admin.address": "c6402.ambari.apache.org:8141", 
             "yarn.nodemanager.aux-services.mapreduce_shuffle.class": "org.apache.hadoop.mapred.ShuffleHandler"
         },
+        "flume-env": {
+          "content": "export JAVA_HOME={{java64_home}}",
+          "flume_user": "flume"
+        },
         "tez-site": {
             "tez.am.log.level": "WARN",
             "tez.lib.uris": "hdfs:///apps/tez/,hdfs:///apps/tez/lib/",

+ 4 - 0
ambari-server/src/test/python/stacks/2.0.6/configs/secured.json

@@ -522,6 +522,10 @@
             "hbase_log_dir": "/var/log/hbase",
             "hbase_user_keytab": "/etc/security/keytabs/hbase.headless.keytab"
         },
+        "flume-env": {
+            "content": "export JAVA_HOME={{java64_home}}",
+            "flume_user": "flume"
+        },
         "ganglia-env": {
             "gmond_user": "nobody", 
             "ganglia_runtime_dir": "/var/run/ganglia/hdp",