瀏覽代碼

AMBARI-19161. Hadoop native library location need to be changed in mapred-site.xml for PPC (aonishuk)

Andrew Onishuk 8 年之前
父節點
當前提交
0c89587a9c
共有 17 個文件被更改,包括 64 次插入12 次删除
  1. 37 0
      ambari-common/src/main/python/resource_management/libraries/functions/get_architecture.py
  2. 1 1
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
  3. 3 0
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
  4. 1 1
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-env.xml
  5. 3 0
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
  6. 1 1
      ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/configuration/oozie-env.xml
  7. 1 1
      ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/configuration/tez-site.xml
  8. 3 0
      ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
  9. 1 1
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration-mapred/mapred-site.xml
  10. 3 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
  11. 3 0
      ambari-server/src/main/resources/common-services/YARN/3.0.0/package/scripts/params_linux.py
  12. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/configuration/oozie-env.xml
  13. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-env.xml
  14. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/configuration/tez-site.xml
  15. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
  16. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.3/services/OOZIE/configuration/oozie-env.xml
  17. 1 1
      ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration-mapred/mapred-site.xml

+ 37 - 0
ambari-common/src/main/python/resource_management/libraries/functions/get_architecture.py

@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+__all__ = ["get_architecture"]
+
+import platform
+from resource_management.libraries.functions.default import default
+
+def get_architecture():
+  architecture = default("/configurations/hadoop-env/architecture", None)
+
+  if architecture:
+    return architecture
+
+  if platform.processor() == 'powerpc' or platform.machine().startswith('ppc'):
+    return 'ppc64le'
+
+  return 'amd64'

+ 1 - 1
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml

@@ -367,7 +367,7 @@ fi
 export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
 export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
 
 
 #Mostly required for hadoop 2.0
 #Mostly required for hadoop 2.0
-export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-amd64-64
+export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-{{architecture}}-64
 
 
 {% if is_datanode_max_locked_memory_set %}
 {% if is_datanode_max_locked_memory_set %}
 # Fix temporary bug, when ulimit from conf files is not picked up, without full relogin. 
 # Fix temporary bug, when ulimit from conf files is not picked up, without full relogin. 

+ 3 - 0
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py

@@ -43,11 +43,14 @@ from resource_management.libraries.functions.format_jvm_option import format_jvm
 from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages
 from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages
 from resource_management.libraries.functions.hdfs_utils import is_https_enabled_in_hdfs
 from resource_management.libraries.functions.hdfs_utils import is_https_enabled_in_hdfs
 from resource_management.libraries.functions import is_empty
 from resource_management.libraries.functions import is_empty
+from resource_management.libraries.functions.get_architecture import get_architecture
 
 
 
 
 config = Script.get_config()
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 tmp_dir = Script.get_tmp_dir()
 
 
+architecture = get_architecture()
+
 stack_name = status_params.stack_name
 stack_name = status_params.stack_name
 stack_root = Script.get_stack_root()
 stack_root = Script.get_stack_root()
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 upgrade_direction = default("/commandParams/upgrade_direction", None)

+ 1 - 1
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-env.xml

@@ -197,7 +197,7 @@ export OOZIE_ADMIN_PORT={{oozie_server_admin_port}}
 # The base URL for callback URLs to Oozie
 # The base URL for callback URLs to Oozie
 #
 #
 # export OOZIE_BASE_URL="http://${OOZIE_HTTP_HOSTNAME}:${OOZIE_HTTP_PORT}/oozie"
 # export OOZIE_BASE_URL="http://${OOZIE_HTTP_HOSTNAME}:${OOZIE_HTTP_PORT}/oozie"
-export JAVA_LIBRARY_PATH={{hadoop_lib_home}}/native/Linux-amd64-64
+export JAVA_LIBRARY_PATH={{hadoop_lib_home}}/native/Linux-{{architecture}}-64
    </value>
    </value>
     <value-attributes>
     <value-attributes>
       <type>content</type>
       <type>content</type>

+ 3 - 0
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py

@@ -33,6 +33,7 @@ from resource_management.libraries.script.script import Script
 
 
 from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages
 from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages
 from resource_management.libraries.functions.expect import expect
 from resource_management.libraries.functions.expect import expect
+from resource_management.libraries.functions.get_architecture import get_architecture
 
 
 from urlparse import urlparse
 from urlparse import urlparse
 
 
@@ -44,6 +45,8 @@ config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 tmp_dir = Script.get_tmp_dir()
 sudo = AMBARI_SUDO_BINARY
 sudo = AMBARI_SUDO_BINARY
 
 
+architecture = get_architecture()
+
 
 
 # Needed since this writes out the Atlas Hive Hook config file.
 # Needed since this writes out the Atlas Hive Hook config file.
 cluster_name = config['clusterName']
 cluster_name = config['clusterName']

+ 1 - 1
ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/configuration/oozie-env.xml

@@ -88,7 +88,7 @@ export OOZIE_ADMIN_PORT={{oozie_server_admin_port}}
 # The base URL for callback URLs to Oozie
 # The base URL for callback URLs to Oozie
 #
 #
 # export OOZIE_BASE_URL="http://${OOZIE_HTTP_HOSTNAME}:${OOZIE_HTTP_PORT}/oozie"
 # export OOZIE_BASE_URL="http://${OOZIE_HTTP_HOSTNAME}:${OOZIE_HTTP_PORT}/oozie"
-export JAVA_LIBRARY_PATH={{hadoop_lib_home}}/native/Linux-amd64-64
+export JAVA_LIBRARY_PATH={{hadoop_lib_home}}/native/Linux-{{architecture}}-64
 
 
 # At least 1 minute of retry time to account for server downtime during
 # At least 1 minute of retry time to account for server downtime during
 # upgrade/downgrade
 # upgrade/downgrade

+ 1 - 1
ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/configuration/tez-site.xml

@@ -156,7 +156,7 @@
   </property>
   </property>
   <property>
   <property>
     <name>tez.am.env</name>
     <name>tez.am.env</name>
-    <value>LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/lib/hadoop/lib/native/Linux-amd64-64</value>
+    <value>LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/lib/hadoop/lib/native/Linux-{{architecture}}-64</value>
     <description>
     <description>
         Additional execution environment entries for tez. This is not an additive property. You must preserve the original value if
         Additional execution environment entries for tez. This is not an additive property. You must preserve the original value if
         you want to have access to native libraries.
         you want to have access to native libraries.

+ 3 - 0
ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py

@@ -29,11 +29,14 @@ from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
 from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.get_architecture import get_architecture
 
 
 # server configurations
 # server configurations
 config = Script.get_config()
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 tmp_dir = Script.get_tmp_dir()
 
 
+architecture = get_architecture()
+
 stack_name = default("/hostLevelParams/stack_name", None)
 stack_name = default("/hostLevelParams/stack_name", None)
 stack_root = Script.get_stack_root()
 stack_root = Script.get_stack_root()
 
 

+ 1 - 1
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration-mapred/mapred-site.xml

@@ -438,7 +438,7 @@
   </property>
   </property>
   <property>
   <property>
     <name>mapreduce.admin.user.env</name>
     <name>mapreduce.admin.user.env</name>
-    <value>LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/lib/hadoop/lib/native/Linux-amd64-64</value>
+    <value>LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/lib/hadoop/lib/native/Linux-{{architecture}}-64</value>
     <description>
     <description>
       Additional execution environment entries for map and reduce task processes.
       Additional execution environment entries for map and reduce task processes.
       This is not an additive property. You must preserve the original value if
       This is not an additive property. You must preserve the original value if

+ 3 - 0
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py

@@ -34,6 +34,7 @@ from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries import functions
 from resource_management.libraries import functions
 from resource_management.libraries.functions import is_empty
 from resource_management.libraries.functions import is_empty
+from resource_management.libraries.functions.get_architecture import get_architecture
 
 
 import status_params
 import status_params
 
 
@@ -55,6 +56,8 @@ YARN_SERVER_ROLE_DIRECTORY_MAP = {
 config = Script.get_config()
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 tmp_dir = Script.get_tmp_dir()
 
 
+architecture = get_architecture()
+
 stack_name = status_params.stack_name
 stack_name = status_params.stack_name
 stack_root = Script.get_stack_root()
 stack_root = Script.get_stack_root()
 tarball_map = default("/configurations/cluster-env/tarball_map", None)
 tarball_map = default("/configurations/cluster-env/tarball_map", None)

+ 3 - 0
ambari-server/src/main/resources/common-services/YARN/3.0.0/package/scripts/params_linux.py

@@ -34,6 +34,7 @@ from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries import functions
 from resource_management.libraries import functions
 from resource_management.libraries.functions import is_empty
 from resource_management.libraries.functions import is_empty
+from resource_management.libraries.functions.get_architecture import get_architecture
 
 
 import status_params
 import status_params
 
 
@@ -55,6 +56,8 @@ YARN_SERVER_ROLE_DIRECTORY_MAP = {
 config = Script.get_config()
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 tmp_dir = Script.get_tmp_dir()
 
 
+architecture = get_architecture()
+
 stack_name = status_params.stack_name
 stack_name = status_params.stack_name
 stack_root = Script.get_stack_root()
 stack_root = Script.get_stack_root()
 tarball_map = default("/configurations/cluster-env/tarball_map", None)
 tarball_map = default("/configurations/cluster-env/tarball_map", None)

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/configuration/oozie-env.xml

@@ -88,7 +88,7 @@ export OOZIE_ADMIN_PORT={{oozie_server_admin_port}}
 # The base URL for callback URLs to Oozie
 # The base URL for callback URLs to Oozie
 #
 #
 # export OOZIE_BASE_URL="http://${OOZIE_HTTP_HOSTNAME}:${OOZIE_HTTP_PORT}/oozie"
 # export OOZIE_BASE_URL="http://${OOZIE_HTTP_HOSTNAME}:${OOZIE_HTTP_PORT}/oozie"
-export JAVA_LIBRARY_PATH={{hadoop_lib_home}}/native/Linux-amd64-64
+export JAVA_LIBRARY_PATH={{hadoop_lib_home}}/native/Linux-{{architecture}}-64
 
 
 # Set Hadoop-related properties
 # Set Hadoop-related properties
 export HADOOP_OPTS="-Dhdp.version=${HDP_VERSION} ${HADOOP_OPTS}"
 export HADOOP_OPTS="-Dhdp.version=${HDP_VERSION} ${HADOOP_OPTS}"

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-env.xml

@@ -85,7 +85,7 @@ export OOZIE_ADMIN_PORT={{oozie_server_admin_port}}
 # The base URL for callback URLs to Oozie
 # The base URL for callback URLs to Oozie
 #
 #
 # export OOZIE_BASE_URL="http://${OOZIE_HTTP_HOSTNAME}:${OOZIE_HTTP_PORT}/oozie"
 # export OOZIE_BASE_URL="http://${OOZIE_HTTP_HOSTNAME}:${OOZIE_HTTP_PORT}/oozie"
-export JAVA_LIBRARY_PATH={{hadoop_lib_home}}/native/Linux-amd64-64
+export JAVA_LIBRARY_PATH={{hadoop_lib_home}}/native/Linux-{{architecture}}-64
 
 
 # At least 1 minute of retry time to account for server downtime during
 # At least 1 minute of retry time to account for server downtime during
 # upgrade/downgrade
 # upgrade/downgrade

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/configuration/tez-site.xml

@@ -77,7 +77,7 @@
   </property>
   </property>
   <property>
   <property>
     <name>tez.am.launch.env</name>
     <name>tez.am.launch.env</name>
-    <value>LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-amd64-64</value>
+    <value>LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-{{architecture}}-64</value>
     <description>
     <description>
         Additional execution environment entries for tez. This is not an additive property. You must preserve the original value if
         Additional execution environment entries for tez. This is not an additive property. You must preserve the original value if
         you want to have access to native libraries.
         you want to have access to native libraries.
@@ -119,7 +119,7 @@
   </property>
   </property>
   <property>
   <property>
     <name>tez.task.launch.env</name>
     <name>tez.task.launch.env</name>
-    <value>LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-amd64-64</value>
+    <value>LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-{{architecture}}-64</value>
     <description>
     <description>
       Additional execution environment entries for tez. This is not an additive property. You must preserve the original value if
       Additional execution environment entries for tez. This is not an additive property. You must preserve the original value if
       you want to have access to native libraries.
       you want to have access to native libraries.

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml

@@ -20,7 +20,7 @@
 <configuration xmlns:xi="http://www.w3.org/2001/XInclude" supports_final="true">
 <configuration xmlns:xi="http://www.w3.org/2001/XInclude" supports_final="true">
   <property>
   <property>
     <name>mapreduce.admin.user.env</name>
     <name>mapreduce.admin.user.env</name>
-    <value>LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-amd64-64</value>
+    <value>LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-{{architecture}}-64</value>
     <description>
     <description>
       Additional execution environment entries for map and reduce task processes.
       Additional execution environment entries for map and reduce task processes.
       This is not an additive property. You must preserve the original value if
       This is not an additive property. You must preserve the original value if

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.3/services/OOZIE/configuration/oozie-env.xml

@@ -88,7 +88,7 @@ export OOZIE_ADMIN_PORT={{oozie_server_admin_port}}
 # The base URL for callback URLs to Oozie
 # The base URL for callback URLs to Oozie
 #
 #
 # export OOZIE_BASE_URL="http://${OOZIE_HTTP_HOSTNAME}:${OOZIE_HTTP_PORT}/oozie"
 # export OOZIE_BASE_URL="http://${OOZIE_HTTP_HOSTNAME}:${OOZIE_HTTP_PORT}/oozie"
-export JAVA_LIBRARY_PATH={{hadoop_lib_home}}/native/Linux-amd64-64
+export JAVA_LIBRARY_PATH={{hadoop_lib_home}}/native/Linux-{{architecture}}-64
 
 
 # At least 1 minute of retry time to account for server downtime during
 # At least 1 minute of retry time to account for server downtime during
 # upgrade/downgrade
 # upgrade/downgrade

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration-mapred/mapred-site.xml

@@ -31,7 +31,7 @@
   <!-- These configs were inherited from HDP 2.2 -->
   <!-- These configs were inherited from HDP 2.2 -->
   <property>
   <property>
     <name>mapreduce.admin.user.env</name>
     <name>mapreduce.admin.user.env</name>
-    <value>LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-amd64-64</value>
+    <value>LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-{{architecture}}-64</value>
     <description>
     <description>
       Additional execution environment entries for map and reduce task processes.
       Additional execution environment entries for map and reduce task processes.
       This is not an additive property. You must preserve the original value if
       This is not an additive property. You must preserve the original value if