Procházet zdrojové kódy

Revert "AMBARI-15612: Add Livy to HDP 2.5 as slave component of Spark ( Jeff Zhang via jluniya )"

This reverts commit 80d8d7eabdabb3f3b96c72ec0a953869cb4aec3b.
Jayush Luniya před 9 roky
rodič
revize
abfb30ea16
18 změnil soubory, kde provedl 5 přidání a 901 odebrání
  1. 0 1
      ambari-common/src/main/python/resource_management/libraries/functions/constants.py
  2. 0 5
      ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
  3. 0 68
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py
  4. 0 46
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_service.py
  5. 2 41
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
  6. 0 200
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py.orig
  7. 1 11
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/service_check.py
  8. 0 79
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_livy.py
  9. 1 7
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/status_params.py
  10. 0 5
      ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
  11. 0 7
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
  12. 1 3
      ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json
  13. 0 59
      ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-conf.xml
  14. 0 92
      ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-env.xml
  15. 0 41
      ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-log4j-properties.xml
  16. 0 40
      ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-spark-blacklist.xml
  17. 0 114
      ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/kerberos.json
  18. 0 82
      ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/metainfo.xml

+ 0 - 1
ambari-common/src/main/python/resource_management/libraries/functions/constants.py

@@ -55,7 +55,6 @@ class StackFeature:
   COPY_TARBALL_TO_HDFS = "copy_tarball_to_hdfs"
   COPY_TARBALL_TO_HDFS = "copy_tarball_to_hdfs"
   SPARK_16PLUS = "spark_16plus"
   SPARK_16PLUS = "spark_16plus"
   SPARK_THRIFTSERVER = "spark_thriftserver"
   SPARK_THRIFTSERVER = "spark_thriftserver"
-  SPARK_LIVY = "spark_livy"
   STORM_KERBEROS = "storm_kerberos"
   STORM_KERBEROS = "storm_kerberos"
   STORM_AMS = "storm_ams"
   STORM_AMS = "storm_ams"
   CREATE_KAFKA_BROKER_ID = "create_kafka_broker_id"
   CREATE_KAFKA_BROKER_ID = "create_kafka_broker_id"

+ 0 - 5
ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py

@@ -248,11 +248,6 @@ _DEFAULT_STACK_FEATURES = {
       "name": "hbase_home_directory",
       "name": "hbase_home_directory",
       "description": "Hbase home directory in HDFS needed for HBASE backup",
       "description": "Hbase home directory in HDFS needed for HBASE backup",
       "min_version": "2.5.0.0"
       "min_version": "2.5.0.0"
-    },
-    {
-      "name": "spark_livy",
-      "description": "Livy as slave component of spark",
-      "min_version": "2.5.0.0"
     }
     }
   ]
   ]
 }
 }

+ 0 - 68
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py

@@ -1,68 +0,0 @@
-#!/usr/bin/python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions.check_process_status import check_process_status
-
-from livy_service import livy_service
-from setup_livy import setup_livy
-
-class LivyServer(Script):
-
-  def install(self, env):
-    import params
-    env.set_params(params)
-
-    self.install_packages(env)
-
-  def configure(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-
-    setup_livy(env, 'server', upgrade_type=upgrade_type, action = 'config')
-
-  def start(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-
-    self.configure(env)
-    livy_service('server', upgrade_type=upgrade_type, action='start')
-
-  def stop(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-
-    livy_service('server', upgrade_type=upgrade_type, action='stop')
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-
-    check_process_status(status_params.livy_server_pid_file)
-
-
-  def get_component_name(self):
-    return "livy-server"
-
-  def pre_upgrade_restart(self, env, upgrade_type=None):
-    pass
-
-if __name__ == "__main__":
-  LivyServer().execute()

+ 0 - 46
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_service.py

@@ -1,46 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-from resource_management.libraries.functions import format
-from resource_management.core.resources.system import File, Execute
-import threading
-
-def livy_service(name, upgrade_type=None, action=None):
-  import params
-
-  if action == 'start':
-    livyserver_no_op_test = format(
-      'ls {livy_server_pid_file} >/dev/null 2>&1 && ps -p `cat {livy_server_pid_file}` >/dev/null 2>&1')
-    Execute(format('{livy_server_start}'),
-            user=params.livy_user,
-            environment={'JAVA_HOME': params.java_home},
-            not_if=livyserver_no_op_test)
-
-  elif action == 'stop':
-    Execute(format('{livy_server_stop}'),
-            user=params.livy_user,
-            environment={'JAVA_HOME': params.java_home}
-            )
-    File(params.livy_server_pid_file,
-         action="delete"
-         )
-
-
-

+ 2 - 41
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py

@@ -28,6 +28,7 @@ import resource_management.libraries.functions
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
+from resource_management.libraries.functions.get_stack_version import get_stack_version
 from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
@@ -40,9 +41,7 @@ from resource_management.libraries.script.script import Script
 SERVER_ROLE_DIRECTORY_MAP = {
 SERVER_ROLE_DIRECTORY_MAP = {
   'SPARK_JOBHISTORYSERVER' : 'spark-historyserver',
   'SPARK_JOBHISTORYSERVER' : 'spark-historyserver',
   'SPARK_CLIENT' : 'spark-client',
   'SPARK_CLIENT' : 'spark-client',
-  'SPARK_THRIFTSERVER' : 'spark-thriftserver',
-  'LIVY_SERVER' : 'livy-server',
-  'LIVY_CLIENT' : 'livy-client'
+  'SPARK_THRIFTSERVER' : 'spark-thriftserver'
 }
 }
 
 
 component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "SPARK_CLIENT")
 component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "SPARK_CLIENT")
@@ -181,44 +180,6 @@ hdfs_site = config['configurations']['hdfs-site']
 
 
 dfs_type = default("/commandParams/dfs_type", "")
 dfs_type = default("/commandParams/dfs_type", "")
 
 
-# livy related config
-
-# livy is only supported from HDP 2.5
-has_livyserver = False
-
-if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY, stack_version_formatted):
-  livy_component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "LIVY_SERVER")
-  livy_conf = format("{stack_root}/current/{livy_component_directory}/conf")
-  livy_log_dir = config['configurations']['livy-env']['livy_log_dir']
-  livy_pid_dir = status_params.livy_pid_dir
-  livy_home = format("{stack_root}/current/{livy_component_directory}")
-  livy_user = status_params.livy_user
-  livy_group = status_params.livy_group
-  user_group = status_params.user_group
-  livy_hdfs_user_dir = format("/user/{livy_user}")
-  livy_server_pid_file = status_params.livy_server_pid_file
-
-  livy_server_start = format("{livy_home}/bin/livy-server start")
-  livy_server_stop = format("{livy_home}/bin/livy-server stop")
-  livy_logs_dir = format("{livy_home}/logs")
-
-  livy_env_sh = config['configurations']['livy-env']['content']
-  livy_log4j_properties = config['configurations']['livy-log4j-properties']['content']
-  livy_spark_blacklist_properties = config['configurations']['livy-spark-blacklist']['content']
-
-  livy_kerberos_keytab =  config['configurations']['livy-conf']['livy.server.kerberos.keytab']
-  livy_kerberos_principal = config['configurations']['livy-conf']['livy.server.kerberos.principal']
-
-  livy_livyserver_hosts = default("/clusterHostInfo/livy_server_hosts", [])
-
-  if len(livy_livyserver_hosts) > 0:
-    livy_livyserver_host = livy_livyserver_hosts[0]
-    has_livyserver = True
-
-  livy_livyserver_port = default('configurations/livy-conf/livy.server.port',8998)
-
-
-
 import functools
 import functools
 #create partial functions with common arguments for every HdfsResource call
 #create partial functions with common arguments for every HdfsResource call
 #to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
 #to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code

+ 0 - 200
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py.orig

@@ -1,200 +0,0 @@
-#!/usr/bin/python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-
-import status_params
-from resource_management.libraries.functions.stack_features import check_stack_feature
-from resource_management.libraries.functions import StackFeature
-from setup_spark import *
-
-import resource_management.libraries.functions
-from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import stack_select
-from resource_management.libraries.functions import format
-from resource_management.libraries.functions.get_stack_version import get_stack_version
-from resource_management.libraries.functions.version import format_stack_version
-from resource_management.libraries.functions.default import default
-from resource_management.libraries.functions import get_kinit_path
-from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
-
-from resource_management.libraries.script.script import Script
-
-# a map of the Ambari role to the component name
-# for use with <stack-root>/current/<component>
-SERVER_ROLE_DIRECTORY_MAP = {
-  'SPARK_JOBHISTORYSERVER' : 'spark-historyserver',
-  'SPARK_CLIENT' : 'spark-client',
-  'SPARK_THRIFTSERVER' : 'spark-thriftserver'
-}
-
-component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "SPARK_CLIENT")
-
-config = Script.get_config()
-tmp_dir = Script.get_tmp_dir()
-
-stack_name = status_params.stack_name
-stack_root = Script.get_stack_root()
-stack_version_unformatted = config['hostLevelParams']['stack_version']
-stack_version_formatted = format_stack_version(stack_version_unformatted)
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
-
-# New Cluster Stack Version that is defined during the RESTART of a Stack Upgrade
-version = default("/commandParams/version", None)
-
-spark_conf = '/etc/spark/conf'
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
-
-if stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted):
-  hadoop_home = stack_select.get_hadoop_dir("home")
-  spark_conf = format("{stack_root}/current/{component_directory}/conf")
-  spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
-  spark_daemon_memory = config['configurations']['spark-env']['spark_daemon_memory']
-  spark_pid_dir = status_params.spark_pid_dir
-  spark_home = format("{stack_root}/current/{component_directory}")
-
-spark_thrift_server_conf_file = spark_conf + "/spark-thrift-sparkconf.conf"
-java_home = config['hostLevelParams']['java_home']
-
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
-hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-user_group = config['configurations']['cluster-env']['user_group']
-
-spark_user = status_params.spark_user
-hive_user = status_params.hive_user
-spark_group = status_params.spark_group
-user_group = status_params.user_group
-spark_hdfs_user_dir = format("/user/{spark_user}")
-spark_history_dir = default('/configurations/spark-defaults/spark.history.fs.logDirectory', "hdfs:///spark-history")
-
-spark_history_server_pid_file = status_params.spark_history_server_pid_file
-spark_thrift_server_pid_file = status_params.spark_thrift_server_pid_file
-
-spark_history_server_start = format("{spark_home}/sbin/start-history-server.sh")
-spark_history_server_stop = format("{spark_home}/sbin/stop-history-server.sh")
-
-spark_thrift_server_start = format("{spark_home}/sbin/start-thriftserver.sh")
-spark_thrift_server_stop = format("{spark_home}/sbin/stop-thriftserver.sh")
-spark_logs_dir = format("{spark_home}/logs")
-spark_hadoop_lib_native = format("{stack_root}/current/hadoop-client/lib/native")
-
-spark_submit_cmd = format("{spark_home}/bin/spark-submit")
-spark_smoke_example = "org.apache.spark.examples.SparkPi"
-spark_service_check_cmd = format(
-  "{spark_submit_cmd} --class {spark_smoke_example}  --master yarn-cluster  --num-executors 1 --driver-memory 256m  --executor-memory 256m   --executor-cores 1  {spark_home}/lib/spark-examples*.jar 1")
-
-spark_jobhistoryserver_hosts = default("/clusterHostInfo/spark_jobhistoryserver_hosts", [])
-
-if len(spark_jobhistoryserver_hosts) > 0:
-  spark_history_server_host = spark_jobhistoryserver_hosts[0]
-else:
-  spark_history_server_host = "localhost"
-
-# spark-defaults params
-spark_yarn_historyServer_address = default(spark_history_server_host, "localhost")
-
-spark_history_ui_port = config['configurations']['spark-defaults']['spark.history.ui.port']
-
-spark_env_sh = config['configurations']['spark-env']['content']
-spark_log4j_properties = config['configurations']['spark-log4j-properties']['content']
-spark_metrics_properties = config['configurations']['spark-metrics-properties']['content']
-
-hive_server_host = default("/clusterHostInfo/hive_server_host", [])
-is_hive_installed = not len(hive_server_host) == 0
-
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
-spark_kerberos_keytab =  config['configurations']['spark-defaults']['spark.history.kerberos.keytab']
-spark_kerberos_principal =  config['configurations']['spark-defaults']['spark.history.kerberos.principal']
-
-spark_thriftserver_hosts = default("/clusterHostInfo/spark_thriftserver_hosts", [])
-has_spark_thriftserver = not len(spark_thriftserver_hosts) == 0
-
-# hive-site params
-spark_hive_properties = {
-  'hive.metastore.uris': config['configurations']['hive-site']['hive.metastore.uris']
-}
-
-# security settings
-if security_enabled:
-  spark_principal = spark_kerberos_principal.replace('_HOST',spark_history_server_host.lower())
-
-  if is_hive_installed:
-    spark_hive_properties.update({
-      'hive.metastore.sasl.enabled': str(config['configurations']['hive-site']['hive.metastore.sasl.enabled']).lower(),
-      'hive.metastore.kerberos.keytab.file': config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file'],
-      'hive.server2.authentication.spnego.principal': config['configurations']['hive-site']['hive.server2.authentication.spnego.principal'],
-      'hive.server2.authentication.spnego.keytab': config['configurations']['hive-site']['hive.server2.authentication.spnego.keytab'],
-      'hive.metastore.kerberos.principal': config['configurations']['hive-site']['hive.metastore.kerberos.principal'],
-      'hive.server2.authentication.kerberos.principal': config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal'],
-      'hive.server2.authentication.kerberos.keytab': config['configurations']['hive-site']['hive.server2.authentication.kerberos.keytab'],
-      'hive.server2.authentication': config['configurations']['hive-site']['hive.server2.authentication'],
-    })
-
-    hive_kerberos_keytab = config['configurations']['hive-site']['hive.server2.authentication.kerberos.keytab']
-    hive_kerberos_principal = config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal']
-
-# thrift server support - available on HDP 2.3 or higher
-spark_thrift_sparkconf = None
-spark_thrift_cmd_opts_properties = ''
-spark_thrift_fairscheduler_content = None
-spark_thrift_master = "yarn-client"
-if 'nm_hosts' in config['clusterHostInfo'] and len(config['clusterHostInfo']['nm_hosts']) == 1:
-  # use local mode when there's only one nodemanager
-  spark_thrift_master = "local[4]"
-
-if has_spark_thriftserver and 'spark-thrift-sparkconf' in config['configurations']:
-  spark_thrift_sparkconf = config['configurations']['spark-thrift-sparkconf']
-  spark_thrift_cmd_opts_properties = config['configurations']['spark-env']['spark_thrift_cmd_opts']
-  if is_hive_installed:
-    # update default metastore client properties (async wait for metastore component) it is useful in case of
-    # blueprint provisioning when hive-metastore and spark-thriftserver is not on the same host.
-    spark_hive_properties.update({
-      'hive.metastore.client.socket.timeout' : config['configurations']['hive-site']['hive.metastore.client.socket.timeout']
-    })
-    spark_hive_properties.update(config['configurations']['spark-hive-site-override'])
-
-  if 'spark-thrift-fairscheduler' in config['configurations'] and 'fairscheduler_content' in config['configurations']['spark-thrift-fairscheduler']:
-    spark_thrift_fairscheduler_content = config['configurations']['spark-thrift-fairscheduler']['fairscheduler_content']
-
-default_fs = config['configurations']['core-site']['fs.defaultFS']
-hdfs_site = config['configurations']['hdfs-site']
-
-dfs_type = default("/commandParams/dfs_type", "")
-
-import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
-  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
-  security_enabled = security_enabled,
-  keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir,
-  principal_name = hdfs_principal_name,
-  hdfs_site = hdfs_site,
-  default_fs = default_fs,
-  immutable_paths = get_not_managed_resources(),
-  dfs_type = dfs_type
- )

+ 1 - 11
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/service_check.py

@@ -32,22 +32,12 @@ class SparkServiceCheck(Script):
     if params.security_enabled:
     if params.security_enabled:
       spark_kinit_cmd = format("{kinit_path_local} -kt {spark_kerberos_keytab} {spark_principal}; ")
       spark_kinit_cmd = format("{kinit_path_local} -kt {spark_kerberos_keytab} {spark_principal}; ")
       Execute(spark_kinit_cmd, user=params.spark_user)
       Execute(spark_kinit_cmd, user=params.spark_user)
-      if (params.has_livyserver):
-        livy_kinit_cmd = format("{kinit_path_local} -kt {livy_kerberos_keytab} {livy_kerberos_principal}; ")
-        Execute(livy_kinit_cmd, user=params.livy_user)
 
 
     Execute(format("curl -s -o /dev/null -w'%{{http_code}}' --negotiate -u: -k http://{spark_history_server_host}:{spark_history_ui_port} | grep 200"),
     Execute(format("curl -s -o /dev/null -w'%{{http_code}}' --negotiate -u: -k http://{spark_history_server_host}:{spark_history_ui_port} | grep 200"),
-      tries=5,
+      tries = 10,
       try_sleep=3,
       try_sleep=3,
       logoutput=True
       logoutput=True
     )
     )
-    if params.has_livyserver and params.livy_livyserver_host != "localhost" and params.livy_livyserver_host != "0.0.0.0":
-      Execute(format("curl -s -o /dev/null -w'%{{http_code}}' --negotiate -u: -k http://{livy_livyserver_host}:{livy_livyserver_port}/sessions | grep 200"),
-              tries=5,
-              try_sleep=3,
-              logoutput=True,
-              user=params.livy_user
-              )
 
 
 if __name__ == "__main__":
 if __name__ == "__main__":
   SparkServiceCheck().execute()
   SparkServiceCheck().execute()

+ 0 - 79
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_livy.py

@@ -1,79 +0,0 @@
-#!/usr/bin/python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import os
-from resource_management import Directory, File, PropertiesFile, InlineTemplate, format
-
-
-def setup_livy(env, type, upgrade_type = None, action = None):
-  import params
-
-  Directory([params.livy_pid_dir, params.livy_log_dir],
-            owner=params.livy_user,
-            group=params.user_group,
-            mode=0775,
-            create_parents = True
-  )
-  if type == 'server' and action == 'config':
-    params.HdfsResource(params.livy_hdfs_user_dir,
-                       type="directory",
-                       action="create_on_execute",
-                       owner=params.livy_user,
-                       mode=0775
-    )
-    params.HdfsResource(None, action="execute")
-
-  # create livy-env.sh in etc/conf dir
-  File(os.path.join(params.livy_conf, 'livy-env.sh'),
-       owner=params.livy_user,
-       group=params.livy_group,
-       content=InlineTemplate(params.livy_env_sh),
-       mode=0644,
-       )
-
-  # create livy.conf in etc/conf dir
-  PropertiesFile(format("{livy_conf}/livy.conf"),
-    properties = params.config['configurations']['livy-conf'],
-    key_value_delimiter = " ",
-    owner=params.livy_user,
-    group=params.livy_group,
-  )
-
-  # create log4j.properties in etc/conf dir
-  File(os.path.join(params.livy_conf, 'log4j.properties'),
-       owner=params.livy_user,
-       group=params.livy_group,
-       content=params.livy_log4j_properties,
-       mode=0644,
-  )
-
-  # create spark-blacklist.properties in etc/conf dir
-  File(os.path.join(params.livy_conf, 'spark-blacklist.properties'),
-       owner=params.livy_user,
-       group=params.livy_group,
-       content=params.livy_spark_blacklist_properties,
-       mode=0644,
-       )
-
-  Directory(params.livy_logs_dir,
-       owner=params.livy_user,
-       group=params.livy_group,
-       mode=0755,
-  )

+ 1 - 7
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/status_params.py

@@ -36,10 +36,4 @@ else:
 spark_pid_dir = config['configurations']['spark-env']['spark_pid_dir']
 spark_pid_dir = config['configurations']['spark-env']['spark_pid_dir']
 spark_history_server_pid_file = format("{spark_pid_dir}/spark-{spark_user}-org.apache.spark.deploy.history.HistoryServer-1.pid")
 spark_history_server_pid_file = format("{spark_pid_dir}/spark-{spark_user}-org.apache.spark.deploy.history.HistoryServer-1.pid")
 spark_thrift_server_pid_file = format("{spark_pid_dir}/spark-{hive_user}-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-1.pid")
 spark_thrift_server_pid_file = format("{spark_pid_dir}/spark-{hive_user}-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-1.pid")
-stack_name = default("/hostLevelParams/stack_name", None)
-
-if "livy-env" in config['configurations']:
-  livy_user = config['configurations']['livy-env']['livy_user']
-  livy_group = config['configurations']['livy-env']['livy_group']
-  livy_pid_dir = config['configurations']['livy-env']['livy_pid_dir']
-  livy_server_pid_file = format("{livy_pid_dir}/livy-{livy_user}-server.pid")
+stack_name = default("/hostLevelParams/stack_name", None)

+ 0 - 5
ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json

@@ -230,11 +230,6 @@
       "name": "hbase_home_directory",
       "name": "hbase_home_directory",
       "description": "Hbase home directory in HDFS needed for HBASE backup",
       "description": "Hbase home directory in HDFS needed for HBASE backup",
       "min_version": "2.5.0.0"
       "min_version": "2.5.0.0"
-    },
-    {
-      "name": "spark_livy",
-      "description": "Livy as slave component of spark",
-      "min_version": "2.5.0.0"
     }
     }
   ]
   ]
 }
 }

+ 0 - 7
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py

@@ -303,13 +303,6 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
         if not falconUser in users and falconUser is not None:
         if not falconUser in users and falconUser is not None:
           users[falconUser] = {"propertyHosts" : "*","propertyGroups" : "*", "config" : "falcon-env", "propertyName" : "falcon_user"}
           users[falconUser] = {"propertyHosts" : "*","propertyGroups" : "*", "config" : "falcon-env", "propertyName" : "falcon_user"}
 
 
-    if "SPARK" in servicesList:
-      livyUser = None
-      if "livy-env" in services["configurations"] and "livy_user" in services["configurations"]["livy-env"]["properties"]:
-        livyUser = services["configurations"]["livy-env"]["properties"]["livy_user"]
-        if not livyUser in users and livyUser is not None:
-          users[livyUser] = {"propertyHosts" : "*","propertyGroups" : "*", "config" : "livy-env", "propertyName" : "livy_user"}
-
     putCoreSiteProperty = self.putProperty(configurations, "core-site", services)
     putCoreSiteProperty = self.putProperty(configurations, "core-site", services)
     putCoreSitePropertyAttribute = self.putPropertyAttribute(configurations, "core-site")
     putCoreSitePropertyAttribute = self.putPropertyAttribute(configurations, "core-site")
 
 

+ 1 - 3
ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json

@@ -8,8 +8,6 @@
     "HIVE_SERVER_INTERACTIVE-START": ["NODEMANAGER-START", "MYSQL_SERVER-START"],
     "HIVE_SERVER_INTERACTIVE-START": ["NODEMANAGER-START", "MYSQL_SERVER-START"],
     "HIVE_SERVER_INTERACTIVE-RESTART": ["NODEMANAGER-RESTART", "MYSQL_SERVER-RESTART"],
     "HIVE_SERVER_INTERACTIVE-RESTART": ["NODEMANAGER-RESTART", "MYSQL_SERVER-RESTART"],
     "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START", "WEBHCAT_SERVER-START", "HIVE_SERVER_INTERACTIVE-START"],
     "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START", "WEBHCAT_SERVER-START", "HIVE_SERVER_INTERACTIVE-START"],
-    "RANGER_ADMIN-START": ["ZOOKEEPER_SERVER-START", "LOGSEARCH_SOLR-START"],
-    "LIVY_SERVER-START" : ["NAMENODE-START", "DATANODE-START"],
-    "SPARK_SERVICE_CHECK-SERVICE_CHECK" : ["SPARK_JOBHISTORYSERVER-START", "APP_TIMELINE_SERVER-START","LIVY_SERVER-START"]
+    "RANGER_ADMIN-START": ["ZOOKEEPER_SERVER-START", "LOGSEARCH_SOLR-START"]
   }
   }
 }
 }

+ 0 - 59
ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-conf.xml

@@ -1,59 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration supports_final="true">
-
-    <property>
-        <name>livy.environment</name>
-        <value>production</value>
-        <description>
-            Specifies Livy's environment. May either be "production" or "development". In "development"
-            mode, Livy will enable debugging options, such as reporting possible routes on a 404.
-            defaults to development
-        </description>
-    </property>
-
-    <property>
-        <name>livy.server.port</name>
-        <value>8998</value>
-        <description>
-            What port to start the server on. Defaults to 8998.
-        </description>
-    </property>
-
-    <property>
-        <name>livy.server.session.timeout</name>
-        <value>3600000</value>
-        <description>
-            Time in milliseconds on how long Livy will wait before timing out an idle session.
-            Default is one hour.
-        </description>
-    </property>
-
-    <property>
-        <name>livy.impersonation.enabled</name>
-        <value>true</value>
-        <description>
-            If livy should use proxy users when submitting a job.
-        </description>
-    </property>
-
-</configuration>

+ 0 - 92
ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-env.xml

@@ -1,92 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration supports_adding_forbidden="true">
-    <property>
-        <name>livy_user</name>
-        <display-name>Livy User</display-name>
-        <value>livy</value>
-        <property-type>USER</property-type>
-        <value-attributes>
-            <type>user</type>
-            <overridable>false</overridable>
-        </value-attributes>
-    </property>
-
-    <property>
-        <name>livy_group</name>
-        <display-name>Livy Group</display-name>
-        <value>livy</value>
-        <property-type>GROUP</property-type>
-        <description>livy group</description>
-        <value-attributes>
-            <type>user</type>
-        </value-attributes>
-    </property>
-
-    <property>
-        <name>livy_log_dir</name>
-        <value>/var/log/livy</value>
-        <description>Livy Log Dir</description>
-        <value-attributes>
-            <type>directory</type>
-        </value-attributes>
-    </property>
-
-    <property>
-        <name>livy_pid_dir</name>
-        <value>/var/run/livy</value>
-        <value-attributes>
-            <type>directory</type>
-        </value-attributes>
-    </property>
-
-    <property>
-        <name>spark_home</name>
-        <value>/usr/hdp/current/spark-client</value>
-        <value-attributes>
-            <type>directory</type>
-        </value-attributes>
-    </property>
-
-    <!-- livy-env.sh -->
-    <property>
-        <name>content</name>
-        <description>This is the jinja template for livy-env.sh file</description>
-        <value>
-            #!/usr/bin/env bash
-
-            # - SPARK_HOME      Spark which you would like to use in livy
-            # - LIVY_LOG_DIR    Where log files are stored.  (Default: ${LIVY_HOME}/logs)
-            # - LIVY_PID_DIR    Where the pid file is stored. (Default: /tmp)
-            # - LIVY_SERVER_JAVA_OPTS  Java Opts for running livy server (You can set jvm related setting here, like jvm memory/gc algorithm and etc.)
-        export SPARK_HOME=/usr/hdp/current/spark-client
-        export LIVY_LOG_DIR={{livy_log_dir}}
-        export LIVY_PID_DIR={{livy_pid_dir}}
-        export LIVY_SERVER_JAVA_OPTS="-Xmx2g"
-        </value>
-        <value-attributes>
-            <type>content</type>
-        </value-attributes>
-    </property>
-
-</configuration>

+ 0 - 41
ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-log4j-properties.xml

@@ -1,41 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration supports_final="false" supports_adding_forbidden="true">
-    <property>
-        <name>content</name>
-        <description>Livy-log4j-Properties</description>
-        <value>
-            # Set everything to be logged to the console
-            log4j.rootCategory=INFO, console
-            log4j.appender.console=org.apache.log4j.ConsoleAppender
-            log4j.appender.console.target=System.err
-            log4j.appender.console.layout=org.apache.log4j.PatternLayout
-            log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
-
-            log4j.logger.org.eclipse.jetty=WARN
-        </value>
-        <value-attributes>
-            <type>content</type>
-            <show-property-name>false</show-property-name>
-        </value-attributes>
-    </property>
-</configuration>

+ 0 - 40
ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-spark-blacklist.xml

@@ -1,40 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration supports_final="false" supports_adding_forbidden="true">
-    <property>
-        <name>content</name>
-        <description>spark-blacklist.properties</description>
-        <value>
-            #
-            # Configuration override / blacklist. Defines a list of properties that users are not allowed
-            # to override when starting Spark sessions.
-            #
-            # This file takes a list of property names (one per line). Empty lines and lines starting with "#"
-            # are ignored.
-            #
-        </value>
-        <value-attributes>
-            <type>content</type>
-            <show-property-name>false</show-property-name>
-        </value-attributes>
-    </property>
-</configuration>

+ 0 - 114
ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/kerberos.json

@@ -1,114 +0,0 @@
-{
-  "services": [
-    {
-      "name": "SPARK",
-      "identities": [
-        {
-          "name": "/smokeuser"
-        },
-        {
-          "name": "sparkuser",
-          "principal": {
-            "value": "${spark-env/spark_user}-${cluster_name}@${realm}",
-            "type" : "user",
-            "configuration": "spark-defaults/spark.history.kerberos.principal",
-            "local_username" : "${spark-env/spark_user}"
-          },
-          "keytab": {
-            "file": "${keytab_dir}/spark.headless.keytab",
-            "owner": {
-              "name": "${spark-env/spark_user}",
-              "access": "r"
-            },
-            "group": {
-              "name": "${cluster-env/user_group}",
-               "access": ""
-            },
-            "configuration": "spark-defaults/spark.history.kerberos.keytab"
-           }
-        },
-        {
-          "name": "livyuser",
-          "principal": {
-            "value": "livy@${realm}",
-            "type" : "user",
-            "configuration": "livy-conf/livy.server.kerberos.principal",
-            "local_username": "${livy-env/livy_user}"
-          },
-          "keytab": {
-            "file": "${keytab_dir}/livy.keytab",
-            "owner": {
-              "name": "${livy-env/livy_user}",
-              "access": "r"
-            },
-            "group": {
-              "name": "${cluster-env/user_group}",
-              "access": ""
-            },
-            "configuration": "livy-conf/livy.server.kerberos.keytab"
-          }
-        },
-        {
-          "name": "/spnego",
-          "principal": {
-            "configuration": "livy-conf/livy.server.auth.kerberos.principal"
-          },
-          "keytab": {
-            "configuration": "livy-conf/livy.server.auth.kerberos.keytab"
-          }
-        }
-      ],
-      "configurations": [
-        {
-          "spark-defaults": {
-            "spark.history.kerberos.enabled": "true"
-          }
-        },
-        {
-          "livy-conf": {
-            "livy.server.auth.type": "kerberos",
-            "livy.impersonation.enabled": "true"
-          }
-        },
-        {
-          "core-site": {
-            "hadoop.proxyuser.${livy-env/livy_user}.groups": "*",
-            "hadoop.proxyuser.${livy-env/livy_user}.hosts": "*"
-          }
-        }
-      ],
-      "components": [
-        {
-          "name": "SPARK_JOBHISTORYSERVER",
-          "identities": [
-            {
-              "name": "/HDFS/NAMENODE/hdfs"
-            }
-          ]
-        },
-        {
-          "name": "SPARK_CLIENT"
-        },
-        {
-          "name": "SPARK_THRIFTSERVER",
-          "identities": [
-            {
-              "name": "/HDFS/NAMENODE/hdfs"
-            },
-            {
-              "name": "/HIVE/HIVE_SERVER/hive_server_hive"
-            }
-          ]
-        },
-        {
-          "name": "LIVY_SERVER",
-          "identities": [
-            {
-              "name": "/HDFS/NAMENODE/hdfs"
-            }
-          ]
-        }
-      ]
-    }
-  ]
-}

+ 0 - 82
ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/metainfo.xml

@@ -24,88 +24,6 @@
     <service>
     <service>
       <name>SPARK</name>
       <name>SPARK</name>
       <version>1.6.x.2.5</version>
       <version>1.6.x.2.5</version>
-      <components>
-        <component>
-          <name>LIVY_SERVER</name>
-          <displayName>Livy Server</displayName>
-          <category>SLAVE</category>
-          <cardinality>0+</cardinality>
-          <versionAdvertised>true</versionAdvertised>
-          <dependencies>
-            <dependency>
-              <name>SPARK/SPARK_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>YARN/YARN_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/livy_server.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-        </component>
-      </components>
-
-      <configuration-dependencies>
-        <config-type>spark-defaults</config-type>
-        <config-type>spark-env</config-type>
-        <config-type>spark-log4j-properties</config-type>
-        <config-type>spark-metrics-properties</config-type>
-        <config-type>spark-thrift-sparkconf</config-type>
-        <config-type>spark-hive-site-override</config-type>
-        <config-type>spark-thrift-fairscheduler</config-type>
-        <config-type>livy-conf</config-type>
-        <config-type>livy-env</config-type>
-        <config-type>livy-log4j-properties</config-type>
-        <config-type>livy-spark-blacklist</config-type>
-      </configuration-dependencies>
-
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
-          <packages>
-            <package>
-              <name>spark_${stack_version}</name>
-            </package>
-            <package>
-              <name>spark_${stack_version}-python</name>
-            </package>
-            <package>
-              <name>livy_${stack_version}</name>
-            </package>
-          </packages>
-        </osSpecific>
-        <osSpecific>
-          <osFamily>debian7,ubuntu12,ubuntu14</osFamily>
-          <packages>
-            <package>
-              <name>spark-${stack_version}</name>
-            </package>
-            <package>
-              <name>spark-${stack_version}-python</name>
-            </package>
-            <package>
-              <name>livy-${stack_version}</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
     </service>
     </service>
   </services>
   </services>
 </metainfo>
 </metainfo>