Browse Source

AMBARI-15612: Add Livy to HDP 2.5 as slave component of Spark (Jeff Zhang via jluniya)

Jayush Luniya 9 years ago
parent
commit
16a257f9db
18 changed files with 703 additions and 7 deletions
  1. 1 0
      ambari-common/src/main/python/resource_management/libraries/functions/constants.py
  2. 5 0
      ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
  3. 68 0
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py
  4. 46 0
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_service.py
  5. 41 2
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
  6. 11 1
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/service_check.py
  7. 79 0
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_livy.py
  8. 7 1
      ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/status_params.py
  9. 5 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
  10. 7 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
  11. 3 1
      ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json
  12. 59 0
      ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-conf.xml
  13. 92 0
      ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-env.xml
  14. 41 0
      ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-log4j-properties.xml
  15. 40 0
      ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-spark-blacklist.xml
  16. 114 0
      ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/kerberos.json
  17. 82 0
      ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/metainfo.xml
  18. 2 2
      ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_service_check.py

+ 1 - 0
ambari-common/src/main/python/resource_management/libraries/functions/constants.py

@@ -55,6 +55,7 @@ class StackFeature:
   COPY_TARBALL_TO_HDFS = "copy_tarball_to_hdfs"
   SPARK_16PLUS = "spark_16plus"
   SPARK_THRIFTSERVER = "spark_thriftserver"
+  SPARK_LIVY = "spark_livy"
   STORM_KERBEROS = "storm_kerberos"
   STORM_AMS = "storm_ams"
   CREATE_KAFKA_BROKER_ID = "create_kafka_broker_id"

+ 5 - 0
ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py

@@ -248,6 +248,11 @@ _DEFAULT_STACK_FEATURES = {
       "name": "hbase_home_directory",
       "description": "Hbase home directory in HDFS needed for HBASE backup",
       "min_version": "2.5.0.0"
+    },
+    {
+      "name": "spark_livy",
+      "description": "Livy as slave component of spark",
+      "min_version": "2.5.0.0"
     }
   ]
 }

+ 68 - 0
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py

@@ -0,0 +1,68 @@
+#!/usr/bin/python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.check_process_status import check_process_status
+
+from livy_service import livy_service
+from setup_livy import setup_livy
+
+class LivyServer(Script):
+
+  def install(self, env):
+    import params
+    env.set_params(params)
+
+    self.install_packages(env)
+
+  def configure(self, env, upgrade_type=None):
+    import params
+    env.set_params(params)
+
+    setup_livy(env, 'server', upgrade_type=upgrade_type, action = 'config')
+
+  def start(self, env, upgrade_type=None):
+    import params
+    env.set_params(params)
+
+    self.configure(env)
+    livy_service('server', upgrade_type=upgrade_type, action='start')
+
+  def stop(self, env, upgrade_type=None):
+    import params
+    env.set_params(params)
+
+    livy_service('server', upgrade_type=upgrade_type, action='stop')
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+
+    check_process_status(status_params.livy_server_pid_file)
+
+
+  def get_component_name(self):
+    return "livy-server"
+
+  def pre_upgrade_restart(self, env, upgrade_type=None):
+    pass
+
+if __name__ == "__main__":
+  LivyServer().execute()

+ 46 - 0
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_service.py

@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from resource_management.libraries.functions import format
+from resource_management.core.resources.system import File, Execute
+import threading
+
+def livy_service(name, upgrade_type=None, action=None):
+  import params
+
+  if action == 'start':
+    livyserver_no_op_test = format(
+      'ls {livy_server_pid_file} >/dev/null 2>&1 && ps -p `cat {livy_server_pid_file}` >/dev/null 2>&1')
+    Execute(format('{livy_server_start}'),
+            user=params.livy_user,
+            environment={'JAVA_HOME': params.java_home},
+            not_if=livyserver_no_op_test)
+
+  elif action == 'stop':
+    Execute(format('{livy_server_stop}'),
+            user=params.livy_user,
+            environment={'JAVA_HOME': params.java_home}
+            )
+    File(params.livy_server_pid_file,
+         action="delete"
+         )
+
+
+

+ 41 - 2
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py

@@ -28,7 +28,6 @@ import resource_management.libraries.functions
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions.get_stack_version import get_stack_version
 from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
@@ -41,7 +40,9 @@ from resource_management.libraries.script.script import Script
 SERVER_ROLE_DIRECTORY_MAP = {
   'SPARK_JOBHISTORYSERVER' : 'spark-historyserver',
   'SPARK_CLIENT' : 'spark-client',
-  'SPARK_THRIFTSERVER' : 'spark-thriftserver'
+  'SPARK_THRIFTSERVER' : 'spark-thriftserver',
+  'LIVY_SERVER' : 'livy-server',
+  'LIVY_CLIENT' : 'livy-client'
 }
 
 component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "SPARK_CLIENT")
@@ -179,6 +180,44 @@ hdfs_site = config['configurations']['hdfs-site']
 
 dfs_type = default("/commandParams/dfs_type", "")
 
+# livy related config
+
+# livy is only supported from HDP 2.5
+has_livyserver = False
+
+if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY, stack_version_formatted):
+  livy_component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "LIVY_SERVER")
+  livy_conf = format("{stack_root}/current/{livy_component_directory}/conf")
+  livy_log_dir = config['configurations']['livy-env']['livy_log_dir']
+  livy_pid_dir = status_params.livy_pid_dir
+  livy_home = format("{stack_root}/current/{livy_component_directory}")
+  livy_user = status_params.livy_user
+  livy_group = status_params.livy_group
+  user_group = status_params.user_group
+  livy_hdfs_user_dir = format("/user/{livy_user}")
+  livy_server_pid_file = status_params.livy_server_pid_file
+
+  livy_server_start = format("{livy_home}/bin/livy-server start")
+  livy_server_stop = format("{livy_home}/bin/livy-server stop")
+  livy_logs_dir = format("{livy_home}/logs")
+
+  livy_env_sh = config['configurations']['livy-env']['content']
+  livy_log4j_properties = config['configurations']['livy-log4j-properties']['content']
+  livy_spark_blacklist_properties = config['configurations']['livy-spark-blacklist']['content']
+
+  livy_kerberos_keytab =  config['configurations']['livy-conf']['livy.server.kerberos.keytab']
+  livy_kerberos_principal = config['configurations']['livy-conf']['livy.server.kerberos.principal']
+
+  livy_livyserver_hosts = default("/clusterHostInfo/livy_server_hosts", [])
+
+  if len(livy_livyserver_hosts) > 0:
+    livy_livyserver_host = livy_livyserver_hosts[0]
+    has_livyserver = True
+
+  livy_livyserver_port = default('configurations/livy-conf/livy.server.port',8998)
+
+
+
 import functools
 #create partial functions with common arguments for every HdfsResource call
 #to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code

+ 11 - 1
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/service_check.py

@@ -32,12 +32,22 @@ class SparkServiceCheck(Script):
     if params.security_enabled:
       spark_kinit_cmd = format("{kinit_path_local} -kt {spark_kerberos_keytab} {spark_principal}; ")
       Execute(spark_kinit_cmd, user=params.spark_user)
+      if (params.has_livyserver):
+        livy_kinit_cmd = format("{kinit_path_local} -kt {livy_kerberos_keytab} {livy_kerberos_principal}; ")
+        Execute(livy_kinit_cmd, user=params.livy_user)
 
     Execute(format("curl -s -o /dev/null -w'%{{http_code}}' --negotiate -u: -k http://{spark_history_server_host}:{spark_history_ui_port} | grep 200"),
-      tries = 10,
+      tries=5,
       try_sleep=3,
       logoutput=True
     )
+    if params.has_livyserver and params.livy_livyserver_host != "localhost" and params.livy_livyserver_host != "0.0.0.0":
+      Execute(format("curl -s -o /dev/null -w'%{{http_code}}' --negotiate -u: -k http://{livy_livyserver_host}:{livy_livyserver_port}/sessions | grep 200"),
+              tries=5,
+              try_sleep=3,
+              logoutput=True,
+              user=params.livy_user
+              )
 
 if __name__ == "__main__":
   SparkServiceCheck().execute()

+ 79 - 0
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_livy.py

@@ -0,0 +1,79 @@
+#!/usr/bin/python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import os
+from resource_management import Directory, File, PropertiesFile, InlineTemplate, format
+
+
+def setup_livy(env, type, upgrade_type = None, action = None):
+  import params
+
+  Directory([params.livy_pid_dir, params.livy_log_dir],
+            owner=params.livy_user,
+            group=params.user_group,
+            mode=0775,
+            create_parents = True
+  )
+  if type == 'server' and action == 'config':
+    params.HdfsResource(params.livy_hdfs_user_dir,
+                       type="directory",
+                       action="create_on_execute",
+                       owner=params.livy_user,
+                       mode=0775
+    )
+    params.HdfsResource(None, action="execute")
+
+  # create livy-env.sh in etc/conf dir
+  File(os.path.join(params.livy_conf, 'livy-env.sh'),
+       owner=params.livy_user,
+       group=params.livy_group,
+       content=InlineTemplate(params.livy_env_sh),
+       mode=0644,
+       )
+
+  # create livy.conf in etc/conf dir
+  PropertiesFile(format("{livy_conf}/livy.conf"),
+    properties = params.config['configurations']['livy-conf'],
+    key_value_delimiter = " ",
+    owner=params.livy_user,
+    group=params.livy_group,
+  )
+
+  # create log4j.properties in etc/conf dir
+  File(os.path.join(params.livy_conf, 'log4j.properties'),
+       owner=params.livy_user,
+       group=params.livy_group,
+       content=params.livy_log4j_properties,
+       mode=0644,
+  )
+
+  # create spark-blacklist.properties in etc/conf dir
+  File(os.path.join(params.livy_conf, 'spark-blacklist.properties'),
+       owner=params.livy_user,
+       group=params.livy_group,
+       content=params.livy_spark_blacklist_properties,
+       mode=0644,
+       )
+
+  Directory(params.livy_logs_dir,
+       owner=params.livy_user,
+       group=params.livy_group,
+       mode=0755,
+  )

+ 7 - 1
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/status_params.py

@@ -36,4 +36,10 @@ else:
 spark_pid_dir = config['configurations']['spark-env']['spark_pid_dir']
 spark_history_server_pid_file = format("{spark_pid_dir}/spark-{spark_user}-org.apache.spark.deploy.history.HistoryServer-1.pid")
 spark_thrift_server_pid_file = format("{spark_pid_dir}/spark-{hive_user}-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-1.pid")
-stack_name = default("/hostLevelParams/stack_name", None)
+stack_name = default("/hostLevelParams/stack_name", None)
+
+if "livy-env" in config['configurations']:
+  livy_user = config['configurations']['livy-env']['livy_user']
+  livy_group = config['configurations']['livy-env']['livy_group']
+  livy_pid_dir = config['configurations']['livy-env']['livy_pid_dir']
+  livy_server_pid_file = format("{livy_pid_dir}/livy-{livy_user}-server.pid")

+ 5 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json

@@ -230,6 +230,11 @@
       "name": "hbase_home_directory",
       "description": "Hbase home directory in HDFS needed for HBASE backup",
       "min_version": "2.5.0.0"
+    },
+    {
+      "name": "spark_livy",
+      "description": "Livy as slave component of spark",
+      "min_version": "2.5.0.0"
     }
   ]
 }

+ 7 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py

@@ -303,6 +303,13 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
         if not falconUser in users and falconUser is not None:
           users[falconUser] = {"propertyHosts" : "*","propertyGroups" : "*", "config" : "falcon-env", "propertyName" : "falcon_user"}
 
+    if "SPARK" in servicesList:
+      livyUser = None
+      if "livy-env" in services["configurations"] and "livy_user" in services["configurations"]["livy-env"]["properties"]:
+        livyUser = services["configurations"]["livy-env"]["properties"]["livy_user"]
+        if not livyUser in users and livyUser is not None:
+          users[livyUser] = {"propertyHosts" : "*","propertyGroups" : "*", "config" : "livy-env", "propertyName" : "livy_user"}
+
     putCoreSiteProperty = self.putProperty(configurations, "core-site", services)
     putCoreSitePropertyAttribute = self.putPropertyAttribute(configurations, "core-site")
 

+ 3 - 1
ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json

@@ -8,6 +8,8 @@
     "HIVE_SERVER_INTERACTIVE-START": ["NODEMANAGER-START", "MYSQL_SERVER-START"],
     "HIVE_SERVER_INTERACTIVE-RESTART": ["NODEMANAGER-RESTART", "MYSQL_SERVER-RESTART"],
     "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START", "WEBHCAT_SERVER-START", "HIVE_SERVER_INTERACTIVE-START"],
-    "RANGER_ADMIN-START": ["ZOOKEEPER_SERVER-START", "LOGSEARCH_SOLR-START"]
+    "RANGER_ADMIN-START": ["ZOOKEEPER_SERVER-START", "LOGSEARCH_SOLR-START"],
+    "LIVY_SERVER-START" : ["NAMENODE-START", "DATANODE-START"],
+    "SPARK_SERVICE_CHECK-SERVICE_CHECK" : ["SPARK_JOBHISTORYSERVER-START", "APP_TIMELINE_SERVER-START","LIVY_SERVER-START"]
   }
 }

+ 59 - 0
ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-conf.xml

@@ -0,0 +1,59 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration supports_final="true">
+
+    <property>
+        <name>livy.environment</name>
+        <value>production</value>
+        <description>
+            Specifies Livy's environment. May either be "production" or "development". In "development"
+            mode, Livy will enable debugging options, such as reporting possible routes on a 404.
+            defaults to development
+        </description>
+    </property>
+
+    <property>
+        <name>livy.server.port</name>
+        <value>8998</value>
+        <description>
+            What port to start the server on. Defaults to 8998.
+        </description>
+    </property>
+
+    <property>
+        <name>livy.server.session.timeout</name>
+        <value>3600000</value>
+        <description>
+            Time in milliseconds on how long Livy will wait before timing out an idle session.
+            Default is one hour.
+        </description>
+    </property>
+
+    <property>
+        <name>livy.impersonation.enabled</name>
+        <value>true</value>
+        <description>
+            If livy should use proxy users when submitting a job.
+        </description>
+    </property>
+
+</configuration>

+ 92 - 0
ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-env.xml

@@ -0,0 +1,92 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration supports_adding_forbidden="true">
+    <property>
+        <name>livy_user</name>
+        <display-name>Livy User</display-name>
+        <value>livy</value>
+        <property-type>USER</property-type>
+        <value-attributes>
+            <type>user</type>
+            <overridable>false</overridable>
+        </value-attributes>
+    </property>
+
+    <property>
+        <name>livy_group</name>
+        <display-name>Livy Group</display-name>
+        <value>livy</value>
+        <property-type>GROUP</property-type>
+        <description>livy group</description>
+        <value-attributes>
+            <type>user</type>
+        </value-attributes>
+    </property>
+
+    <property>
+        <name>livy_log_dir</name>
+        <value>/var/log/livy</value>
+        <description>Livy Log Dir</description>
+        <value-attributes>
+            <type>directory</type>
+        </value-attributes>
+    </property>
+
+    <property>
+        <name>livy_pid_dir</name>
+        <value>/var/run/livy</value>
+        <value-attributes>
+            <type>directory</type>
+        </value-attributes>
+    </property>
+
+    <property>
+        <name>spark_home</name>
+        <value>/usr/hdp/current/spark-client</value>
+        <value-attributes>
+            <type>directory</type>
+        </value-attributes>
+    </property>
+
+    <!-- livy-env.sh -->
+    <property>
+        <name>content</name>
+        <description>This is the jinja template for livy-env.sh file</description>
+        <value>
+            #!/usr/bin/env bash
+
+            # - SPARK_HOME      Spark which you would like to use in livy
+            # - LIVY_LOG_DIR    Where log files are stored.  (Default: ${LIVY_HOME}/logs)
+            # - LIVY_PID_DIR    Where the pid file is stored. (Default: /tmp)
+            # - LIVY_SERVER_JAVA_OPTS  Java Opts for running livy server (You can set jvm related setting here, like jvm memory/gc algorithm and etc.)
+        export SPARK_HOME=/usr/hdp/current/spark-client
+        export LIVY_LOG_DIR={{livy_log_dir}}
+        export LIVY_PID_DIR={{livy_pid_dir}}
+        export LIVY_SERVER_JAVA_OPTS="-Xmx2g"
+        </value>
+        <value-attributes>
+            <type>content</type>
+        </value-attributes>
+    </property>
+
+</configuration>

+ 41 - 0
ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-log4j-properties.xml

@@ -0,0 +1,41 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration supports_final="false" supports_adding_forbidden="true">
+    <property>
+        <name>content</name>
+        <description>Livy-log4j-Properties</description>
+        <value>
+            # Set everything to be logged to the console
+            log4j.rootCategory=INFO, console
+            log4j.appender.console=org.apache.log4j.ConsoleAppender
+            log4j.appender.console.target=System.err
+            log4j.appender.console.layout=org.apache.log4j.PatternLayout
+            log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
+
+            log4j.logger.org.eclipse.jetty=WARN
+        </value>
+        <value-attributes>
+            <type>content</type>
+            <show-property-name>false</show-property-name>
+        </value-attributes>
+    </property>
+</configuration>

+ 40 - 0
ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/configuration/livy-spark-blacklist.xml

@@ -0,0 +1,40 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration supports_final="false" supports_adding_forbidden="true">
+    <property>
+        <name>content</name>
+        <description>spark-blacklist.properties</description>
+        <value>
+            #
+            # Configuration override / blacklist. Defines a list of properties that users are not allowed
+            # to override when starting Spark sessions.
+            #
+            # This file takes a list of property names (one per line). Empty lines and lines starting with "#"
+            # are ignored.
+            #
+        </value>
+        <value-attributes>
+            <type>content</type>
+            <show-property-name>false</show-property-name>
+        </value-attributes>
+    </property>
+</configuration>

+ 114 - 0
ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/kerberos.json

@@ -0,0 +1,114 @@
+{
+  "services": [
+    {
+      "name": "SPARK",
+      "identities": [
+        {
+          "name": "/smokeuser"
+        },
+        {
+          "name": "sparkuser",
+          "principal": {
+            "value": "${spark-env/spark_user}-${cluster_name}@${realm}",
+            "type" : "user",
+            "configuration": "spark-defaults/spark.history.kerberos.principal",
+            "local_username" : "${spark-env/spark_user}"
+          },
+          "keytab": {
+            "file": "${keytab_dir}/spark.headless.keytab",
+            "owner": {
+              "name": "${spark-env/spark_user}",
+              "access": "r"
+            },
+            "group": {
+              "name": "${cluster-env/user_group}",
+               "access": ""
+            },
+            "configuration": "spark-defaults/spark.history.kerberos.keytab"
+           }
+        },
+        {
+          "name": "livyuser",
+          "principal": {
+            "value": "livy@${realm}",
+            "type" : "service",
+            "configuration": "livy-conf/livy.server.kerberos.principal",
+            "local_username": "${livy-env/livy_user}"
+          },
+          "keytab": {
+            "file": "${keytab_dir}/livy.keytab",
+            "owner": {
+              "name": "${livy-env/livy_user}",
+              "access": "r"
+            },
+            "group": {
+              "name": "${cluster-env/user_group}",
+              "access": ""
+            },
+            "configuration": "livy-conf/livy.server.kerberos.keytab"
+          }
+        },
+        {
+          "name": "/spnego",
+          "principal": {
+            "configuration": "livy-conf/livy.server.auth.kerberos.principal"
+          },
+          "keytab": {
+            "configuration": "livy-conf/livy.server.auth.kerberos.keytab"
+          }
+        }
+      ],
+      "configurations": [
+        {
+          "spark-defaults": {
+            "spark.history.kerberos.enabled": "true"
+          }
+        },
+        {
+          "livy-conf": {
+            "livy.server.auth.type": "kerberos",
+            "livy.impersonation.enabled": "true"
+          }
+        },
+        {
+          "core-site": {
+            "hadoop.proxyuser.${livy-env/livy_user}.groups": "*",
+            "hadoop.proxyuser.${livy-env/livy_user}.hosts": "*"
+          }
+        }
+      ],
+      "components": [
+        {
+          "name": "SPARK_JOBHISTORYSERVER",
+          "identities": [
+            {
+              "name": "/HDFS/NAMENODE/hdfs"
+            }
+          ]
+        },
+        {
+          "name": "SPARK_CLIENT"
+        },
+        {
+          "name": "SPARK_THRIFTSERVER",
+          "identities": [
+            {
+              "name": "/HDFS/NAMENODE/hdfs"
+            },
+            {
+              "name": "/HIVE/HIVE_SERVER/hive_server_hive"
+            }
+          ]
+        },
+        {
+          "name": "LIVY_SERVER",
+          "identities": [
+            {
+              "name": "/HDFS/NAMENODE/hdfs"
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}

+ 82 - 0
ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/metainfo.xml

@@ -24,6 +24,88 @@
     <service>
       <name>SPARK</name>
       <version>1.6.x.2.5</version>
+      <components>
+        <component>
+          <name>LIVY_SERVER</name>
+          <displayName>Livy Server</displayName>
+          <category>SLAVE</category>
+          <cardinality>0+</cardinality>
+          <versionAdvertised>true</versionAdvertised>
+          <dependencies>
+            <dependency>
+              <name>SPARK/SPARK_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>YARN/YARN_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/livy_server.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+        </component>
+      </components>
+
+      <configuration-dependencies>
+        <config-type>spark-defaults</config-type>
+        <config-type>spark-env</config-type>
+        <config-type>spark-log4j-properties</config-type>
+        <config-type>spark-metrics-properties</config-type>
+        <config-type>spark-thrift-sparkconf</config-type>
+        <config-type>spark-hive-site-override</config-type>
+        <config-type>spark-thrift-fairscheduler</config-type>
+        <config-type>livy-conf</config-type>
+        <config-type>livy-env</config-type>
+        <config-type>livy-log4j-properties</config-type>
+        <config-type>livy-spark-blacklist</config-type>
+      </configuration-dependencies>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
+          <packages>
+            <package>
+              <name>spark_${stack_version}</name>
+            </package>
+            <package>
+              <name>spark_${stack_version}-python</name>
+            </package>
+            <package>
+              <name>livy_${stack_version}</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>debian7,ubuntu12,ubuntu14</osFamily>
+          <packages>
+            <package>
+              <name>spark-${stack_version}</name>
+            </package>
+            <package>
+              <name>spark-${stack_version}-python</name>
+            </package>
+            <package>
+              <name>livy-${stack_version}</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
     </service>
   </services>
 </metainfo>

+ 2 - 2
ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_service_check.py

@@ -37,7 +37,7 @@ class TestServiceCheck(RMFTestCase):
                         target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     self.assertResourceCalled('Execute', "curl -s -o /dev/null -w'%{http_code}' --negotiate -u: -k http://localhost:18080 | grep 200",
-        tries = 10,
+        tries = 5,
         try_sleep = 3,
         logoutput = True
     )
@@ -56,7 +56,7 @@ class TestServiceCheck(RMFTestCase):
         user = 'spark',
     )
     self.assertResourceCalled('Execute', "curl -s -o /dev/null -w'%{http_code}' --negotiate -u: -k http://localhost:18080 | grep 200",
-        tries = 10,
+        tries = 5,
         try_sleep = 3,
         logoutput = True
     )