瀏覽代碼

AMBARI-9251 : Support SQL Server on Linux via Ambari Blueprints for Hive And Oozie (jluniya)

Jayush Luniya 10 年之前
父節點
當前提交
8ebade7508
共有 24 個文件被更改,包括 131 次插入221 次删除
  1. 0 2
      ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
  2. 1 1
      ambari-server/src/main/python/ambari-server.py
  3. 9 3
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
  4. 4 1
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
  5. 1 0
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_service.py
  6. 4 1
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
  7. 2 2
      ambari-server/src/main/resources/custom_actions/scripts/check_host.py
  8. 1 1
      ambari-server/src/test/python/custom_actions/TestCheckHost.py
  9. 8 19
      ambari-server/src/test/python/stacks/2.0.6/configs/client-upgrade.json
  10. 7 18
      ambari-server/src/test/python/stacks/2.0.6/configs/hbase-2.2.json
  11. 7 18
      ambari-server/src/test/python/stacks/2.0.6/configs/hbase-check-2.2.json
  12. 1 3
      ambari-server/src/test/python/stacks/2.0.6/configs/hbase-preupgrade.json
  13. 7 18
      ambari-server/src/test/python/stacks/2.0.6/configs/hbase-rs-2.2.json
  14. 8 19
      ambari-server/src/test/python/stacks/2.0.6/configs/zk-service_check_2.2.json
  15. 8 19
      ambari-server/src/test/python/stacks/2.1/configs/client-upgrade.json
  16. 8 19
      ambari-server/src/test/python/stacks/2.2/configs/falcon-upgrade.json
  17. 2 13
      ambari-server/src/test/python/stacks/2.2/configs/hive-upgrade.json
  18. 1 12
      ambari-server/src/test/python/stacks/2.2/configs/oozie-upgrade.json
  19. 6 6
      ambari-web/app/controllers/main/service/info/configs.js
  20. 2 2
      ambari-web/app/controllers/wizard/step7_controller.js
  21. 20 20
      ambari-web/app/controllers/wizard/step8_controller.js
  22. 6 6
      ambari-web/app/data/HDP2/site_properties.js
  23. 12 12
      ambari-web/app/views/wizard/controls_view.js
  24. 6 6
      ambari-web/test/views/wizard/controls_view_test.js

+ 0 - 2
ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java

@@ -276,8 +276,6 @@ public class Configuration {
   private static final String SERVER_JDBC_USER_PASSWD_DEFAULT = "bigdata";
   private static final String SERVER_JDBC_USER_PASSWD_DEFAULT = "bigdata";
   private static final String SERVER_JDBC_RCA_USER_NAME_DEFAULT = "mapred";
   private static final String SERVER_JDBC_RCA_USER_NAME_DEFAULT = "mapred";
   private static final String SERVER_JDBC_RCA_USER_PASSWD_DEFAULT = "mapred";
   private static final String SERVER_JDBC_RCA_USER_PASSWD_DEFAULT = "mapred";
-  private static final String SCOM_JDBC_SINK_USER_NAME_DEFAULT = "hadoop";
-  private static final String SCOM_JDBC_SINK_USER_PASSWD_DEFAULT = "hadoop";
   private static final String SRVR_TWO_WAY_SSL_DEFAULT = "false";
   private static final String SRVR_TWO_WAY_SSL_DEFAULT = "false";
   private static final String SRVR_KSTR_DIR_DEFAULT = ".";
   private static final String SRVR_KSTR_DIR_DEFAULT = ".";
   private static final String API_CSRF_PREVENTION_DEFAULT = "true";
   private static final String API_CSRF_PREVENTION_DEFAULT = "true";

+ 1 - 1
ambari-server/src/main/python/ambari-server.py

@@ -390,7 +390,7 @@ ORACLE_UPGRADE_STACK_ARGS = "-S -L '{0}/{1}@(description=(address=(protocol=TCP)
 
 
 JDBC_PATTERNS = {"oracle": "*ojdbc*.jar", "mysql": "*mysql*.jar"}
 JDBC_PATTERNS = {"oracle": "*ojdbc*.jar", "mysql": "*mysql*.jar"}
 DATABASE_FULL_NAMES = {"oracle": "Oracle", "mysql": "MySQL", "postgres": "PostgreSQL"}
 DATABASE_FULL_NAMES = {"oracle": "Oracle", "mysql": "MySQL", "postgres": "PostgreSQL"}
-JDBC_DB_OPTION_VALUES = ["postgres", "mysql", "oracle"]
+JDBC_DB_OPTION_VALUES = ["postgres", "mysql", "oracle", "mssql"]
 JDBC_DB_DEFAULT_DRIVER = {"postgresql" : "postgresql-jdbc.jar", "mysql" : "mysql-connector-java.jar", "oracle" : "ojdbc6.jar"}
 JDBC_DB_DEFAULT_DRIVER = {"postgresql" : "postgresql-jdbc.jar", "mysql" : "mysql-connector-java.jar", "oracle" : "ojdbc6.jar"}
 ORACLE_DB_ID_TYPES = ["Service Name", "SID"]
 ORACLE_DB_ID_TYPES = ["Service Name", "SID"]
 
 

+ 9 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py

@@ -106,12 +106,18 @@ hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.opti
 
 
 hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
 hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
 hive_metastore_db_type = config['configurations']['hive-env']['hive_database_type']
 hive_metastore_db_type = config['configurations']['hive-env']['hive_database_type']
+#HACK Temporarily use dbType=azuredb while invoking schematool
+if hive_metastore_db_type == "mssql":
+  hive_metastore_db_type = "azuredb"
 
 
 #users
 #users
 hive_user = config['configurations']['hive-env']['hive_user']
 hive_user = config['configurations']['hive-env']['hive_user']
 #JDBC driver jar name
 #JDBC driver jar name
 hive_jdbc_driver = config['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName']
 hive_jdbc_driver = config['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName']
-if hive_jdbc_driver == "com.mysql.jdbc.Driver":
+if hive_jdbc_driver == "com.microsoft.sqlserver.jdbc.SQLServerDriver":
+  jdbc_jar_name = "sqljdbc4.jar"
+  jdbc_symlink_name = "mssql-jdbc-driver.jar"
+elif hive_jdbc_driver == "com.mysql.jdbc.Driver":
   jdbc_jar_name = "mysql-connector-java.jar"
   jdbc_jar_name = "mysql-connector-java.jar"
   jdbc_symlink_name = "mysql-jdbc-driver.jar"
   jdbc_symlink_name = "mysql-jdbc-driver.jar"
 elif hive_jdbc_driver == "org.postgresql.Driver":
 elif hive_jdbc_driver == "org.postgresql.Driver":
@@ -123,7 +129,7 @@ elif hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
 
 
 check_db_connection_jar_name = "DBConnectionVerification.jar"
 check_db_connection_jar_name = "DBConnectionVerification.jar"
 check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
 check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
-hive_jdbc_drivers_list = ["com.mysql.jdbc.Driver","org.postgresql.Driver","oracle.jdbc.driver.OracleDriver"]
+hive_jdbc_drivers_list = ["com.microsoft.sqlserver.jdbc.SQLServerDriver","com.mysql.jdbc.Driver","org.postgresql.Driver","oracle.jdbc.driver.OracleDriver"]
 downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
 downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
 prepackaged_ojdbc_symlink = format("{hive_lib}/ojdbc6.jar")
 prepackaged_ojdbc_symlink = format("{hive_lib}/ojdbc6.jar")
 
 
@@ -331,4 +337,4 @@ if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
     if  user_input.lower() == 'yes':
     if  user_input.lower() == 'yes':
       enable_ranger_hive = True
       enable_ranger_hive = True
     elif user_input.lower() == 'no':
     elif user_input.lower() == 'no':
-      enable_ranger_hive = False
+      enable_ranger_hive = False

+ 4 - 1
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py

@@ -81,6 +81,7 @@ def oozie(is_server=False # TODO: see if see can remove this
   }
   }
 
 
   if params.jdbc_driver_name == "com.mysql.jdbc.Driver" or \
   if params.jdbc_driver_name == "com.mysql.jdbc.Driver" or \
+     params.jdbc_driver_name == "com.microsoft.sqlserver.jdbc.SQLServerDriver" or \
      params.jdbc_driver_name == "org.postgresql.Driver" or \
      params.jdbc_driver_name == "org.postgresql.Driver" or \
      params.jdbc_driver_name == "oracle.jdbc.driver.OracleDriver":
      params.jdbc_driver_name == "oracle.jdbc.driver.OracleDriver":
     Execute(format("/bin/sh -c 'cd /usr/lib/ambari-agent/ &&\
     Execute(format("/bin/sh -c 'cd /usr/lib/ambari-agent/ &&\
@@ -153,7 +154,9 @@ def oozie_server_specific():
     sudo = True,
     sudo = True,
   )
   )
 
 
-  if params.jdbc_driver_name=="com.mysql.jdbc.Driver" or params.jdbc_driver_name=="oracle.jdbc.driver.OracleDriver":
+  if params.jdbc_driver_name=="com.mysql.jdbc.Driver" or \
+     params.jdbc_driver_name == "com.microsoft.sqlserver.jdbc.SQLServerDriver" or \
+     params.jdbc_driver_name=="oracle.jdbc.driver.OracleDriver":
 
 
     environment = {
     environment = {
       "no_proxy": format("{ambari_server_hostname}")
       "no_proxy": format("{ambari_server_hostname}")

+ 1 - 0
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_service.py

@@ -45,6 +45,7 @@ def oozie_service(action = 'start', rolling_restart=False):
     start_cmd = format("cd {oozie_tmp_dir} && {oozie_home}/bin/oozie-start.sh")
     start_cmd = format("cd {oozie_tmp_dir} && {oozie_home}/bin/oozie-start.sh")
     
     
     if params.jdbc_driver_name == "com.mysql.jdbc.Driver" or \
     if params.jdbc_driver_name == "com.mysql.jdbc.Driver" or \
+       params.jdbc_driver_name == "com.microsoft.sqlserver.jdbc.SQLServerDriver" or \
        params.jdbc_driver_name == "org.postgresql.Driver" or \
        params.jdbc_driver_name == "org.postgresql.Driver" or \
        params.jdbc_driver_name == "oracle.jdbc.driver.OracleDriver":
        params.jdbc_driver_name == "oracle.jdbc.driver.OracleDriver":
       db_connection_check_command = format("{java_home}/bin/java -cp {check_db_connection_jar}:{target} org.apache.ambari.server.DBConnectionVerification '{oozie_jdbc_connection_url}' {oozie_metastore_user_name} {oozie_metastore_user_passwd!p} {jdbc_driver_name}")
       db_connection_check_command = format("{java_home}/bin/java -cp {check_db_connection_jar}:{target} org.apache.ambari.server.DBConnectionVerification '{oozie_jdbc_connection_url}' {oozie_metastore_user_name} {oozie_metastore_user_passwd!p} {jdbc_driver_name}")

+ 4 - 1
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py

@@ -138,7 +138,10 @@ else:
   
   
 jdbc_driver_name = default("/configurations/oozie-site/oozie.service.JPAService.jdbc.driver", "")
 jdbc_driver_name = default("/configurations/oozie-site/oozie.service.JPAService.jdbc.driver", "")
 
 
-if jdbc_driver_name == "com.mysql.jdbc.Driver":
+if jdbc_driver_name == "com.microsoft.sqlserver.jdbc.SQLServerDriver":
+  jdbc_driver_jar = "sqljdbc4.jar"
+  jdbc_symlink_name = "mssql-jdbc-driver.jar"
+elif jdbc_driver_name == "com.mysql.jdbc.Driver":
   jdbc_driver_jar = "mysql-connector-java.jar"
   jdbc_driver_jar = "mysql-connector-java.jar"
   jdbc_symlink_name = "mysql-jdbc-driver.jar"
   jdbc_symlink_name = "mysql-jdbc-driver.jar"
 elif jdbc_driver_name == "org.postgresql.Driver":
 elif jdbc_driver_name == "org.postgresql.Driver":

+ 2 - 2
ambari-server/src/main/resources/custom_actions/scripts/check_host.py

@@ -219,7 +219,7 @@ class CheckHost(Script):
     # download jdbc driver from ambari-server resources
     # download jdbc driver from ambari-server resources
     try:
     try:
       download_file(jdbc_url, jdbc_path)
       download_file(jdbc_url, jdbc_path)
-      if db_name == DB_MSSQL:
+      if db_name == DB_MSSQL and OSCheck.is_windows_family():
         jdbc_auth_path = os.path.join(agent_cache_dir, JDBC_AUTH_SYMLINK_MSSQL)
         jdbc_auth_path = os.path.join(agent_cache_dir, JDBC_AUTH_SYMLINK_MSSQL)
         jdbc_auth_url = jdk_location + JDBC_AUTH_SYMLINK_MSSQL
         jdbc_auth_url = jdk_location + JDBC_AUTH_SYMLINK_MSSQL
         download_file(jdbc_auth_url, jdbc_auth_path)
         download_file(jdbc_auth_url, jdbc_auth_path)
@@ -235,7 +235,7 @@ class CheckHost(Script):
   
   
     # try to connect to db
     # try to connect to db
     db_connection_check_command = format("{java_exec} -cp {check_db_connection_path}{class_path_delimiter}" \
     db_connection_check_command = format("{java_exec} -cp {check_db_connection_path}{class_path_delimiter}" \
-           "{jdbc_path} -Djava.library.path={agent_cache_dir} org.apache.ambari.server.DBConnectionVerification {db_connection_url} " \
+           "{jdbc_path} -Djava.library.path={agent_cache_dir} org.apache.ambari.server.DBConnectionVerification \"{db_connection_url}\" " \
            "{user_name} {user_passwd!p} {jdbc_driver}")
            "{user_name} {user_passwd!p} {jdbc_driver}")
     print "INFO db_connection_check_command: " + db_connection_check_command
     print "INFO db_connection_check_command: " + db_connection_check_command
     process = subprocess.Popen(db_connection_check_command,
     process = subprocess.Popen(db_connection_check_command,

+ 1 - 1
ambari-server/src/test/python/custom_actions/TestCheckHost.py

@@ -148,7 +148,7 @@ class TestCheckHost(TestCase):
                                                                                     'exit_code': 1}})
                                                                                     'exit_code': 1}})
     self.assertEquals(format_mock.call_args[0][0],'{java_exec} -cp '\
     self.assertEquals(format_mock.call_args[0][0],'{java_exec} -cp '\
             '{check_db_connection_path}{class_path_delimiter}{jdbc_path} -Djava.library.path={agent_cache_dir} '\
             '{check_db_connection_path}{class_path_delimiter}{jdbc_path} -Djava.library.path={agent_cache_dir} '\
-            'org.apache.ambari.server.DBConnectionVerification {db_connection_url} '\
+            'org.apache.ambari.server.DBConnectionVerification \"{db_connection_url}\" '\
             '{user_name} {user_passwd!p} {jdbc_driver}')
             '{user_name} {user_passwd!p} {jdbc_driver}')
 
 
     # test, db connection success
     # test, db connection success

+ 8 - 19
ambari-server/src/test/python/stacks/2.0.6/configs/client-upgrade.json

@@ -544,34 +544,23 @@
             "zk_user": "zookeeper"
             "zk_user": "zookeeper"
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
-            "security_enabled": "false", 
-            "sink_existing_mssql_server_2_database": "MSSQL", 
+            "security_enabled": "false",
             "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
             "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar", 
-            "sink_existing_mssql_server_database": "MSSQL", 
+            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
             "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
             "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "sink.dbservername": "", 
+            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
             "ignore_groupsusers_create": "false", 
             "ignore_groupsusers_create": "false", 
-            "kerberos_domain": "EXAMPLE.COM", 
-            "sink.dblogin": "", 
+            "kerberos_domain": "EXAMPLE.COM",
             "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
             "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
             "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
             "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-            "hadoop.user.name": "hadoop", 
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-            "sink.db.schema.name": "", 
-            "sink.jdbc.url": "", 
+            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
+            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
             "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
             "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
             "user_group": "hadoop", 
             "user_group": "hadoop", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz", 
-            "hadoop.user.password": "", 
-            "sink.jdbc.driver": "", 
+            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
             "smokeuser": "ambari-qa", 
             "smokeuser": "ambari-qa", 
             "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
             "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-            "sink.dbpassword": "", 
-            "sink_database": "Existing MSSQL Server database with sql auth"
+            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
         }
         }
     }, 
     }, 
     "configurationTags": {
     "configurationTags": {

+ 7 - 18
ambari-server/src/test/python/stacks/2.0.6/configs/hbase-2.2.json

@@ -563,36 +563,25 @@
             "zk_user": "zookeeper"
             "zk_user": "zookeeper"
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
-            "security_enabled": "false", 
-            "sink_existing_mssql_server_2_database": "MSSQL", 
-            "sink.dbservername": "", 
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar", 
-            "sink_existing_mssql_server_database": "MSSQL", 
+            "security_enabled": "false",
+            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
             "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
             "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
             "oozie_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/oozie/", 
             "oozie_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/oozie/", 
             "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
             "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
             "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
             "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
             "ignore_groupsusers_create": "false", 
             "ignore_groupsusers_create": "false", 
-            "kerberos_domain": "EXAMPLE.COM", 
-            "sink.dblogin": "", 
+            "kerberos_domain": "EXAMPLE.COM",
             "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
             "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
             "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
             "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-            "hadoop.user.name": "hadoop", 
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-            "sink.db.schema.name": "", 
-            "sink.jdbc.url": "", 
+            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
+            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
             "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
             "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
             "user_group": "hadoop", 
             "user_group": "hadoop", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz", 
-            "hadoop.user.password": "", 
-            "sink.jdbc.driver": "", 
+            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
             "smokeuser": "ambari-qa", 
             "smokeuser": "ambari-qa", 
             "oozie_tar_source": "/usr/hdp/current/oozie-client/oozie-sharelib.tar.gz", 
             "oozie_tar_source": "/usr/hdp/current/oozie-client/oozie-sharelib.tar.gz", 
             "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
             "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-            "sink.dbpassword": "", 
-            "sink_database": "Existing MSSQL Server database with sql auth"
+            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
         }
         }
     }, 
     }, 
     "configurationTags": {
     "configurationTags": {

+ 7 - 18
ambari-server/src/test/python/stacks/2.0.6/configs/hbase-check-2.2.json

@@ -558,36 +558,25 @@
             "zk_user": "zookeeper"
             "zk_user": "zookeeper"
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
-            "security_enabled": "false", 
-            "sink_existing_mssql_server_2_database": "MSSQL", 
-            "sink.dbservername": "", 
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar", 
-            "sink_existing_mssql_server_database": "MSSQL", 
+            "security_enabled": "false",
+            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
             "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
             "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
             "oozie_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/oozie/", 
             "oozie_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/oozie/", 
             "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
             "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
             "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
             "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
             "ignore_groupsusers_create": "false", 
             "ignore_groupsusers_create": "false", 
-            "kerberos_domain": "EXAMPLE.COM", 
-            "sink.dblogin": "", 
+            "kerberos_domain": "EXAMPLE.COM",
             "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
             "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
             "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
             "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-            "hadoop.user.name": "hadoop", 
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-            "sink.db.schema.name": "", 
-            "sink.jdbc.url": "", 
+            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
+            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
             "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
             "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
             "user_group": "hadoop", 
             "user_group": "hadoop", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz", 
-            "hadoop.user.password": "", 
-            "sink.jdbc.driver": "", 
+            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
             "smokeuser": "ambari-qa", 
             "smokeuser": "ambari-qa", 
             "oozie_tar_source": "/usr/hdp/current/oozie-client/oozie-sharelib.tar.gz", 
             "oozie_tar_source": "/usr/hdp/current/oozie-client/oozie-sharelib.tar.gz", 
             "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
             "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-            "sink.dbpassword": "", 
-            "sink_database": "Existing MSSQL Server database with sql auth"
+            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
         }
         }
     }, 
     }, 
     "configurationTags": {
     "configurationTags": {

+ 1 - 3
ambari-server/src/test/python/stacks/2.0.6/configs/hbase-preupgrade.json

@@ -102,9 +102,7 @@
             "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
             "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
             "user_group": "hadoop", 
             "user_group": "hadoop", 
             "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
             "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz", 
-            "hadoop.user.password": "", 
-            "hadoop.user.name": "hadoop"
+            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz"
         }
         }
     }, 
     }, 
     "commandParams": {
     "commandParams": {

+ 7 - 18
ambari-server/src/test/python/stacks/2.0.6/configs/hbase-rs-2.2.json

@@ -563,36 +563,25 @@
             "zk_user": "zookeeper"
             "zk_user": "zookeeper"
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
-            "security_enabled": "false", 
-            "sink_existing_mssql_server_2_database": "MSSQL", 
-            "sink.dbservername": "", 
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar", 
-            "sink_existing_mssql_server_database": "MSSQL", 
+            "security_enabled": "false",
+            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
             "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
             "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
             "oozie_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/oozie/", 
             "oozie_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/oozie/", 
             "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
             "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
             "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
             "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
             "ignore_groupsusers_create": "false", 
             "ignore_groupsusers_create": "false", 
-            "kerberos_domain": "EXAMPLE.COM", 
-            "sink.dblogin": "", 
+            "kerberos_domain": "EXAMPLE.COM",
             "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
             "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
             "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
             "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-            "hadoop.user.name": "hadoop", 
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-            "sink.db.schema.name": "", 
-            "sink.jdbc.url": "", 
+            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
+            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
             "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
             "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
             "user_group": "hadoop", 
             "user_group": "hadoop", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz", 
-            "hadoop.user.password": "", 
-            "sink.jdbc.driver": "", 
+            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
             "smokeuser": "ambari-qa", 
             "smokeuser": "ambari-qa", 
             "oozie_tar_source": "/usr/hdp/current/oozie-client/oozie-sharelib.tar.gz", 
             "oozie_tar_source": "/usr/hdp/current/oozie-client/oozie-sharelib.tar.gz", 
             "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
             "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-            "sink.dbpassword": "", 
-            "sink_database": "Existing MSSQL Server database with sql auth"
+            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
         }
         }
     }, 
     }, 
     "configurationTags": {
     "configurationTags": {

+ 8 - 19
ambari-server/src/test/python/stacks/2.0.6/configs/zk-service_check_2.2.json

@@ -51,34 +51,23 @@
             "zk_user": "zookeeper"
             "zk_user": "zookeeper"
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
-            "security_enabled": "false", 
-            "sink_existing_mssql_server_2_database": "MSSQL", 
+            "security_enabled": "false",
             "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
             "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar", 
-            "sink_existing_mssql_server_database": "MSSQL", 
+            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
             "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
             "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "sink.dbservername": "", 
+            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
             "ignore_groupsusers_create": "false", 
             "ignore_groupsusers_create": "false", 
-            "kerberos_domain": "EXAMPLE.COM", 
-            "sink.dblogin": "", 
+            "kerberos_domain": "EXAMPLE.COM",
             "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
             "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
             "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
             "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-            "hadoop.user.name": "hadoop", 
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-            "sink.db.schema.name": "", 
-            "sink.jdbc.url": "", 
+            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
+            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
             "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
             "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
             "user_group": "hadoop", 
             "user_group": "hadoop", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz", 
-            "hadoop.user.password": "", 
-            "sink.jdbc.driver": "", 
+            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
             "smokeuser": "ambari-qa", 
             "smokeuser": "ambari-qa", 
             "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
             "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-            "sink.dbpassword": "", 
-            "sink_database": "Existing MSSQL Server database with sql auth"
+            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
         }
         }
     }, 
     }, 
     "commandId": "25-1", 
     "commandId": "25-1", 

+ 8 - 19
ambari-server/src/test/python/stacks/2.1/configs/client-upgrade.json

@@ -538,34 +538,23 @@
             "zk_user": "zookeeper"
             "zk_user": "zookeeper"
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
-            "security_enabled": "false", 
-            "sink_existing_mssql_server_2_database": "MSSQL", 
+            "security_enabled": "false",
             "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
             "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar", 
-            "sink_existing_mssql_server_database": "MSSQL", 
+            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
             "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
             "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "sink.dbservername": "", 
+            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
             "ignore_groupsusers_create": "false", 
             "ignore_groupsusers_create": "false", 
-            "kerberos_domain": "EXAMPLE.COM", 
-            "sink.dblogin": "", 
+            "kerberos_domain": "EXAMPLE.COM",
             "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
             "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
             "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
             "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-            "hadoop.user.name": "hadoop", 
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-            "sink.db.schema.name": "", 
-            "sink.jdbc.url": "", 
+            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
+            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
             "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
             "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
             "user_group": "hadoop", 
             "user_group": "hadoop", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz", 
-            "hadoop.user.password": "", 
-            "sink.jdbc.driver": "", 
+            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
             "smokeuser": "ambari-qa", 
             "smokeuser": "ambari-qa", 
             "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
             "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-            "sink.dbpassword": "", 
-            "sink_database": "Existing MSSQL Server database with sql auth"
+            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
         }
         }
     }, 
     }, 
     "configurationTags": {
     "configurationTags": {

+ 8 - 19
ambari-server/src/test/python/stacks/2.2/configs/falcon-upgrade.json

@@ -219,34 +219,23 @@
             "namenode_opt_permsize": "128m"
             "namenode_opt_permsize": "128m"
         }, 
         }, 
         "cluster-env": {
         "cluster-env": {
-            "security_enabled": "false", 
-            "sink_existing_mssql_server_2_database": "MSSQL", 
+            "security_enabled": "false",
             "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
             "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
-            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar", 
-            "sink_existing_mssql_server_database": "MSSQL", 
+            "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
             "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
             "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", 
-            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "sink.dbservername": "", 
+            "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
             "ignore_groupsusers_create": "false", 
             "ignore_groupsusers_create": "false", 
-            "kerberos_domain": "EXAMPLE.COM", 
-            "sink.dblogin": "", 
+            "kerberos_domain": "EXAMPLE.COM",
             "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
             "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", 
             "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
             "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
-            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
-            "hadoop.user.name": "hadoop", 
-            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
-            "sink.db.schema.name": "", 
-            "sink.jdbc.url": "", 
+            "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
+            "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
             "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
             "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", 
             "user_group": "hadoop", 
             "user_group": "hadoop", 
-            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz", 
-            "hadoop.user.password": "", 
-            "sink.jdbc.driver": "", 
+            "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
             "smokeuser": "ambari-qa", 
             "smokeuser": "ambari-qa", 
             "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
             "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", 
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", 
-            "sink.dbpassword": "", 
-            "sink_database": "Existing MSSQL Server database with sql auth"
+            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
         }
         }
     }, 
     }, 
     "configurationTags": {
     "configurationTags": {

+ 2 - 13
ambari-server/src/test/python/stacks/2.2/configs/hive-upgrade.json

@@ -373,33 +373,22 @@
         },
         },
         "cluster-env": {
         "cluster-env": {
             "security_enabled": "false",
             "security_enabled": "false",
-            "sink_existing_mssql_server_2_database": "MSSQL",
             "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz",
             "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz",
             "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
             "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
-            "sink_existing_mssql_server_database": "MSSQL",
             "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/",
             "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/",
             "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
             "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-            "sink.dbservername": "",
             "ignore_groupsusers_create": "false",
             "ignore_groupsusers_create": "false",
             "kerberos_domain": "EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
-            "sink.dblogin": "",
             "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/",
             "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/",
             "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz",
             "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz",
             "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
             "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
-            "hadoop.user.name": "hadoop",
             "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
             "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
-            "sink.db.schema.name": "",
-            "sink.jdbc.url": "",
             "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/",
             "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/",
             "user_group": "hadoop",
             "user_group": "hadoop",
             "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
             "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
-            "hadoop.user.password": "",
-            "sink.jdbc.driver": "",
             "smokeuser": "ambari-qa",
             "smokeuser": "ambari-qa",
             "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
             "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/",
-            "sink.dbpassword": "",
-            "sink_database": "Existing MSSQL Server database with sql auth"
+            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
         },
         },
         "ranger-hive-plugin-properties" : {
         "ranger-hive-plugin-properties" : {
             "ranger-hive-plugin-enabled":"yes"
             "ranger-hive-plugin-enabled":"yes"
@@ -508,4 +497,4 @@
             "c6402.ambari.apache.org"
             "c6402.ambari.apache.org"
         ]
         ]
     }
     }
-}
+}

+ 1 - 12
ambari-server/src/test/python/stacks/2.2/configs/oozie-upgrade.json

@@ -171,33 +171,22 @@
         },
         },
         "cluster-env": {
         "cluster-env": {
             "security_enabled": "false",
             "security_enabled": "false",
-            "sink_existing_mssql_server_2_database": "MSSQL",
             "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz",
             "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz",
             "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
             "hadoop-streaming_tar_source": "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar",
-            "sink_existing_mssql_server_database": "MSSQL",
             "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/",
             "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/",
             "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
             "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-            "sink.dbservername": "",
             "ignore_groupsusers_create": "false",
             "ignore_groupsusers_create": "false",
             "kerberos_domain": "EXAMPLE.COM",
             "kerberos_domain": "EXAMPLE.COM",
-            "sink.dblogin": "",
             "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/",
             "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/",
             "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz",
             "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz",
             "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
             "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz",
-            "hadoop.user.name": "hadoop",
             "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
             "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz",
-            "sink.db.schema.name": "",
-            "sink.jdbc.url": "",
             "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/",
             "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/",
             "user_group": "hadoop",
             "user_group": "hadoop",
             "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
             "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz",
-            "hadoop.user.password": "",
-            "sink.jdbc.driver": "",
             "smokeuser": "ambari-qa",
             "smokeuser": "ambari-qa",
             "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
             "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
-            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/",
-            "sink.dbpassword": "",
-            "sink_database": "Existing MSSQL Server database with sql auth"
+            "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/"
         }
         }
     },
     },
     "configurationTags": {
     "configurationTags": {

+ 6 - 6
ambari-web/app/controllers/main/service/info/configs.js

@@ -1574,7 +1574,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
         configs = configs.without(configs.findProperty('name', 'hive_existing_mssql_server_host'));
         configs = configs.without(configs.findProperty('name', 'hive_existing_mssql_server_host'));
         configs = configs.without(configs.findProperty('name', 'hive_existing_mssql_server_2_database'));
         configs = configs.without(configs.findProperty('name', 'hive_existing_mssql_server_2_database'));
         configs = configs.without(configs.findProperty('name', 'hive_existing_mssql_server_2_host'));
         configs = configs.without(configs.findProperty('name', 'hive_existing_mssql_server_2_host'));
-      } else if (hiveDb.value === 'Existing MSSQL Server database with integrated authentication') {
+      } else if (hiveDb.value === 'Existing MSSQL Server database with SQL authentication') {
         var existingMSSQLServerHost = configs.findProperty('name', 'hive_existing_mssql_server_host');
         var existingMSSQLServerHost = configs.findProperty('name', 'hive_existing_mssql_server_host');
         if (existingMSSQLServerHost) {
         if (existingMSSQLServerHost) {
           dbHostPropertyName = 'hive_existing_mssql_server_host';
           dbHostPropertyName = 'hive_existing_mssql_server_host';
@@ -1588,7 +1588,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
         configs = configs.without(configs.findProperty('name', 'hive_existing_oracle_database'));
         configs = configs.without(configs.findProperty('name', 'hive_existing_oracle_database'));
         configs = configs.without(configs.findProperty('name', 'hive_existing_mssql_server_2_database'));
         configs = configs.without(configs.findProperty('name', 'hive_existing_mssql_server_2_database'));
         configs = configs.without(configs.findProperty('name', 'hive_existing_mssql_server_2_host'));
         configs = configs.without(configs.findProperty('name', 'hive_existing_mssql_server_2_host'));
-      } else if (hiveDb.value === 'Existing MSSQL Server database with sql auth') {
+      } else if (hiveDb.value === 'Existing MSSQL Server database with integrated authentication') {
         var existingMSSQL2ServerHost = configs.findProperty('name', 'hive_existing_mssql_server_2_host');
         var existingMSSQL2ServerHost = configs.findProperty('name', 'hive_existing_mssql_server_2_host');
         if (existingMSSQL2ServerHost) {
         if (existingMSSQL2ServerHost) {
           dbHostPropertyName = 'hive_existing_mssql_server_2_host';
           dbHostPropertyName = 'hive_existing_mssql_server_2_host';
@@ -1697,7 +1697,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
         configs = configs.without(configs.findProperty('name', 'oozie_existing_mssql_server_host'));
         configs = configs.without(configs.findProperty('name', 'oozie_existing_mssql_server_host'));
         configs = configs.without(configs.findProperty('name', 'oozie_existing_mssql_server_2_database'));
         configs = configs.without(configs.findProperty('name', 'oozie_existing_mssql_server_2_database'));
         configs = configs.without(configs.findProperty('name', 'oozie_existing_mssql_server_2_host'));
         configs = configs.without(configs.findProperty('name', 'oozie_existing_mssql_server_2_host'));
-      } else if (oozieDb.value === 'Existing MSSQL Server database with integrated authentication') {
+      } else if (oozieDb.value === 'Existing MSSQL Server database with SQL authentication') {
         var existingMySqlServerHost = configs.findProperty('name', 'oozie_existing_mssql_server_host');
         var existingMySqlServerHost = configs.findProperty('name', 'oozie_existing_mssql_server_host');
         if (existingMySqlServerHost) {
         if (existingMySqlServerHost) {
           dbHostPropertyName = 'oozie_existing_mssql_server_host';
           dbHostPropertyName = 'oozie_existing_mssql_server_host';
@@ -1713,7 +1713,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
         configs = configs.without(configs.findProperty('name', 'oozie_existing_mysql_database'));
         configs = configs.without(configs.findProperty('name', 'oozie_existing_mysql_database'));
         configs = configs.without(configs.findProperty('name', 'oozie_existing_mssql_server_2_database'));
         configs = configs.without(configs.findProperty('name', 'oozie_existing_mssql_server_2_database'));
         configs = configs.without(configs.findProperty('name', 'oozie_existing_mssql_server_2_host'));
         configs = configs.without(configs.findProperty('name', 'oozie_existing_mssql_server_2_host'));
-      } else if (oozieDb.value === 'Existing MSSQL Server database with sql auth') {
+      } else if (oozieDb.value === 'Existing MSSQL Server database with integrated authentication') {
         var existingMySql2ServerHost = configs.findProperty('name', 'oozie_existing_mssql_server_2_host');
         var existingMySql2ServerHost = configs.findProperty('name', 'oozie_existing_mssql_server_2_host');
         if (existingMySql2ServerHost) {
         if (existingMySql2ServerHost) {
           dbHostPropertyName = 'oozie_existing_mssql_server_2_host';
           dbHostPropertyName = 'oozie_existing_mssql_server_2_host';
@@ -2293,13 +2293,13 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
 
 
     if (serviceName === 'HIVE') {
     if (serviceName === 'HIVE') {
       var hiveDb = configs.findProperty('name', 'hive_database').value;
       var hiveDb = configs.findProperty('name', 'hive_database').value;
-      if (['Existing MySQL Database', 'Existing Oracle Database', 'Existing PostgreSQL Database', 'Existing MSSQL Server database with integrated authentication', 'Existing MSSQL Server database with sql auth'].contains(hiveDb)) {
+      if (['Existing MySQL Database', 'Existing Oracle Database', 'Existing PostgreSQL Database', 'Existing MSSQL Server database with SQL authentication', 'Existing MSSQL Server database with integrated authentication'].contains(hiveDb)) {
         configs.findProperty('name', 'hive_hostname').isVisible = true;
         configs.findProperty('name', 'hive_hostname').isVisible = true;
       }
       }
     }
     }
     if (serviceName === 'OOZIE') {
     if (serviceName === 'OOZIE') {
       var oozieDb = configs.findProperty('name', 'oozie_database').value;
       var oozieDb = configs.findProperty('name', 'oozie_database').value;
-      if (['Existing MySQL Database', 'Existing Oracle Database', 'Existing PostgreSQL Database', 'Existing MSSQL Server database with integrated authentication', 'Existing MSSQL Server database with sql auth'].contains(oozieDb)) {
+      if (['Existing MySQL Database', 'Existing Oracle Database', 'Existing PostgreSQL Database', 'Existing MSSQL Server database with SQL authentication', 'Existing MSSQL Server database with integrated authentication'].contains(oozieDb)) {
         configs.findProperty('name', 'oozie_hostname').isVisible = true;
         configs.findProperty('name', 'oozie_hostname').isVisible = true;
       }
       }
     }
     }

+ 2 - 2
ambari-web/app/controllers/wizard/step7_controller.js

@@ -904,9 +904,9 @@ App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, {
       var dbPrefix = database.toLowerCase();
       var dbPrefix = database.toLowerCase();
       if (database.toLowerCase() == 'mssql') {
       if (database.toLowerCase() == 'mssql') {
         if (/integrated/gi.test(dbTypeConfig.value)) {
         if (/integrated/gi.test(dbTypeConfig.value)) {
-          dbPrefix = 'mssql_server';
-        } else {
           dbPrefix = 'mssql_server_2';
           dbPrefix = 'mssql_server_2';
+        } else {
+          dbPrefix = 'mssql_server';
         }
         }
       }
       }
       var propertyName = propertyPrefix + '_existing_' + dbPrefix + '_host';
       var propertyName = propertyPrefix + '_existing_' + dbPrefix + '_host';

+ 20 - 20
ambari-web/app/controllers/wizard/step8_controller.js

@@ -278,16 +278,16 @@ App.WizardStep8Controller = Em.Controller.extend(App.AddSecurityConfigs, App.wiz
           hive_properties = Em.A(['hive_ambari_database', 'hive_existing_oracle_database', 'hive_existing_mysql_database',
           hive_properties = Em.A(['hive_ambari_database', 'hive_existing_oracle_database', 'hive_existing_mysql_database',
             'hive_existing_mssql_server_database', 'hive_existing_mssql_server_2_database']);
             'hive_existing_mssql_server_database', 'hive_existing_mssql_server_2_database']);
           break;
           break;
+        case 'Existing MSSQL Server database with SQL authentication':
+          configs.findProperty('name', 'hive_hostname').value = configs.findProperty('name', 'hive_existing_mssql_server_host').value;
+          hive_properties = Em.A(['hive_ambari_database', 'hive_existing_oracle_database', 'hive_existing_postgresql_database',
+            'hive_existing_mysql_database', 'hive_existing_mssql_server_database', 'hive_existing_mssql_server_database']);
+          break;
         case 'Existing MSSQL Server database with integrated authentication':
         case 'Existing MSSQL Server database with integrated authentication':
           configs.findProperty('name', 'hive_hostname').value = configs.findProperty('name', 'hive_existing_mssql_server_2_host').value;
           configs.findProperty('name', 'hive_hostname').value = configs.findProperty('name', 'hive_existing_mssql_server_2_host').value;
           hive_properties = Em.A(['hive_ambari_database', 'hive_existing_oracle_database', 'hive_existing_postgresql_database',
           hive_properties = Em.A(['hive_ambari_database', 'hive_existing_oracle_database', 'hive_existing_postgresql_database',
             'hive_existing_mysql_database', 'hive_existing_mssql_server_database', 'hive_existing_mssql_server_2_database']);
             'hive_existing_mysql_database', 'hive_existing_mssql_server_database', 'hive_existing_mssql_server_2_database']);
           break;
           break;
-        case 'Existing MSSQL Server database with sql auth':
-          configs.findProperty('name', 'hive_hostname').value = configs.findProperty('name', 'hive_existing_mssql_server_host').value;
-          hive_properties = Em.A(['hive_ambari_database', 'hive_existing_oracle_database', 'hive_existing_postgresql_database',
-            'hive_existing_mysql_database', 'hive_existing_mssql_server_database', 'hive_existing_mssql_server_database']);
-          break;
         default:
         default:
           configs.findProperty('name', 'hive_hostname').value = configs.findProperty('name', 'hive_existing_oracle_host').value;
           configs.findProperty('name', 'hive_hostname').value = configs.findProperty('name', 'hive_existing_oracle_host').value;
           hive_properties = Em.A(['hive_ambari_database',  'hive_existing_mysql_database', 'hive_existing_postgresql_database',
           hive_properties = Em.A(['hive_ambari_database',  'hive_existing_mysql_database', 'hive_existing_postgresql_database',
@@ -330,13 +330,13 @@ App.WizardStep8Controller = Em.Controller.extend(App.AddSecurityConfigs, App.wiz
           oozie_properties = Em.A(['oozie_ambari_database', 'oozie_existing_oracle_database', 'oozie_existing_mysql_database',
           oozie_properties = Em.A(['oozie_ambari_database', 'oozie_existing_oracle_database', 'oozie_existing_mysql_database',
             'oozie_existing_mssql_server_database', 'oozie_existing_mssql_server_2_database']);
             'oozie_existing_mssql_server_database', 'oozie_existing_mssql_server_2_database']);
           break;
           break;
-        case 'Existing MSSQL Server database with integrated authentication':
-          configs.findProperty('name', 'oozie_hostname').value = configs.findProperty('name', 'oozie_existing_mysql_host').value;
+        case 'Existing MSSQL Server database with SQL authentication':
+          configs.findProperty('name', 'oozie_hostname').value = configs.findProperty('name', 'oozie_existing_mssql_server_host').value;
           oozie_properties = Em.A(['oozie_existing_oracle_database', 'oozie_existing_postgresql_database',
           oozie_properties = Em.A(['oozie_existing_oracle_database', 'oozie_existing_postgresql_database',
             'oozie_existing_mysql_database', 'oozie_existing_mssql_server_database', 'oozie_existing_mssql_server_2_database']);
             'oozie_existing_mysql_database', 'oozie_existing_mssql_server_database', 'oozie_existing_mssql_server_2_database']);
           break;
           break;
-        case 'Existing MSSQL Server database with sql auth':
-          configs.findProperty('name', 'oozie_hostname').value = configs.findProperty('name', 'oozie_existing_mysql_host').value;
+        case 'Existing MSSQL Server database with integrated authentication':
+          configs.findProperty('name', 'oozie_hostname').value = configs.findProperty('name', 'oozie_existing_mssql_server_2_host').value;
           oozie_properties = Em.A(['oozie_existing_oracle_database', 'oozie_existing_postgresql_database',
           oozie_properties = Em.A(['oozie_existing_oracle_database', 'oozie_existing_postgresql_database',
             'oozie_existing_mysql_database', 'oozie_existing_mssql_server_database', 'oozie_existing_mssql_server_database']);
             'oozie_existing_mysql_database', 'oozie_existing_mssql_server_database', 'oozie_existing_mssql_server_database']);
           break;
           break;
@@ -737,8 +737,8 @@ App.WizardStep8Controller = Em.Controller.extend(App.AddSecurityConfigs, App.wiz
    * @method loadHiveDbValue
    * @method loadHiveDbValue
    */
    */
   loadHiveDbValue: function () {
   loadHiveDbValue: function () {
-    var db, serviceConfigPreoprties = this.get('wizardController').getDBProperty('serviceConfigProperties'),
-      hiveDb = serviceConfigPreoprties.findProperty('name', 'hive_database');
+    var db, serviceConfigProperties = this.get('wizardController').getDBProperty('serviceConfigProperties'),
+      hiveDb = serviceConfigProperties.findProperty('name', 'hive_database');
     if (hiveDb.value === 'New MySQL Database') {
     if (hiveDb.value === 'New MySQL Database') {
       return 'MySQL (New Database)';
       return 'MySQL (New Database)';
     } else if (hiveDb.value === 'New PostgreSQL Database') {
     } else if (hiveDb.value === 'New PostgreSQL Database') {
@@ -746,27 +746,27 @@ App.WizardStep8Controller = Em.Controller.extend(App.AddSecurityConfigs, App.wiz
     }
     }
     else {
     else {
       if (hiveDb.value === 'Existing MySQL Database') {
       if (hiveDb.value === 'Existing MySQL Database') {
-        db = serviceConfigPreoprties.findProperty('name', 'hive_existing_mysql_database');
+        db = serviceConfigProperties.findProperty('name', 'hive_existing_mysql_database');
         return db.value + ' (' + hiveDb.value + ')';
         return db.value + ' (' + hiveDb.value + ')';
       }
       }
       else {
       else {
         if (hiveDb.value === Em.I18n.t('services.service.config.hive.oozie.postgresql')) {
         if (hiveDb.value === Em.I18n.t('services.service.config.hive.oozie.postgresql')) {
-          db = serviceConfigPreoprties.findProperty('name', 'hive_existing_postgresql_database');
+          db = serviceConfigProperties.findProperty('name', 'hive_existing_postgresql_database');
           return db.value + ' (' + hiveDb.value + ')';
           return db.value + ' (' + hiveDb.value + ')';
         }
         }
         else {
         else {
-          if (hiveDb.value === 'Existing MSSQL Server database with integrated authentication') {
-            db = serviceConfigPreoprties.findProperty('name', 'hive_existing_mssql_server_database');
+          if (hiveDb.value === 'Existing MSSQL Server database with SQL authentication') {
+            db = serviceConfigProperties.findProperty('name', 'hive_existing_mssql_server_database');
             return db.value + ' (' + hiveDb.value + ')';
             return db.value + ' (' + hiveDb.value + ')';
           }
           }
           else {
           else {
-            if (hiveDb.value === 'Existing MSSQL Server database with sql auth') {
-              db = serviceConfigPreoprties.findProperty('name', 'hive_existing_mssql_server_2_database');
+            if (hiveDb.value === 'Existing MSSQL Server database with integrated authentication') {
+              db = serviceConfigProperties.findProperty('name', 'hive_existing_mssql_server_2_database');
               return db.value + ' (' + hiveDb.value + ')';
               return db.value + ' (' + hiveDb.value + ')';
             }
             }
             else {
             else {
               // existing oracle database
               // existing oracle database
-              db = serviceConfigPreoprties.findProperty('name', 'hive_existing_oracle_database');
+              db = serviceConfigProperties.findProperty('name', 'hive_existing_oracle_database');
               return db.value + ' (' + hiveDb.value + ')';
               return db.value + ' (' + hiveDb.value + ')';
             }
             }
           }
           }
@@ -826,12 +826,12 @@ App.WizardStep8Controller = Em.Controller.extend(App.AddSecurityConfigs, App.wiz
           return db.value + ' (' + oozieDb.value + ')';
           return db.value + ' (' + oozieDb.value + ')';
         }
         }
         else {
         else {
-          if (oozieDb.value === 'Existing MSSQL Server database with integrated authentication') {
+          if (oozieDb.value === 'Existing MSSQL Server database with SQL authentication') {
             db = this.get('wizardController').getDBProperty('serviceConfigProperties').findProperty('name', 'oozie_existing_mssql_server_database');
             db = this.get('wizardController').getDBProperty('serviceConfigProperties').findProperty('name', 'oozie_existing_mssql_server_database');
             return db.value + ' (' + oozieDb.value + ')';
             return db.value + ' (' + oozieDb.value + ')';
           }
           }
           else {
           else {
-            if (oozieDb.value === 'Existing MSSQL Server database with sql auth') {
+            if (oozieDb.value === 'Existing MSSQL Server database with integrated authentication') {
               db = this.get('wizardController').getDBProperty('serviceConfigProperties').findProperty('name', 'oozie_existing_mssql_server_2_database');
               db = this.get('wizardController').getDBProperty('serviceConfigProperties').findProperty('name', 'oozie_existing_mssql_server_2_database');
               return db.value + ' (' + oozieDb.value + ')';
               return db.value + ' (' + oozieDb.value + ')';
             }
             }

+ 6 - 6
ambari-web/app/data/HDP2/site_properties.js

@@ -3011,12 +3011,12 @@ var hdp2properties = [
         hidden: App.get('isHadoopWindowsStack')
         hidden: App.get('isHadoopWindowsStack')
       },
       },
       {
       {
-        displayName: 'Existing MSSQL Server database with integrated authentication',
+        displayName: 'Existing MSSQL Server database with SQL authentication',
         foreignKeys: ['hive_existing_mssql_server_database', 'hive_existing_mssql_server_host'],
         foreignKeys: ['hive_existing_mssql_server_database', 'hive_existing_mssql_server_host'],
         hidden: !App.get('isHadoopWindowsStack')
         hidden: !App.get('isHadoopWindowsStack')
       },
       },
       {
       {
-        displayName: 'Existing MSSQL Server database with sql auth',
+        displayName: 'Existing MSSQL Server database with integrated authentication',
         foreignKeys: ['hive_existing_mssql_server_2_database', 'hive_existing_mssql_server_2_host'],
         foreignKeys: ['hive_existing_mssql_server_2_database', 'hive_existing_mssql_server_2_host'],
         hidden: !App.get('isHadoopWindowsStack')
         hidden: !App.get('isHadoopWindowsStack')
       }
       }
@@ -3385,7 +3385,7 @@ var hdp2properties = [
     "displayName": "Database Type",
     "displayName": "Database Type",
     "value": "",
     "value": "",
     "defaultValue": "MSSQL",
     "defaultValue": "MSSQL",
-    "description": "Using an existing MSSQL database with integrated authentication for Oozie Metastore",
+    "description": "Using an existing MSSQL database with SQL authentication for Oozie Metastore",
     "displayType": "masterHost",
     "displayType": "masterHost",
     "isVisible": false,
     "isVisible": false,
     "isReconfigurable": false,
     "isReconfigurable": false,
@@ -3401,7 +3401,7 @@ var hdp2properties = [
     "displayName": "Database Type",
     "displayName": "Database Type",
     "value": "",
     "value": "",
     "defaultValue": "MSSQL",
     "defaultValue": "MSSQL",
-    "description": "Using an existing MSSQL database with sql auth for Oozie Metastore",
+    "description": "Using an existing MSSQL database with integrated authentication for Oozie Metastore",
     "displayType": "masterHost",
     "displayType": "masterHost",
     "isVisible": false,
     "isVisible": false,
     "isReconfigurable": false,
     "isReconfigurable": false,
@@ -3439,12 +3439,12 @@ var hdp2properties = [
         hidden: App.get('isHadoopWindowsStack')
         hidden: App.get('isHadoopWindowsStack')
       },
       },
       {
       {
-        displayName: 'Existing MSSQL Server database with integrated authentication',
+        displayName: 'Existing MSSQL Server database with SQL authentication',
         foreignKeys: ['oozie_existing_mssql_server_database', 'oozie_existing_mssql_server_host'],
         foreignKeys: ['oozie_existing_mssql_server_database', 'oozie_existing_mssql_server_host'],
         hidden: !App.get('isHadoopWindowsStack')
         hidden: !App.get('isHadoopWindowsStack')
       },
       },
       {
       {
-        displayName: 'Existing MSSQL Server database with sql auth',
+        displayName: 'Existing MSSQL Server database with integrated authentication',
         foreignKeys: ['oozie_existing_mssql_server_2_database', 'oozie_existing_mssql_server_2_host'],
         foreignKeys: ['oozie_existing_mssql_server_2_database', 'oozie_existing_mssql_server_2_host'],
         hidden: !App.get('isHadoopWindowsStack')
         hidden: !App.get('isHadoopWindowsStack')
       }
       }

+ 12 - 12
ambari-web/app/views/wizard/controls_view.js

@@ -258,13 +258,13 @@ App.ServiceConfigRadioButtons = Ember.View.extend(App.ServiceConfigCalculateId,
               dbClass.set('value', "oracle.jdbc.driver.OracleDriver");
               dbClass.set('value', "oracle.jdbc.driver.OracleDriver");
               Em.set(hiveDbType, 'value', 'oracle');
               Em.set(hiveDbType, 'value', 'oracle');
               break;
               break;
-            case 'Existing MSSQL Server database with integrated authentication':
-              connectionUrl.set('value', "jdbc:sqlserver://" + this.get('hostName') + ";databaseName=" + this.get('databaseName') + ";integratedSecurity=true");
+            case 'Existing MSSQL Server database with SQL authentication':
+              connectionUrl.set('value', "jdbc:sqlserver://" + this.get('hostName') + ";databaseName=" + this.get('databaseName'));
               dbClass.set('value', "com.microsoft.sqlserver.jdbc.SQLServerDriver");
               dbClass.set('value', "com.microsoft.sqlserver.jdbc.SQLServerDriver");
               Em.set(hiveDbType, 'value', 'mssql');
               Em.set(hiveDbType, 'value', 'mssql');
               break;
               break;
-            case 'Existing MSSQL Server database with sql auth':
-              connectionUrl.set('value', "jdbc:sqlserver://" + this.get('hostName') + ";databaseName=" + this.get('databaseName'));
+            case 'Existing MSSQL Server database with integrated authentication':
+              connectionUrl.set('value', "jdbc:sqlserver://" + this.get('hostName') + ";databaseName=" + this.get('databaseName') + ";integratedSecurity=true");
               dbClass.set('value', "com.microsoft.sqlserver.jdbc.SQLServerDriver");
               dbClass.set('value', "com.microsoft.sqlserver.jdbc.SQLServerDriver");
               Em.set(hiveDbType, 'value', 'mssql');
               Em.set(hiveDbType, 'value', 'mssql');
               break;
               break;
@@ -296,12 +296,12 @@ App.ServiceConfigRadioButtons = Ember.View.extend(App.ServiceConfigCalculateId,
               connectionUrl.set('value', "jdbc:oracle:thin:@//" + this.get('hostName') + ":1521/" + this.get('databaseName'));
               connectionUrl.set('value', "jdbc:oracle:thin:@//" + this.get('hostName') + ":1521/" + this.get('databaseName'));
               dbClass.set('value', "oracle.jdbc.driver.OracleDriver");
               dbClass.set('value', "oracle.jdbc.driver.OracleDriver");
               break;
               break;
-            case 'Existing MSSQL Server database with integrated authentication':
-              connectionUrl.set('value', "jdbc:sqlserver://" + this.get('hostName') + ";databaseName=" + this.get('databaseName') + ";integratedSecurity=true");
+            case 'Existing MSSQL Server database with SQL authentication':
+              connectionUrl.set('value', "jdbc:sqlserver://" + this.get('hostName') + ";databaseName=" + this.get('databaseName'));
               dbClass.set('value', "com.microsoft.sqlserver.jdbc.SQLServerDriver");
               dbClass.set('value', "com.microsoft.sqlserver.jdbc.SQLServerDriver");
               break;
               break;
-            case 'Existing MSSQL Server database with sql auth':
-              connectionUrl.set('value', "jdbc:sqlserver://" + this.get('hostName') + ";databaseName=" + this.get('databaseName'));
+            case 'Existing MSSQL Server database with integrated authentication':
+              connectionUrl.set('value', "jdbc:sqlserver://" + this.get('hostName') + ";databaseName=" + this.get('databaseName') + ";integratedSecurity=true");
               dbClass.set('value', "com.microsoft.sqlserver.jdbc.SQLServerDriver");
               dbClass.set('value', "com.microsoft.sqlserver.jdbc.SQLServerDriver");
               break;
               break;
           }
           }
@@ -355,10 +355,10 @@ App.ServiceConfigRadioButtons = Ember.View.extend(App.ServiceConfigCalculateId,
         case 'Existing Oracle Database':
         case 'Existing Oracle Database':
           hostname = this.get('categoryConfigsAll').findProperty('name', 'hive_existing_oracle_host');
           hostname = this.get('categoryConfigsAll').findProperty('name', 'hive_existing_oracle_host');
           break;
           break;
-        case 'Existing MSSQL Server database with integrated authentication':
+        case 'Existing MSSQL Server database with SQL authentication':
           hostname = this.get('categoryConfigsAll').findProperty('name', 'hive_existing_mssql_server_host');
           hostname = this.get('categoryConfigsAll').findProperty('name', 'hive_existing_mssql_server_host');
           break;
           break;
-        case 'Existing MSSQL Server database with sql auth':
+        case 'Existing MSSQL Server database with integrated authentication':
           hostname = this.get('categoryConfigsAll').findProperty('name', 'hive_existing_mssql_server_2_host');
           hostname = this.get('categoryConfigsAll').findProperty('name', 'hive_existing_mssql_server_2_host');
           break;
           break;
       }
       }
@@ -381,10 +381,10 @@ App.ServiceConfigRadioButtons = Ember.View.extend(App.ServiceConfigCalculateId,
         case 'Existing Oracle Database':
         case 'Existing Oracle Database':
           hostname = this.get('categoryConfigsAll').findProperty('name', 'oozie_existing_oracle_host');
           hostname = this.get('categoryConfigsAll').findProperty('name', 'oozie_existing_oracle_host');
           break;
           break;
-        case 'Existing MSSQL Server database with integrated authentication':
+        case 'Existing MSSQL Server database with SQL authentication':
           hostname = this.get('categoryConfigsAll').findProperty('name', 'oozie_existing_mssql_server_host');
           hostname = this.get('categoryConfigsAll').findProperty('name', 'oozie_existing_mssql_server_host');
           break;
           break;
-        case 'Existing MSSQL Server database with sql auth':
+        case 'Existing MSSQL Server database with integrated authentication':
           hostname = this.get('categoryConfigsAll').findProperty('name', 'oozie_existing_mssql_server_2_host');
           hostname = this.get('categoryConfigsAll').findProperty('name', 'oozie_existing_mssql_server_2_host');
           break;
           break;
       }
       }

+ 6 - 6
ambari-web/test/views/wizard/controls_view_test.js

@@ -220,12 +220,12 @@ describe('App.ServiceConfigRadioButtons', function () {
         },
         },
         {
         {
           serviceName: 'HIVE',
           serviceName: 'HIVE',
-          value: 'Existing MSSQL Server database with integrated authentication',
+          value: 'Existing MSSQL Server database with SQL authentication',
           expected: 'h4'
           expected: 'h4'
         },
         },
         {
         {
           serviceName: 'HIVE',
           serviceName: 'HIVE',
-          value: 'Existing MSSQL Server database with sql auth',
+          value: 'Existing MSSQL Server database with integrated authentication',
           expected: 'h5'
           expected: 'h5'
         },
         },
         {
         {
@@ -255,12 +255,12 @@ describe('App.ServiceConfigRadioButtons', function () {
         },
         },
         {
         {
           serviceName: 'OOZIE',
           serviceName: 'OOZIE',
-          value: 'Existing MSSQL Server database with integrated authentication',
+          value: 'Existing MSSQL Server database with SQL authentication',
           expected: 'h14'
           expected: 'h14'
         },
         },
         {
         {
           serviceName: 'OOZIE',
           serviceName: 'OOZIE',
-          value: 'Existing MSSQL Server database with sql auth',
+          value: 'Existing MSSQL Server database with integrated authentication',
           expected: 'h15'
           expected: 'h15'
         },
         },
         {
         {
@@ -356,7 +356,7 @@ describe('App.ServiceConfigRadioButtons', function () {
         },
         },
         {
         {
           serviceName: 'HIVE',
           serviceName: 'HIVE',
-          serviceConfigValue: 'Existing MSSQL Server database with sql auth',
+          serviceConfigValue: 'Existing MSSQL Server database with SQL authentication',
           databaseName: 'db3',
           databaseName: 'db3',
           hostName: 'h3',
           hostName: 'h3',
           connectionUrlValue: 'jdbc:sqlserver://h3;databaseName=db3',
           connectionUrlValue: 'jdbc:sqlserver://h3;databaseName=db3',
@@ -413,7 +413,7 @@ describe('App.ServiceConfigRadioButtons', function () {
         },
         },
         {
         {
           serviceName: 'OOZIE',
           serviceName: 'OOZIE',
-          serviceConfigValue: 'Existing MSSQL Server database with sql auth',
+          serviceConfigValue: 'Existing MSSQL Server database with SQL authentication',
           databaseName: 'db9',
           databaseName: 'db9',
           hostName: 'h9',
           hostName: 'h9',
           connectionUrlValue: 'jdbc:sqlserver://h9;databaseName=db9',
           connectionUrlValue: 'jdbc:sqlserver://h9;databaseName=db9',