Bläddra i källkod

AMBARI-8944. HIVE_METASTORE start failed on Ubuntu12.(vbrodetskyi)

Vitaly Brodetskyi 10 år sedan
förälder
incheckning
6271c9c41f

+ 12 - 8
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py

@@ -281,24 +281,28 @@ def jdbc_connector():
       "no_proxy": format("{ambari_server_hostname}")
     }
 
-    Execute(('curl', '-kf', '-x', "", '--retry', '10', params.driver_curl_source, '-o', params.driver_curl_target),
-            not_if=format("test -f {target}"),
+    # TODO: should be removed after ranger_hive_plugin will not provide jdbc
+    Execute(('rm', '-f', params.prepackaged_ojdbc_symlink),
+            path=["/bin", "/usr/bin/"],
+            sudo = True)
+
+    Execute(('curl', '-kf', '-x', "", '--retry', '10', params.driver_curl_source, '-o',
+             params.downloaded_custom_connector),
+            not_if=format("test -f {downloaded_custom_connector}"),
             path=["/bin", "/usr/bin/"],
             environment=environment,
             sudo = True)
 
 
-    Execute(('cp', params.driver_curl_target, params.target),
-            not_if=format("test -f {target}"),
-            creates=params.target,
+    Execute(('cp', '--remove-destination', params.downloaded_custom_connector, params.target),
+            #creates=params.target, TODO: uncomment after ranger_hive_plugin will not provide jdbc
             path=["/bin", "/usr/bin/"],
             sudo = True)
 
   else:
     #for default hive db (Mysql)
-    Execute(('cp', format('/usr/share/java/{jdbc_jar_name}'), params.target),
-            not_if=format("test -f {target}"),
-            creates=params.target,
+    Execute(('cp', '--remove-destination', format('/usr/share/java/{jdbc_jar_name}'), params.target),
+            #creates=params.target, TODO: uncomment after ranger_hive_plugin will not provide jdbc
             path=["/bin", "/usr/bin/"],
             sudo=True
     )

+ 2 - 0
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py

@@ -155,6 +155,8 @@ elif hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
 check_db_connection_jar_name = "DBConnectionVerification.jar"
 check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
 hive_jdbc_drivers_list = ["com.mysql.jdbc.Driver","org.postgresql.Driver","oracle.jdbc.driver.OracleDriver"]
+downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
+prepackaged_ojdbc_symlink = format("{hive_lib}/ojdbc6.jar")
 
 #common
 hive_metastore_host = config['clusterHostInfo']['hive_metastore_host'][0]

+ 5 - 6
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py

@@ -161,20 +161,19 @@ def oozie_server_specific():
       "no_proxy": format("{ambari_server_hostname}")
     }
 
-    Execute(('curl', '-kf', '-x', "", '--retry', '10', params.driver_curl_source, '-o', params.driver_curl_target),
-            not_if=format("test -f {target}"),
+    Execute(('curl', '-kf', '-x', "", '--retry', '10', params.driver_curl_source, '-o',
+             params.downloaded_custom_connector),
+            not_if=format("test -f {downloaded_custom_connector}"),
             path=["/bin", "/usr/bin/"],
             environment=environment,
             sudo = True)
 
 
-    Execute(('cp', params.driver_curl_target, params.target),
-            not_if=format("test -f {target}"),
-            creates=params.target,
+    Execute(('cp', '--remove-destination', params.downloaded_custom_connector, params.target),
+            #creates=params.target, TODO: uncomment after ranger_hive_plugin will not provide jdbc
             path=["/bin", "/usr/bin/"],
             sudo = True)
 
-
   #falcon el extension
   if params.has_falcon_host:
     Execute(format('sudo cp {falcon_home}/oozie/ext/falcon-oozie-el-extension-*.jar {oozie_libext_dir}'),

+ 2 - 2
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_service.py

@@ -39,8 +39,8 @@ def oozie_service(action = 'start'): # 'start' or 'stop'
     cmd1 =  format("cd {oozie_tmp_dir} && {oozie_home}/bin/ooziedb.sh create -sqlfile oozie.sql -run")
     cmd2 =  format("{kinit_if_needed} {put_shared_lib_to_hdfs_cmd} ; hadoop --config {hadoop_conf_dir} dfs -chmod -R 755 {oozie_hdfs_user_dir}/share")
 
-    if not os.path.isfile(params.jdbc_driver_jar) and params.jdbc_driver_name == "org.postgresql.Driver":
-      print format("ERROR: jdbc file {jdbc_driver_jar} is unavailable. Please, follow next steps:\n" \
+    if not os.path.isfile(params.target) and params.jdbc_driver_name == "org.postgresql.Driver":
+      print format("ERROR: jdbc file {target} is unavailable. Please, follow next steps:\n" \
         "1) Download postgresql-9.0-801.jdbc4.jar.\n2) Create needed directory: mkdir -p {oozie_home}/libserver/\n" \
         "3) Copy postgresql-9.0-801.jdbc4.jar to newly created dir: cp /path/to/jdbc/postgresql-9.0-801.jdbc4.jar " \
         "{oozie_home}/libserver/\n4) Copy postgresql-9.0-801.jdbc4.jar to libext: cp " \

+ 1 - 0
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py

@@ -125,6 +125,7 @@ else:
 
 driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
 driver_curl_target = format("{java_share_dir}/{jdbc_driver_jar}")
+downloaded_custom_connector = format("{tmp_dir}/{jdbc_driver_jar}")
 if jdbc_driver_name == "org.postgresql.Driver":
   target = jdbc_driver_jar
 else:

+ 12 - 8
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py

@@ -166,24 +166,28 @@ def jdbc_connector():
       "no_proxy": format("{ambari_server_hostname}")
     }
 
-    Execute(('curl', '-kf', '-x', "", '--retry', '10', params.driver_curl_source, '-o', params.driver_curl_target),
-            not_if=format("test -f {target}"),
+    # TODO: should be removed after ranger_hive_plugin will not provide jdbc
+    Execute(('rm', '-f', params.prepackaged_ojdbc_symlink),
+            path=["/bin", "/usr/bin/"],
+            sudo = True)
+
+    Execute(('curl', '-kf', '-x', "", '--retry', '10', params.driver_curl_source, '-o',
+             params.downloaded_custom_connector),
+            not_if=format("test -f {downloaded_custom_connector}"),
             path=["/bin", "/usr/bin/"],
             environment=environment,
             sudo = True)
 
 
-    Execute(('cp', params.driver_curl_target, params.target),
-            not_if=format("test -f {target}"),
-            creates=params.target,
+    Execute(('cp', '--remove-destination', params.downloaded_custom_connector, params.target),
+            #creates=params.target, TODO: uncomment after ranger_hive_plugin will not provide jdbc
             path=["/bin", "/usr/bin/"],
             sudo = True)
 
   else:
     #for default hive db (Mysql)
-    Execute(('cp', format('/usr/share/java/{jdbc_jar_name}'), params.target),
-            not_if=format("test -f {target}"),
-            creates=params.target,
+    Execute(('cp', '--remove-destination', format('/usr/share/java/{jdbc_jar_name}'), params.target),
+            #creates=params.target, TODO: uncomment after ranger_hive_plugin will not provide jdbc
             path=["/bin", "/usr/bin/"],
             sudo=True
     )

+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py

@@ -50,6 +50,8 @@ elif hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
 check_db_connection_jar_name = "DBConnectionVerification.jar"
 check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
 hive_jdbc_drivers_list = ["com.mysql.jdbc.Driver","org.postgresql.Driver","oracle.jdbc.driver.OracleDriver"]
+downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
+prepackaged_ojdbc_symlink = format("{hive_lib}/ojdbc6.jar")
 
 #common
 hive_metastore_port = get_port_from_url(config['configurations']['hive-site']['hive.metastore.uris']) #"9083"

+ 5 - 6
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py

@@ -146,20 +146,19 @@ def oozie_server_specific(
       "no_proxy": format("{ambari_server_hostname}")
     }
 
-    Execute(('curl', '-kf', '-x', "", '--retry', '10', params.driver_curl_source, '-o', params.driver_curl_target),
-            not_if=format("test -f {target}"),
+    Execute(('curl', '-kf', '-x', "", '--retry', '10', params.driver_curl_source, '-o',
+             params.downloaded_custom_connector),
+            not_if=format("test -f {downloaded_custom_connector}"),
             path=["/bin", "/usr/bin/"],
             environment=environment,
             sudo = True)
 
 
-    Execute(('cp', params.driver_curl_target, params.target),
-            not_if=format("test -f {target}"),
-            creates=params.target,
+    Execute(('cp', '--remove-destination', params.downloaded_custom_connector, params.target),
+            #creates=params.target, TODO: uncomment after ranger_hive_plugin will not provide jdbc
             path=["/bin", "/usr/bin/"],
             sudo = True)
 
-
   oozie_setup_cmd = format("cd {oozie_tmp_dir} && /usr/lib/oozie/bin/oozie-setup.sh -hadoop 0.20.200 {hadoop_jar_location} -extjs {ext_js_path} {jar_option} {jar_path}")
   Execute( oozie_setup_cmd,
     user = params.oozie_user,

+ 5 - 4
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py

@@ -71,13 +71,13 @@ jdbc_driver_name = default("/configurations/oozie-site/oozie.service.JPAService.
 oozie_env_sh_template = config['configurations']['oozie-env']['content']
 
 if jdbc_driver_name == "com.mysql.jdbc.Driver":
-  jdbc_driver_jar = "/usr/share/java/mysql-connector-java.jar"
+  jdbc_driver_jar = "mysql-connector-java.jar"
   jdbc_symlink_name = "mysql-jdbc-driver.jar"
 elif jdbc_driver_name == "org.postgresql.Driver":
   jdbc_driver_jar = format("{oozie_home}/libtools/postgresql-9.0-801.jdbc4.jar") #oozie using it's own postgres jdbc
   jdbc_symlink_name = "postgres-jdbc-driver.jar"
 elif jdbc_driver_name == "oracle.jdbc.driver.OracleDriver":
-  jdbc_driver_jar = "/usr/share/java/ojdbc6.jar"
+  jdbc_driver_jar = "ojdbc.jar"
   jdbc_symlink_name = "oracle-jdbc-driver.jar"
 else:
   jdbc_driver_jar = ""
@@ -85,6 +85,7 @@ else:
 
 driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
 driver_curl_target = format("{java_share_dir}/{jdbc_driver_jar}")
+downloaded_custom_connector = format("{tmp_dir}/{jdbc_driver_jar}")
 if jdbc_driver_name == "org.postgresql.Driver":
   target = jdbc_driver_jar
 else:
@@ -98,9 +99,9 @@ else:
 lzo_jar_suffix = '`LZO_JARS=($(find /usr/lib/hadoop/lib/ -name "hadoop-lzo-*")); echo ${LZO_JARS[0]}`' if lzo_enabled else ""
   
 if lzo_enabled and jdbc_driver_name:
-    jar_path = format("{lzo_jar_suffix}:{jdbc_driver_jar}")        
+    jar_path = format("{lzo_jar_suffix}:{target}")
 else:
-    jar_path = "{lzo_jar_suffix}{jdbc_driver_jar}"
+    jar_path = "{lzo_jar_suffix}{target}"
 
 #oozie-log4j.properties
 if (('oozie-log4j' in config['configurations']) and ('content' in config['configurations']['oozie-log4j'])):

+ 4 - 8
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py

@@ -218,11 +218,9 @@ class TestHiveMetastore(RMFTestCase):
                               owner = 'hive',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('Execute', ('cp', '/usr/share/java/mysql-connector-java.jar', '/usr/lib/hive/lib//mysql-connector-java.jar'),
+    self.assertResourceCalled('Execute', ('cp', '--remove-destination', '/usr/share/java/mysql-connector-java.jar', '/usr/lib/hive/lib//mysql-connector-java.jar'),
                               path = ['/bin', '/usr/bin/'],
-                              creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
-                              sudo = True,
-                              not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+                              sudo = True
                               )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
         environment = {'no_proxy': 'c6401.ambari.apache.org'},
@@ -334,11 +332,9 @@ class TestHiveMetastore(RMFTestCase):
                               owner = 'hive',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('Execute', ('cp', '/usr/share/java/mysql-connector-java.jar', '/usr/lib/hive/lib//mysql-connector-java.jar'),
+    self.assertResourceCalled('Execute', ('cp', '--remove-destination', '/usr/share/java/mysql-connector-java.jar', '/usr/lib/hive/lib//mysql-connector-java.jar'),
                               path = ['/bin', '/usr/bin/'],
-                              creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
-                              sudo = True,
-                              not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+                              sudo = True
                               )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
         environment = {'no_proxy': 'c6401.ambari.apache.org'},

+ 4 - 8
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py

@@ -402,11 +402,9 @@ class TestHiveServer(RMFTestCase):
                               owner = 'hive',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('Execute', ('cp', '/usr/share/java/mysql-connector-java.jar', '/usr/lib/hive/lib//mysql-connector-java.jar'),
+    self.assertResourceCalled('Execute', ('cp', '--remove-destination', '/usr/share/java/mysql-connector-java.jar', '/usr/lib/hive/lib//mysql-connector-java.jar'),
                               path = ['/bin', '/usr/bin/'],
-                              creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
-                              sudo = True,
-                              not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+                              sudo = True
                               )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
         environment = {'no_proxy': 'c6401.ambari.apache.org'},
@@ -594,11 +592,9 @@ class TestHiveServer(RMFTestCase):
                               owner = 'hive',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('Execute', ('cp', '/usr/share/java/mysql-connector-java.jar', '/usr/lib/hive/lib//mysql-connector-java.jar'),
+    self.assertResourceCalled('Execute', ('cp', '--remove-destination', '/usr/share/java/mysql-connector-java.jar', '/usr/lib/hive/lib//mysql-connector-java.jar'),
                               path = ['/bin', '/usr/bin/'],
-                              creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
-                              sudo = True,
-                              not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+                              sudo = True
                               )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
         environment = {'no_proxy': 'c6401.ambari.apache.org'},

+ 4 - 8
ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py

@@ -195,11 +195,9 @@ class TestHiveMetastore(RMFTestCase):
                               owner = 'hive',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('Execute', ('cp', '/usr/share/java/mysql-connector-java.jar', '/usr/lib/hive/lib//mysql-connector-java.jar'),
+    self.assertResourceCalled('Execute', ('cp', '--remove-destination', '/usr/share/java/mysql-connector-java.jar', '/usr/lib/hive/lib//mysql-connector-java.jar'),
                               path = ['/bin', '/usr/bin/'],
-                              creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
-                              sudo = True,
-                              not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+                              sudo = True
                               )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
         environment = {'no_proxy': 'c6401.ambari.apache.org'},
@@ -290,11 +288,9 @@ class TestHiveMetastore(RMFTestCase):
                               owner = 'hive',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('Execute', ('cp', '/usr/share/java/mysql-connector-java.jar', '/usr/lib/hive/lib//mysql-connector-java.jar'),
+    self.assertResourceCalled('Execute', ('cp', '--remove-destination', '/usr/share/java/mysql-connector-java.jar', '/usr/lib/hive/lib//mysql-connector-java.jar'),
         path = ['/bin', '/usr/bin/'],
-        creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
-        sudo = True,
-        not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+        sudo = True
     )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
         environment = {'no_proxy': 'c6401.ambari.apache.org'},