|
@@ -16,43 +16,17 @@
|
|
|
* limitations under the License.
|
|
|
*/
|
|
|
|
|
|
-package org.apache.ambari.view.huetoambarimigration.service.hive;
|
|
|
+package org.apache.ambari.view.huetoambarimigration.migration.hive.savedquery;
|
|
|
|
|
|
-import java.nio.charset.Charset;
|
|
|
-import java.security.PrivilegedExceptionAction;
|
|
|
-import java.sql.Connection;
|
|
|
-import java.sql.DriverManager;
|
|
|
-import java.sql.ResultSet;
|
|
|
-import java.sql.SQLException;
|
|
|
-import java.sql.Statement;
|
|
|
-import java.text.ParseException;
|
|
|
-import java.text.SimpleDateFormat;
|
|
|
-import java.io.BufferedInputStream;
|
|
|
-import java.io.BufferedReader;
|
|
|
-import java.io.BufferedWriter;
|
|
|
-import java.io.ByteArrayInputStream;
|
|
|
-import java.io.File;
|
|
|
-import java.io.FileInputStream;
|
|
|
-import java.io.FileWriter;
|
|
|
-import java.io.IOException;
|
|
|
-import java.io.InputStream;
|
|
|
-import java.io.InputStreamReader;
|
|
|
-import java.util.ArrayList;
|
|
|
-import java.util.Calendar;
|
|
|
-import java.util.Date;
|
|
|
-import java.util.GregorianCalendar;
|
|
|
-import java.util.Scanner;
|
|
|
-import java.io.*;
|
|
|
-import java.net.URISyntaxException;
|
|
|
-import java.net.URL;
|
|
|
-
|
|
|
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
|
|
|
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel;
|
|
|
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.QuerySetAmbariDB;
|
|
|
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset.QuerySet;
|
|
|
+import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
|
|
|
import org.apache.hadoop.conf.Configuration;
|
|
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
|
|
import org.apache.hadoop.fs.FileSystem;
|
|
|
import org.apache.hadoop.fs.Path;
|
|
|
import org.apache.hadoop.security.UserGroupInformation;
|
|
|
-import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
|
|
|
import org.apache.log4j.Logger;
|
|
|
import org.jdom.Attribute;
|
|
|
import org.jdom.Document;
|
|
@@ -61,16 +35,23 @@ import org.jdom.JDOMException;
|
|
|
import org.jdom.input.SAXBuilder;
|
|
|
import org.jdom.output.Format;
|
|
|
import org.jdom.output.XMLOutputter;
|
|
|
-import org.json.JSONArray;
|
|
|
import org.json.JSONObject;
|
|
|
-import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
|
|
|
-import org.apache.hadoop.security.UserGroupInformation;
|
|
|
|
|
|
-import org.apache.ambari.view.huetoambarimigration.model.*;
|
|
|
+import java.io.*;
|
|
|
+import java.net.URISyntaxException;
|
|
|
+import java.nio.charset.Charset;
|
|
|
+import java.security.PrivilegedExceptionAction;
|
|
|
+import java.sql.*;
|
|
|
+import java.text.ParseException;
|
|
|
+import java.text.SimpleDateFormat;
|
|
|
+import java.util.ArrayList;
|
|
|
+import java.util.Calendar;
|
|
|
+import java.util.Date;
|
|
|
+import java.util.GregorianCalendar;
|
|
|
|
|
|
-public class HiveSavedQueryImpl {
|
|
|
+public class HiveSavedQueryMigrationImplementation {
|
|
|
|
|
|
- static final Logger logger = Logger.getLogger(HiveSavedQueryImpl.class);
|
|
|
+ static final Logger logger = Logger.getLogger(HiveSavedQueryMigrationImplementation.class);
|
|
|
|
|
|
private static String readAll(Reader rd) throws IOException {
|
|
|
StringBuilder sb = new StringBuilder();
|
|
@@ -92,7 +73,7 @@ public class HiveSavedQueryImpl {
|
|
|
|
|
|
xmlOutput.setFormat(Format.getPrettyFormat());
|
|
|
|
|
|
- File xmlfile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml");
|
|
|
+ File xmlfile = new File(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml");
|
|
|
|
|
|
if (xmlfile.exists()) {
|
|
|
String iteration = Integer.toString(i + 1);
|
|
@@ -105,18 +86,17 @@ public class HiveSavedQueryImpl {
|
|
|
|
|
|
Element record = new Element("RevertRecord");
|
|
|
record.setAttribute(new Attribute("id", iteration));
|
|
|
- record.addContent(new Element("datetime").setText(currentDate
|
|
|
- .toString()));
|
|
|
+ record.addContent(new Element("datetime").setText(currentDate.toString()));
|
|
|
record.addContent(new Element("dirname").setText(dirname));
|
|
|
record.addContent(new Element("instance").setText(instance));
|
|
|
record.addContent(new Element("query").setText(content));
|
|
|
|
|
|
rootNode.addContent(record);
|
|
|
- xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
|
|
|
+ xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
|
|
|
|
|
|
} catch (JDOMException e) {
|
|
|
// TODO Auto-generated catch block
|
|
|
- logger.error("JDOMException: " , e);
|
|
|
+ logger.error("JDOMException: ", e);
|
|
|
}
|
|
|
|
|
|
} else {
|
|
@@ -137,7 +117,7 @@ public class HiveSavedQueryImpl {
|
|
|
|
|
|
doc.getRootElement().addContent(record);
|
|
|
|
|
|
- xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
|
|
|
+ xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
|
|
|
|
|
|
} catch (IOException io) {
|
|
|
|
|
@@ -147,25 +127,18 @@ public class HiveSavedQueryImpl {
|
|
|
|
|
|
}
|
|
|
|
|
|
- public int fetchMaxidforSavedQueryHive(String driverName, Connection c, int id)
|
|
|
- throws SQLException {
|
|
|
+ public int fetchMaxidforSavedQueryHive(Connection c, int id, QuerySetAmbariDB ambaridatabase) throws SQLException {
|
|
|
|
|
|
String ds_id = null;
|
|
|
- Statement stmt = null;
|
|
|
- stmt = c.createStatement();
|
|
|
ResultSet rs = null;
|
|
|
+ PreparedStatement prSt = null;
|
|
|
|
|
|
- if (driverName.contains("postgresql")) {
|
|
|
- rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_savedquery_" + id + ";");
|
|
|
- } else if (driverName.contains("mysql")) {
|
|
|
- rs = stmt.executeQuery("select max(cast(ds_id as unsigned) ) as max from DS_SAVEDQUERY_" + id + ";");
|
|
|
- } else if (driverName.contains("oracle")) {
|
|
|
- rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_savedquery_" + id + ";");
|
|
|
- }
|
|
|
+ prSt = ambaridatabase.getMaxDsIdFromTableIdSavedquery(c, id);
|
|
|
+
|
|
|
+ rs = prSt.executeQuery();
|
|
|
|
|
|
while (rs.next()) {
|
|
|
ds_id = rs.getString("max");
|
|
|
-
|
|
|
}
|
|
|
|
|
|
int num;
|
|
@@ -174,85 +147,70 @@ public class HiveSavedQueryImpl {
|
|
|
} else {
|
|
|
num = Integer.parseInt(ds_id);
|
|
|
}
|
|
|
-
|
|
|
return num;
|
|
|
}
|
|
|
|
|
|
- public int fetchInstancetablenameForSavedqueryHive(String driverName, Connection c,
|
|
|
- String instance) throws SQLException {
|
|
|
+ public int fetchInstancetablenameForSavedqueryHive(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
|
|
|
|
|
|
String ds_id = new String();
|
|
|
int id = 0;
|
|
|
Statement stmt = null;
|
|
|
+ PreparedStatement prSt = null;
|
|
|
+
|
|
|
|
|
|
- stmt = c.createStatement();
|
|
|
ResultSet rs = null;
|
|
|
|
|
|
- if (driverName.contains("oracle")) {
|
|
|
- rs = stmt
|
|
|
- .executeQuery("select * from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.savedQueries.SavedQuery' and view_instance_name='"
|
|
|
- + instance + "'");
|
|
|
- } else {
|
|
|
- rs = stmt
|
|
|
- .executeQuery("select * from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.savedQueries.SavedQuery' and view_instance_name='"
|
|
|
- + instance + "';");
|
|
|
- }
|
|
|
|
|
|
+ prSt = ambaridatabase.getTableIdFromInstanceNameSavedquery(c, instance);
|
|
|
+
|
|
|
+ logger.info("sql statement to fetch is from ambari instance:= = " + prSt);
|
|
|
+
|
|
|
+ rs = prSt.executeQuery();
|
|
|
|
|
|
while (rs.next()) {
|
|
|
id = rs.getInt("id");
|
|
|
-
|
|
|
}
|
|
|
-
|
|
|
return id;
|
|
|
}
|
|
|
|
|
|
- public int fetchInstanceTablenameHiveHistory(String driverName, Connection c,
|
|
|
- String instance) throws SQLException {
|
|
|
+ public int fetchInstanceTablenameHiveHistory(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
|
|
|
String ds_id = new String();
|
|
|
int id = 0;
|
|
|
Statement stmt = null;
|
|
|
+ PreparedStatement prSt = null;
|
|
|
|
|
|
|
|
|
- stmt = c.createStatement();
|
|
|
ResultSet rs = null;
|
|
|
|
|
|
- if (driverName.contains("oracle")) {
|
|
|
- rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name='" + instance + "'");
|
|
|
- } else {
|
|
|
- rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name='" + instance + "';");
|
|
|
- }
|
|
|
|
|
|
+ prSt = ambaridatabase.getTableIdFromInstanceNameHistoryquery(c, instance);
|
|
|
+
|
|
|
+ logger.info("sql statement to fetch is from ambari instance:= = " + prSt);
|
|
|
+
|
|
|
+ rs = prSt.executeQuery();
|
|
|
|
|
|
while (rs.next()) {
|
|
|
id = rs.getInt("id");
|
|
|
- System.out.println("id is " + id);
|
|
|
-
|
|
|
}
|
|
|
-
|
|
|
return id;
|
|
|
|
|
|
}
|
|
|
|
|
|
- public int fetchMaxdsidFromHiveHistory(String driverName, Connection c, int id)
|
|
|
+ public int fetchMaxdsidFromHiveHistory(Connection c, int id, QuerySetAmbariDB ambaridatabase)
|
|
|
throws SQLException {
|
|
|
|
|
|
String ds_id = null;
|
|
|
- Statement stmt = null;
|
|
|
-
|
|
|
- stmt = c.createStatement();
|
|
|
ResultSet rs = null;
|
|
|
+ PreparedStatement prSt = null;
|
|
|
+
|
|
|
+ prSt = ambaridatabase.getMaxDsIdFromTableIdHistoryquery(c, id);
|
|
|
+
|
|
|
+ rs = prSt.executeQuery();
|
|
|
|
|
|
- if (driverName.contains("postgresql")) {
|
|
|
- rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + ";");
|
|
|
- } else if (driverName.contains("mysql")) {
|
|
|
- rs = stmt.executeQuery("select max( cast(ds_id as unsigned) ) as max from DS_JOBIMPL_" + id + ";");
|
|
|
- } else if (driverName.contains("oracle")) {
|
|
|
- rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id);
|
|
|
- }
|
|
|
while (rs.next()) {
|
|
|
ds_id = rs.getString("max");
|
|
|
}
|
|
|
+
|
|
|
int num;
|
|
|
if (ds_id == null) {
|
|
|
num = 1;
|
|
@@ -264,92 +222,50 @@ public class HiveSavedQueryImpl {
|
|
|
|
|
|
|
|
|
/**/
|
|
|
- public void insertRowHiveHistory(String driverName, String dirname, int maxcount,
|
|
|
- long epochtime, Connection c, int id, String instance, int i)
|
|
|
+ public void insertRowHiveHistory(String dirname, int maxcount, long epochtime, Connection c, int id, String instance, int i, QuerySetAmbariDB ambaridatabase)
|
|
|
throws SQLException, IOException {
|
|
|
- String maxcount1 = Integer.toString(maxcount);
|
|
|
|
|
|
+ String maxcount1 = Integer.toString(maxcount);
|
|
|
String epochtime1 = Long.toString(epochtime);
|
|
|
+ PreparedStatement prSt = null;
|
|
|
+ String revsql = null;
|
|
|
|
|
|
- String ds_id = new String();
|
|
|
- Statement stmt = null;
|
|
|
+ prSt = ambaridatabase.insertToHiveHistory(c, id, maxcount1, epochtime, dirname);
|
|
|
|
|
|
- stmt = c.createStatement();
|
|
|
- String sql = "";
|
|
|
- String revsql = "";
|
|
|
-
|
|
|
- if (driverName.contains("mysql")) {
|
|
|
- sql = "INSERT INTO DS_JOBIMPL_" + id + " values ('" + maxcount1
|
|
|
- + "','','','','','default'," + epochtime1 + ",0,'','','"
|
|
|
- + dirname + "logs','admin','" + dirname
|
|
|
- + "query.hql','','job','','','Unknown','" + dirname
|
|
|
- + "','','Worksheet');";
|
|
|
-
|
|
|
- revsql = "delete from DS_JOBIMPL_" + id + " where ds_id='"
|
|
|
- + maxcount1 + "';";
|
|
|
-
|
|
|
- } else if (driverName.contains("postgresql")) {
|
|
|
- sql = "INSERT INTO ds_jobimpl_" + id + " values ('" + maxcount1
|
|
|
- + "','','','','','default'," + epochtime1 + ",0,'','','"
|
|
|
- + dirname + "logs','admin','" + dirname
|
|
|
- + "query.hql','','job','','','Unknown','" + dirname
|
|
|
- + "','','Worksheet');";
|
|
|
-
|
|
|
- revsql = "delete from ds_jobimpl_" + id + " where ds_id='"
|
|
|
- + maxcount1 + "';";
|
|
|
-
|
|
|
- } else if (driverName.contains("oracle")) {
|
|
|
- sql = "INSERT INTO ds_jobimpl_" + id + " values ('" + maxcount1
|
|
|
- + "','','','','','default'," + epochtime1 + ",0,'','','"
|
|
|
- + dirname + "logs','admin','" + dirname
|
|
|
- + "query.hql','','job','','','Unknown','" + dirname
|
|
|
- + "','','Worksheet')";
|
|
|
- revsql = "delete from ds_jobimpl_" + id + " where ds_id='"
|
|
|
- + maxcount1 + "'";
|
|
|
+ System.out.println("the actual query is " + prSt);
|
|
|
+
|
|
|
+ logger.info("The actual insert statement is " + prSt);
|
|
|
+
|
|
|
+ prSt.executeUpdate();
|
|
|
+
|
|
|
+ revsql = ambaridatabase.revertSqlHistoryQuery(id, maxcount1);
|
|
|
+
|
|
|
+ logger.info("adding revert sqlsavedquery in hivehistory ");
|
|
|
|
|
|
- }
|
|
|
- stmt.executeUpdate(sql);
|
|
|
wrtitetoalternatesqlfile(dirname, revsql, instance, i);
|
|
|
}
|
|
|
|
|
|
- public void insertRowinSavedQuery(String driverName, int maxcount, String database,
|
|
|
- String dirname, String query, String name, Connection c, int id,
|
|
|
- String instance, int i) throws SQLException, IOException {
|
|
|
+ public void insertRowinSavedQuery(int maxcount, String database, String dirname, String query, String name, Connection c, int id, String instance, int i, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
|
|
|
+
|
|
|
String maxcount1 = Integer.toString(maxcount);
|
|
|
+ String revsql = null;
|
|
|
|
|
|
- String ds_id = new String();
|
|
|
- Statement stmt = null;
|
|
|
- String sql = "";
|
|
|
- String revsql = "";
|
|
|
- stmt = c.createStatement();
|
|
|
+ PreparedStatement prSt = null;
|
|
|
|
|
|
- if (driverName.contains("mysql")) {
|
|
|
- sql = "INSERT INTO DS_SAVEDQUERY_" + id + " values ('"
|
|
|
- + maxcount1 + "','" + database + "','" + "admin" + "','"
|
|
|
- + dirname + "query.hql','" + query + "','" + name + "');";
|
|
|
+ prSt = ambaridatabase.insertToHiveSavedQuery(c, id, maxcount1, database, dirname, query, name);
|
|
|
|
|
|
- revsql = "delete from DS_SAVEDQUERY_" + id + " where ds_id='"
|
|
|
- + maxcount1 + "';";
|
|
|
+ System.out.println("the actual query is " + prSt);
|
|
|
|
|
|
- } else if (driverName.contains("postgresql")) {
|
|
|
- sql = "INSERT INTO ds_savedquery_" + id + " values ('"
|
|
|
- + maxcount1 + "','" + database + "','" + "admin" + "','"
|
|
|
- + dirname + "query.hql','" + query + "','" + name + "');";
|
|
|
+ logger.info("The actual insert statement is " + prSt);
|
|
|
|
|
|
- revsql = "delete from ds_savedquery_" + id + " where ds_id='"
|
|
|
- + maxcount1 + "';";
|
|
|
+ prSt.executeUpdate();
|
|
|
|
|
|
- } else if (driverName.contains("oracle")) {
|
|
|
- sql = "INSERT INTO ds_savedquery_" + id + " values ('"
|
|
|
- + maxcount1 + "','" + database + "','" + "admin" + "','"
|
|
|
- + dirname + "query.hql','" + query + "','" + name + "')";
|
|
|
+ revsql = ambaridatabase.revertSqlSavedQuery(id, maxcount1);
|
|
|
|
|
|
- revsql = "delete from ds_savedquery_" + id + " where ds_id='"
|
|
|
- + maxcount1 + "'";
|
|
|
+ logger.info("adding revert sqlsavedquery ");
|
|
|
|
|
|
- }
|
|
|
wrtitetoalternatesqlfile(dirname, revsql, instance, i);
|
|
|
- stmt.executeUpdate(sql);
|
|
|
+
|
|
|
}
|
|
|
|
|
|
public long getEpochTime() throws ParseException {
|
|
@@ -386,89 +302,68 @@ public class HiveSavedQueryImpl {
|
|
|
|
|
|
}
|
|
|
|
|
|
- public ArrayList<PojoHive> fetchFromHuedb(String username,
|
|
|
- String startdate, String endtime, Connection connection)
|
|
|
+ public ArrayList<HiveModel> fetchFromHuedb(String username, String startdate, String endtime, Connection connection, QuerySet huedatabase)
|
|
|
throws ClassNotFoundException, IOException {
|
|
|
int id = 0;
|
|
|
int i = 0;
|
|
|
String[] query = new String[100];
|
|
|
- ArrayList<PojoHive> hiveArrayList = new ArrayList<PojoHive>();
|
|
|
+ ArrayList<HiveModel> hiveArrayList = new ArrayList<HiveModel>();
|
|
|
ResultSet rs1 = null;
|
|
|
|
|
|
try {
|
|
|
Statement statement = connection.createStatement();
|
|
|
+ connection.setAutoCommit(false);
|
|
|
+ PreparedStatement prSt = null;
|
|
|
+ ResultSet rs;
|
|
|
if (username.equals("all")) {
|
|
|
} else {
|
|
|
- ResultSet rs = statement
|
|
|
- .executeQuery("select id from auth_user where username='"
|
|
|
- + username + "';");
|
|
|
- while (rs.next()) {
|
|
|
|
|
|
- id = rs.getInt("id");
|
|
|
+ prSt = huedatabase.getUseridfromUserName(connection, username);
|
|
|
|
|
|
- }
|
|
|
+ rs = prSt.executeQuery();
|
|
|
|
|
|
+ while (rs.next()) {
|
|
|
+ id = rs.getInt("id");
|
|
|
+ }
|
|
|
}
|
|
|
+
|
|
|
if (startdate.equals("") && endtime.equals("")) {
|
|
|
if (username.equals("all")) {
|
|
|
- rs1 = statement
|
|
|
- .executeQuery("select data,name,owner_id from beeswax_savedquery;");
|
|
|
-
|
|
|
+ prSt = huedatabase.getQueriesNoStartDateNoEndDateAllUser(connection);
|
|
|
} else {
|
|
|
- rs1 = statement
|
|
|
- .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
|
|
|
- + id + ";");
|
|
|
+ prSt = huedatabase.getQueriesNoStartDateNoEndDate(connection, id);
|
|
|
+
|
|
|
}
|
|
|
|
|
|
- } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
|
|
|
+ } else if ((startdate.equals("")) && !(endtime.equals(""))) {
|
|
|
if (username.equals("all")) {
|
|
|
- rs1 = statement
|
|
|
- .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime >= date('"
|
|
|
- + startdate
|
|
|
- + "') AND mtime <= date('"
|
|
|
- + endtime + "');");
|
|
|
+ prSt = huedatabase.getQueriesNoStartDateYesEndDateAllUser(connection, endtime);
|
|
|
} else {
|
|
|
- rs1 = statement
|
|
|
- .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
|
|
|
- + id
|
|
|
- + " AND mtime >= date('"
|
|
|
- + startdate
|
|
|
- + "') AND mtime <= date('"
|
|
|
- + endtime
|
|
|
- + "');");
|
|
|
- }
|
|
|
+ prSt = huedatabase.getQueriesNoStartDateYesEndDate(connection, id, endtime);
|
|
|
|
|
|
+ }
|
|
|
} else if (!(startdate.equals("")) && (endtime.equals(""))) {
|
|
|
if (username.equals("all")) {
|
|
|
- rs1 = statement
|
|
|
- .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and mtime >= date('"
|
|
|
- + startdate + "');");
|
|
|
+ prSt = huedatabase.getQueriesYesStartDateNoEndDateAllUser(connection, startdate);
|
|
|
} else {
|
|
|
- rs1 = statement
|
|
|
- .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
|
|
|
- + id
|
|
|
- + " AND mtime >= date('"
|
|
|
- + startdate
|
|
|
- + "');");
|
|
|
+ prSt = huedatabase.getQueriesYesStartDateNoEndDate(connection, id, startdate);
|
|
|
+
|
|
|
}
|
|
|
|
|
|
- } else if ((startdate.equals("")) && !(endtime.equals(""))) {
|
|
|
+ } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
|
|
|
if (username.equals("all")) {
|
|
|
- rs1 = statement
|
|
|
- .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime <= date('"
|
|
|
- + endtime + "');");
|
|
|
+ prSt = huedatabase.getQueriesYesStartDateYesEndDateAllUser(connection, startdate, endtime);
|
|
|
} else {
|
|
|
- rs1 = statement
|
|
|
- .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
|
|
|
- + id
|
|
|
- + " AND mtime <= date('"
|
|
|
- + endtime
|
|
|
- + "');");
|
|
|
+ prSt = huedatabase.getQueriesYesStartDateYesEndDate(connection, id, startdate, endtime);
|
|
|
}
|
|
|
|
|
|
}
|
|
|
+
|
|
|
+ rs1 = prSt.executeQuery();
|
|
|
+
|
|
|
+
|
|
|
while (rs1.next()) {
|
|
|
- PojoHive hivepojo = new PojoHive();
|
|
|
+ HiveModel hivepojo = new HiveModel();
|
|
|
String name = rs1.getString("name");
|
|
|
String temp = rs1.getString("data");
|
|
|
InputStream is = new ByteArrayInputStream(temp.getBytes());
|
|
@@ -490,16 +385,16 @@ public class HiveSavedQueryImpl {
|
|
|
i++;
|
|
|
}
|
|
|
|
|
|
- } catch (SQLException e) {
|
|
|
- // if the error message is "out of memory",
|
|
|
- // it probably means no database file is found
|
|
|
- System.err.println(e.getMessage());
|
|
|
- } finally {
|
|
|
+ } catch (SQLException e2) {
|
|
|
+ e2.printStackTrace();
|
|
|
+ } finally
|
|
|
+
|
|
|
+ {
|
|
|
try {
|
|
|
if (connection != null)
|
|
|
connection.close();
|
|
|
} catch (SQLException e) {
|
|
|
- logger.error("sql connection exception" , e);
|
|
|
+ logger.error("sql connection exception", e);
|
|
|
}
|
|
|
}
|
|
|
|
|
@@ -521,42 +416,42 @@ public class HiveSavedQueryImpl {
|
|
|
bw.close();
|
|
|
|
|
|
} catch (IOException e) {
|
|
|
- logger.error("IOException: " , e);
|
|
|
+ logger.error("IOException: ", e);
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
public void deleteFileQueryhql(String homedir) {
|
|
|
- try{
|
|
|
+ try {
|
|
|
File file = new File(homedir + "query.hql");
|
|
|
|
|
|
- if(file.delete()){
|
|
|
+ if (file.delete()) {
|
|
|
logger.info("temporary hql file deleted");
|
|
|
- }else{
|
|
|
+ } else {
|
|
|
logger.info("temporary hql file delete failed");
|
|
|
}
|
|
|
|
|
|
- }catch(Exception e){
|
|
|
+ } catch (Exception e) {
|
|
|
|
|
|
- logger.error("File Exception ",e);
|
|
|
+ logger.error("File Exception ", e);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
public void deleteFileQueryLogs(String homedir) {
|
|
|
- try{
|
|
|
+ try {
|
|
|
File file = new File(homedir + "logs");
|
|
|
|
|
|
- if(file.delete()){
|
|
|
+ if (file.delete()) {
|
|
|
logger.info("temporary logs file deleted");
|
|
|
- }else{
|
|
|
+ } else {
|
|
|
logger.info("temporary logs file delete failed");
|
|
|
}
|
|
|
|
|
|
- }catch(Exception e){
|
|
|
+ } catch (Exception e) {
|
|
|
|
|
|
- logger.error("File Exception ",e);
|
|
|
+ logger.error("File Exception ", e);
|
|
|
|
|
|
}
|
|
|
|
|
@@ -580,7 +475,7 @@ public class HiveSavedQueryImpl {
|
|
|
bw.close();
|
|
|
|
|
|
} catch (IOException e) {
|
|
|
- logger.error("IOException: " , e);
|
|
|
+ logger.error("IOException: ", e);
|
|
|
}
|
|
|
|
|
|
}
|
|
@@ -615,7 +510,7 @@ public class HiveSavedQueryImpl {
|
|
|
}
|
|
|
});
|
|
|
} catch (Exception e) {
|
|
|
- logger.error("Webhdfs: " , e);
|
|
|
+ logger.error("Webhdfs: ", e);
|
|
|
}
|
|
|
}
|
|
|
|
|
@@ -649,7 +544,7 @@ public class HiveSavedQueryImpl {
|
|
|
}
|
|
|
});
|
|
|
} catch (Exception e) {
|
|
|
- logger.error("Webhdfs: " , e);
|
|
|
+ logger.error("Webhdfs: ", e);
|
|
|
}
|
|
|
}
|
|
|
|
|
@@ -706,7 +601,7 @@ public class HiveSavedQueryImpl {
|
|
|
}
|
|
|
});
|
|
|
} catch (Exception e) {
|
|
|
- logger.error("Webhdfs exception" , e);
|
|
|
+ logger.error("Webhdfs exception", e);
|
|
|
}
|
|
|
|
|
|
}
|
|
@@ -770,7 +665,7 @@ public class HiveSavedQueryImpl {
|
|
|
}
|
|
|
});
|
|
|
} catch (Exception e) {
|
|
|
- logger.error("Webhdfs exception" , e);
|
|
|
+ logger.error("Webhdfs exception", e);
|
|
|
}
|
|
|
|
|
|
}
|