Prechádzať zdrojové kódy

AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)

Dipayan Bhowmick 9 rokov pred
rodič
commit
f6e8637c14
100 zmenil súbory, kde vykonal 6439 pridanie a 1694 odobranie
  1. 81 61
      contrib/views/hueambarimigration/pom.xml
  2. 0 182
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ConfigurationCheck.java
  3. 0 54
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ProgressBarStatus.java
  4. 0 222
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveHistoryMigration.java
  5. 0 231
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveSavedQueryMigration.java
  6. 0 208
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigScriptMigration.java
  7. 0 2
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceAmbariDatabase.java
  8. 0 2
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceHueDatabase.java
  9. 46 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/MysqlQuerySetAmbariDB.java
  10. 44 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java
  11. 22 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java
  12. 79 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/QuerySetAmbariDB.java
  13. 23 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/MysqlQuerySetAmbariDB.java
  14. 31 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/OracleQuerySetAmbariDB.java
  15. 22 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/PostgressQuerySetAmbariDB.java
  16. 48 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/QuerySetAmbariDB.java
  17. 65 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/MysqlQuerySetAmbariDB.java
  18. 58 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/OracleQuerySetAmbariDB.java
  19. 22 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/PostgressQuerySetAmbariDB.java
  20. 131 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/QuerySetAmbariDB.java
  21. 23 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/MysqlQuerySetAmbariDB.java
  22. 30 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/OracleQuerySetAmbariDB.java
  23. 22 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/PostgressQuerySetAmbariDB.java
  24. 39 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/QuerySetAmbariDB.java
  25. 43 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/MysqlQuerySetAmbariDB.java
  26. 41 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/OracleQuerySetAmbariDB.java
  27. 22 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/PostgressQuerySetAmbariDB.java
  28. 80 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/QuerySetAmbariDB.java
  29. 43 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/MysqlQuerySetAmbariDB.java
  30. 41 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/OracleQuerySetAmbariDB.java
  31. 22 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/PostgressQuerySetAmbariDB.java
  32. 70 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/QuerySetAmbariDB.java
  33. 23 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/MysqlQuerySet.java
  34. 61 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/OracleQuerySet.java
  35. 22 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/PostgressQuerySet.java
  36. 130 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/QuerySet.java
  37. 22 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/SqliteQuerySet.java
  38. 23 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/MysqlQuerySet.java
  39. 65 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/OracleQuerySet.java
  40. 22 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/PostgressQuerySet.java
  41. 134 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/QuerySet.java
  42. 22 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/SqliteQuerySet.java
  43. 22 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/MysqlQuerySet.java
  44. 65 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/OracleQuerySet.java
  45. 22 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/PostgressQuerySet.java
  46. 132 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/QuerySet.java
  47. 22 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/SqliteQuerySet.java
  48. 22 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/MysqlQuerySet.java
  49. 60 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/OracleQuerySet.java
  50. 67 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/PostgressQuerySet.java
  51. 135 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/QuerySet.java
  52. 24 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/SqliteQuerySet.java
  53. 24 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/MysqlQuerySet.java
  54. 28 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/OracleQuerySet.java
  55. 22 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/PostgressQuerySet.java
  56. 42 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/QuerySet.java
  57. 22 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/SqliteQuerySet.java
  58. 85 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/CreateJobId.java
  59. 84 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/CreateJobIdRevertChange.java
  60. 102 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/InitiateJobMigration.java
  61. 85 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/InitiateJobMigrationforRevertchange.java
  62. 59 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/AllInstanceDetailsAmbari.java
  63. 62 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/AmbariDatabaseCheck.java
  64. 60 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/AmbariWebHdfsCheck.java
  65. 70 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/CheckProgresStatus.java
  66. 134 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/ConfigurationCheckImplementation.java
  67. 53 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsAmbari.java
  68. 106 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java
  69. 60 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueDatabaseCheck.java
  70. 60 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueHttpUrlCheck.java
  71. 60 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueWebHdfsCheck.java
  72. 61 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsAmbari.java
  73. 79 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsUtility.java
  74. 58 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailHue.java
  75. 80 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailsUtility.java
  76. 255 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
  77. 100 111
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
  78. 74 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryStartJob.java
  79. 126 231
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
  80. 281 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
  81. 77 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java
  82. 70 101
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
  83. 101 64
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
  84. 68 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobStartJob.java
  85. 70 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java
  86. 78 174
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
  87. 229 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java
  88. 70 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeStartJob.java
  89. 59 51
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java
  90. 151 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/DataStoreStorage.java
  91. 132 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/InstanceKeyValueStorage.java
  92. 162 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/KeyValueStorage.java
  93. 70 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/LocalKeyValueStorage.java
  94. 52 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/PersistentConfiguration.java
  95. 45 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/SmokeTestEntity.java
  96. 78 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/Storage.java
  97. 260 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/ContextConfigurationAdapter.java
  98. 31 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/FilteringStrategy.java
  99. 36 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/Indexed.java
  100. 25 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/ItemNotFound.java

+ 81 - 61
contrib/views/hueambarimigration/pom.xml

@@ -16,24 +16,32 @@
 -->
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.ambari.contrib.views</groupId>
+  <artifactId>hueambarimigration</artifactId>
+  <version>1.0.0.0-SNAPSHOT</version>
+  <name>hueambarimigration</name>
+
   <parent>
     <groupId>org.apache.ambari.contrib.views</groupId>
     <artifactId>ambari-contrib-views</artifactId>
     <version>2.4.0.0.0</version>
   </parent>
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>huetoambari-view</artifactId>
-  <packaging>jar</packaging>
-  <name>Hue To Ambari Migration-view</name>
-  <version>2.4.0.0.0</version>
-  <url>http://maven.apache.org</url>
+
   <properties>
     <ambari.dir>${project.parent.parent.parent.basedir}</ambari.dir>
   </properties>
-
-
   <dependencies>
-
+    <dependency>
+      <groupId>com.google.code.gson</groupId>
+      <artifactId>gson</artifactId>
+      <version>2.6.2</version>
+    </dependency>
+    <dependency>
+      <groupId>com.jayway.jsonpath</groupId>
+      <artifactId>json-path</artifactId>
+      <version>2.0.0</version>
+    </dependency>
     <dependency>
       <groupId>org.easymock</groupId>
       <artifactId>easymock</artifactId>
@@ -43,14 +51,12 @@
     <dependency>
       <groupId>org.apache.ambari</groupId>
       <artifactId>ambari-views</artifactId>
-      <version>2.4.0.0.0</version>
+      <version>[1.7.0.0,)</version>
     </dependency>
-
     <dependency>
       <groupId>javax.servlet</groupId>
       <artifactId>servlet-api</artifactId>
       <version>2.5</version>
-
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
@@ -110,9 +116,41 @@
       <groupId>c3p0</groupId>
       <artifactId>c3p0</artifactId>
       <version>0.9.1.2</version>
-
     </dependency>
-
+    <dependency>
+      <groupId>com.jayway.jsonpath</groupId>
+      <artifactId>json-path</artifactId>
+      <version>2.0.0</version>
+    </dependency>
+    <dependency>
+      <groupId>com.google.inject</groupId>
+      <artifactId>guice</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey.contribs</groupId>
+      <artifactId>jersey-multipart</artifactId>
+      <version>1.18</version>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-client</artifactId>
+      <version>1.8</version>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-core</artifactId>
+      <version>1.18.1</version>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-json</artifactId>
+      <version>1.9</version>
+    </dependency>
+    <dependency>
+      <groupId>com.googlecode.json-simple</groupId>
+      <artifactId>json-simple</artifactId>
+      <version>1.1.1</version>
+    </dependency>
   </dependencies>
 
   <build>
@@ -134,7 +172,7 @@
         <configuration>
           <nodeVersion>v0.12.2</nodeVersion>
           <npmVersion>1.4.8</npmVersion>
-          <workingDirectory>${project.basedir}/src/main/resources/ui</workingDirectory>
+          <workingDirectory>${project.basedir}/src/main/resources/ui/hueambarimigration-view/</workingDirectory>
         </configuration>
         <executions>
           <execution>
@@ -151,7 +189,30 @@
               <goal>npm</goal>
             </goals>
             <configuration>
-              <arguments>install --python="${project.basedir}/src/main/unix/ambari-python-wrap" --unsafe-perm
+              <arguments>install --python="${project.basedir}/../src/main/unix/ambari-python-wrap" --unsafe-perm
+              </arguments>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+
+      <plugin>
+        <artifactId>exec-maven-plugin</artifactId>
+        <groupId>org.codehaus.mojo</groupId>
+        <version>1.3.2</version>
+        <executions>
+          <execution>
+            <id>Hueambarimigration-build</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>exec</goal>
+            </goals>
+            <configuration>
+              <workingDirectory>${basedir}/src/main/resources/ui/hueambarimigration-view</workingDirectory>
+              <executable>node/node</executable>
+              <arguments>
+                <argument>node_modules/.bin/ember</argument>
+                <argument>build</argument>
               </arguments>
             </configuration>
           </execution>
@@ -181,60 +242,19 @@
 
       <resource>
         <directory>src/main/resources</directory>
-        <filtering>true</filtering>
+        <filtering>false</filtering>
         <includes>
-          <include>index.jsp</include>
-          <include>image/*.*</include>
+          <include>META-INF/**/*</include>
           <include>view.xml</include>
           <include>view.log4j.properties</include>
-          <include>ui/*.*</include>
-          <include>WEB-INF/web.xml</include>
         </includes>
       </resource>
 
-
       <resource>
-        <directory>src/main/resources/ui/bower_components/bootstrap/dist/css/</directory>
+        <directory>src/main/resources/ui/hueambarimigration-view/dist</directory>
         <filtering>false</filtering>
-        <targetPath>${project.build.outputDirectory}/css</targetPath>
       </resource>
 
-      <resource>
-        <directory>src/main/resources/ui/bower_components/bootstrap/dist/fonts/</directory>
-        <filtering>false</filtering>
-        <targetPath>${project.build.outputDirectory}/fonts</targetPath>
-      </resource>
-
-
-      <resource>
-        <directory>src/main/resources/ui/bower_components/eonasdan-bootstrap-datetimepicker/build/css</directory>
-        <filtering>false</filtering>
-        <targetPath>${project.build.outputDirectory}/css</targetPath>
-      </resource>
-
-      <resource>
-        <directory>src/main/resources/ui//bower_components/moment/min/</directory>
-        <filtering>false</filtering>
-        <targetPath>${project.build.outputDirectory}/js</targetPath>
-      </resource>
-
-
-      <resource>
-        <directory>src/main/resources/ui/bower_components/eonasdan-bootstrap-datetimepicker/build/js</directory>
-        <filtering>false</filtering>
-        <targetPath>${project.build.outputDirectory}/js</targetPath>
-      </resource>
-
-      <resource>
-        <directory>src/main/resources/ui/bower_components/jquery/dist/</directory>
-        <filtering>false</filtering>
-        <targetPath>${project.build.outputDirectory}/js</targetPath>
-      </resource>
-      <resource>
-        <directory>src/main/resources/ui/bower_components/bootstrap/dist/js</directory>
-        <filtering>false</filtering>
-        <targetPath>${project.build.outputDirectory}/js</targetPath>
-      </resource>
       <resource>
         <targetPath>WEB-INF/lib</targetPath>
         <filtering>false</filtering>
@@ -243,4 +263,4 @@
     </resources>
   </build>
 
-</project>
+</project>

+ 0 - 182
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ConfigurationCheck.java

@@ -1,182 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.controller.configurationcheck;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.net.URISyntaxException;
-import java.sql.Connection;
-
-import org.apache.ambari.view.ViewContext;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletContext;
-
-import org.apache.ambari.view.huetoambarimigration.service.*;
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
-import org.apache.log4j.Logger;
-
-
-public class ConfigurationCheck extends HttpServlet {
-  private static final long serialVersionUID = 1L;
-
-  ViewContext view;
-
-  @Override
-  public void init(ServletConfig config) throws ServletException {
-
-    super.init(config);
-    ServletContext context = config.getServletContext();
-    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
-
-  }
-
-  protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
-
-    final Logger logger = Logger.getLogger(ConfigurationCheck.class);
-    response.setContentType("text/html");
-    PrintWriter out = response.getWriter();
-
-    out.println("<table class=\"table\">");
-    out.println("<thead><tr><th>Service</th><th>Status</th></tr></thead>");
-    out.println("<tbody>");
-
-    if (ConfFileReader.checkConfigurationForHue(view.getProperties().get("Hue_URL"))) {
-      logger.info("Hue URl connection:- Success");
-      out.println("<tr class=\"success\">");
-      out.println("<td><h6>" + "Ambari" + "</h6></td>");
-      out.println("<td><h6>" + "OK" + "</h6></td>");
-      out.println("</tr>");
-    } else {
-      logger.info("Hue URl connection:- Failed");
-      out.println("<tr class=\"danger\">");
-      out.println("<td><h6>" + "Ambari" + "</h6></td>");
-      out.println("<td><h6>" + "ERROR" + "</h6></td>");
-      out.println("</tr>");
-    }
-
-    if (ConfFileReader.checkConfigurationForAmbari(view.getProperties().get("Ambari_URL"))) {
-
-      logger.info("Ambari URl connection:- Success");
-      out.println("<tr class=\"success\">");
-      out.println("<td><h6>" + "Hue" + "</h6></td>");
-      out.println("<td><h6>" + "OK" + "</h6></td>");
-      out.println("</tr>");
-
-    } else {
-
-      logger.info("Ambari URl connection:- Failed");
-      out.println("<tr class=\"danger\">");
-      out.println("<td><h6>" + "Hue" + "</h6></td>");
-      out.println("<td><h6>" + "ERROR" + "</h6></td>");
-      out.println("</tr>");
-
-    }
-
-    if (ConfFileReader.checkAmbariDatbaseConection(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword"))) {
-
-      logger.info("Ambari Database connection:- Success");
-      out.println("<tr class=\"success\">");
-      out.println("<td><h6>" + "Ambari Database" + "</h6></td>");
-      out.println("<td><h6>" + "OK" + "</h6></td>");
-      out.println("</tr>");
-
-    } else {
-
-      logger.info("Ambari Database connection:- Failed");
-      out.println("<tr class=\"danger\">");
-      out.println("<td><h6>" + "Ambari Database" + "</h6></td>");
-      out.println("<td><h6>" + "ERROR" + "</h6></td>");
-      out.println("</tr>");
-
-    }
-    if (ConfFileReader.checkHueDatabaseConnection(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword"))) {
-
-      logger.info("Hue Database connection:- Success");
-      out.println("<tr class=\"success\">");
-      out.println("<td><h6>" + "Hue Database" + "</h6></td>");
-      out.println("<td><h6>" + "OK" + "</h6></td>");
-      out.println("</tr>");
-
-    } else {
-
-      logger.info("Hue Database connection:- Failed");
-      out.println("<tr class=\"danger\">");
-      out.println("<td><h6>" + "Hue Database" + "</h6></td>");
-      out.println("<td><h6>" + "ERROR" + "</h6></td>");
-      out.println("</tr>");
-
-    }
-
-    try {
-
-      if (ConfFileReader.checkNamenodeURIConnectionforambari(view.getProperties().get("namenode_URI_Ambari"))) {
-
-        logger.info("Web hdfs Access to ambari:- Success");
-        out.println("<tr class=\"success\">");
-        out.println("<td><h6>" + "namenodeURIAmbari" + "</h6></td>");
-        out.println("<td><h6>" + "OK" + "</h6></td>");
-        out.println("</tr>");
-
-      } else {
-
-        logger.info("Web hdfs Access to ambari:- Failed");
-        out.println("<tr class=\"danger\">");
-        out.println("<td><h6>" + "namenodeURIAmbari" + "</h6></td>");
-        out.println("<td><h6>" + "ERROR" + "</h6></td>");
-        out.println("</tr>");
-
-      }
-    } catch (URISyntaxException e) {
-      logger.error("Error in accessing Webhdfs of Ambari: ", e);
-    }
-
-    try {
-      if (ConfFileReader.checkNamenodeURIConnectionforHue(view.getProperties().get("namenode_URI_Hue"))) {
-
-        logger.info("Web hdfs Access to hue:- Success");
-        out.println("<tr class=\"success\">");
-        out.println("<td><h6>" + "namenodeURIHue" + "</h6></td>");
-        out.println("<td><h6>" + "OK" + "</h6></td>");
-        out.println("</tr>");
-
-      } else {
-
-        logger.info("Web hdfs Access to hue:- Failed");
-        out.println("<tr class=\"danger\">");
-        out.println("<td><h6>" + "namenodeURIHue" + "</h6></td>");
-        out.println("<td><h6>" + "ERROR" + "</h6></td>");
-        out.println("</tr>");
-
-      }
-    } catch (URISyntaxException e) {
-      logger.error("Error in accessing Webhdfs of Hue: " , e);
-    }
-
-    out.println("</tbody></table>");
-
-  }
-
-
-}

+ 0 - 54
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ProgressBarStatus.java

@@ -1,54 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.controller.configurationcheck;
-
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpSession;
-
-
-public class ProgressBarStatus extends HttpServlet {
-
-  private static final long serialVersionUID = 1L;
-
-  public static String TASK_PROGRESS_VARIABLE = "task_progress_session";
-
-
-  protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
-
-    HttpSession session = request.getSession(true);
-    Integer param = (Integer) session.getAttribute(TASK_PROGRESS_VARIABLE);
-
-    if (param == null) {
-      param = 0;
-    }
-
-    response.setContentType("text/html");
-    PrintWriter out = response.getWriter();
-    out.println(param + "%");
-
-  }
-
-}

+ 0 - 222
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveHistoryMigration.java

@@ -1,222 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.controller.hive;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.net.URISyntaxException;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.text.ParseException;
-
-
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpSession;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ConfigurationCheck;
-import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
-import org.apache.log4j.Logger;
-
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
-import org.apache.ambari.view.huetoambarimigration.service.hive.HiveHistoryQueryImpl;
-
-public class HiveHistoryMigration extends HttpServlet {
-
-
-  private static final long serialVersionUID = 1031422249396784970L;
-  ViewContext view;
-
-  private String startDate;
-  private String endDate;
-  private String instance;
-  private String username;
-
-  @Override
-  public void init(ServletConfig config) throws ServletException {
-
-    super.init(config);
-    ServletContext context = config.getServletContext();
-    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
-
-  }
-
-  public void doGet(HttpServletRequest req, HttpServletResponse resp)
-    throws ServletException, IOException {
-
-    HttpSession session = req.getSession(true);
-    final Logger logger = Logger.getLogger(HiveHistoryMigration.class);
-    Connection connectionHuedb = null;
-    Connection connectionAmbaridb = null;
-
-    /* fetching the variable from the client */
-    username = req.getParameter("username");
-    startDate = req.getParameter("startdate");
-    endDate = req.getParameter("enddate");
-    instance = req.getParameter("instance");
-
-    logger.info("--------------------------------------");
-    logger.info("Hive History query Migration started");
-    logger.info("--------------------------------------");
-    logger.info("start date: " + startDate);
-    logger.info("enddate date: " + endDate);
-    logger.info("instance is: " + username);
-    logger.info("hue username is : " + instance);
-
-    int maxCountOfAmbariDb, i = 0;
-    String time = null;
-    Long epochTime = null;
-    String dirNameforHiveHistroy;
-
-    HiveHistoryQueryImpl hiveHistoryQueryImpl = new HiveHistoryQueryImpl();// creating objects of HiveHistroy implementation
-
-    String[] hiveQuery = new String[1000000];
-
-    try {
-
-      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();
-
-      hiveQuery = hiveHistoryQueryImpl.fetchFromHue(username, startDate, endDate, connectionHuedb);
-
-		   /* if No hive query selected from Hue Database according to our search criteria */
-
-      if (hiveQuery[i] == null) {
-
-        logger.info("No queries has been selected acccording to your criteria");
-        resp.setContentType("text/html");
-        PrintWriter out = resp.getWriter();
-        out.println("<br>");
-        out.println("<h4>No queries selected according to your criteria</h4>");
-
-      } else {
-        /* If Hive queries are selected based on our search criteria */
-
-        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();// connecting to ambari db
-        connectionAmbaridb.setAutoCommit(false);
-
-        // for each queries fetched from Hue database//
-
-        for (i = 0; hiveQuery[i] != null; i++) {
-
-          float calc = ((float) (i + 1)) / hiveQuery.length * 100;
-          int progressPercentage = Math.round(calc);
-
-          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
-
-          logger.info("_____________________");
-          logger.info("Loop No." + (i + 1));
-          logger.info("_____________________");
-          logger.info("Hue query that has been fetched" + hiveQuery[i]);
-          int id = 0;
-
-          id = hiveHistoryQueryImpl.fetchInstanceTablename(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance); // feching table name according to the given instance name
-
-          logger.info("Table name has been fetched from intance name");
-
-          hiveHistoryQueryImpl.writetoFileQueryhql(hiveQuery[i], ConfFileReader.getHomeDir());// writing to .hql file to a temp file on local disk
-
-          logger.info(".hql file created in Temp directory");
-
-          hiveHistoryQueryImpl.writetoFileLogs(ConfFileReader.getHomeDir());// writing to logs file to a temp file on local disk
-
-          logger.info("Log file created in Temp directory");
-
-          maxCountOfAmbariDb = (hiveHistoryQueryImpl.fetchMaximumIdfromAmbaridb(view.getProperties().get("ambaridrivername"), connectionAmbaridb, id) + 1);// fetching the maximum count for ambari db to insert
-
-          time = hiveHistoryQueryImpl.getTime();// getting the system current time.
-
-          epochTime = hiveHistoryQueryImpl.getEpochTime();// getting system time as epoch format
-
-          dirNameforHiveHistroy = "/user/admin/hive/jobs/hive-job-" + maxCountOfAmbariDb + "-" + time + "/";// creating the directory name
-
-          logger.info("Directory name where .hql will be saved: " + dirNameforHiveHistroy);
-
-          hiveHistoryQueryImpl.insertRowinAmbaridb(view.getProperties().get("ambaridrivername"), dirNameforHiveHistroy, maxCountOfAmbariDb, epochTime, connectionAmbaridb, id, instance, i);// inserting in ambari database
-
-          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
-
-            logger.info("kerberose enabled");
-            hiveHistoryQueryImpl.createDirKerberorisedSecured(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// creating directory in kerborized secured hdfs
-            logger.info("Directory created in hdfs");
-            hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfFileReader.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the .hql file to kerborized hdfs
-            hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfFileReader.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the log file to kerborized hdfs
-          } else {
-
-            logger.info("kerberose not enabled");
-            hiveHistoryQueryImpl.createDir(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs
-            logger.info("Directory created in hdfs");
-            hiveHistoryQueryImpl.putFileinHdfs(ConfFileReader.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the .hql file to hdfs
-            hiveHistoryQueryImpl.putFileinHdfs(ConfFileReader.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the log file to hdfs
-          }
-
-        }
-        connectionAmbaridb.commit();
-
-      }
-    } catch (SQLException e) {
-      logger.error("Sql exception in ambari database: ", e);
-      try {
-        connectionAmbaridb.rollback();
-        logger.error("Sql statement are Rolledback");
-      } catch (SQLException e1) {
-        logger.error("Sql rollback exception in ambari database",
-          e1);
-      }
-    } catch (ClassNotFoundException e) {
-      logger.error("Class not found :- " ,e);
-    } catch (ParseException e) {
-      logger.error("Parse Exception : " ,e);
-    } catch (URISyntaxException e) {
-      logger.error("URI Syntax Exception: " ,e);
-    } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException: " ,e);
-    } finally {
-      if (connectionAmbaridb != null) try {
-        connectionAmbaridb.close();
-      } catch (SQLException e) {
-        logger.error("Exception in closing the connection :" ,e);
-      }
-    }
-    //deleteing the temprary files that are created while execution
-    hiveHistoryQueryImpl.deleteFileQueryhql(ConfFileReader.getHomeDir());
-    hiveHistoryQueryImpl.deleteFileQueryLogs(ConfFileReader.getHomeDir());
-
-    session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
-    logger.info("------------------------------");
-    logger.info("Hive History query Migration Ends");
-    logger.info("------------------------------");
-
-    /* servlet returned to client */
-    resp.setContentType("text/html");
-    PrintWriter out = resp.getWriter();
-    out.println("<br>");
-    out.println("<h4>" + i + " Query has been migrated to  " + instance + "</h4>");
-
-  }
-
-}

+ 0 - 231
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveSavedQueryMigration.java

@@ -1,231 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.controller.hive;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.net.URISyntaxException;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.text.ParseException;
-import java.util.ArrayList;
-
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.*;
-import javax.servlet.http.*;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
-import org.apache.log4j.Logger;
-
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-import org.apache.ambari.view.huetoambarimigration.model.*;
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
-import org.apache.ambari.view.huetoambarimigration.service.hive.HiveSavedQueryImpl;
-
-public class HiveSavedQueryMigration extends HttpServlet {
-
-  private static final long serialVersionUID = 1031422249396784970L;
-
-  ViewContext view;
-  private String startDate;
-  private String endDate;
-  private String instance;
-  private String userName;
-
-  @Override
-  public void init(ServletConfig config) throws ServletException {
-    super.init(config);
-    ServletContext context = config.getServletContext();
-    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
-  }
-
-  public void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
-
-    HttpSession session = req.getSession(true);
-    final Logger logger = Logger.getLogger(HiveSavedQueryMigration.class);
-
-    Connection connectionAmbaridb = null;
-    Connection connectionHuedb = null;
-
-    /* fetching from servlet */
-    userName = req.getParameter("username");
-    startDate = req.getParameter("startdate");
-    endDate = req.getParameter("enddate");
-    instance = req.getParameter("instance");
-
-    int i = 0;
-
-    logger.info("-------------------------------------");
-    logger.info("Hive saved query Migration started");
-    logger.info("-------------------------------------");
-    logger.info("start date: " + startDate);
-    logger.info("enddate date: " + endDate);
-    logger.info("instance is: " + instance);
-    logger.info("hue username is : " + userName);
-
-    HiveSavedQueryImpl hivesavedqueryimpl = new HiveSavedQueryImpl();/* creating Implementation object  */
-
-    int maxcountForHivehistroryAmbaridb, maxCountforSavequeryAmbaridb;
-    String time = null;
-    Long epochtime = null;
-    String dirNameforHiveSavedquery;
-    ArrayList<PojoHive> dbpojoHiveSavedQuery = new ArrayList<PojoHive>();
-
-    try {
-
-      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection(); /* fetching connection to hue DB */
-
-      dbpojoHiveSavedQuery = hivesavedqueryimpl.fetchFromHuedb(userName, startDate, endDate, connectionHuedb); /* fetching data from hue db and storing it in to a model */
-
-      if (dbpojoHiveSavedQuery.size() == 0) /* if no data has been fetched from hue db according to search criteria */ {
-
-        logger.info("no Hive saved query has been selected from hue according to your criteria of searching");
-        resp.setContentType("text/html");
-        PrintWriter out = resp.getWriter();
-        out.println("<br>");
-        out.println("<h4>No queries selected according to your criteria</h4>");
-
-      } else {
-
-        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();/* connecting to ambari DB */
-        connectionAmbaridb.setAutoCommit(false);
-
-        for (i = 0; i < dbpojoHiveSavedQuery.size(); i++) {
-
-          logger.info("_____________________");
-          logger.info("Loop No." + (i + 1));
-          logger.info("_____________________");
-
-          float calc = ((float) (i + 1)) / dbpojoHiveSavedQuery.size() * 100;
-          int progressPercentage = Math.round(calc);
-
-          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
-
-          logger.info("query fetched from hue:-  " + dbpojoHiveSavedQuery.get(i).getQuery());
-
-          int tableIdSavedQuery = hivesavedqueryimpl.fetchInstancetablenameForSavedqueryHive(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance); /* fetching the instance table name for hive saved query  from the given instance name */
-
-          int tableIdHistoryHive = hivesavedqueryimpl.fetchInstanceTablenameHiveHistory(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance); /* fetching the instance table name for hive history query from the given instance name */
-
-          logger.info("Table name are fetched from instance name.");
-
-          hivesavedqueryimpl.writetoFilequeryHql(dbpojoHiveSavedQuery.get(i).getQuery(), ConfFileReader.getHomeDir()); /* writing hive query to a local file*/
-
-          hivesavedqueryimpl.writetoFileLogs(ConfFileReader.getHomeDir());/* writing logs to localfile */
-
-          logger.info(".hql and logs file are saved in temporary directory");
-
-          maxcountForHivehistroryAmbaridb = (hivesavedqueryimpl.fetchMaxdsidFromHiveHistory(view.getProperties().get("ambaridrivername"), connectionAmbaridb, tableIdHistoryHive) + 1);/* fetching the maximum ds_id from hive history table*/
-
-          maxCountforSavequeryAmbaridb = (hivesavedqueryimpl.fetchMaxidforSavedQueryHive(view.getProperties().get("ambaridrivername"), connectionAmbaridb, tableIdSavedQuery) + 1);/* fetching the maximum ds_id from hive saved query table*/
-
-          time = hivesavedqueryimpl.getTime();/* getting system time */
-
-          epochtime = hivesavedqueryimpl.getEpochTime();/* getting epoch time */
-
-
-          dirNameforHiveSavedquery = "/user/admin/hive/jobs/hive-job-" + maxcountForHivehistroryAmbaridb + "-"
-            + time + "/"; // creating hdfs directory name
-
-          logger.info("Directory will be creted in HDFS" + dirNameforHiveSavedquery);
-
-
-          hivesavedqueryimpl.insertRowHiveHistory(view.getProperties().get("ambaridrivername"), dirNameforHiveSavedquery, maxcountForHivehistroryAmbaridb, epochtime, connectionAmbaridb, tableIdHistoryHive, instance, i);// inserting to hive history table
-
-          logger.info("Row inserted in Hive History table.");
-
-          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
-
-            logger.info("Kerberose Enabled");
-            hivesavedqueryimpl.createDirHiveSecured(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs in kerborized cluster
-            hivesavedqueryimpl.putFileinHdfsSecured(ConfFileReader.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting .hql file in hdfs in kerberoroized cluster
-            hivesavedqueryimpl.putFileinHdfsSecured(ConfFileReader.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting logs file in hdfs in kerberoroized cluster
-
-          } else {
-
-            logger.info("Kerberose Not Enabled");
-            hivesavedqueryimpl.createDirHive(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs
-            hivesavedqueryimpl.putFileinHdfs(ConfFileReader.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting .hql file in hdfs directory
-            hivesavedqueryimpl.putFileinHdfs(ConfFileReader.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting logs file in hdfs
-          }
-
-          //inserting into hived saved query table
-          hivesavedqueryimpl.insertRowinSavedQuery(view.getProperties().get("ambaridrivername"), maxCountforSavequeryAmbaridb, dbpojoHiveSavedQuery.get(i).getDatabase(), dirNameforHiveSavedquery, dbpojoHiveSavedQuery.get(i).getQuery(), dbpojoHiveSavedQuery.get(i).getOwner(), connectionAmbaridb, tableIdSavedQuery, instance, i);
-
-        }
-        connectionAmbaridb.commit();
-
-      }
-
-
-    } catch (SQLException e) {
-
-      logger.error("SQL exception: ", e);
-      try {
-        connectionAmbaridb.rollback();
-        logger.info("roll back done");
-      } catch (SQLException e1) {
-        logger.error("Rollback error: ", e1);
-
-      }
-    } catch (ClassNotFoundException e1) {
-      logger.error("Class not found : " , e1);
-    } catch (ParseException e) {
-      logger.error("ParseException: " , e);
-    } catch (URISyntaxException e) {
-      logger.error("URISyntaxException: " , e);
-    } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException:" , e);
-    } finally {
-      if (null != connectionAmbaridb)
-        try {
-          connectionAmbaridb.close();
-        } catch (SQLException e) {
-          logger.error("Error in connection close", e);
-        }
-    }
-
-
-    hivesavedqueryimpl.deleteFileQueryhql(ConfFileReader.getHomeDir());
-    hivesavedqueryimpl.deleteFileQueryLogs(ConfFileReader.getHomeDir());
-    session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
-
-    logger.info("-------------------------------");
-    logger.info("Hive saved query Migration end");
-    logger.info("--------------------------------");
-
-    resp.setContentType("text/html");
-    PrintWriter out = resp.getWriter();
-    out.println("<br>");
-    out.println("<h4>" + i + " Saved query has been migrated to  " + instance + "</h4>");
-  }
-}
-
-
-
-

+ 0 - 208
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigScriptMigration.java

@@ -1,208 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.controller.pig;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.text.ParseException;
-import java.util.ArrayList;
-
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpSession;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
-import org.apache.log4j.Logger;
-
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-import org.apache.ambari.view.huetoambarimigration.model.*;
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
-import org.apache.ambari.view.huetoambarimigration.service.pig.PigScriptImpl;
-
-public class PigScriptMigration extends HttpServlet {
-
-
-  private static final long serialVersionUID = 1031422249396784970L;
-  ViewContext view;
-  private String startDate;
-  private String endDate;
-  private String instance;
-  private String userName;
-
-  @Override
-  public void init(ServletConfig config) throws ServletException {
-
-    super.init(config);
-    ServletContext context = config.getServletContext();
-    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
-
-  }
-
-  public void doGet(HttpServletRequest req, HttpServletResponse resp)
-    throws ServletException, IOException {
-
-    HttpSession session = req.getSession(true);
-    final Logger logger = Logger.getLogger(PigScriptMigration.class);
-    Connection connectionHuedb = null;
-    Connection connectionAmbaridb = null;
-
-    logger.info("-------------------------------------");
-    logger.info("Pig saved script Migration started");
-    logger.info("-------------------------------------");
-
-    //fethcing data from client
-
-    userName = req.getParameter("username");
-    startDate = req.getParameter("startdate");
-    endDate = req.getParameter("enddate");
-    instance = req.getParameter("instance");
-    int i = 0;
-
-    logger.info("start date: " + startDate);
-    logger.info("enddate date: " + endDate);
-    logger.info("instance is: " + userName);
-    logger.info("hue username is : " + instance);
-
-    //Reading the configuration file
-    PigScriptImpl pigsavedscriptmigration = new PigScriptImpl();
-
-    int maxcountforsavequery = 0, maxcountforpigsavedscript;
-    String time = null, timetobeInorder = null;
-    Long epochTime = null;
-    String dirNameForPigScript, completeDirandFilePath, pigscriptFilename="";
-    int pigInstanceTableName;
-
-    ArrayList<PojoPig> dbpojoPigSavedscript = new ArrayList<PojoPig>();
-
-    try {
-      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connection to Hue DB
-      dbpojoPigSavedscript = pigsavedscriptmigration.fetchFromHueDatabase(userName, startDate, endDate, connectionHuedb, view.getProperties().get("huedrivername"));// Fetching Pig script details from Hue DB
-
-      /* If No Pig Script has been fetched from Hue db according to our search criteria*/
-      if (dbpojoPigSavedscript.size() == 0) {
-
-        logger.info("no Pig script has been selected from hue according to your criteria of searching");
-        resp.setContentType("text/html");
-        PrintWriter out = resp.getWriter();
-        out.println("<br>");
-        out.println("<h4>No Pig Script selected according to your criteria</h4>");
-
-      } else {
-
-        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();// connecting to ambari db
-        connectionAmbaridb.setAutoCommit(false);
-        logger.info("loop will continue for " + dbpojoPigSavedscript.size() + "times");
-
-        //for each pig script found in Hue Database
-
-        for (i = 0; i < dbpojoPigSavedscript.size(); i++) {
-
-
-          float calc = ((float) (i + 1)) / dbpojoPigSavedscript.size() * 100;
-          int progressPercentage = Math.round(calc);
-
-          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
-
-          logger.info("Loop No." + (i + 1));
-          logger.info("________________");
-          logger.info("the title of script:  " + dbpojoPigSavedscript.get(i).getTitle());
-
-          pigInstanceTableName = pigsavedscriptmigration.fetchInstanceTablenamePigScript(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance);// finding the table name in ambari from the given instance
-
-          maxcountforpigsavedscript = (pigsavedscriptmigration.fetchmaxIdforPigSavedScript(view.getProperties().get("ambaridrivername"), connectionAmbaridb, pigInstanceTableName) + 1);// maximum count of the primary key of Pig Script table
-
-          time = pigsavedscriptmigration.getTime();
-
-          timetobeInorder = pigsavedscriptmigration.getTimeInorder();
-
-          epochTime = pigsavedscriptmigration.getEpochTime();
-
-          dirNameForPigScript = "/user/admin/pig/scripts/";
-
-          pigscriptFilename = dbpojoPigSavedscript.get(i).getTitle() + "-" + time + ".pig";
-
-          completeDirandFilePath = dirNameForPigScript + pigscriptFilename;
-
-          pigsavedscriptmigration.writetPigScripttoLocalFile(dbpojoPigSavedscript.get(i).getScript(), dbpojoPigSavedscript.get(i).getTitle(), dbpojoPigSavedscript.get(i).getDt(), ConfFileReader.getHomeDir(), pigscriptFilename);
-
-          pigsavedscriptmigration.insertRowForPigScript(view.getProperties().get("ambaridrivername"), completeDirandFilePath, maxcountforsavequery, maxcountforpigsavedscript, time, timetobeInorder, epochTime, dbpojoPigSavedscript.get(i).getTitle(), connectionAmbaridb, pigInstanceTableName, instance, i);
-
-          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
-            pigsavedscriptmigration.putFileinHdfsSecured(ConfFileReader.getHomeDir() + pigscriptFilename, dirNameForPigScript, view.getProperties().get("namenode_URI_Ambari"));
-          } else {
-            pigsavedscriptmigration.putFileinHdfs(ConfFileReader.getHomeDir() + pigscriptFilename, dirNameForPigScript, view.getProperties().get("namenode_URI_Ambari"));
-          }
-
-          logger.info(dbpojoPigSavedscript.get(i).getTitle() + "Migrated to Ambari");
-
-          pigsavedscriptmigration.deletePigScriptLocalFile(ConfFileReader.getHomeDir(), pigscriptFilename);
-
-        }
-        connectionAmbaridb.commit();
-
-      }
-
-
-    } catch (SQLException e) {
-      logger.error("Sql exception in ambari database", e);
-      try {
-        connectionAmbaridb.rollback();
-        logger.info("rollback done");
-      } catch (SQLException e1) {
-        logger.error("Sql exception while doing roll back", e);
-      }
-    } catch (ClassNotFoundException e2) {
-      logger.error("class not found exception", e2);
-    } catch (ParseException e) {
-      logger.error("ParseException: " , e);
-    } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException: " , e);
-    } finally {
-      if (null != connectionAmbaridb)
-        try {
-          connectionAmbaridb.close();
-        } catch (SQLException e) {
-          logger.error("connection close exception: ", e);
-        }
-    }
-
-    session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
-
-    resp.setContentType("text/html");
-    PrintWriter out = resp.getWriter();
-    out.println("<br>");
-    out.println("<h4>" + i + " Pig Script has been migrated to " + instance + "</h4>");
-
-    logger.info("----------------------------------");
-    logger.info("Pig saved script Migration ends");
-    logger.info("----------------------------------");
-  }
-
-
-}

+ 0 - 2
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceAmbariDatabase.java

@@ -18,8 +18,6 @@
 
 package org.apache.ambari.view.huetoambarimigration.datasource;
 
-import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ConfigurationCheck;
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
 import com.mchange.v2.c3p0.ComboPooledDataSource;
 
 import java.beans.PropertyVetoException;

+ 0 - 2
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceHueDatabase.java

@@ -18,8 +18,6 @@
 
 package org.apache.ambari.view.huetoambarimigration.datasource;
 
-import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ConfigurationCheck;
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
 import com.mchange.v2.c3p0.ComboPooledDataSource;
 
 import java.beans.PropertyVetoException;

+ 46 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/MysqlQuerySetAmbariDB.java

@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset;
+
+/**
+ * Overriding methods for Mysql specific queries
+ */
+
+public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
+
+  @Override
+  protected String getSqlMaxDSidFromTableId(int id) {
+    return "select max( cast(ds_id as unsigned) ) as max from DS_JOBIMPL_" + id + ";";
+  }
+
+  @Override
+  protected String getTableIdSqlFromInstanceName() {
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?;";
+  }
+
+  @Override
+  protected String getSqlInsertHiveHistory(int id) {
+    return "INSERT INTO DS_JOBIMPL_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet');";
+  }
+
+  @Override
+  protected String getRevSql(int id, String maxcount) {
+    return "delete from  DS_JOBIMPL_" + id + " where ds_id='" + maxcount + "';";
+  }
+
+}

+ 44 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java

@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset;
+
+/**
+ *
+ * Overriding methods for Oracle specific queries
+ */
+
+public class OracleQuerySetAmbariDB extends QuerySetAmbariDB {
+
+  @Override
+  protected String getSqlMaxDSidFromTableId(int id) {
+    return "select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + "";
+  }
+  @Override
+  protected String getTableIdSqlFromInstanceName() {
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?";
+  }
+  @Override
+  protected String getSqlInsertHiveHistory(int id) {
+    return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet')";
+  }
+  @Override
+  protected String getRevSql(int id,String maxcount){
+    return "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount + "'";
+  }
+
+}

+ 22 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java

@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset;
+
+
+public class PostgressQuerySetAmbariDB extends QuerySetAmbariDB {
+}

+ 79 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/QuerySetAmbariDB.java

@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+
+/**
+ * History Query Prepared statemets
+ */
+
+public abstract class QuerySetAmbariDB {
+
+  public PreparedStatement getTableIdFromInstanceName(Connection connection, String instance) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(getTableIdSqlFromInstanceName());
+    prSt.setString(1, instance);
+    return prSt;
+  }
+
+  public PreparedStatement getMaxDsIdFromTableId(Connection connection, int id) throws SQLException {
+
+    PreparedStatement prSt = connection.prepareStatement(getSqlMaxDSidFromTableId(id));
+
+    return prSt;
+  }
+
+  public PreparedStatement insertToHiveHistory(Connection connection, int id, String maxcount, long epochtime, String dirname) throws SQLException {
+
+    String Logfile=  dirname + "logs";
+    String queryHqlFile= dirname + "query.hql";
+
+    PreparedStatement prSt = connection.prepareStatement(getSqlInsertHiveHistory(id));
+
+    prSt.setString(1, maxcount);
+    prSt.setLong(2, epochtime);
+    prSt.setString(3, Logfile);
+    prSt.setString(4, queryHqlFile);
+    prSt.setString(5, dirname);
+
+    return prSt;
+  }
+
+  public String RevertSql(int id,String maxcount) throws SQLException {
+    return getRevSql(id,maxcount);
+  }
+
+  protected String getSqlMaxDSidFromTableId(int id) {
+    return "select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + ";";
+  }
+
+  protected String getTableIdSqlFromInstanceName() {
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?;";
+  }
+
+  protected String getSqlInsertHiveHistory(int id) {
+    return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet');";
+  }
+
+  protected String getRevSql(int id,String maxcount){
+    return "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount + "';";
+  }
+
+}

+ 23 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/MysqlQuerySetAmbariDB.java

@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.instancedetail;
+
+
+public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
+
+}

+ 31 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/OracleQuerySetAmbariDB.java

@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.instancedetail;
+
+
+public class OracleQuerySetAmbariDB extends QuerySetAmbariDB {
+
+  @Override
+  protected String getHiveInstanceSql(){
+    return "select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}'";
+  }
+  @Override
+  protected String getAllInstanceDetailSql(){
+    return "select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}' or view_name='PIG{1.0.0}';";
+  }
+}

+ 22 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/PostgressQuerySetAmbariDB.java

@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.instancedetail;
+
+
+public class PostgressQuerySetAmbariDB extends QuerySetAmbariDB {
+}

+ 48 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/QuerySetAmbariDB.java

@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.instancedetail;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+
+/**
+ * Service class to get instance detail
+ */
+
+public abstract class QuerySetAmbariDB {
+
+  public PreparedStatement getHiveInstanceDeatil(Connection connection) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(getHiveInstanceSql());
+    return prSt;
+  }
+
+  public PreparedStatement getAllInstanceDeatil(Connection connection) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(getAllInstanceDetailSql());
+    return prSt;
+  }
+
+  protected String getHiveInstanceSql(){
+    return "select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}';";
+  }
+
+  protected String getAllInstanceDetailSql(){
+    return "select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}' or view_name='PIG{1.0.0}';";
+  }
+
+}

+ 65 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/MysqlQuerySetAmbariDB.java

@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset;
+
+/**
+ *  override methods specific to Mysql
+ */
+
+public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
+
+  @Override
+  protected String getSqlMaxDSidFromTableIdSavedQuery(int id) {
+    return "select max( cast(ds_id as unsigned) ) as max from DS_SAVEDQUERY_" + id + ";";
+  }
+
+  @Override
+  protected String getTableIdSqlFromInstanceNameSavedQuery() {
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.savedQueries.SavedQuery' and view_instance_name=?;";
+  }
+
+  @Override
+  protected String getSqlMaxDSidFromTableIdHistoryQuery(int id) {
+    return "select MAX(cast(ds_id as integer)) as max from DS_JOBIMPL_" + id + ";";
+  }
+
+  @Override
+  protected String getTableIdSqlFromInstanceNameHistoryQuery() {
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?;";
+  }
+
+  @Override
+  protected String getSqlInsertHiveHistory(int id) {
+    return "INSERT INTO DS_JOBIMPL_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet');";
+  }
+
+  @Override
+  protected String getSqlInsertSavedQuery(int id) {
+    return "INSERT INTO DS_SAVEDQUERY_" + id + " values (?,?,'" + "admin" + "',?,?,?);";
+  }
+
+  @Override
+  protected String getRevSqlSavedQuery(int id, String maxcount) {
+    return "delete from  DS_SAVEDQUERY_" + id + " where ds_id='" + maxcount + "';";
+  }
+
+  @Override
+  protected String getRevSqlHistoryQuery(int id, String maxcount) {
+    return "delete from  DS_JOBIMPL_" + id + " where ds_id='" + maxcount + "';";
+  }
+}

+ 58 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/OracleQuerySetAmbariDB.java

@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset;
+
+public class OracleQuerySetAmbariDB extends QuerySetAmbariDB {
+
+  @Override
+  protected String getSqlMaxDSidFromTableIdSavedQuery(int id) {
+    return "select MAX(cast(ds_id as integer)) as max from ds_savedquery_" + id + "";
+  }
+
+  @Override
+  protected String getTableIdSqlFromInstanceNameSavedQuery() {
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.savedQueries.SavedQuery' and view_instance_name=?";
+  }
+
+  @Override
+  protected String getSqlMaxDSidFromTableIdHistoryQuery(int id) {
+    return "select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + "";
+  }
+  @Override
+  protected String getTableIdSqlFromInstanceNameHistoryQuery() {
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?";
+  }
+
+  @Override
+  protected String getSqlInsertHiveHistory(int id) {
+    return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet')";
+  }
+  @Override
+  protected String getSqlInsertSavedQuery(int id) {
+    return "INSERT INTO ds_savedquery_" + id + " values (?,?,'" + "admin" + "',?,?,?)";
+  }
+
+  @Override
+  protected String getRevSqlSavedQuery(int id, String maxcount) {
+    return "delete from  ds_savedquery_" + id + " where ds_id='" + maxcount + "'";
+  }
+  @Override
+  protected String getRevSqlHistoryQuery(int id, String maxcount) {
+    return "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount + "'";
+  }
+}

+ 22 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/PostgressQuerySetAmbariDB.java

@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset;
+
+
+public class PostgressQuerySetAmbariDB extends QuerySetAmbariDB {
+}

+ 131 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/QuerySetAmbariDB.java

@@ -0,0 +1,131 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+
+/**
+ * Prepared statement for Saved query
+ */
+
+public abstract class QuerySetAmbariDB {
+
+  public PreparedStatement getTableIdFromInstanceNameSavedquery(Connection connection, String instance) throws SQLException {
+
+    PreparedStatement prSt = connection.prepareStatement(getTableIdSqlFromInstanceNameSavedQuery());
+    prSt.setString(1, instance);
+    return prSt;
+  }
+
+  public PreparedStatement getTableIdFromInstanceNameHistoryquery(Connection connection, String instance) throws SQLException {
+
+    PreparedStatement prSt = connection.prepareStatement(getTableIdSqlFromInstanceNameHistoryQuery());
+    prSt.setString(1, instance);
+    return prSt;
+  }
+
+  public PreparedStatement getMaxDsIdFromTableIdHistoryquery(Connection connection, int id) throws SQLException {
+
+    PreparedStatement prSt = connection.prepareStatement(getSqlMaxDSidFromTableIdHistoryQuery(id));
+    return prSt;
+  }
+
+  public PreparedStatement getMaxDsIdFromTableIdSavedquery(Connection connection, int id) throws SQLException {
+
+    PreparedStatement prSt = connection.prepareStatement(getSqlMaxDSidFromTableIdSavedQuery(id));
+
+    return prSt;
+  }
+
+  public PreparedStatement insertToHiveHistory(Connection connection, int id, String maxcount, long epochtime, String dirname) throws SQLException {
+
+    String Logfile = dirname + "logs";
+    String queryHqlFile = dirname + "query.hql";
+
+    PreparedStatement prSt = connection.prepareStatement(getSqlInsertHiveHistory(id));
+
+    prSt.setString(1, maxcount);
+    prSt.setLong(2, epochtime);
+    prSt.setString(3, Logfile);
+    prSt.setString(4, queryHqlFile);
+    prSt.setString(5, dirname);
+
+    return prSt;
+  }
+
+  public PreparedStatement insertToHiveSavedQuery(Connection connection, int id, String maxcount, String database, String dirname, String query, String name) throws SQLException {
+
+    String Logfile = dirname + "logs";
+    String queryHqlFile = dirname + "query.hql";
+
+    PreparedStatement prSt = connection.prepareStatement(getSqlInsertSavedQuery(id));
+
+    prSt.setString(1, maxcount);
+    prSt.setString(2, database);
+    prSt.setString(3, queryHqlFile);
+    prSt.setString(4, query);
+    prSt.setString(5, name);
+
+    return prSt;
+  }
+
+  public String revertSqlHistoryQuery(int id, String maxcount) throws SQLException {
+
+    return getRevSqlHistoryQuery(id, maxcount);
+  }
+
+  public String revertSqlSavedQuery(int id, String maxcount) throws SQLException {
+
+    return getRevSqlSavedQuery(id, maxcount);
+  }
+
+  protected String getSqlMaxDSidFromTableIdSavedQuery(int id) {
+    return "select MAX(cast(ds_id as integer)) as max from ds_savedquery_" + id + ";";
+  }
+
+  protected String getTableIdSqlFromInstanceNameSavedQuery() {
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.savedQueries.SavedQuery' and view_instance_name=?;";
+  }
+
+  protected String getSqlMaxDSidFromTableIdHistoryQuery(int id) {
+    return "select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + ";";
+  }
+
+  protected String getTableIdSqlFromInstanceNameHistoryQuery() {
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?;";
+  }
+
+  protected String getSqlInsertHiveHistory(int id) {
+    return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet');";
+  }
+
+  protected String getSqlInsertSavedQuery(int id) {
+    return "INSERT INTO ds_savedquery_" + id + " values (?,?,'" + "admin" + "',?,?,?);";
+  }
+
+  protected String getRevSqlSavedQuery(int id, String maxcount) {
+    return "delete from  ds_savedquery_" + id + " where ds_id='" + maxcount + "';";
+  }
+
+  protected String getRevSqlHistoryQuery(int id, String maxcount) {
+    return "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount + "';";
+  }
+
+}

+ 23 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/MysqlQuerySetAmbariDB.java

@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.instancedetail;
+
+
+public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
+
+}

+ 30 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/OracleQuerySetAmbariDB.java

@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.instancedetail;
+
+/**
+ *  override method specific to Query.
+ */
+public class OracleQuerySetAmbariDB extends QuerySetAmbariDB {
+
+  @Override
+  protected String getPigInstanceSql(){
+    return  "select distinct(view_instance_name) as instancename from viewentity where view_name='PIG{1.0.0}'";
+  }
+
+}

+ 22 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/PostgressQuerySetAmbariDB.java

@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.instancedetail;
+
+
+public class PostgressQuerySetAmbariDB extends QuerySetAmbariDB {
+}

+ 39 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/QuerySetAmbariDB.java

@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.instancedetail;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+
+/**
+ * Prepared statement for  Pig Instance details
+ */
+
+public abstract class QuerySetAmbariDB {
+
+  public PreparedStatement getAllPigInstance(Connection connection) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(getPigInstanceSql());
+    return prSt;
+  }
+
+  protected String getPigInstanceSql(){
+   return  "select distinct(view_instance_name) as instancename from viewentity where view_name='PIG{1.0.0}';";
+  }
+
+}

+ 43 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/MysqlQuerySetAmbariDB.java

@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset;
+
+/**
+ * Override method specific to Mysql
+ */
+
+public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
+
+  @Override
+  protected String getSqlMaxDSidFromTableId(int id) {
+    return "select max( cast(ds_id as unsigned) ) as max from DS_PIGJOB_" + id + ";";
+  }
+
+  @Override
+  protected String getTableIdSqlFromInstanceName() {
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.jobs.models.PigJob' and view_instance_name=?;";
+  }
+  @Override
+  protected String getSqlinsertToPigJob(int id) {
+    return "INSERT INTO DS_PIGJOB_" + id + " values (?,?,0,'','f','','','admin',0,?,'',?,'','',?,?,'',?);";
+  }
+  @Override
+  protected String getRevSql(int id, String maxcount) {
+    return "delete from  DS_PIGJOB_" + id + " where ds_id='" + maxcount + "';";
+  }
+}

+ 41 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/OracleQuerySetAmbariDB.java

@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset;
+
+/**
+ * Override methods for Oracle
+ */
+public class OracleQuerySetAmbariDB extends QuerySetAmbariDB {
+
+  @Override
+  protected String getSqlMaxDSidFromTableId(int id) {
+    return "select MAX(cast(ds_id as integer)) as max from ds_pigjob_" + id + "";
+  }
+  @Override
+  protected String getTableIdSqlFromInstanceName() {
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.jobs.models.PigJob' and view_instance_name=?";
+  }
+  @Override
+  protected String getSqlinsertToPigJob(int id) {
+    return "INSERT INTO ds_pigjob_" + id + " values (?,?,0,'','f','','','admin',0,?,'',?,'','',?,?,'',?)";
+  }
+  @Override
+  protected String getRevSql(int id, String maxcount) {
+    return "delete from  ds_pigjob_" + id + " where ds_id='" + maxcount + "'";
+  }
+}

+ 22 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/PostgressQuerySetAmbariDB.java

@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset;
+
+
+public class PostgressQuerySetAmbariDB extends QuerySetAmbariDB {
+}

+ 80 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/QuerySetAmbariDB.java

@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+
+/**
+ *  Pig Job Prepare statement
+ */
+
+public abstract class QuerySetAmbariDB {
+
+  public PreparedStatement getTableIdFromInstanceName(Connection connection, String instance) throws SQLException {
+
+    PreparedStatement prSt = connection.prepareStatement(getTableIdSqlFromInstanceName());
+    prSt.setString(1, instance);
+    return prSt;
+  }
+
+  public PreparedStatement getMaxDsIdFromTableId(Connection connection, int id) throws SQLException {
+
+    PreparedStatement prSt = connection.prepareStatement(getSqlMaxDSidFromTableId(id));
+    return prSt;
+  }
+
+  public PreparedStatement insertToPigJob(String dirname, String maxcountforpigjob, long epochtime1, String title, Connection connection, int id, String status) throws SQLException {
+
+    String pigScriptFile = dirname + "script.pig";
+
+    PreparedStatement prSt = connection.prepareStatement(getSqlinsertToPigJob(id));
+
+    prSt.setString(1, maxcountforpigjob);
+    prSt.setLong(2, epochtime1);
+    prSt.setString(3, pigScriptFile);
+    prSt.setString(4, maxcountforpigjob);
+    prSt.setString(5, status);
+    prSt.setString(6, dirname);
+    prSt.setString(7, title);
+
+    return prSt;
+  }
+
+  public String revertSql(int id, String maxcount) throws SQLException {
+    return getRevSql(id, maxcount);
+  }
+
+  protected String getSqlMaxDSidFromTableId(int id) {
+    return "select MAX(cast(ds_id as integer)) as max from ds_pigjob_" + id + ";";
+  }
+
+  protected String getTableIdSqlFromInstanceName() {
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.jobs.models.PigJob' and view_instance_name=?;";
+  }
+
+  protected String getSqlinsertToPigJob(int id) {
+    return "INSERT INTO ds_pigjob_" + id + " values (?,?,0,'','f','','','admin',0,?,'',?,'','',?,?,'',?);";
+  }
+
+  protected String getRevSql(int id, String maxcount) {
+    return "delete from  ds_pigjob_" + id + " where ds_id='" + maxcount + "';";
+  }
+
+}

+ 43 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/MysqlQuerySetAmbariDB.java

@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.savedscriptqueryset;
+
+/**
+ * over rides method for Mysql specific queries
+ */
+
+public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
+
+  @Override
+  protected String getSqlMaxDSidFromTableId(int id) {
+    return "select max( cast(ds_id as unsigned) ) as max from DS_PIGSCRIPT_" + id + ";";
+  }
+  @Override
+  protected String getTableIdSqlFromInstanceName() {
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.scripts.models.PigScript' and view_instance_name=?;";
+  }
+  @Override
+  protected String getSqlinsertToPigScript(int id) {
+    return  "INSERT INTO DS_PIGSCRIPT_" + id + " values (?,'1970-01-17 20:28:55.586000 +00:00:00',0,'admin',?,'','',?);";
+  }
+  @Override
+  protected String getRevSql(int id,String maxcount){
+    return "delete from  DS_PIGSCRIPT_" + id + " where ds_id='" + maxcount + "';";
+  }
+
+}

+ 41 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/OracleQuerySetAmbariDB.java

@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.savedscriptqueryset;
+
+public class OracleQuerySetAmbariDB extends QuerySetAmbariDB {
+  /*
+  * Overriding methods for Oracle specific queries
+  * */
+
+  protected String getSqlMaxDSidFromTableId(int id) {
+    return "select MAX(cast(ds_id as integer)) as max from ds_pigscript_" + id + "";
+  }
+
+  protected String getTableIdSqlFromInstanceName() {
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.scripts.models.PigScript' and view_instance_name=?";
+  }
+
+  protected String getSqlinsertToPigScript(int id) {
+    return "INSERT INTO ds_pigscript_" + id + " values (?,'1970-01-17 20:28:55.586000 +00:00:00','f','admin',?,'','',?)";
+  }
+
+  protected String getRevSql(int id, String maxcount) {
+    return "delete from  ds_pigscript_" + id + " where ds_id='" + maxcount + "'";
+  }
+
+}

+ 22 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/PostgressQuerySetAmbariDB.java

@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.savedscriptqueryset;
+
+
+public class PostgressQuerySetAmbariDB extends QuerySetAmbariDB {
+}

+ 70 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/QuerySetAmbariDB.java

@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.savedscriptqueryset;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+
+/**
+ * Pig Script prepared statement
+ */
+public abstract class QuerySetAmbariDB {
+
+  public PreparedStatement getTableIdFromInstanceName(Connection connection, String instance) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(getTableIdSqlFromInstanceName());
+    prSt.setString(1, instance);
+    return prSt;
+  }
+
+  public PreparedStatement getMaxDsIdFromTableId(Connection connection, int id) throws SQLException {
+
+    PreparedStatement prSt = connection.prepareStatement(getSqlMaxDSidFromTableId(id));
+    return prSt;
+  }
+
+  public PreparedStatement insertToPigScript(Connection connection, int id, String maxcount1, String dirname, String title) throws SQLException {
+
+    PreparedStatement prSt = connection.prepareStatement(getSqlinsertToPigScript(id));
+    prSt.setString(1, maxcount1);
+    prSt.setString(2, dirname);
+    prSt.setString(3, title);
+
+    return prSt;
+  }
+
+  public String revertSql(int id, String maxcount) throws SQLException {
+    return getRevSql(id, maxcount);
+  }
+
+  protected String getSqlMaxDSidFromTableId(int id) {
+    return "select MAX(cast(ds_id as integer)) as max from ds_pigscript_" + id + ";";
+  }
+
+  protected String getTableIdSqlFromInstanceName() {
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.scripts.models.PigScript' and view_instance_name=?;";
+  }
+
+  protected String getSqlinsertToPigScript(int id) {
+    return "INSERT INTO ds_pigscript_" + id + " values (?,'1970-01-17 20:28:55.586000 +00:00:00','f','admin',?,'','',?);";
+  }
+
+  protected String getRevSql(int id, String maxcount) {
+    return "delete from  ds_pigscript_" + id + " where ds_id='" + maxcount + "';";
+  }
+}

+ 23 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/MysqlQuerySet.java

@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.historyqueryset;
+
+
+public class MysqlQuerySet extends QuerySet {
+
+}

+ 61 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/OracleQuerySet.java

@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.historyqueryset;
+
+
+public class OracleQuerySet extends QuerySet {
+
+  @Override
+  protected String fetchuserIdfromUsernameSql() {
+    return "select id from auth_user where username=?";
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateNoEnddateSql() {
+    return "select query from beeswax_queryhistory where owner_id =?";
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateYesEnddateSql() {
+    return "select query from beeswax_queryhistory where owner_id =? AND submission_date <= date(?)";
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateNoEnddateSql() {
+    return "select query from beeswax_queryhistory where owner_id =? AND submission_date >= date(?)";
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateYesEnddateSql() {
+    return "select query from beeswax_queryhistory where owner_id =? AND submission_date >= date(?) AND submission_date <= date(?)";
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateNoEnddateYesallUserSql() {
+    return "select query from beeswax_queryhistory";
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateYesEnddateYesallUserSql() {
+    return "select query from beeswax_queryhistory where submission_date <= date(?)";
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateNoEnddateYesallUserSql() {
+    return "select query from beeswax_queryhistory where submission_date >= date(?)";
+
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateYesEnddateYesallUserSql() {
+    return "select query from beeswax_queryhistory where submission_date >= date(?) AND submission_date <= date(?)";
+
+  }
+}

+ 22 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/PostgressQuerySet.java

@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.historyqueryset;
+
+
+public class PostgressQuerySet extends QuerySet {
+}

+ 130 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/QuerySet.java

@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.historyqueryset;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+
+
+public abstract class QuerySet {
+
+
+  public PreparedStatement getUseridfromUserName(Connection connection, String username) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchuserIdfromUsernameSql());
+    prSt.setString(1, username);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesNoStartDateNoEndDate(Connection connection, int id) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateSql());
+    prSt.setInt(1, id);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesNoStartDateYesEndDate(Connection connection, int id, String enddate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateSql());
+    prSt.setInt(1, id);
+    prSt.setString(2, enddate);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesYesStartDateNoEndDate(Connection connection, int id, String startdate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateSql());
+    prSt.setInt(1, id);
+    prSt.setString(2, startdate);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesYesStartDateYesEndDate(Connection connection, int id, String startdate, String endate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateSql());
+    prSt.setInt(1, id);
+    prSt.setString(2, startdate);
+    prSt.setString(3, endate);
+    return prSt;
+  }
+
+  /**
+   * for all user
+   */
+  public PreparedStatement getQueriesNoStartDateNoEndDateAllUser(Connection connection) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateYesallUserSql());
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesNoStartDateYesEndDateAllUser(Connection connection, String enddate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateYesallUserSql());
+    prSt.setString(1, enddate);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesYesStartDateNoEndDateAllUser(Connection connection, String startdate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateYesallUserSql());
+    prSt.setString(1, startdate);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesYesStartDateYesEndDateAllUser(Connection connection, String startdate, String endate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateYesallUserSql());
+    prSt.setString(1, startdate);
+    prSt.setString(2, endate);
+    return prSt;
+  }
+
+
+  protected String fetchuserIdfromUsernameSql() {
+    return "select id from auth_user where username=?;";
+
+  }
+
+  protected String fetchHueQueriesNoStartdateNoEnddateSql() {
+    return "select query from beeswax_queryhistory where owner_id =?;";
+  }
+
+  protected String fetchHueQueriesNoStartdateYesEnddateSql() {
+    return "select query from beeswax_queryhistory where owner_id =? AND submission_date <= date(?);";
+  }
+
+  protected String fetchHueQueriesYesStartdateNoEnddateSql() {
+    return "select query from beeswax_queryhistory where owner_id =? AND submission_date >= date(?);";
+  }
+
+  protected String fetchHueQueriesYesStartdateYesEnddateSql() {
+    return "select query from beeswax_queryhistory where owner_id =? AND submission_date >= date(?) AND submission_date <= date(?);";
+  }
+
+  protected String fetchHueQueriesNoStartdateNoEnddateYesallUserSql() {
+    return "select query from beeswax_queryhistory;";
+  }
+
+  protected String fetchHueQueriesNoStartdateYesEnddateYesallUserSql() {
+    return "select query from beeswax_queryhistory where submission_date <= date(?);";
+  }
+
+  protected String fetchHueQueriesYesStartdateNoEnddateYesallUserSql() {
+    return "select query from beeswax_queryhistory where submission_date >= date(?);";
+
+  }
+
+  protected String fetchHueQueriesYesStartdateYesEnddateYesallUserSql() {
+    return "select query from beeswax_queryhistory where submission_date >= date(?) AND submission_date <= date(?);";
+
+  }
+
+
+}

+ 22 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/SqliteQuerySet.java

@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.historyqueryset;
+
+
+public class SqliteQuerySet extends QuerySet {
+}

+ 23 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/MysqlQuerySet.java

@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset;
+
+
+public class MysqlQuerySet extends QuerySet {
+}

+ 65 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/OracleQuerySet.java

@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset;
+
+
+public class OracleQuerySet extends QuerySet {
+
+  @Override
+  protected String fetchuserIdfromUsernameSql() {
+    return "select id from auth_user where username=?";
+
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateNoEnddateSql() {
+    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =?";
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateYesEnddateSql() {
+    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =? AND mtime <= date(?)";
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateNoEnddateSql() {
+    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =? AND mtime >= date(?)";
+
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateYesEnddateSql() {
+    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =? AND mtime >= date(?) AND mtime <= date(?)";
+
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateNoEnddateYesallUserSql() {
+    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'";
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateYesEnddateYesallUserSql() {
+    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime <= date(?)";
+
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateNoEnddateYesallUserSql() {
+    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime >= date(?)";
+
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateYesEnddateYesallUserSql() {
+    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime >= date(?) AND mtime <= date(?)";
+
+  }
+}

+ 22 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/PostgressQuerySet.java

@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset;
+
+
+public class PostgressQuerySet extends QuerySet {
+}

+ 134 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/QuerySet.java

@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+
+
+public  abstract class QuerySet {
+
+
+  public PreparedStatement getUseridfromUserName(Connection connection, String username) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchuserIdfromUsernameSql());
+    prSt.setString(1, username);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesNoStartDateNoEndDate(Connection connection, int id) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateSql());
+    prSt.setInt(1, id);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesNoStartDateYesEndDate(Connection connection, int id, String enddate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateSql());
+    prSt.setInt(1, id);
+    prSt.setString(2, enddate);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesYesStartDateNoEndDate(Connection connection, int id, String startdate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateSql());
+    prSt.setInt(1, id);
+    prSt.setString(2, startdate);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesYesStartDateYesEndDate(Connection connection, int id, String startdate, String endate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateSql());
+    prSt.setInt(1, id);
+    prSt.setString(2, startdate);
+    prSt.setString(3, endate);
+    return prSt;
+  }
+
+  /**
+   * for all user
+   */
+  public PreparedStatement getQueriesNoStartDateNoEndDateAllUser(Connection connection) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateYesallUserSql());
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesNoStartDateYesEndDateAllUser(Connection connection, String enddate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateYesallUserSql());
+    prSt.setString(1, enddate);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesYesStartDateNoEndDateAllUser(Connection connection, String startdate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateYesallUserSql());
+    prSt.setString(1, startdate);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesYesStartDateYesEndDateAllUser(Connection connection, String startdate, String endate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateYesallUserSql());
+    prSt.setString(1, startdate);
+    prSt.setString(2, endate);
+    return prSt;
+  }
+
+
+  protected String fetchuserIdfromUsernameSql() {
+    return "select id from auth_user where username=?;";
+
+  }
+
+  protected String fetchHueQueriesNoStartdateNoEnddateSql() {
+    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =?;";
+  }
+
+  protected String fetchHueQueriesNoStartdateYesEnddateSql() {
+    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =? AND mtime <= date(?);";
+  }
+
+  protected String fetchHueQueriesYesStartdateNoEnddateSql() {
+    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =? AND mtime >= date(?);";
+
+  }
+
+  protected String fetchHueQueriesYesStartdateYesEnddateSql() {
+    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =? AND mtime >= date(?) AND mtime <= date(?);";
+
+  }
+
+  protected String fetchHueQueriesNoStartdateNoEnddateYesallUserSql() {
+    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query';";
+  }
+
+  protected String fetchHueQueriesNoStartdateYesEnddateYesallUserSql() {
+    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime <= date(?);";
+
+  }
+
+  protected String fetchHueQueriesYesStartdateNoEnddateYesallUserSql() {
+    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime >= date(?);";
+
+  }
+
+  protected String fetchHueQueriesYesStartdateYesEnddateYesallUserSql() {
+    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime >= date(?) AND mtime <= date(?);";
+
+  }
+
+
+}

+ 22 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/SqliteQuerySet.java

@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset;
+
+
+public class SqliteQuerySet extends QuerySet {
+}

+ 22 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/MysqlQuerySet.java

@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset;
+
+
+public class MysqlQuerySet extends QuerySet {
+}

+ 65 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/OracleQuerySet.java

@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset;
+
+
+public class OracleQuerySet extends QuerySet {
+  @Override
+  protected String fetchuserIdfromUsernameSql() {
+    return   "select id from auth_user where username=?";
+
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateNoEnddateSql() {
+    return "select status,start_time,statusdir,script_title,user_id from pig_job where user_id =?";
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateYesEnddateSql() {
+    return "select status,start_time,statusdir,script_title,user_id from pig_job where user_id =?  AND start_time <= date(?)";
+
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateNoEnddateSql() {
+    return "select status,start_time,statusdir,script_title,user_id from pig_job where user_id =? AND start_time >= date(?)";
+
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateYesEnddateSql() {
+    return "select status,start_time,statusdir,script_title,user_id from pig_job where user_id =? AND start_time >= date(?) AND start_time <= date(?)";
+
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateNoEnddateYesallUserSql() {
+    return "select status,start_time,statusdir,script_title,user_id from pig_job ";
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateYesEnddateYesallUserSql() {
+    return "select status,start_time,statusdir,script_title,user_id from pig_job where  start_time <= date(?)";
+
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateNoEnddateYesallUserSql() {
+    return "select status,start_time,statusdir,script_title,user_id from pig_job where  start_time >= date(?)";
+
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateYesEnddateYesallUserSql() {
+    return "select status,start_time,statusdir,script_title,user_id from pig_job where  start_time >= date(?) AND start_time <= date(?)";
+
+  }
+}

+ 22 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/PostgressQuerySet.java

@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset;
+
+
+public class PostgressQuerySet extends QuerySet {
+}

+ 132 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/QuerySet.java

@@ -0,0 +1,132 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+
+
+public abstract class QuerySet {
+
+  public PreparedStatement getUseridfromUserName(Connection connection, String username) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchuserIdfromUsernameSql());
+    prSt.setString(1, username);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesNoStartDateNoEndDate(Connection connection, int id) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateSql());
+    prSt.setInt(1, id);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesNoStartDateYesEndDate(Connection connection, int id, String enddate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateSql());
+    prSt.setInt(1, id);
+    prSt.setString(2, enddate);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesYesStartDateNoEndDate(Connection connection, int id, String startdate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateSql());
+    prSt.setInt(1, id);
+    prSt.setString(2, startdate);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesYesStartDateYesEndDate(Connection connection, int id, String startdate, String endate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateSql());
+    prSt.setInt(1, id);
+    prSt.setString(2, startdate);
+    prSt.setString(3, endate);
+    return prSt;
+  }
+
+  /**
+   * for all user
+   */
+  public PreparedStatement getQueriesNoStartDateNoEndDateAllUser(Connection connection) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateYesallUserSql());
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesNoStartDateYesEndDateAllUser(Connection connection, String enddate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateYesallUserSql());
+    prSt.setString(1, enddate);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesYesStartDateNoEndDateAllUser(Connection connection, String startdate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateYesallUserSql());
+    prSt.setString(1, startdate);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesYesStartDateYesEndDateAllUser(Connection connection, String startdate, String endate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateYesallUserSql());
+    prSt.setString(1, startdate);
+    prSt.setString(2, endate);
+    return prSt;
+  }
+
+  protected String fetchuserIdfromUsernameSql() {
+    return "select id from auth_user where username=?;";
+
+  }
+
+  protected String fetchHueQueriesNoStartdateNoEnddateSql() {
+    return "select status,start_time,statusdir,script_title,user_id from pig_job where user_id =?;";
+  }
+
+  protected String fetchHueQueriesNoStartdateYesEnddateSql() {
+    return "select status,start_time,statusdir,script_title,user_id from pig_job where user_id =?  AND start_time <= date(?);";
+
+  }
+
+  protected String fetchHueQueriesYesStartdateNoEnddateSql() {
+    return "select status,start_time,statusdir,script_title,user_id from pig_job where user_id =? AND start_time >= date(?);";
+
+  }
+
+  protected String fetchHueQueriesYesStartdateYesEnddateSql() {
+    return "select status,start_time,statusdir,script_title,user_id from pig_job where user_id =? AND start_time >= date(?) AND start_time <= date(?);";
+
+  }
+
+  protected String fetchHueQueriesNoStartdateNoEnddateYesallUserSql() {
+    return "select status,start_time,statusdir,script_title,user_id from pig_job ;";
+  }
+
+  protected String fetchHueQueriesNoStartdateYesEnddateYesallUserSql() {
+    return "select status,start_time,statusdir,script_title,user_id from pig_job where  start_time <= date(?);";
+
+  }
+
+  protected String fetchHueQueriesYesStartdateNoEnddateYesallUserSql() {
+    return "select status,start_time,statusdir,script_title,user_id from pig_job where  start_time >= date(?);";
+
+  }
+
+  protected String fetchHueQueriesYesStartdateYesEnddateYesallUserSql() {
+    return "select status,start_time,statusdir,script_title,user_id from pig_job where  start_time >= date(?) AND start_time <= date(?);";
+
+  }
+
+
+}

+ 22 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/SqliteQuerySet.java

@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset;
+
+
+public class SqliteQuerySet extends QuerySet {
+}

+ 22 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/MysqlQuerySet.java

@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.savedscriptqueryset;
+
+
+public class MysqlQuerySet extends QuerySet {
+}

+ 60 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/OracleQuerySet.java

@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.savedscriptqueryset;
+
+
+public class OracleQuerySet extends QuerySet {
+
+  @Override
+  protected String fetchuserIdfromUsernameSql() {
+    return "select id from auth_user where username=?";
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateNoEnddateSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =?";
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateYesEnddateSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =? AND  date_created <= date(?)";
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateNoEnddateSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =? AND date_created >= date(?)";
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateYesEnddateSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =? AND date_created >= date(?) AND date_created <= date(?)";
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateNoEnddateYesallUserSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 ";
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateYesEnddateYesallUserSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1  AND  date_created <= date(?)";
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateNoEnddateYesallUserSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1  AND date_created >= date(?)";
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateYesEnddateYesallUserSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1  AND date_created >= date(?) AND date_created <= date(?)";
+  }
+
+}

+ 67 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/PostgressQuerySet.java

@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.savedscriptqueryset;
+
+
+public class PostgressQuerySet extends QuerySet {
+
+  @Override
+  protected String fetchuserIdfromUsernameSql() {
+    return "select id from auth_user where username=?;";
+
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateNoEnddateSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =?;";
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateYesEnddateSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =? AND  date_created <= date(?);";
+
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateNoEnddateSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =? AND date_created >= date(?);";
+
+
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateYesEnddateSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =? AND date_created >= date(?) AND date_created <= date(?);";
+
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateNoEnddateYesallUserSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' ;";
+  }
+  @Override
+  protected String fetchHueQueriesNoStartdateYesEnddateYesallUserSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true'  AND  date_created <= date(?);";
+
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateNoEnddateYesallUserSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true'  AND date_created >= date(?);";
+
+  }
+  @Override
+  protected String fetchHueQueriesYesStartdateYesEnddateYesallUserSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true'  AND date_created >= date(?) AND date_created <= date(?);";
+
+  }
+}

+ 135 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/QuerySet.java

@@ -0,0 +1,135 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.savedscriptqueryset;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+
+
+public abstract class QuerySet {
+
+  public PreparedStatement getUseridfromUserName(Connection connection,String username) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchuserIdfromUsernameSql());
+    prSt.setString(1, username);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesNoStartDateNoEndDate(Connection connection,int id) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateSql());
+    prSt.setInt(1, id);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesNoStartDateYesEndDate(Connection connection,int id,String enddate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateSql());
+    prSt.setInt(1, id);
+    prSt.setString(2, enddate);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesYesStartDateNoEndDate(Connection connection,int id,String startdate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateSql());
+    prSt.setInt(1, id);
+    prSt.setString(2, startdate);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesYesStartDateYesEndDate(Connection connection,int id,String startdate,String endate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateSql());
+    prSt.setInt(1, id);
+    prSt.setString(2, startdate);
+    prSt.setString(3, endate);
+    return prSt;
+  }
+
+  /**
+   * for all user
+   * */
+  public PreparedStatement getQueriesNoStartDateNoEndDateAllUser(Connection connection) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateYesallUserSql());
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesNoStartDateYesEndDateAllUser(Connection connection,String enddate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateYesallUserSql());
+    prSt.setString(1, enddate);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesYesStartDateNoEndDateAllUser(Connection connection,String startdate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateYesallUserSql());
+    prSt.setString(1, startdate);
+    return prSt;
+  }
+
+  public PreparedStatement getQueriesYesStartDateYesEndDateAllUser(Connection connection,String startdate,String endate) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateYesallUserSql());
+    prSt.setString(1, startdate);
+    prSt.setString(2, endate);
+    return prSt;
+  }
+
+
+  protected String fetchuserIdfromUsernameSql() {
+    return "select id from auth_user where username=?;";
+
+  }
+
+  protected String fetchHueQueriesNoStartdateNoEnddateSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =?;";
+  }
+
+  protected String fetchHueQueriesNoStartdateYesEnddateSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =? AND  date_created <= date(?);";
+
+  }
+
+  protected String fetchHueQueriesYesStartdateNoEnddateSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =? AND date_created >= date(?);";
+
+
+  }
+
+  protected String fetchHueQueriesYesStartdateYesEnddateSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =? AND date_created >= date(?) AND date_created <= date(?);";
+
+  }
+
+  protected String fetchHueQueriesNoStartdateNoEnddateYesallUserSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 ;";
+  }
+
+  protected String fetchHueQueriesNoStartdateYesEnddateYesallUserSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1  AND  date_created <= date(?);";
+
+  }
+
+  protected String fetchHueQueriesYesStartdateNoEnddateYesallUserSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1  AND date_created >= date(?);";
+
+  }
+
+  protected String fetchHueQueriesYesStartdateYesEnddateYesallUserSql() {
+    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1  AND date_created >= date(?) AND date_created <= date(?);";
+
+  }
+
+
+
+}

+ 24 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/SqliteQuerySet.java

@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.savedscriptqueryset;
+
+
+public class SqliteQuerySet extends QuerySet {
+
+
+}

+ 24 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/MysqlQuerySet.java

@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.userdetails;
+
+
+public class MysqlQuerySet extends QuerySet {
+
+
+}

+ 28 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/OracleQuerySet.java

@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.userdetails;
+
+
+public class OracleQuerySet extends QuerySet {
+
+
+  @Override
+  protected String fetchUserDetailSql() {
+    return "select * from auth_user";
+  }
+}

+ 22 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/PostgressQuerySet.java

@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.userdetails;
+
+
+public class PostgressQuerySet extends QuerySet {
+}

+ 42 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/QuerySet.java

@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.userdetails;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+
+
+public abstract class QuerySet {
+
+
+  public PreparedStatement getUserDetails(Connection connection) throws SQLException {
+    PreparedStatement prSt = connection.prepareStatement(fetchUserDetailSql());
+    return prSt;
+  }
+
+
+
+  protected String fetchUserDetailSql() {
+    return "select * from auth_user;";
+  }
+
+
+
+
+}

+ 22 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/SqliteQuerySet.java

@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.userdetails;
+
+
+public class SqliteQuerySet extends QuerySet {
+}

+ 85 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/CreateJobId.java

@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.migration;
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.JobReturnIdModel;
+import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+
+@Path("/returnjobids")
+
+public class CreateJobId {
+
+
+
+
+  @Inject
+  ViewContext view;
+
+
+  protected MigrationResourceManager resourceManager = null;
+
+  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager() {
+    if (resourceManager == null) {
+      resourceManager = new MigrationResourceManager(view);
+    }
+    return resourceManager;
+  }
+
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getIdOfMigrationObject(@QueryParam("username") String username,@QueryParam("instance") String instance,@QueryParam("startdate") String startdate,@QueryParam("enddate") String enddate,@QueryParam("jobtype") String jobtype) throws IOException, InvocationTargetException, IllegalAccessException {
+
+    System.out.println("username is "+username+ "instance is "+ instance);
+    MigrationResponse migrationresult=new MigrationResponse();
+
+    migrationresult.setIntanceName(instance);
+    migrationresult.setUserNameofhue(username);
+    migrationresult.setProgressPercentage(0);
+    migrationresult.setJobtype(jobtype);
+
+    getResourceManager().create(migrationresult);
+
+    JSONObject response = new JSONObject();
+
+    JobReturnIdModel model=new JobReturnIdModel();
+
+    model.setIdforJob(migrationresult.getId());
+    model.setId(0);
+
+    response.put("returnjobid",model);
+
+    return Response.ok(response).build();
+
+
+  }
+
+}

+ 84 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/CreateJobIdRevertChange.java

@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.migration;
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.JobReturnIdModel;
+import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+
+@Path("/returnjobidforrevertchanges")
+
+public class CreateJobIdRevertChange {
+
+
+
+
+  @Inject
+  ViewContext view;
+
+
+  protected MigrationResourceManager resourceManager = null;
+
+  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager() {
+    if (resourceManager == null) {
+      resourceManager = new MigrationResourceManager(view);
+    }
+    return resourceManager;
+  }
+
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getIdOfMigrationObject(@QueryParam("instance") String instance,@QueryParam("revertdate") String revertdate) throws IOException, InvocationTargetException, IllegalAccessException {
+
+
+    MigrationResponse migrationresult=new MigrationResponse();
+
+    migrationresult.setIntanceName(instance);
+    migrationresult.setProgressPercentage(0);
+    migrationresult.setJobtype("revertchange");
+
+    getResourceManager().create(migrationresult);
+
+    JSONObject response = new JSONObject();
+
+    JobReturnIdModel model=new JobReturnIdModel();
+
+    model.setIdforJob(migrationresult.getId());
+    model.setId(0);
+
+    response.put("returnjobidforrevertchanges",model);
+
+    return Response.ok(response).build();
+
+
+  }
+
+}

+ 102 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/InitiateJobMigration.java

@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.migration;
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.migration.hive.historyquery.HiveHistoryStartJob;
+import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
+import org.apache.ambari.view.huetoambarimigration.migration.hive.savedquery.HiveSavedQueryStartJob;
+import org.apache.ambari.view.huetoambarimigration.migration.pig.pigjob.PigJobStartJob;
+import org.apache.ambari.view.huetoambarimigration.migration.pig.pigscript.PigSavedScriptStartJob;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+
+@Path("/startmigrations")
+
+public class InitiateJobMigration implements Runnable {
+
+  MigrationResponse migrationresult = new MigrationResponse();
+
+  public void run() {
+
+  }
+
+
+  @Inject
+  ViewContext view;
+
+
+  protected MigrationResourceManager resourceManager = null;
+
+  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager() {
+    if (resourceManager == null) {
+      resourceManager = new MigrationResourceManager(view);
+    }
+    return resourceManager;
+  }
+
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+
+  public Response inititateJOb(@QueryParam("username") String username, @QueryParam("instance") String instance, @QueryParam("startdate") String startdate, @QueryParam("enddate") String enddate, @QueryParam("jobid") String jobid, @QueryParam("jobtype") String jobtype) throws IOException, InvocationTargetException, IllegalAccessException {
+
+    System.out.println("username is " + username + "instance is " + instance);
+
+    JSONObject response = new JSONObject();
+
+    if (jobtype.contains("hivehistoryquerymigration")) {
+
+      new HiveHistoryStartJob(username, instance, startdate, enddate, jobid, view).start();
+    } else if (jobtype.contains("hivesavedquerymigration")) {
+
+      new HiveSavedQueryStartJob(username, instance, startdate, enddate, jobid, view).start();
+
+    } else if (jobtype.contains("pigjobmigration")) {
+
+      new PigJobStartJob(username, instance, startdate, enddate, jobid, view).start();
+
+    } else if (jobtype.contains("pigsavedscriptmigration")) {
+
+      new PigSavedScriptStartJob(username, instance, startdate, enddate, jobid, view).start();
+
+    }
+
+
+    migrationresult.setId(jobid);
+    migrationresult.setProgressPercentage(0);
+
+
+    response.put("startmigration", migrationresult);
+
+    return Response.ok(response).build();
+
+  }
+
+
+}

+ 85 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/InitiateJobMigrationforRevertchange.java

@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.migration;
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
+import org.apache.ambari.view.huetoambarimigration.migration.revertchange.RevertChangeStartJob;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+
+@Path("/startrevertchanges")
+
+public class InitiateJobMigrationforRevertchange implements Runnable{
+
+  MigrationResponse migrationresult=new MigrationResponse();
+
+  public void run(){
+
+  }
+
+
+  @Inject
+  ViewContext view;
+
+
+  protected MigrationResourceManager resourceManager = null;
+
+  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager() {
+    if (resourceManager == null) {
+      resourceManager = new MigrationResourceManager(view);
+    }
+    return resourceManager;
+  }
+
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+
+  public Response RevertChangeJob(@QueryParam("instance") String instance,@QueryParam("revertdate") String revertdate,@QueryParam("jobid") String jobid) throws IOException, InvocationTargetException, IllegalAccessException {
+
+    JSONObject response = new JSONObject();
+
+    new RevertChangeStartJob(instance,revertdate,jobid, view).start();
+
+    migrationresult.setId(jobid);
+    migrationresult.setProgressPercentage(0);
+    migrationresult.setJobtype("revertchange");
+    migrationresult.setIntanceName(instance);
+
+    new RevertChangeStartJob(instance,revertdate,jobid, view).start();
+
+    response.put("startrevertchanges",migrationresult);
+
+    return Response.ok(response).build();
+
+  }
+
+
+
+}

+ 59 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/AllInstanceDetailsAmbari.java

@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.sql.SQLException;
+
+/**
+ * sevice class for fetching
+ * all the instace details
+ */
+
+@Path("/allinstancedetails")
+
+public class AllInstanceDetailsAmbari {
+
+  @Inject
+  ViewContext view;
+
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response instancelist() throws IOException, PropertyVetoException, SQLException {
+
+    HiveInstanceDetailsUtility instance = new HiveInstanceDetailsUtility();
+
+    JSONObject response = new JSONObject();
+    response.put("allinstancedetails", instance.getAllInstancedetails(view));
+    return Response.ok(response).build();
+
+  }
+
+}

+ 62 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/AmbariDatabaseCheck.java

@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.utils.ServiceFormattedException;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+
+/**
+ * Service class to check ambari database
+ */
+
+@Path("/ambaridatabases")
+
+public class AmbariDatabaseCheck {
+
+  @Inject
+  ViewContext view;
+
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response ambariDatabase() throws IOException {
+    JSONObject response = new JSONObject();
+    try {
+      response.put("ambaridatabase", ConfigurationCheckImplementation.checkAmbariDatbaseConection(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")));
+      return Response.ok(response).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+
+  }
+
+
+}

+ 60 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/AmbariWebHdfsCheck.java

@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.utils.ServiceFormattedException;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.net.URISyntaxException;
+
+/**
+ * Service class Ambari Webhdfs Check
+ */
+@Path("/ambariwebhdfsurls")
+public class AmbariWebHdfsCheck {
+
+  @Inject
+  ViewContext view;
+
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response ambariWebHdfs() throws IOException, URISyntaxException {
+    JSONObject response = new JSONObject();
+    try {
+      response.put("ambariwebhdfsurl", ConfigurationCheckImplementation.checkNamenodeURIConnectionforambari(view.getProperties().get("namenode_URI_Ambari")));
+      return Response.ok(response).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+
+  }
+
+}

+ 70 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/CheckProgresStatus.java

@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+
+/**
+ * Service class to check progress
+ */
+
+@Path("/checkprogresses")
+
+public class CheckProgresStatus {
+
+  @Inject
+  ViewContext view;
+
+  protected MigrationResourceManager resourceManager = null;
+
+  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager() {
+    if (resourceManager == null) {
+      resourceManager = new MigrationResourceManager(view);
+    }
+    return resourceManager;
+  }
+
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+
+  public Response checkProgressStatus(@QueryParam("jobid") String jobid) throws IOException, InvocationTargetException, IllegalAccessException, ItemNotFound {
+
+    MigrationResponse mr = null;
+    mr = getResourceManager().read(jobid);
+    JSONObject object = new JSONObject();
+    object.put("checkprogress", mr);
+    return Response.ok(object).build();
+
+  }
+
+}

+ 134 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/ConfigurationCheckImplementation.java

@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.ConfigurationModel;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
+import org.apache.log4j.Logger;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.net.*;
+import java.sql.Connection;
+import java.sql.SQLException;
+
+/**
+ * Configuration check Implenetation class
+ */
+
+public class ConfigurationCheckImplementation {
+
+  static final Logger logger = Logger.getLogger(ConfigurationCheckImplementation.class);
+
+  private static String homeDir = System.getProperty("java.io.tmpdir") + "/";
+
+  public static ConfigurationModel checkConfigurationForHue(String hueURL) throws IOException {
+
+    URL url = null;
+    int resonseCode = 0;
+    ConfigurationModel hueHttpUrl = new ConfigurationModel();
+    hueHttpUrl.setId(1);
+    hueHttpUrl.setConfigParameter("hueHtttpUrl");
+    url = new URL(hueURL);
+
+    HttpURLConnection connection = (HttpURLConnection) url.openConnection();
+    connection.setRequestMethod("GET");  //OR  huc.setRequestMethod ("HEAD");
+    connection.connect();
+    resonseCode = connection.getResponseCode();
+    if (resonseCode == 200) {
+      hueHttpUrl.setConfigStatus("Success");
+    } else {
+      hueHttpUrl.setConfigStatus("Failed");
+    }
+    return hueHttpUrl;
+  }
+
+  public static ConfigurationModel checkHueDatabaseConnection(String hueDBDRiver, String hueJdbcUrl, String huedbUsername, String huedbPassword) throws IOException, PropertyVetoException, SQLException {
+
+    ConfigurationModel configmodelHueDB = new ConfigurationModel();
+    configmodelHueDB.setId(4);
+    configmodelHueDB.setConfigParameter("huedb");
+    Connection con = DataSourceHueDatabase.getInstance(hueDBDRiver, hueJdbcUrl, huedbUsername, huedbPassword).getConnection();
+    configmodelHueDB.setConfigStatus("Success");
+    return configmodelHueDB;
+  }
+
+  public static ConfigurationModel checkAmbariDatbaseConection(String ambariDBDriver, String ambariDBJdbcUrl, String ambariDbUsername, String ambariDbPassword) throws IOException, PropertyVetoException, SQLException {
+
+    ConfigurationModel configmodelAmbariDB = new ConfigurationModel();
+    configmodelAmbariDB.setId(5);
+    configmodelAmbariDB.setConfigParameter("ambaridb");
+    Connection con = DataSourceAmbariDatabase.getInstance(ambariDBDriver, ambariDBJdbcUrl, ambariDbUsername, ambariDbPassword).getConnection();
+    configmodelAmbariDB.setConfigStatus("Success");
+    return configmodelAmbariDB;
+  }
+
+  public static String getHomeDir() {
+    return homeDir;
+  }
+
+  public static ConfigurationModel checkNamenodeURIConnectionforambari(String ambariServerNameNode) throws Exception {
+
+    ConfigurationModel configmodelWebhdfsAmbari = new ConfigurationModel();
+    configmodelWebhdfsAmbari.setId(6);
+    configmodelWebhdfsAmbari.setConfigParameter("ambariwebhdfsurl");
+    Configuration conf = new Configuration();
+    conf.set("fs.hdfs.impl",
+      org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
+    conf.set("fs.file.impl",
+      org.apache.hadoop.fs.LocalFileSystem.class.getName()
+    );
+    FileSystem fileSystem = FileSystem.get(new URI(ambariServerNameNode), conf);
+
+    if (fileSystem instanceof WebHdfsFileSystem) {
+      configmodelWebhdfsAmbari.setConfigStatus("Success");
+    } else {
+      configmodelWebhdfsAmbari.setConfigStatus("Failed");
+      throw new Exception();
+    }
+    return configmodelWebhdfsAmbari;
+  }
+
+  public static ConfigurationModel checkNamenodeURIConnectionforHue(String hueServerNamenodeURI) throws Exception {
+
+    ConfigurationModel configmodelWebhdfsHue = new ConfigurationModel();
+    configmodelWebhdfsHue.setId(7);
+    configmodelWebhdfsHue.setConfigParameter("huewebhdfsurl");
+    Configuration conf = new Configuration();
+    conf.set("fs.hdfs.impl",
+      org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+    );
+    conf.set("fs.file.impl",
+      org.apache.hadoop.fs.LocalFileSystem.class.getName()
+    );
+    FileSystem fileSystem = FileSystem.get(new URI(hueServerNamenodeURI), conf);
+
+    if (fileSystem instanceof WebHdfsFileSystem) {
+      configmodelWebhdfsHue.setConfigStatus("Success");
+    } else {
+      configmodelWebhdfsHue.setConfigStatus("Failed");
+      throw new Exception();
+    }
+    return configmodelWebhdfsHue;
+  }
+}

+ 53 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsAmbari.java

@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.json.simple.JSONObject;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.sql.SQLException;
+
+/**
+ * Service class to fetch hiveinstalcedetail
+ */
+
+@Path("/hiveinstancedetails")
+
+public class HiveInstanceDetailsAmbari {
+
+  @Inject
+  ViewContext view;
+
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response instancelist() throws IOException, PropertyVetoException, SQLException {
+    HiveInstanceDetailsUtility instance=new HiveInstanceDetailsUtility();
+    JSONObject response = new JSONObject();
+    response.put("hiveinstancedetails",instance.getInstancedetails(view));
+    return Response.ok(response).build();
+  }
+
+}

+ 106 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java

@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.InstanceModel;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.sql.*;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.instancedetail.*;
+
+
+public class HiveInstanceDetailsUtility {
+
+  public List<InstanceModel> getInstancedetails(ViewContext view) throws PropertyVetoException, SQLException, IOException {
+
+    List<InstanceModel> instancelist = new ArrayList<>();
+    Connection conn = null;
+    conn = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
+    conn.setAutoCommit(false);
+    PreparedStatement prSt;
+
+    QuerySetAmbariDB ambaridatabase = null;
+
+    if (view.getProperties().get("ambaridrivername").contains("mysql")) {
+      ambaridatabase = new MysqlQuerySetAmbariDB();
+    } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
+      ambaridatabase = new PostgressQuerySetAmbariDB();
+    } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
+      ambaridatabase = new OracleQuerySetAmbariDB();
+    }
+
+    ResultSet rs1 = null;
+    prSt = ambaridatabase.getHiveInstanceDeatil(conn);
+    rs1 = prSt.executeQuery();
+    int i = 0;
+
+    while (rs1.next()) {
+      InstanceModel I = new InstanceModel();
+      I.setInstanceName(rs1.getString(1));
+      I.setId(i);
+      instancelist.add(I);
+      i++;
+    }
+    return instancelist;
+
+  }
+
+  public List<InstanceModel> getAllInstancedetails(ViewContext view) throws PropertyVetoException, SQLException, IOException {
+
+    List<InstanceModel> instancelist = new ArrayList<>();
+    Connection conn = null;
+    Statement stmt = null;
+    conn = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
+    conn.setAutoCommit(false);
+    PreparedStatement prSt;
+
+    QuerySetAmbariDB ambaridatabase = null;
+
+    if (view.getProperties().get("ambaridrivername").contains("mysql")) {
+      ambaridatabase = new MysqlQuerySetAmbariDB();
+    } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
+      ambaridatabase = new PostgressQuerySetAmbariDB();
+    } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
+      ambaridatabase = new OracleQuerySetAmbariDB();
+    }
+
+    ResultSet rs1 = null;
+    int i = 0;
+    prSt = ambaridatabase.getAllInstanceDeatil(conn);
+    rs1 = prSt.executeQuery();
+
+    while (rs1.next()) {
+      InstanceModel I = new InstanceModel();
+      I.setInstanceName(rs1.getString(1));
+      I.setId(i);
+      instancelist.add(I);
+      i++;
+    }
+    rs1.close();
+    return instancelist;
+
+  }
+
+
+}

+ 60 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueDatabaseCheck.java

@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.utils.ServiceFormattedException;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+
+/**
+ * Service class to check hue database check
+ */
+@Path("/huedatabases")
+
+public class HueDatabaseCheck {
+
+  @Inject
+  ViewContext view;
+
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response hueDatabase() throws IOException {
+    JSONObject response = new JSONObject();
+    try {
+      response.put("huedatabase", ConfigurationCheckImplementation.checkHueDatabaseConnection(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")));
+      return Response.ok(response).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+
+  }
+
+}

+ 60 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueHttpUrlCheck.java

@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.utils.ServiceFormattedException;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+/**
+ * Service class to check hue http url
+ */
+@Path("/huehttpurls")
+
+public class HueHttpUrlCheck {
+
+  @Inject
+  ViewContext view;
+
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response hueHtttpUrl()  {
+
+    JSONObject response = new JSONObject();
+    try {
+      response.put("huehttpurl", ConfigurationCheckImplementation.checkConfigurationForHue(view.getProperties().get("Hue_URL")));
+      return Response.ok(response).build();
+
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+}

+ 60 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueWebHdfsCheck.java

@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.utils.ServiceFormattedException;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.net.URISyntaxException;
+
+/**
+ * Service class to check hueWebHdfs
+ */
+
+@Path("/huewebhdfsurls")
+
+public class HueWebHdfsCheck {
+
+  @Inject
+  ViewContext view;
+
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response hueWebHdfs() throws IOException, URISyntaxException {
+
+    JSONObject response = new JSONObject();
+    try {
+      response.put("huewebhdfsurl", ConfigurationCheckImplementation.checkNamenodeURIConnectionforHue(view.getProperties().get("namenode_URI_Hue")));
+      return Response.ok(response).build();
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+}

+ 61 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsAmbari.java

@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.sql.SQLException;
+
+/**
+ * Service class to fetch Piginstance detail
+ */
+
+@Path("/piginstancedetails")
+
+public class PigInstanceDetailsAmbari {
+
+  @Inject
+  ViewContext view;
+
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response instancelist() throws IOException, PropertyVetoException, SQLException {
+
+
+    PigInstanceDetailsUtility instance=new PigInstanceDetailsUtility();
+
+    JSONObject response = new JSONObject();
+    response.put("piginstancedetails",instance.getInstancedetails(view));
+    return Response.ok(response).build();
+
+
+  }
+
+
+}

+ 79 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsUtility.java

@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.InstanceModel;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.sql.*;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.instancedetail.*;
+
+/**
+ * Utility class to fetch Pig Instance details
+ */
+
+public class PigInstanceDetailsUtility {
+
+  public List<InstanceModel> getInstancedetails(ViewContext view) throws PropertyVetoException, SQLException, IOException {
+
+    List<InstanceModel> instancelist = new ArrayList<>();
+    Connection conn = null;
+    Statement stmt = null;
+    PreparedStatement prSt;
+    conn = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
+    conn.setAutoCommit(false);
+    stmt = conn.createStatement();
+    int i = 0;
+
+    QuerySetAmbariDB ambaridatabase = null;
+
+    if (view.getProperties().get("ambaridrivername").contains("mysql")) {
+      ambaridatabase = new MysqlQuerySetAmbariDB();
+    } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
+      ambaridatabase = new PostgressQuerySetAmbariDB();
+    } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
+      ambaridatabase = new OracleQuerySetAmbariDB();
+    }
+
+    ResultSet rs1 = null;
+
+    prSt = ambaridatabase.getAllPigInstance(conn);
+
+    rs1 = prSt.executeQuery();
+
+    while (rs1.next()) {
+      InstanceModel I = new InstanceModel();
+      I.setInstanceName(rs1.getString(1));
+      I.setId(i);
+      instancelist.add(I);
+      i++;
+    }
+    rs1.close();
+    stmt.close();
+    return instancelist;
+
+  }
+
+
+}

+ 58 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailHue.java

@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.json.simple.JSONObject;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.sql.SQLException;
+
+/**
+ * Service class to fetch user detail
+ */
+
+@Path("/usersdetails")
+
+public class UserDetailHue {
+
+  @Inject
+  ViewContext view;
+
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response userList() throws IOException, PropertyVetoException, SQLException {
+
+    UserDetailsUtility user=new UserDetailsUtility();
+
+    JSONObject response = new JSONObject();
+    response.put("usersdetails",user.getUserDetails(view));
+    return Response.ok(response).build();
+  }
+
+
+}

+ 80 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailsUtility.java

@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.migration.configuration;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.UserModel;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.sql.*;
+import java.util.ArrayList;
+import java.util.List;
+import  org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.userdetails.*;
+
+
+public class UserDetailsUtility {
+
+
+  public List<UserModel> getUserDetails(ViewContext view) throws PropertyVetoException, SQLException, IOException {
+
+    List<UserModel> userlist=new ArrayList<>();
+    Connection conn = null;
+    Statement stmt = null;
+    conn = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"),view.getProperties().get("huejdbcurl"),view.getProperties().get("huedbusername"),view.getProperties().get("huedbpassword")).getConnection();
+    conn.setAutoCommit(false);
+    stmt = conn.createStatement();
+    UserModel all=new UserModel();
+    all.setId(-1);
+    all.setUsername("all");
+    PreparedStatement prSt;
+    userlist.add(all);
+    ResultSet rs1=null;
+
+    QuerySet huedatabase = null;
+
+    if (view.getProperties().get("huedrivername").contains("mysql")) {
+      huedatabase = new MysqlQuerySet();
+    } else if (view.getProperties().get("huedrivername").contains("postgresql")) {
+      huedatabase = new PostgressQuerySet();
+    } else if (view.getProperties().get("huedrivername").contains("sqlite")) {
+      huedatabase = new SqliteQuerySet();
+    } else if (view.getProperties().get("huedrivername").contains("oracle")) {
+      huedatabase = new OracleQuerySet();
+    }
+
+    prSt = huedatabase.getUserDetails(conn);
+
+    rs1 = prSt.executeQuery();
+
+    while (rs1.next()) {
+      UserModel I=new UserModel();
+      I.setUsername(rs1.getString(2));
+      I.setId(rs1.getInt(1));
+      System.out.println(rs1.getString(2));
+      userlist.add(I);
+    }
+    rs1.close();
+    stmt.close();
+    return userlist;
+
+  }
+
+
+}

+ 255 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java

@@ -0,0 +1,255 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.migration.hive.historyquery;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+import org.apache.ambari.view.huetoambarimigration.migration.InitiateJobMigration;
+import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationModel;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset.MysqlQuerySetAmbariDB;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset.OracleQuerySetAmbariDB;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset.PostgressQuerySetAmbariDB;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset.QuerySetAmbariDB;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.historyqueryset.*;
+import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
+import org.apache.log4j.Logger;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.text.ParseException;
+import java.util.ArrayList;
+
+
+public class HiveHistoryMigrationUtility {
+
+
+  protected MigrationResourceManager resourceManager = null;
+
+  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
+    if (resourceManager == null) {
+      resourceManager = new MigrationResourceManager(view);
+    }
+    return resourceManager;
+  }
+
+
+  public void hiveHistoryQueryMigration(String username, String instance, String startDate, String endDate, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound {
+
+    InitiateJobMigration migrationservice = new InitiateJobMigration();
+
+    long startTime = System.currentTimeMillis();
+
+    final Logger logger = Logger.getLogger(HiveHistoryMigrationUtility.class);
+    Connection connectionHuedb = null;
+    Connection connectionAmbaridb = null;
+
+    logger.info("--------------------------------------");
+    logger.info("hive History query Migration started");
+    logger.info("--------------------------------------");
+    logger.info("start date: " + startDate);
+    logger.info("enddate date: " + endDate);
+    logger.info("instance is: " + username);
+    logger.info("hue username is : " + instance);
+
+    MigrationModel model = new MigrationModel();
+
+    int maxCountOfAmbariDb, i = 0;
+    String time = null;
+    Long epochTime = null;
+    String dirNameforHiveHistroy;
+    ArrayList<HiveModel> dbpojoHiveHistoryQuery = new ArrayList<HiveModel>();
+
+    HiveHistoryQueryMigrationImplementation hiveHistoryQueryImpl = new HiveHistoryQueryMigrationImplementation();// creating objects of HiveHistroy implementation
+
+    QuerySet huedatabase = null;
+
+    /*instanciang queryset
+    * according to driver name
+    */
+
+    if (view.getProperties().get("huedrivername").contains("mysql")) {
+      huedatabase = new MysqlQuerySet();
+    } else if (view.getProperties().get("huedrivername").contains("postgresql")) {
+      huedatabase = new PostgressQuerySet();
+    } else if (view.getProperties().get("huedrivername").contains("sqlite")) {
+      huedatabase = new SqliteQuerySet();
+    } else if (view.getProperties().get("huedrivername").contains("oracle")) {
+      huedatabase = new OracleQuerySet();
+    }
+
+
+    QuerySetAmbariDB ambaridatabase = null;
+
+
+    if (view.getProperties().get("ambaridrivername").contains("mysql")) {
+      ambaridatabase = new MysqlQuerySetAmbariDB();
+    } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
+      ambaridatabase = new PostgressQuerySetAmbariDB();
+    } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
+      ambaridatabase = new OracleQuerySetAmbariDB();
+    }
+
+
+    try {
+
+      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();
+
+      dbpojoHiveHistoryQuery = hiveHistoryQueryImpl.fetchFromHue(username, startDate, endDate, connectionHuedb, huedatabase);
+
+      for (int j = 0; j < dbpojoHiveHistoryQuery.size(); j++) {
+        logger.info("the query fetched from hue" + dbpojoHiveHistoryQuery.get(j).getQuery());
+
+      }
+
+		   /* if No migration query selected from Hue Database according to our search criteria */
+
+      if (dbpojoHiveHistoryQuery.size() == 0) {
+        migrationresult.setIsNoQuerySelected("yes");
+        migrationresult.setProgressPercentage(0);
+        migrationresult.setNumberOfQueryTransfered(0);
+        migrationresult.setTotalNoQuery(dbpojoHiveHistoryQuery.size());
+        getResourceManager(view).update(migrationresult, jobid);
+        logger.info("No queries has been selected acccording to your criteria");
+
+      } else {
+        /* If hive queries are selected based on our search criteria */
+
+        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();// connecting to ambari db
+        connectionAmbaridb.setAutoCommit(false);
+
+        // for each queries fetched from Hue database//
+
+        for (i = 0; i < dbpojoHiveHistoryQuery.size(); i++) {
+
+          float calc = ((float) (i + 1)) / dbpojoHiveHistoryQuery.size() * 100;
+          int progressPercentage = Math.round(calc);
+          migrationresult.setIsNoQuerySelected("no");
+          migrationresult.setProgressPercentage(progressPercentage);
+          migrationresult.setNumberOfQueryTransfered(i + 1);
+          migrationresult.setTotalNoQuery(dbpojoHiveHistoryQuery.size());
+          getResourceManager(view).update(migrationresult, jobid);
+
+          logger.info("_____________________");
+          logger.info("Loop No." + (i + 1));
+          logger.info("_____________________");
+          logger.info("Hue query that has been fetched" + dbpojoHiveHistoryQuery.get(i).getQuery());
+          int id = 0;
+
+          id = hiveHistoryQueryImpl.fetchInstanceTablename(connectionAmbaridb, instance, ambaridatabase); // feching table name according to the given instance name
+
+          logger.info("Table name has been fetched from intance name");
+
+          hiveHistoryQueryImpl.writetoFileQueryhql(dbpojoHiveHistoryQuery.get(i).getQuery(), ConfigurationCheckImplementation.getHomeDir());// writing to .hql file to a temp file on local disk
+
+          logger.info(".hql file created in Temp directory");
+
+          hiveHistoryQueryImpl.writetoFileLogs(ConfigurationCheckImplementation.getHomeDir());// writing to logs file to a temp file on local disk
+
+          logger.info("Log file created in Temp directory");
+
+          maxCountOfAmbariDb = (hiveHistoryQueryImpl.fetchMaximumIdfromAmbaridb(connectionAmbaridb, id, ambaridatabase) + 1);// fetching the maximum count for ambari db to insert
+
+          time = hiveHistoryQueryImpl.getTime();// getting the system current time.
+
+          epochTime = hiveHistoryQueryImpl.getEpochTime();// getting system time as epoch format
+
+          dirNameforHiveHistroy = "/user/admin/migration/jobs/migration-job-" + maxCountOfAmbariDb + "-" + time + "/";// creating the directory name
+
+          logger.info("Directory name where .hql will be saved: " + dirNameforHiveHistroy);
+
+          hiveHistoryQueryImpl.insertRowinAmbaridb(dirNameforHiveHistroy, maxCountOfAmbariDb, epochTime, connectionAmbaridb, id, instance, i, ambaridatabase);// inserting in ambari database
+
+          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
+
+            logger.info("kerberose enabled");
+            hiveHistoryQueryImpl.createDirKerberorisedSecured(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// creating directory in kerborized secured hdfs
+            logger.info("Directory created in hdfs");
+            hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the .hql file to kerborized hdfs
+            hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the log file to kerborized hdfs
+          } else {
+
+            logger.info("kerberose not enabled");
+            hiveHistoryQueryImpl.createDir(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs
+            logger.info("Directory created in hdfs");
+            hiveHistoryQueryImpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the .hql file to hdfs
+            hiveHistoryQueryImpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the log file to hdfs
+          }
+
+        }
+        connectionAmbaridb.commit();
+
+
+      }
+    } catch (SQLException e) {
+      logger.error("Sql exception in ambari database: ", e);
+      try {
+        connectionAmbaridb.rollback();
+        model.setIfSuccess(false);
+        logger.error("Sql statement are Rolledback");
+      } catch (SQLException e1) {
+        logger.error("Sql rollback exception in ambari database",
+          e1);
+      }
+    } catch (ClassNotFoundException e) {
+      logger.error("Class not found :- ", e);
+    } catch (ParseException e) {
+      logger.error("Parse Exception : ", e);
+    } catch (URISyntaxException e) {
+      logger.error("URI Syntax Exception: ", e);
+    } catch (PropertyVetoException e) {
+      logger.error("PropertyVetoException: ", e);
+    } catch (ItemNotFound itemNotFound) {
+      itemNotFound.printStackTrace();
+    } finally {
+      if (connectionAmbaridb != null) try {
+        connectionAmbaridb.close();
+      } catch (SQLException e) {
+        logger.error("Exception in closing the connection :", e);
+      }
+    }
+    //deleteing the temprary files that are created while execution
+    hiveHistoryQueryImpl.deleteFileQueryhql(ConfigurationCheckImplementation.getHomeDir());
+    hiveHistoryQueryImpl.deleteFileQueryLogs(ConfigurationCheckImplementation.getHomeDir());
+
+    //session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
+    logger.info("------------------------------");
+    logger.info("hive History query Migration Ends");
+    logger.info("------------------------------");
+
+    long stopTime = System.currentTimeMillis();
+    long elapsedTime = stopTime - startTime;
+
+    migrationresult.setJobtype("hivehistoryquerymigration");
+    migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
+    getResourceManager(view).update(migrationresult, jobid);
+
+
+  }
+
+}

+ 100 - 111
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveHistoryQueryImpl.java → contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java

@@ -16,36 +16,17 @@
  * limitations under the License.
  */
 
-package org.apache.ambari.view.huetoambarimigration.service.hive;
+package org.apache.ambari.view.huetoambarimigration.migration.hive.historyquery;
 
-import java.net.URISyntaxException;
-import java.security.PrivilegedExceptionAction;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.io.BufferedInputStream;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.GregorianCalendar;
-import java.util.Scanner;
-
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset.QuerySetAmbariDB;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.historyqueryset.QuerySet;
+import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.log4j.Logger;
 import org.jdom.Attribute;
 import org.jdom.Document;
@@ -55,9 +36,20 @@ import org.jdom.input.SAXBuilder;
 import org.jdom.output.Format;
 import org.jdom.output.XMLOutputter;
 
-public class HiveHistoryQueryImpl {
+import java.io.*;
+import java.net.URISyntaxException;
+import java.security.PrivilegedExceptionAction;
+import java.sql.*;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
+
+public class HiveHistoryQueryMigrationImplementation {
 
-  static final Logger logger = Logger.getLogger(HiveHistoryQueryImpl.class);
+  static final Logger logger = Logger.getLogger(HiveHistoryQueryMigrationImplementation.class);
 
   public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException {
 
@@ -68,7 +60,7 @@ public class HiveHistoryQueryImpl {
     XMLOutputter xmlOutput = new XMLOutputter();
     xmlOutput.setFormat(Format.getPrettyFormat());
 
-    File xmlfile = new File("/var/lib/huetoambari/RevertChange.xml");
+    File xmlfile = new File(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml");
 
     if (xmlfile.exists()) {
       String iteration = Integer.toString(i + 1);
@@ -84,10 +76,10 @@ public class HiveHistoryQueryImpl {
         record.addContent(new Element("instance").setText(instance));
         record.addContent(new Element("query").setText(content));
         rootNode.addContent(record);
-        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+        xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
 
       } catch (JDOMException e) {
-        logger.error("JDOMException" ,e);
+        logger.error("JDOMException", e);
 
       }
 
@@ -106,29 +98,24 @@ public class HiveHistoryQueryImpl {
         record.addContent(new Element("instance").setText(instance));
         record.addContent(new Element("query").setText(content));
         doc.getRootElement().addContent(record);
-        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+        xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
       } catch (IOException io) {
-        logger.error("JDOMException" , io);
+        logger.error("JDOMException", io);
       }
 
     }
 
   }
 
-  public int fetchMaximumIdfromAmbaridb(String driverName, Connection c, int id) throws SQLException {
+  public int fetchMaximumIdfromAmbaridb(Connection c, int id, QuerySetAmbariDB ambaridatabase) throws SQLException {
 
     String ds_id = null;
-    Statement stmt = null;
-    stmt = c.createStatement();
     ResultSet rs = null;
+    PreparedStatement prSt = null;
 
-    if (driverName.contains("postgresql")) {
-      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + ";");
-    } else if (driverName.contains("mysql")) {
-      rs = stmt.executeQuery("select max( cast(ds_id as unsigned) ) as max from DS_JOBIMPL_" + id + ";");
-    } else if (driverName.contains("oracle")) {
-      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id);
-    }
+    prSt = ambaridatabase.getMaxDsIdFromTableId(c, id);
+
+    rs = prSt.executeQuery();
 
     while (rs.next()) {
       ds_id = rs.getString("max");
@@ -143,60 +130,45 @@ public class HiveHistoryQueryImpl {
     return num;
   }
 
-  public void insertRowinAmbaridb(String driverName, String dirname, int maxcount, long epochtime, Connection c, int id, String instance, int i) throws SQLException, IOException {
+
+  public void insertRowinAmbaridb(String dirname, int maxcount, long epochtime, Connection c, int id, String instance, int i, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
 
     String maxcount1 = Integer.toString(maxcount);
     String epochtime1 = Long.toString(epochtime);
-    String ds_id = new String();
-    Statement stmt = null;
-    String sql = "";
-    String revsql = "";
-    stmt = c.createStatement();
-
-    if (driverName.contains("mysql")) {
-      sql = "INSERT INTO DS_JOBIMPL_" + id + " values ('" + maxcount1
-        + "','','','','','default'," + epochtime1 + ",0,'','','"
-        + dirname + "logs','admin','" + dirname
-        + "query.hql','','job','','','Unknown','" + dirname
-        + "','','Worksheet');";
-      revsql = "delete from  DS_JOBIMPL_" + id + " where ds_id='" + maxcount1 + "';";
-
-    } else if (driverName.contains("postgresql")) {
-      sql = "INSERT INTO ds_jobimpl_" + id + " values ('" + maxcount1
-        + "','','','','','default'," + epochtime1 + ",0,'','','"
-        + dirname + "logs','admin','" + dirname
-        + "query.hql','','job','','','Unknown','" + dirname
-        + "','','Worksheet');";
-      revsql = "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount1 + "';";
-
-    } else if (driverName.contains("oracle")) {
-      sql = "INSERT INTO ds_jobimpl_" + id + " values ('" + maxcount1
-        + "','','','','','default'," + epochtime1 + ",0,'','','"
-        + dirname + "logs','admin','" + dirname
-        + "query.hql','','job','','','Unknown','" + dirname
-        + "','','Worksheet')";
-      revsql = "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount1 + "'";
+    PreparedStatement prSt = null;
+    String revsql = null;
+
+    prSt = ambaridatabase.insertToHiveHistory(c, id, maxcount1, epochtime, dirname);
+
+    logger.info("The actual insert statement is " + prSt);
+
+    prSt.executeUpdate();
+
+    revsql = ambaridatabase.RevertSql(id, maxcount1);
+
+    logger.info("adding revert sql hive history");
 
-    }
     wrtitetoalternatesqlfile(dirname, revsql, instance, i);
 
-    stmt.executeUpdate(sql);
 
   }
 
-  public int fetchInstanceTablename(String driverName, Connection c, String instance) throws SQLException {
+  public int fetchInstanceTablename(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
 
     String ds_id = new String();
     int id = 0;
     Statement stmt = null;
-    stmt = c.createStatement();
+    PreparedStatement prSt = null;
+
+
     ResultSet rs = null;
 
-    if (driverName.contains("oracle")) {
-      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name='" + instance + "'");
-    } else {
-      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name='" + instance + "';");
-    }
+
+    prSt = ambaridatabase.getTableIdFromInstanceName(c, instance);
+
+    logger.info("sql statement to fetch is from ambari instance:= =  " + prSt);
+
+    rs = prSt.executeQuery();
 
     while (rs.next()) {
       id = rs.getInt("id");
@@ -236,54 +208,71 @@ public class HiveHistoryQueryImpl {
 
   }
 
-  public String[] fetchFromHue(String username, String startdate, String endtime, Connection connection) throws ClassNotFoundException, SQLException {
+  public ArrayList<HiveModel> fetchFromHue(String username, String startdate, String endtime, Connection connection, QuerySet huedatabase) throws ClassNotFoundException, SQLException {
     int id = 0;
     int i = 0;
-    String[] query = new String[100];
+    ArrayList<HiveModel> hiveArrayList = new ArrayList<HiveModel>();
+
 
     try {
       connection.setAutoCommit(false);
+      PreparedStatement prSt = null;
       Statement statement = connection.createStatement();
+      String query;
+      ResultSet rs;
 
       ResultSet rs1 = null;
       if (username.equals("all")) {
       } else {
-        ResultSet rs = statement.executeQuery("select id from auth_user where username='" + username + "';");
+
+
+        prSt = huedatabase.getUseridfromUserName(connection, username);
+
+        rs = prSt.executeQuery();
+
         while (rs.next()) {
           id = rs.getInt("id");
         }
       }
+
       if (startdate.equals("") && endtime.equals("")) {
         if (username.equals("all")) {
-          rs1 = statement.executeQuery("select query from beeswax_queryhistory;");
+          prSt = huedatabase.getQueriesNoStartDateNoEndDateAllUser(connection);
         } else {
-          rs1 = statement.executeQuery("select query from beeswax_queryhistory where owner_id =" + id + ";");
+          prSt = huedatabase.getQueriesNoStartDateNoEndDate(connection, id);
+
         }
 
-      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
+      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
         if (username.equals("all")) {
-          rs1 = statement.executeQuery("select query from beeswax_queryhistory where submission_date >= date('" + startdate + "') AND submission_date < date('" + endtime + "');");
+          prSt = huedatabase.getQueriesNoStartDateYesEndDateAllUser(connection, endtime);
         } else {
-          rs1 = statement.executeQuery("select query from beeswax_queryhistory where owner_id =" + id + " AND submission_date >= date('" + startdate + "') AND submission_date <= date('" + endtime + "');");
+          prSt = huedatabase.getQueriesNoStartDateYesEndDate(connection, id, endtime);
+
         }
       } else if (!(startdate.equals("")) && (endtime.equals(""))) {
         if (username.equals("all")) {
-          rs1 = statement.executeQuery("select query from beeswax_queryhistory where submission_date >= date('" + startdate + "');");
+          prSt = huedatabase.getQueriesYesStartDateNoEndDateAllUser(connection, startdate);
         } else {
-          rs1 = statement.executeQuery("select query from beeswax_queryhistory where owner_id =" + id + " AND submission_date >= date('" + startdate + "');");
+          prSt = huedatabase.getQueriesYesStartDateNoEndDate(connection, id, startdate);
+
         }
 
-      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
+      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
         if (username.equals("all")) {
-          rs1 = statement.executeQuery("select query from beeswax_queryhistory where submission_date < date('" + endtime + "');");
+          prSt = huedatabase.getQueriesYesStartDateYesEndDateAllUser(connection, startdate, endtime);
         } else {
-          rs1 = statement.executeQuery("select query from beeswax_queryhistory where owner_id =" + id + " AND submission_date < date('" + endtime + "');");
+          prSt = huedatabase.getQueriesYesStartDateYesEndDate(connection, id, startdate, endtime);
         }
       }
 
+      rs1 = prSt.executeQuery();
 
       while (rs1.next()) {
-        query[i] = rs1.getString("query");
+        HiveModel hivepojo = new HiveModel();
+        query = rs1.getString("query");
+        hivepojo.setQuery(query);
+        hiveArrayList.add(hivepojo);
         i++;
       }
 
@@ -300,7 +289,7 @@ public class HiveHistoryQueryImpl {
         logger.error("Sql exception error: " + e);
       }
     }
-    return query;
+    return hiveArrayList;
 
   }
 
@@ -316,42 +305,42 @@ public class HiveHistoryQueryImpl {
       bw.write(content);
       bw.close();
     } catch (IOException e) {
-      logger.error("IOException" , e);
+      logger.error("IOException", e);
     }
 
   }
 
   public void deleteFileQueryhql(String homedir) {
-    try{
+    try {
       File file = new File(homedir + "query.hql");
 
-      if(file.delete()){
+      if (file.delete()) {
         logger.info("temporary hql file deleted");
-      }else{
+      } else {
         logger.info("temporary hql file delete failed");
       }
 
-    }catch(Exception e){
+    } catch (Exception e) {
 
-     logger.error("File Exception ",e);
+      logger.error("File Exception ", e);
 
     }
 
   }
 
   public void deleteFileQueryLogs(String homedir) {
-    try{
+    try {
       File file = new File(homedir + "logs");
 
-      if(file.delete()){
+      if (file.delete()) {
         logger.info("temporary logs file deleted");
-      }else{
+      } else {
         logger.info("temporary logs file delete failed");
       }
 
-    }catch(Exception e){
+    } catch (Exception e) {
 
-      logger.error("File Exception ",e);
+      logger.error("File Exception ", e);
 
     }
 
@@ -370,7 +359,7 @@ public class HiveHistoryQueryImpl {
       bw.write(content);
       bw.close();
     } catch (IOException e) {
-      logger.error("IOException" , e);
+      logger.error("IOException", e);
     }
 
   }
@@ -404,7 +393,7 @@ public class HiveHistoryQueryImpl {
         }
       });
     } catch (Exception e) {
-      logger.error("Exception in Webhdfs" , e);
+      logger.error("Exception in Webhdfs", e);
     }
   }
 
@@ -435,7 +424,7 @@ public class HiveHistoryQueryImpl {
         }
       });
     } catch (Exception e) {
-      logger.error("Exception in Webhdfs" , e);
+      logger.error("Exception in Webhdfs", e);
     }
   }
 
@@ -492,7 +481,7 @@ public class HiveHistoryQueryImpl {
         }
       });
     } catch (Exception e) {
-      logger.error("Webhdfs exception" , e);
+      logger.error("Webhdfs exception", e);
     }
 
   }
@@ -553,7 +542,7 @@ public class HiveHistoryQueryImpl {
         }
       });
     } catch (Exception e) {
-      logger.error("Webhdfs exception" , e);
+      logger.error("Webhdfs exception", e);
 
     }
 

+ 74 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryStartJob.java

@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.migration.hive.historyquery;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
+import org.json.simple.JSONObject;
+
+import java.io.IOException;
+
+
+public class HiveHistoryStartJob extends Thread{
+
+  String username;
+  String instance;
+  String startdate;
+  String enddate;
+  String jobid;
+  ViewContext view;
+
+  public HiveHistoryStartJob(String username, String instance, String startdate, String enddate, String jobid, ViewContext view) {
+    this.username = username;
+    this.instance=instance;
+    this.startdate=startdate;
+    this.enddate=enddate;
+    this.jobid=jobid;
+    this.view=view;
+  }
+
+  @Override
+  public void run() {
+
+    MigrationResponse migrationresult=new MigrationResponse();
+
+    migrationresult.setId(jobid);
+    migrationresult.setIntanceName(instance);
+    migrationresult.setUserNameofhue(username);
+    migrationresult.setProgressPercentage(0);
+
+    JSONObject response = new JSONObject();
+
+    /**
+     * creating a separete thread
+     */
+
+    HiveHistoryMigrationUtility hivehistoryquery=new HiveHistoryMigrationUtility();
+    try {
+      hivehistoryquery.hiveHistoryQueryMigration(username,instance,startdate,enddate,view,migrationresult,jobid);
+    }
+    catch (IOException e) {
+      e.printStackTrace();
+    } catch (ItemNotFound itemNotFound) {
+      itemNotFound.printStackTrace();
+    }
+
+  }
+
+}

+ 126 - 231
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveSavedQueryImpl.java → contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java

@@ -16,43 +16,17 @@
  * limitations under the License.
  */
 
-package org.apache.ambari.view.huetoambarimigration.service.hive;
+package org.apache.ambari.view.huetoambarimigration.migration.hive.savedquery;
 
-import java.nio.charset.Charset;
-import java.security.PrivilegedExceptionAction;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.io.BufferedInputStream;
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.ByteArrayInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.GregorianCalendar;
-import java.util.Scanner;
-import java.io.*;
-import java.net.URISyntaxException;
-import java.net.URL;
-
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.QuerySetAmbariDB;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset.QuerySet;
+import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.log4j.Logger;
 import org.jdom.Attribute;
 import org.jdom.Document;
@@ -61,16 +35,23 @@ import org.jdom.JDOMException;
 import org.jdom.input.SAXBuilder;
 import org.jdom.output.Format;
 import org.jdom.output.XMLOutputter;
-import org.json.JSONArray;
 import org.json.JSONObject;
-import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
-import org.apache.hadoop.security.UserGroupInformation;
 
-import org.apache.ambari.view.huetoambarimigration.model.*;
+import java.io.*;
+import java.net.URISyntaxException;
+import java.nio.charset.Charset;
+import java.security.PrivilegedExceptionAction;
+import java.sql.*;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
 
-public class HiveSavedQueryImpl {
+public class HiveSavedQueryMigrationImplementation {
 
-  static final Logger logger = Logger.getLogger(HiveSavedQueryImpl.class);
+  static final Logger logger = Logger.getLogger(HiveSavedQueryMigrationImplementation.class);
 
   private static String readAll(Reader rd) throws IOException {
     StringBuilder sb = new StringBuilder();
@@ -92,7 +73,7 @@ public class HiveSavedQueryImpl {
 
     xmlOutput.setFormat(Format.getPrettyFormat());
 
-    File xmlfile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml");
+    File xmlfile = new File(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml");
 
     if (xmlfile.exists()) {
       String iteration = Integer.toString(i + 1);
@@ -105,18 +86,17 @@ public class HiveSavedQueryImpl {
 
         Element record = new Element("RevertRecord");
         record.setAttribute(new Attribute("id", iteration));
-        record.addContent(new Element("datetime").setText(currentDate
-          .toString()));
+        record.addContent(new Element("datetime").setText(currentDate.toString()));
         record.addContent(new Element("dirname").setText(dirname));
         record.addContent(new Element("instance").setText(instance));
         record.addContent(new Element("query").setText(content));
 
         rootNode.addContent(record);
-        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+        xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
 
       } catch (JDOMException e) {
         // TODO Auto-generated catch block
-        logger.error("JDOMException: " , e);
+        logger.error("JDOMException: ", e);
       }
 
     } else {
@@ -137,7 +117,7 @@ public class HiveSavedQueryImpl {
 
         doc.getRootElement().addContent(record);
 
-        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+        xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
 
       } catch (IOException io) {
 
@@ -147,25 +127,18 @@ public class HiveSavedQueryImpl {
 
   }
 
-  public int fetchMaxidforSavedQueryHive(String driverName, Connection c, int id)
-    throws SQLException {
+  public int fetchMaxidforSavedQueryHive(Connection c, int id, QuerySetAmbariDB ambaridatabase) throws SQLException {
 
     String ds_id = null;
-    Statement stmt = null;
-    stmt = c.createStatement();
     ResultSet rs = null;
+    PreparedStatement prSt = null;
 
-    if (driverName.contains("postgresql")) {
-      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_savedquery_" + id + ";");
-    } else if (driverName.contains("mysql")) {
-      rs = stmt.executeQuery("select max(cast(ds_id as unsigned) ) as max from DS_SAVEDQUERY_" + id + ";");
-    } else if (driverName.contains("oracle")) {
-      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_savedquery_" + id + ";");
-    }
+    prSt = ambaridatabase.getMaxDsIdFromTableIdSavedquery(c, id);
+
+    rs = prSt.executeQuery();
 
     while (rs.next()) {
       ds_id = rs.getString("max");
-
     }
 
     int num;
@@ -174,85 +147,70 @@ public class HiveSavedQueryImpl {
     } else {
       num = Integer.parseInt(ds_id);
     }
-
     return num;
   }
 
-  public int fetchInstancetablenameForSavedqueryHive(String driverName, Connection c,
-                                                     String instance) throws SQLException {
+  public int fetchInstancetablenameForSavedqueryHive(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
 
     String ds_id = new String();
     int id = 0;
     Statement stmt = null;
+    PreparedStatement prSt = null;
+
 
-    stmt = c.createStatement();
     ResultSet rs = null;
 
-    if (driverName.contains("oracle")) {
-      rs = stmt
-        .executeQuery("select * from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.savedQueries.SavedQuery' and view_instance_name='"
-          + instance + "'");
-    } else {
-      rs = stmt
-        .executeQuery("select * from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.savedQueries.SavedQuery' and view_instance_name='"
-          + instance + "';");
-    }
 
+    prSt = ambaridatabase.getTableIdFromInstanceNameSavedquery(c, instance);
+
+    logger.info("sql statement to fetch is from ambari instance:= =  " + prSt);
+
+    rs = prSt.executeQuery();
 
     while (rs.next()) {
       id = rs.getInt("id");
-
     }
-
     return id;
   }
 
-  public int fetchInstanceTablenameHiveHistory(String driverName, Connection c,
-                                               String instance) throws SQLException {
+  public int fetchInstanceTablenameHiveHistory(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
     String ds_id = new String();
     int id = 0;
     Statement stmt = null;
+    PreparedStatement prSt = null;
 
 
-    stmt = c.createStatement();
     ResultSet rs = null;
 
-    if (driverName.contains("oracle")) {
-      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name='" + instance + "'");
-    } else {
-      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name='" + instance + "';");
-    }
 
+    prSt = ambaridatabase.getTableIdFromInstanceNameHistoryquery(c, instance);
+
+    logger.info("sql statement to fetch is from ambari instance:= =  " + prSt);
+
+    rs = prSt.executeQuery();
 
     while (rs.next()) {
       id = rs.getInt("id");
-      System.out.println("id is " + id);
-
     }
-
     return id;
 
   }
 
-  public int fetchMaxdsidFromHiveHistory(String driverName, Connection c, int id)
+  public int fetchMaxdsidFromHiveHistory(Connection c, int id, QuerySetAmbariDB ambaridatabase)
     throws SQLException {
 
     String ds_id = null;
-    Statement stmt = null;
-
-    stmt = c.createStatement();
     ResultSet rs = null;
+    PreparedStatement prSt = null;
+
+    prSt = ambaridatabase.getMaxDsIdFromTableIdHistoryquery(c, id);
+
+    rs = prSt.executeQuery();
 
-    if (driverName.contains("postgresql")) {
-      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + ";");
-    } else if (driverName.contains("mysql")) {
-      rs = stmt.executeQuery("select max( cast(ds_id as unsigned) ) as max from DS_JOBIMPL_" + id + ";");
-    } else if (driverName.contains("oracle")) {
-      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id);
-    }
     while (rs.next()) {
       ds_id = rs.getString("max");
     }
+
     int num;
     if (ds_id == null) {
       num = 1;
@@ -264,92 +222,50 @@ public class HiveSavedQueryImpl {
 
 
   /**/
-  public void insertRowHiveHistory(String driverName, String dirname, int maxcount,
-                                   long epochtime, Connection c, int id, String instance, int i)
+  public void insertRowHiveHistory(String dirname, int maxcount, long epochtime, Connection c, int id, String instance, int i, QuerySetAmbariDB ambaridatabase)
     throws SQLException, IOException {
-    String maxcount1 = Integer.toString(maxcount);
 
+    String maxcount1 = Integer.toString(maxcount);
     String epochtime1 = Long.toString(epochtime);
+    PreparedStatement prSt = null;
+    String revsql = null;
 
-    String ds_id = new String();
-    Statement stmt = null;
+    prSt = ambaridatabase.insertToHiveHistory(c, id, maxcount1, epochtime, dirname);
 
-    stmt = c.createStatement();
-    String sql = "";
-    String revsql = "";
-
-    if (driverName.contains("mysql")) {
-      sql = "INSERT INTO DS_JOBIMPL_" + id + " values ('" + maxcount1
-        + "','','','','','default'," + epochtime1 + ",0,'','','"
-        + dirname + "logs','admin','" + dirname
-        + "query.hql','','job','','','Unknown','" + dirname
-        + "','','Worksheet');";
-
-      revsql = "delete from  DS_JOBIMPL_" + id + " where ds_id='"
-        + maxcount1 + "';";
-
-    } else if (driverName.contains("postgresql")) {
-      sql = "INSERT INTO ds_jobimpl_" + id + " values ('" + maxcount1
-        + "','','','','','default'," + epochtime1 + ",0,'','','"
-        + dirname + "logs','admin','" + dirname
-        + "query.hql','','job','','','Unknown','" + dirname
-        + "','','Worksheet');";
-
-      revsql = "delete from  ds_jobimpl_" + id + " where ds_id='"
-        + maxcount1 + "';";
-
-    } else if (driverName.contains("oracle")) {
-      sql = "INSERT INTO ds_jobimpl_" + id + " values ('" + maxcount1
-        + "','','','','','default'," + epochtime1 + ",0,'','','"
-        + dirname + "logs','admin','" + dirname
-        + "query.hql','','job','','','Unknown','" + dirname
-        + "','','Worksheet')";
-      revsql = "delete from  ds_jobimpl_" + id + " where ds_id='"
-        + maxcount1 + "'";
+    System.out.println("the actual query is " + prSt);
+
+    logger.info("The actual insert statement is " + prSt);
+
+    prSt.executeUpdate();
+
+    revsql = ambaridatabase.revertSqlHistoryQuery(id, maxcount1);
+
+    logger.info("adding revert sqlsavedquery in hivehistory ");
 
-    }
-    stmt.executeUpdate(sql);
     wrtitetoalternatesqlfile(dirname, revsql, instance, i);
   }
 
-  public void insertRowinSavedQuery(String driverName, int maxcount, String database,
-                                    String dirname, String query, String name, Connection c, int id,
-                                    String instance, int i) throws SQLException, IOException {
+  public void insertRowinSavedQuery(int maxcount, String database, String dirname, String query, String name, Connection c, int id, String instance, int i, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
+
     String maxcount1 = Integer.toString(maxcount);
+    String revsql = null;
 
-    String ds_id = new String();
-    Statement stmt = null;
-    String sql = "";
-    String revsql = "";
-    stmt = c.createStatement();
+    PreparedStatement prSt = null;
 
-    if (driverName.contains("mysql")) {
-      sql = "INSERT INTO DS_SAVEDQUERY_" + id + " values ('"
-        + maxcount1 + "','" + database + "','" + "admin" + "','"
-        + dirname + "query.hql','" + query + "','" + name + "');";
+    prSt = ambaridatabase.insertToHiveSavedQuery(c, id, maxcount1, database, dirname, query, name);
 
-      revsql = "delete from  DS_SAVEDQUERY_" + id + " where ds_id='"
-        + maxcount1 + "';";
+    System.out.println("the actual query is " + prSt);
 
-    } else if (driverName.contains("postgresql")) {
-      sql = "INSERT INTO ds_savedquery_" + id + " values ('"
-        + maxcount1 + "','" + database + "','" + "admin" + "','"
-        + dirname + "query.hql','" + query + "','" + name + "');";
+    logger.info("The actual insert statement is " + prSt);
 
-      revsql = "delete from  ds_savedquery_" + id + " where ds_id='"
-        + maxcount1 + "';";
+    prSt.executeUpdate();
 
-    } else if (driverName.contains("oracle")) {
-      sql = "INSERT INTO ds_savedquery_" + id + " values ('"
-        + maxcount1 + "','" + database + "','" + "admin" + "','"
-        + dirname + "query.hql','" + query + "','" + name + "')";
+    revsql = ambaridatabase.revertSqlSavedQuery(id, maxcount1);
 
-      revsql = "delete from  ds_savedquery_" + id + " where ds_id='"
-        + maxcount1 + "'";
+    logger.info("adding revert sqlsavedquery ");
 
-    }
     wrtitetoalternatesqlfile(dirname, revsql, instance, i);
-    stmt.executeUpdate(sql);
+
   }
 
   public long getEpochTime() throws ParseException {
@@ -386,89 +302,68 @@ public class HiveSavedQueryImpl {
 
   }
 
-  public ArrayList<PojoHive> fetchFromHuedb(String username,
-                                            String startdate, String endtime, Connection connection)
+  public ArrayList<HiveModel> fetchFromHuedb(String username, String startdate, String endtime, Connection connection, QuerySet huedatabase)
     throws ClassNotFoundException, IOException {
     int id = 0;
     int i = 0;
     String[] query = new String[100];
-    ArrayList<PojoHive> hiveArrayList = new ArrayList<PojoHive>();
+    ArrayList<HiveModel> hiveArrayList = new ArrayList<HiveModel>();
     ResultSet rs1 = null;
 
     try {
       Statement statement = connection.createStatement();
+      connection.setAutoCommit(false);
+      PreparedStatement prSt = null;
+      ResultSet rs;
       if (username.equals("all")) {
       } else {
-        ResultSet rs = statement
-          .executeQuery("select id from auth_user where username='"
-            + username + "';");
-        while (rs.next()) {
 
-          id = rs.getInt("id");
+        prSt = huedatabase.getUseridfromUserName(connection, username);
 
-        }
+        rs = prSt.executeQuery();
 
+        while (rs.next()) {
+          id = rs.getInt("id");
+        }
       }
+
       if (startdate.equals("") && endtime.equals("")) {
         if (username.equals("all")) {
-          rs1 = statement
-            .executeQuery("select data,name,owner_id from beeswax_savedquery;");
-
+          prSt = huedatabase.getQueriesNoStartDateNoEndDateAllUser(connection);
         } else {
-          rs1 = statement
-            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
-              + id + ";");
+          prSt = huedatabase.getQueriesNoStartDateNoEndDate(connection, id);
+
         }
 
-      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
+      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
         if (username.equals("all")) {
-          rs1 = statement
-            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime >= date('"
-              + startdate
-              + "') AND mtime <= date('"
-              + endtime + "');");
+          prSt = huedatabase.getQueriesNoStartDateYesEndDateAllUser(connection, endtime);
         } else {
-          rs1 = statement
-            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
-              + id
-              + " AND mtime >= date('"
-              + startdate
-              + "') AND mtime <= date('"
-              + endtime
-              + "');");
-        }
+          prSt = huedatabase.getQueriesNoStartDateYesEndDate(connection, id, endtime);
 
+        }
       } else if (!(startdate.equals("")) && (endtime.equals(""))) {
         if (username.equals("all")) {
-          rs1 = statement
-            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and  mtime >= date('"
-              + startdate + "');");
+          prSt = huedatabase.getQueriesYesStartDateNoEndDateAllUser(connection, startdate);
         } else {
-          rs1 = statement
-            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
-              + id
-              + " AND mtime >= date('"
-              + startdate
-              + "');");
+          prSt = huedatabase.getQueriesYesStartDateNoEndDate(connection, id, startdate);
+
         }
 
-      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
+      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
         if (username.equals("all")) {
-          rs1 = statement
-            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime <= date('"
-              + endtime + "');");
+          prSt = huedatabase.getQueriesYesStartDateYesEndDateAllUser(connection, startdate, endtime);
         } else {
-          rs1 = statement
-            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
-              + id
-              + " AND mtime <= date('"
-              + endtime
-              + "');");
+          prSt = huedatabase.getQueriesYesStartDateYesEndDate(connection, id, startdate, endtime);
         }
 
       }
+
+      rs1 = prSt.executeQuery();
+
+
       while (rs1.next()) {
-        PojoHive hivepojo = new PojoHive();
+        HiveModel hivepojo = new HiveModel();
         String name = rs1.getString("name");
         String temp = rs1.getString("data");
         InputStream is = new ByteArrayInputStream(temp.getBytes());
@@ -490,16 +385,16 @@ public class HiveSavedQueryImpl {
         i++;
       }
 
-    } catch (SQLException e) {
-      // if the error message is "out of memory",
-      // it probably means no database file is found
-      System.err.println(e.getMessage());
-    } finally {
+    } catch (SQLException e2) {
+      e2.printStackTrace();
+    } finally
+
+    {
       try {
         if (connection != null)
           connection.close();
       } catch (SQLException e) {
-        logger.error("sql connection exception" , e);
+        logger.error("sql connection exception", e);
       }
     }
 
@@ -521,42 +416,42 @@ public class HiveSavedQueryImpl {
       bw.close();
 
     } catch (IOException e) {
-      logger.error("IOException: " , e);
+      logger.error("IOException: ", e);
     }
 
   }
 
   public void deleteFileQueryhql(String homedir) {
-    try{
+    try {
       File file = new File(homedir + "query.hql");
 
-      if(file.delete()){
+      if (file.delete()) {
         logger.info("temporary hql file deleted");
-      }else{
+      } else {
         logger.info("temporary hql file delete failed");
       }
 
-    }catch(Exception e){
+    } catch (Exception e) {
 
-      logger.error("File Exception ",e);
+      logger.error("File Exception ", e);
 
     }
 
   }
 
   public void deleteFileQueryLogs(String homedir) {
-    try{
+    try {
       File file = new File(homedir + "logs");
 
-      if(file.delete()){
+      if (file.delete()) {
         logger.info("temporary logs file deleted");
-      }else{
+      } else {
         logger.info("temporary logs file delete failed");
       }
 
-    }catch(Exception e){
+    } catch (Exception e) {
 
-      logger.error("File Exception ",e);
+      logger.error("File Exception ", e);
 
     }
 
@@ -580,7 +475,7 @@ public class HiveSavedQueryImpl {
       bw.close();
 
     } catch (IOException e) {
-      logger.error("IOException: " , e);
+      logger.error("IOException: ", e);
     }
 
   }
@@ -615,7 +510,7 @@ public class HiveSavedQueryImpl {
         }
       });
     } catch (Exception e) {
-      logger.error("Webhdfs: " , e);
+      logger.error("Webhdfs: ", e);
     }
   }
 
@@ -649,7 +544,7 @@ public class HiveSavedQueryImpl {
         }
       });
     } catch (Exception e) {
-      logger.error("Webhdfs: " , e);
+      logger.error("Webhdfs: ", e);
     }
   }
 
@@ -706,7 +601,7 @@ public class HiveSavedQueryImpl {
         }
       });
     } catch (Exception e) {
-      logger.error("Webhdfs exception" , e);
+      logger.error("Webhdfs exception", e);
     }
 
   }
@@ -770,7 +665,7 @@ public class HiveSavedQueryImpl {
         }
       });
     } catch (Exception e) {
-      logger.error("Webhdfs exception" , e);
+      logger.error("Webhdfs exception", e);
     }
 
   }

+ 281 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java

@@ -0,0 +1,281 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.migration.hive.savedquery;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationModel;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.MysqlQuerySetAmbariDB;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.OracleQuerySetAmbariDB;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.PostgressQuerySetAmbariDB;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.QuerySetAmbariDB;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset.*;
+import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
+import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
+import org.apache.log4j.Logger;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.text.ParseException;
+import java.util.ArrayList;
+
+public class HiveSavedQueryMigrationUtility {
+
+
+
+  protected MigrationResourceManager resourceManager = null;
+
+  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
+    if (resourceManager == null) {
+      resourceManager = new MigrationResourceManager(view);
+    }
+    return resourceManager;
+  }
+
+  public MigrationModel hiveSavedQueryMigration(String username, String instance, String startDate, String endDate, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound {
+
+    long startTime = System.currentTimeMillis();
+
+    final Logger logger = Logger.getLogger(HiveSavedQueryMigrationUtility.class);
+
+    Connection connectionAmbaridb = null;
+    Connection connectionHuedb = null;
+
+    int i = 0;
+
+    logger.info("-------------------------------------");
+    logger.info("hive saved query Migration started");
+    logger.info("-------------------------------------");
+    logger.info("start date: " + startDate);
+    logger.info("enddate date: " + endDate);
+    logger.info("instance is: " + instance);
+    logger.info("hue username is : " + username);
+
+    HiveSavedQueryMigrationImplementation hivesavedqueryimpl = new HiveSavedQueryMigrationImplementation();/* creating Implementation object  */
+
+    QuerySet huedatabase=null;
+
+    if(view.getProperties().get("huedrivername").contains("mysql"))
+    {
+      huedatabase=new MysqlQuerySet();
+    }
+    else if(view.getProperties().get("huedrivername").contains("postgresql"))
+    {
+      huedatabase=new PostgressQuerySet();
+    }
+    else if(view.getProperties().get("huedrivername").contains("sqlite"))
+    {
+     huedatabase=new SqliteQuerySet();
+    }
+    else if (view.getProperties().get("huedrivername").contains("oracle"))
+    {
+      huedatabase=new OracleQuerySet();
+    }
+
+
+    QuerySetAmbariDB ambaridatabase=null;
+
+
+    if(view.getProperties().get("ambaridrivername").contains("mysql"))
+    {
+      ambaridatabase=new MysqlQuerySetAmbariDB();
+    }
+    else if(view.getProperties().get("ambaridrivername").contains("postgresql"))
+    {
+      ambaridatabase=new PostgressQuerySetAmbariDB();
+    }
+    else if (view.getProperties().get("ambaridrivername").contains("oracle"))
+    {
+      ambaridatabase= new OracleQuerySetAmbariDB();
+    }
+
+    int maxcountForHivehistroryAmbaridb, maxCountforSavequeryAmbaridb;
+    String time = null;
+    Long epochtime = null;
+    String dirNameforHiveSavedquery;
+    ArrayList<HiveModel> dbpojoHiveSavedQuery = new ArrayList<HiveModel>();
+
+    try {
+
+      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection(); /* fetching connection to hue DB */
+
+      dbpojoHiveSavedQuery = hivesavedqueryimpl.fetchFromHuedb(username, startDate, endDate, connectionHuedb,huedatabase); /* fetching data from hue db and storing it in to a model */
+
+
+      for(int j=0;j<dbpojoHiveSavedQuery.size();j++)
+      {
+        logger.info("the query fetched from hue"+dbpojoHiveSavedQuery.get(j).getQuery());
+
+      }
+
+
+      if (dbpojoHiveSavedQuery.size() == 0) /* if no data has been fetched from hue db according to search criteria */ {
+
+        migrationresult.setIsNoQuerySelected("yes");
+        migrationresult.setProgressPercentage(0);
+        migrationresult.setNumberOfQueryTransfered(0);
+        migrationresult.setTotalNoQuery(dbpojoHiveSavedQuery.size());
+        getResourceManager(view).update(migrationresult, jobid);
+        logger.info("No queries has been selected acccording to your criteria");
+
+        logger.info("no hive saved query has been selected from hue according to your criteria of searching");
+
+
+      } else {
+
+        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();/* connecting to ambari DB */
+        connectionAmbaridb.setAutoCommit(false);
+
+        for (i = 0; i < dbpojoHiveSavedQuery.size(); i++) {
+
+          logger.info("_____________________");
+          logger.info("Loop No." + (i + 1));
+          logger.info("_____________________");
+
+          float calc = ((float) (i + 1)) / dbpojoHiveSavedQuery.size() * 100;
+          int progressPercentage = Math.round(calc);
+
+          migrationresult.setIsNoQuerySelected("no");
+          migrationresult.setProgressPercentage(progressPercentage);
+          migrationresult.setNumberOfQueryTransfered(i+1);
+          migrationresult.setTotalNoQuery(dbpojoHiveSavedQuery.size());
+          getResourceManager(view).update(migrationresult, jobid);
+
+
+
+
+          logger.info("query fetched from hue:-  " + dbpojoHiveSavedQuery.get(i).getQuery());
+
+          int tableIdSavedQuery = hivesavedqueryimpl.fetchInstancetablenameForSavedqueryHive(connectionAmbaridb, instance,ambaridatabase); /* fetching the instance table name for migration saved query  from the given instance name */
+
+          int tableIdHistoryHive = hivesavedqueryimpl.fetchInstanceTablenameHiveHistory(connectionAmbaridb, instance,ambaridatabase); /* fetching the instance table name for migration history query from the given instance name */
+
+          logger.info("Table name are fetched from instance name.");
+
+          hivesavedqueryimpl.writetoFilequeryHql(dbpojoHiveSavedQuery.get(i).getQuery(), ConfigurationCheckImplementation.getHomeDir()); /* writing migration query to a local file*/
+
+          hivesavedqueryimpl.writetoFileLogs(ConfigurationCheckImplementation.getHomeDir());/* writing logs to localfile */
+
+          logger.info(".hql and logs file are saved in temporary directory");
+
+          maxcountForHivehistroryAmbaridb = (hivesavedqueryimpl.fetchMaxdsidFromHiveHistory( connectionAmbaridb, tableIdHistoryHive,ambaridatabase) + 1);/* fetching the maximum ds_id from migration history table*/
+
+          maxCountforSavequeryAmbaridb = (hivesavedqueryimpl.fetchMaxidforSavedQueryHive(connectionAmbaridb, tableIdSavedQuery,ambaridatabase) + 1);/* fetching the maximum ds_id from migration saved query table*/
+
+          time = hivesavedqueryimpl.getTime();/* getting system time */
+
+          epochtime = hivesavedqueryimpl.getEpochTime();/* getting epoch time */
+
+          dirNameforHiveSavedquery = "/user/admin/migration/jobs/migration-job-" + maxcountForHivehistroryAmbaridb + "-"
+            + time + "/"; // creating hdfs directory name
+
+          logger.info("Directory will be creted in HDFS" + dirNameforHiveSavedquery);
+
+          hivesavedqueryimpl.insertRowHiveHistory(dirNameforHiveSavedquery,maxcountForHivehistroryAmbaridb,epochtime,connectionAmbaridb,tableIdHistoryHive,instance,i,ambaridatabase);// inserting to migration history table
+
+          logger.info("Row inserted in hive History table.");
+
+          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
+
+            logger.info("Kerberose Enabled");
+            hivesavedqueryimpl.createDirHiveSecured(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs in kerborized cluster
+            hivesavedqueryimpl.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting .hql file in hdfs in kerberoroized cluster
+            hivesavedqueryimpl.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting logs file in hdfs in kerberoroized cluster
+
+          } else {
+
+            logger.info("Kerberose Not Enabled");
+            hivesavedqueryimpl.createDirHive(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs
+            hivesavedqueryimpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting .hql file in hdfs directory
+            hivesavedqueryimpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting logs file in hdfs
+          }
+
+          //inserting into hived saved query table
+          //6.
+          hivesavedqueryimpl.insertRowinSavedQuery(maxCountforSavequeryAmbaridb, dbpojoHiveSavedQuery.get(i).getDatabase(), dirNameforHiveSavedquery, dbpojoHiveSavedQuery.get(i).getQuery(), dbpojoHiveSavedQuery.get(i).getOwner(), connectionAmbaridb, tableIdSavedQuery, instance, i,ambaridatabase);
+
+        }
+        connectionAmbaridb.commit();
+
+      }
+
+
+    } catch (SQLException e) {
+
+      logger.error("SQL exception: ", e);
+      try {
+        connectionAmbaridb.rollback();
+        logger.info("roll back done");
+      } catch (SQLException e1) {
+        logger.error("Rollback error: ", e1);
+
+      }
+    } catch (ClassNotFoundException e1) {
+      logger.error("Class not found : " , e1);
+    } catch (ParseException e) {
+      logger.error("ParseException: " , e);
+    } catch (URISyntaxException e) {
+      logger.error("URISyntaxException: " , e);
+    } catch (PropertyVetoException e) {
+      logger.error("PropertyVetoException:" , e);
+    } finally {
+      if (null != connectionAmbaridb)
+        try {
+          connectionAmbaridb.close();
+        } catch (SQLException e) {
+          logger.error("Error in connection close", e);
+        }
+    }
+
+
+    hivesavedqueryimpl.deleteFileQueryhql(ConfigurationCheckImplementation.getHomeDir());
+    hivesavedqueryimpl.deleteFileQueryLogs(ConfigurationCheckImplementation.getHomeDir());
+
+    long stopTime = System.currentTimeMillis();
+    long elapsedTime = stopTime - startTime;
+
+    MigrationModel model=new MigrationModel();
+
+    migrationresult.setJobtype("hivesavedquerymigration");
+    migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
+    getResourceManager(view).update(migrationresult, jobid);
+
+
+
+    logger.info("-------------------------------");
+    logger.info("hive saved query Migration end");
+    logger.info("--------------------------------");
+
+    return model;
+
+  }
+}
+
+
+
+

+ 77 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java

@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.migration.hive.savedquery;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
+import org.json.simple.JSONObject;
+
+import java.io.IOException;
+
+
+public class HiveSavedQueryStartJob extends Thread{
+
+  String username;
+  String instance;
+  String startdate;
+  String enddate;
+  String jobid;
+  ViewContext view;
+
+  public HiveSavedQueryStartJob(String username, String instance, String startdate, String enddate, String jobid, ViewContext view) {
+    this.username = username;
+    this.instance=instance;
+    this.startdate=startdate;
+    this.enddate=enddate;
+    this.jobid=jobid;
+    this.view=view;
+  }
+
+
+
+  @Override
+  public void run() {
+
+    MigrationResponse migrationresult=new MigrationResponse();
+
+    migrationresult.setId(jobid);
+    migrationresult.setIntanceName(instance);
+    migrationresult.setUserNameofhue(username);
+    migrationresult.setProgressPercentage(0);
+
+    JSONObject response = new JSONObject();
+
+    /**
+     * creating a separate thread
+     */
+
+    HiveSavedQueryMigrationUtility hivesavedquery=new HiveSavedQueryMigrationUtility();
+    try {
+      hivesavedquery.hiveSavedQueryMigration(username,instance,startdate,enddate,view,migrationresult,jobid);
+    }
+    catch (IOException e) {
+      e.printStackTrace();
+    } catch (ItemNotFound itemNotFound) {
+      itemNotFound.printStackTrace();
+    }
+
+  }
+
+}

+ 70 - 101
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java → contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java

@@ -16,36 +16,25 @@
  * limitations under the License.
  */
 
-package org.apache.ambari.view.huetoambarimigration.service.pig;
+package org.apache.ambari.view.huetoambarimigration.migration.pig.pigjob;
 
-import java.nio.charset.Charset;
 import java.security.PrivilegedExceptionAction;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
+import java.sql.*;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
-import java.io.BufferedInputStream;
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.ByteArrayInputStream;
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.FileWriter;
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
 import java.util.ArrayList;
 import java.util.Calendar;
 import java.util.Date;
 import java.util.GregorianCalendar;
-import java.util.Scanner;
 import java.io.*;
 import java.net.URISyntaxException;
-import java.net.URL;
-
+;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset.QuerySet;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset.*;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.PigModel;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -60,15 +49,12 @@ import org.jdom.JDOMException;
 import org.jdom.input.SAXBuilder;
 import org.jdom.output.Format;
 import org.jdom.output.XMLOutputter;
-import org.json.JSONArray;
-import org.json.JSONObject;
 
-import org.apache.ambari.view.huetoambarimigration.model.*;
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
 
-public class PigJobImpl {
+public class PigJobMigrationImplementation {
 
-  static final Logger logger = Logger.getLogger(PigJobImpl.class);
+  static final Logger logger = Logger.getLogger(PigJobMigrationImplementation.class);
 
   private static String readAll(Reader rd) throws IOException {
     StringBuilder sb = new StringBuilder();
@@ -85,7 +71,7 @@ public class PigJobImpl {
     String currentDate = ft.format(dNow);
     XMLOutputter xmlOutput = new XMLOutputter();
     xmlOutput.setFormat(Format.getPrettyFormat());
-    File xmlfile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml");
+    File xmlfile = new File(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml");
     if (xmlfile.exists()) {
       String iteration = Integer.toString(i + 1);
       SAXBuilder builder = new SAXBuilder();
@@ -100,10 +86,10 @@ public class PigJobImpl {
         record.addContent(new Element("instance").setText(instance));
         record.addContent(new Element("query").setText(content));
         rootNode.addContent(record);
-        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+        xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
       } catch (JDOMException e) {
 
-        logger.error("Jdom Exception: " , e);
+        logger.error("Jdom Exception: ", e);
       }
 
 
@@ -121,34 +107,28 @@ public class PigJobImpl {
         record.addContent(new Element("instance").setText(instance));
         record.addContent(new Element("query").setText(content));
         doc.getRootElement().addContent(record);
-        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+        xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
       } catch (IOException io) {
-        logger.error("Jdom Exception: " , io);
+        logger.error("Jdom Exception: ", io);
       }
 
     }
 
   }
 
-  public int fetchMaxIdforPigJob(String driverName, Connection c, int id) throws SQLException {
+  public int fetchMaxIdforPigJob(Connection c, int id, QuerySetAmbariDB ambaridatabase) throws SQLException {
+
 
     String ds_id = null;
-    Statement stmt = null;
     ResultSet rs = null;
+    PreparedStatement prSt = null;
 
-    stmt = c.createStatement();
+    prSt = ambaridatabase.getMaxDsIdFromTableId(c, id);
 
-    if (driverName.contains("postgresql")) {
-      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_pigjob_" + id + ";");
-    } else if (driverName.contains("mysql")) {
-      rs = stmt.executeQuery("select max( cast(ds_id as unsigned) ) as max from DS_PIGJOB_" + id + ";");
-    } else if (driverName.contains("oracle")) {
-      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_pigjob_" + id);
-    }
+    rs = prSt.executeQuery();
 
     while (rs.next()) {
       ds_id = rs.getString("max");
-
     }
 
     int num;
@@ -157,60 +137,50 @@ public class PigJobImpl {
     } else {
       num = Integer.parseInt(ds_id);
     }
-
     return num;
 
   }
 
-  public int fetchInstanceTablename(String driverName, Connection c, String instance) throws SQLException {
+  public int fetchInstanceTablename(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
 
 
     String ds_id = new String();
     int id = 0;
     Statement stmt = null;
-    stmt = c.createStatement();
+    PreparedStatement prSt = null;
+
 
     ResultSet rs = null;
-    if (driverName.contains("oracle")) {
-      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.jobs.models.PigJob' and view_instance_name='" + instance + "'");
-    } else {
-      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.jobs.models.PigJob' and view_instance_name='" + instance + "';");
-    }
+
+
+    prSt = ambaridatabase.getTableIdFromInstanceName(c, instance);
+
+    logger.info("sql statement to fetch is from ambari instance:= =  " + prSt);
+
+    rs = prSt.executeQuery();
+
     while (rs.next()) {
       id = rs.getInt("id");
-
     }
-
     return id;
   }
 
-  public void insertRowPigJob(String driverName, String dirname, int maxcountforpigjob, String time, String time2, long epochtime, String title, Connection c, int id, String status, String instance, int i) throws SQLException, IOException {
+  public void insertRowPigJob(String dirname, int maxcountforpigjob, String time, String time2, long epochtime, String title, Connection c, int id, String status, String instance, int i, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
 
     String epochtime1 = Long.toString(epochtime);
+    String maxcountforpigjob1 = Integer.toString(maxcountforpigjob);
     String ds_id = new String();
-    Statement stmt = null;
-
-    stmt = c.createStatement();
-    String sql = "";
-    String revsql = "";
+    String revSql;
 
-    if (driverName.contains("mysql")) {
-      sql = "INSERT INTO DS_PIGJOB_" + id + " values ('" + maxcountforpigjob + "'," + epochtime1 + ",0,'','f','','','admin',0,'" + dirname + "script.pig','','" + maxcountforpigjob + "','','','" + status + "','" + dirname + "','','" + title + "');";
-      revsql = "delete from  DS_PIGJOB_" + id + " where ds_id='" + maxcountforpigjob + "';";
+    PreparedStatement prSt = null;
 
-    } else if (driverName.contains("postgresql")) {
-      sql = "INSERT INTO ds_pigjob_" + id + " values ('" + maxcountforpigjob + "'," + epochtime1 + ",0,'','f','','','admin',0,'" + dirname + "script.pig','','" + maxcountforpigjob + "','','','" + status + "','" + dirname + "','','" + title + "');";
-      revsql = "delete from  ds_pigjob_" + id + " where ds_id='" + maxcountforpigjob + "';";
+    prSt = ambaridatabase.insertToPigJob(dirname, maxcountforpigjob1, epochtime, title, c, id, status);
 
-    } else if (driverName.contains("oracle")) {
-      sql = "INSERT INTO ds_pigjob_" + id + " values ('" + maxcountforpigjob + "'," + epochtime1 + ",0,'','f','','','admin',0,'" + dirname + "script.pig','','" + maxcountforpigjob + "','','','" + status + "','" + dirname + "','','" + title + "')";
-      revsql = "delete from  ds_pigjob_" + id + " where ds_id='" + maxcountforpigjob + "'";
+    prSt.executeUpdate();
 
-    }
+    revSql = ambaridatabase.revertSql(id, maxcountforpigjob1);
 
-    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
-
-    stmt.executeUpdate(sql);
+    wrtitetoalternatesqlfile(dirname, revSql, instance, i);
 
   }
 
@@ -266,68 +236,68 @@ public class PigJobImpl {
     return strDate;
   }
 
-  public ArrayList<PojoPig> fetchFromHueDB(String username, String startdate, String endtime, Connection connection) throws ClassNotFoundException, IOException {
+  public ArrayList<PigModel> fetchFromHueDB(String username, String startdate, String endtime, Connection connection, QuerySet huedatabase) throws ClassNotFoundException, IOException {
     int id = 0;
     int i = 0;
     String[] query = new String[100];
-    ArrayList<PojoPig> pigjobarraylist = new ArrayList<PojoPig>();
+    ArrayList<PigModel> pigjobarraylist = new ArrayList<PigModel>();
     try {
+      connection.setAutoCommit(false);
+      PreparedStatement prSt = null;
       Statement statement = connection.createStatement();
+      ResultSet rs;
+
       ResultSet rs1 = null;
       if (username.equals("all")) {
       } else {
-        ResultSet rs = statement
-          .executeQuery("select id from auth_user where username='"
-            + username + "';");
-        while (rs.next()) {
 
-          id = rs.getInt("id");
+        prSt = huedatabase.getUseridfromUserName(connection, username);
 
-        }
+        rs = prSt.executeQuery();
 
+        while (rs.next()) {
+          id = rs.getInt("id");
+        }
       }
 
       if (startdate.equals("") && endtime.equals("")) {
         if (username.equals("all")) {
-
-          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job;");
-
+          prSt = huedatabase.getQueriesNoStartDateNoEndDateAllUser(connection);
         } else {
+          prSt = huedatabase.getQueriesNoStartDateNoEndDate(connection, id);
 
-          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where user_id =" + id + ";");
         }
 
-      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
+      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
         if (username.equals("all")) {
-
-          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where start_time >= date('" + startdate + "') AND start_time <= date('" + endtime + "');");
+          prSt = huedatabase.getQueriesNoStartDateYesEndDateAllUser(connection, endtime);
         } else {
+          prSt = huedatabase.getQueriesNoStartDateYesEndDate(connection, id, endtime);
 
-          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where user_id =" + id + " AND start_time >= date('" + startdate + "') AND start_time <= date('" + endtime + "');");
         }
-
       } else if (!(startdate.equals("")) && (endtime.equals(""))) {
         if (username.equals("all")) {
-
-          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where start_time >= date('" + startdate + "');");
+          prSt = huedatabase.getQueriesYesStartDateNoEndDateAllUser(connection, startdate);
         } else {
+          prSt = huedatabase.getQueriesYesStartDateNoEndDate(connection, id, startdate);
 
-          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where user_id =" + id + " AND start_time >= date('" + startdate + "');");
         }
 
-      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
+      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
         if (username.equals("all")) {
-
-          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where start_time <= date('" + endtime + "');");
+          prSt = huedatabase.getQueriesYesStartDateYesEndDateAllUser(connection, startdate, endtime);
         } else {
-
-          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where user_id =" + id + " AND start_time <= date('" + endtime + "');");
+          prSt = huedatabase.getQueriesYesStartDateYesEndDate(connection, id, startdate, endtime);
         }
 
+
       }
 
+      rs1 = prSt.executeQuery();
+
+
       while (rs1.next()) {
-        PojoPig pigjjobobject = new PojoPig();
+        PigModel pigjjobobject = new PigModel();
 
         int runstatus = rs1.getInt("status");
 
@@ -356,13 +326,13 @@ public class PigJobImpl {
 
 
     } catch (SQLException e) {
-      logger.error("Sqlexception: " , e);
+      logger.error("Sqlexception: ", e);
     } finally {
       try {
         if (connection != null)
           connection.close();
       } catch (SQLException e) {
-        logger.error("Sqlexception in closing the connection: " , e);
+        logger.error("Sqlexception in closing the connection: ", e);
 
       }
     }
@@ -399,7 +369,7 @@ public class PigJobImpl {
         }
       });
     } catch (Exception e) {
-      logger.error("Webhdfs exception: " , e);
+      logger.error("Webhdfs exception: ", e);
     }
   }
 
@@ -435,7 +405,7 @@ public class PigJobImpl {
         }
       });
     } catch (Exception e) {
-      logger.error("Webhdfs exception: " , e);
+      logger.error("Webhdfs exception: ", e);
     }
   }
 
@@ -492,7 +462,7 @@ public class PigJobImpl {
         }
       });
     } catch (Exception e) {
-      logger.error("Webhdfs exception: " , e);
+      logger.error("Webhdfs exception: ", e);
     }
 
   }
@@ -521,7 +491,6 @@ public class PigJobImpl {
 
         public Void run() throws Exception {
 
-
           FileSystem fileSystemAmbari = FileSystem.get(confAmbari);
 
           FileSystem fileSystemHue = FileSystem.get(confHue);
@@ -555,7 +524,7 @@ public class PigJobImpl {
         }
       });
     } catch (Exception e) {
-      logger.error("Webhdfs exception: " , e);
+      logger.error("Webhdfs exception: ", e);
     }
 
   }

+ 101 - 64
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigJobMigration.java → contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java

@@ -17,99 +17,120 @@
  */
 
 
-package org.apache.ambari.view.huetoambarimigration.controller.pig;
+package org.apache.ambari.view.huetoambarimigration.migration.pig.pigjob;
 
 import java.beans.PropertyVetoException;
 import java.io.IOException;
-import java.io.PrintWriter;
 import java.net.URISyntaxException;
 import java.sql.Connection;
 import java.sql.SQLException;
 import java.text.ParseException;
-import java.text.SimpleDateFormat;
 import java.util.ArrayList;
-import java.util.Date;
-
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpSession;
 
 import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
+
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.PigModel;
 import org.apache.log4j.Logger;
 
 import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
 import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-import org.apache.ambari.view.huetoambarimigration.service.*;
-import org.apache.ambari.view.huetoambarimigration.model.*;
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
-import org.apache.ambari.view.huetoambarimigration.service.pig.PigJobImpl;
-
-public class PigJobMigration extends HttpServlet {
-
-  private static final long serialVersionUID = 1031422249396784970L;
-  ViewContext view;
-  int i = 0;
-  private String userName;
-  private String startDate;
-  private String endDate;
-  private String instance;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset.*;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset.*;
 
-  @Override
-  public void init(ServletConfig config) throws ServletException {
+public class PigJobMigrationUtility  {
 
-    super.init(config);
-    ServletContext context = config.getServletContext();
-    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+  protected MigrationResourceManager resourceManager = null;
 
+  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
+    if (resourceManager == null) {
+      resourceManager = new MigrationResourceManager(view);
+    }
+    return resourceManager;
   }
 
-  public void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
+  public void pigJobMigration(String username, String instance, String startDate, String endDate, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound {
+
+    long startTime = System.currentTimeMillis();
 
-    HttpSession session = req.getSession(true);
-    final Logger logger = Logger.getLogger(PigJobMigration.class);
+    final Logger logger = Logger.getLogger(PigJobMigrationUtility.class);
     Connection connectionHuedb = null;
     Connection connectionAmbaridb = null;
 
-    // fetchinf data from the clients
-    userName = req.getParameter("username");
-    startDate = req.getParameter("startdate");
-    endDate = req.getParameter("enddate");
-    instance = req.getParameter("instance");
-
     logger.info("------------------------------");
-    logger.info("Pig Jobs Migration started");
+    logger.info("pig Jobs Migration started");
     logger.info("------------------------------");
     logger.info("start date: " + startDate);
     logger.info("enddate date: " + endDate);
-    logger.info("instance is: " + userName);
+    logger.info("instance is: " + username);
     logger.info("hue username is : " + instance);
 
-    PigJobImpl pigjobimpl = new PigJobImpl();// creating the implementation object
-    int maxCountforPigScript = 0;
+    PigJobMigrationImplementation pigjobimpl = new PigJobMigrationImplementation();// creating the implementation object
+
+    QuerySet huedatabase=null;
+
+    if(view.getProperties().get("huedrivername").contains("mysql"))
+    {
+      huedatabase=new MysqlQuerySet();
+    }
+    else if(view.getProperties().get("huedrivername").contains("postgresql"))
+    {
+      huedatabase=new PostgressQuerySet();
+    }
+    else if(view.getProperties().get("huedrivername").contains("sqlite"))
+    {
+      huedatabase=new SqliteQuerySet();
+    }
+    else if (view.getProperties().get("huedrivername").contains("oracle"))
+    {
+      huedatabase=new OracleQuerySet();
+    }
+
+    QuerySetAmbariDB ambaridatabase=null;
+
+    if(view.getProperties().get("ambaridrivername").contains("mysql"))
+    {
+      ambaridatabase=new MysqlQuerySetAmbariDB();
+    }
+    else if(view.getProperties().get("ambaridrivername").contains("postgresql"))
+    {
+      ambaridatabase=new PostgressQuerySetAmbariDB();
+    }
+    else if (view.getProperties().get("ambaridrivername").contains("oracle"))
+    {
+      ambaridatabase= new OracleQuerySetAmbariDB();
+    }
+    int maxCountforPigScript = 0,i=0;
 
     String time = null, timeIndorder = null;
     Long epochtime = null;
     String pigJobDirName;
-    ArrayList<PojoPig> pigJobDbPojo = new ArrayList<PojoPig>();
+    ArrayList<PigModel> pigJobDbPojo = new ArrayList<PigModel>();
 
     try {
 
       connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connecting to hue database
-      pigJobDbPojo = pigjobimpl.fetchFromHueDB(userName, startDate, endDate, connectionHuedb);// fetching the PigJobs details from hue
 
-			/*No Pig Job details has been fetched accordring to search criteria*/
+      pigJobDbPojo = pigjobimpl.fetchFromHueDB(username, startDate, endDate, connectionHuedb,huedatabase);// fetching the PigJobs details from hue
+
+      for(int j=0;j<pigJobDbPojo.size();j++)
+      {
+        logger.info("the query fetched from hue="+pigJobDbPojo.get(i).getScript());
+
+      }
+
+			/*No pig Job details has been fetched accordring to search criteria*/
       if (pigJobDbPojo.size() == 0) {
 
-        logger.info("no Pig Job has been selected from hue according to your criteria of searching");
-        resp.setContentType("text/html");
-        PrintWriter out = resp.getWriter();
-        out.println("<br>");
-        out.println("<h4>No Pig Job  selected according to your criteria</h4>");
+        migrationresult.setIsNoQuerySelected("yes");
+        migrationresult.setProgressPercentage(0);
+        migrationresult.setNumberOfQueryTransfered(0);
+        migrationresult.setTotalNoQuery(pigJobDbPojo.size());
+        getResourceManager(view).update(migrationresult, jobid);
+        logger.info("no pig Job has been selected from hue according to your criteria of searching");
 
       } else {
 
@@ -120,15 +141,22 @@ public class PigJobMigration extends HttpServlet {
 
           float calc = ((float) (i + 1)) / pigJobDbPojo.size() * 100;
           int progressPercentage = Math.round(calc);
+          migrationresult.setIsNoQuerySelected("no");
+          migrationresult.setProgressPercentage(progressPercentage);
+          migrationresult.setNumberOfQueryTransfered(i+1);
+          migrationresult.setTotalNoQuery(pigJobDbPojo.size());
+          getResourceManager(view).update(migrationresult, jobid);
+
+
 
-          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
 
           logger.info("Loop No." + (i + 1));
           logger.info("________________");
           logger.info("the title of script " + pigJobDbPojo.get(i).getTitle());
 
-          int fetchPigTablenameInstance = pigjobimpl.fetchInstanceTablename(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance);
-          maxCountforPigScript = (pigjobimpl.fetchMaxIdforPigJob(view.getProperties().get("ambaridrivername"), connectionAmbaridb, fetchPigTablenameInstance) + 1);
+          int fetchPigTablenameInstance = pigjobimpl.fetchInstanceTablename(connectionAmbaridb, instance,ambaridatabase);
+
+          maxCountforPigScript = (pigjobimpl.fetchMaxIdforPigJob(connectionAmbaridb, fetchPigTablenameInstance,ambaridatabase) + 1);
 
           time = pigjobimpl.getTime();
           timeIndorder = pigjobimpl.getTimeInorder();
@@ -136,7 +164,7 @@ public class PigJobMigration extends HttpServlet {
 
           pigJobDirName = "/user/admin/pig/jobs/" + pigJobDbPojo.get(i).getTitle() + "_" + time + "/";
 
-          pigjobimpl.insertRowPigJob(view.getProperties().get("ambaridrivername"), pigJobDirName, maxCountforPigScript, time, timeIndorder, epochtime, pigJobDbPojo.get(i).getTitle(), connectionAmbaridb, fetchPigTablenameInstance, pigJobDbPojo.get(i).getStatus(), instance, i);
+          pigjobimpl.insertRowPigJob(pigJobDirName, maxCountforPigScript, time, timeIndorder, epochtime, pigJobDbPojo.get(i).getTitle(), connectionAmbaridb, fetchPigTablenameInstance, pigJobDbPojo.get(i).getStatus(), instance, i,ambaridatabase);
 
           if (view.getProperties().get("KerberoseEnabled").equals("y")) {
 
@@ -186,15 +214,24 @@ public class PigJobMigration extends HttpServlet {
     }
 
     logger.info("------------------------------");
-    logger.info("Pig Job Migration End");
+    logger.info("pig Job Migration End");
     logger.info("------------------------------");
 
-    session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
-    resp.setContentType("text/html");
-    PrintWriter out = resp.getWriter();
-    out.println("<br>");
-    out.println("<h4>" + i + " Pig jobs has been migrated to  "
-      + instance + "</h4>");
+    //session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
+
+//    CheckProgresStatus.setProgressPercentage(0);
+//    CheckProgresStatus.setNoOfQueryCompleted(0);
+//    CheckProgresStatus.setTotalNoOfQuery(0);
+//    CheckProgresStatus.setNoOfQueryLeft(0);
+
+    long stopTime = System.currentTimeMillis();
+    long elapsedTime = stopTime - startTime;
+
+    migrationresult.setJobtype("hivehistoryquerymigration");
+    migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
+    getResourceManager(view).update(migrationresult, jobid);
+
+
   }
 
 }

+ 68 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobStartJob.java

@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.migration.pig.pigjob;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
+import org.json.simple.JSONObject;
+
+import java.io.IOException;
+
+
+public class PigJobStartJob extends Thread{
+
+  String username;
+  String instance;
+  String startdate;
+  String enddate;
+  String jobid;
+  ViewContext view;
+
+  public PigJobStartJob(String username, String instance, String startdate, String enddate, String jobid, ViewContext view) {
+    this.username = username;
+    this.instance=instance;
+    this.startdate=startdate;
+    this.enddate=enddate;
+    this.jobid=jobid;
+    this.view=view;
+  }
+
+  @Override
+  public void run() {
+
+    MigrationResponse migrationresult=new MigrationResponse();
+
+    migrationresult.setId(jobid);
+    migrationresult.setIntanceName(instance);
+    migrationresult.setUserNameofhue(username);
+    migrationresult.setProgressPercentage(0);
+
+    PigJobMigrationUtility pigjobmigration=new PigJobMigrationUtility();
+    try {
+      pigjobmigration.pigJobMigration(username,instance,startdate,enddate,view,migrationresult,jobid);
+    }
+    catch (IOException e) {
+      e.printStackTrace();
+    } catch (ItemNotFound itemNotFound) {
+      itemNotFound.printStackTrace();
+    }
+
+  }
+
+}

+ 70 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java

@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.migration.pig.pigscript;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
+import org.json.simple.JSONObject;
+
+import java.io.IOException;
+
+
+public class PigSavedScriptStartJob extends Thread{
+
+  String username;
+  String instance;
+  String startdate;
+  String enddate;
+  String jobid;
+  ViewContext view;
+
+  public PigSavedScriptStartJob(String username, String instance, String startdate, String enddate, String jobid, ViewContext view) {
+    this.username = username;
+    this.instance=instance;
+    this.startdate=startdate;
+    this.enddate=enddate;
+    this.jobid=jobid;
+    this.view=view;
+  }
+
+
+
+  @Override
+  public void run() {
+
+    MigrationResponse migrationresult=new MigrationResponse();
+
+    migrationresult.setId(jobid);
+    migrationresult.setIntanceName(instance);
+    migrationresult.setUserNameofhue(username);
+    migrationresult.setProgressPercentage(0);
+
+    PigScriptMigrationUtility pigsavedscript =new PigScriptMigrationUtility();
+    try {
+      pigsavedscript.pigScriptMigration(username,instance,startdate,enddate,view,migrationresult,jobid);
+    }
+    catch (IOException e) {
+      e.printStackTrace();
+    } catch (ItemNotFound itemNotFound) {
+      itemNotFound.printStackTrace();
+    }
+
+  }
+
+}

+ 78 - 174
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigScriptImpl.java → contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java

@@ -17,37 +17,13 @@
  */
 
 
-package org.apache.ambari.view.huetoambarimigration.service.pig;
+package org.apache.ambari.view.huetoambarimigration.migration.pig.pigscript;
 
-import java.nio.charset.Charset;
-import java.security.PrivilegedExceptionAction;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.io.BufferedInputStream;
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.ByteArrayInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.GregorianCalendar;
-import java.util.Scanner;
-import java.io.*;
-import java.net.URISyntaxException;
-import java.net.URL;
-
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.ambari.view.huetoambarimigration.migration.pig.pigjob.PigJobMigrationImplementation;
+import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.PigModel;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.savedscriptqueryset.QuerySetAmbariDB;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.savedscriptqueryset.QuerySet;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -61,15 +37,21 @@ import org.jdom.JDOMException;
 import org.jdom.input.SAXBuilder;
 import org.jdom.output.Format;
 import org.jdom.output.XMLOutputter;
-import org.json.JSONArray;
-import org.json.JSONObject;
 
-import org.apache.ambari.view.huetoambarimigration.model.*;
+import java.io.*;
+import java.security.PrivilegedExceptionAction;
+import java.sql.*;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
 
 
-public class PigScriptImpl {
+public class PigScriptMigrationImplementation {
 
-  static final Logger logger = Logger.getLogger(PigJobImpl.class);
+  static final Logger logger = Logger.getLogger(PigJobMigrationImplementation.class);
 
   private static String readAll(Reader rd) throws IOException {
     StringBuilder sb = new StringBuilder();
@@ -90,7 +72,7 @@ public class PigScriptImpl {
 
     xmlOutput.setFormat(Format.getPrettyFormat());
 
-    File xmlfile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml");
+    File xmlfile = new File(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml");
 
     if (xmlfile.exists()) {
       String iteration = Integer.toString(i + 1);
@@ -109,10 +91,10 @@ public class PigScriptImpl {
         record.addContent(new Element("query").setText(content));
 
         rootNode.addContent(record);
-        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+        xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
 
       } catch (JDOMException e) {
-        logger.error("JDOMException: " , e);
+        logger.error("JDOMException: ", e);
       }
 
 
@@ -133,10 +115,10 @@ public class PigScriptImpl {
 
         doc.getRootElement().addContent(record);
 
-        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+        xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
 
       } catch (IOException io) {
-        logger.error("IOException: " , io);
+        logger.error("IOException: ", io);
 
       }
 
@@ -145,46 +127,40 @@ public class PigScriptImpl {
 
   }
 
-  public int fetchInstanceTablenamePigScript(String driverName, Connection c, String instance) throws SQLException {
+  public int fetchInstanceTablenamePigScript(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
 
     String ds_id = new String();
     int id = 0;
     Statement stmt = null;
+    PreparedStatement prSt = null;
 
-    stmt = c.createStatement();
 
     ResultSet rs = null;
 
-    if (driverName.contains("oracle")) {
-      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.scripts.models.PigScript' and view_instance_name='" + instance + "'");
-    } else {
-      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.scripts.models.PigScript' and view_instance_name='" + instance + "';");
-    }
+
+    prSt = ambaridatabase.getTableIdFromInstanceName(c, instance);
+
+    logger.info("sql statement to fetch is from ambari instance:= =  " + prSt);
+
+    rs = prSt.executeQuery();
 
     while (rs.next()) {
       id = rs.getInt("id");
-
     }
-
     return id;
 
   }
 
-  public int fetchmaxIdforPigSavedScript(String driverName, Connection c, int id) throws SQLException {
+  public int fetchmaxIdforPigSavedScript(Connection c, int id, QuerySetAmbariDB ambaridatabase) throws SQLException {
 
-    String ds_id = null;
-    Statement stmt = null;
 
-    stmt = c.createStatement();
+    String ds_id = null;
     ResultSet rs = null;
+    PreparedStatement prSt = null;
 
-    if (driverName.contains("postgresql")) {
-      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_pigscript_" + id + ";");
-    } else if (driverName.contains("mysql")) {
-      rs = stmt.executeQuery("select max( cast(ds_id as unsigned) ) as max from DS_PIGSCRIPT_" + id + ";");
-    } else if (driverName.contains("oracle")) {
-      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_pigscript_" + id + "");
-    }
+    prSt = ambaridatabase.getMaxDsIdFromTableId(c, id);
+
+    rs = prSt.executeQuery();
 
     while (rs.next()) {
       ds_id = rs.getString("max");
@@ -192,42 +168,28 @@ public class PigScriptImpl {
 
     int num;
     if (ds_id == null) {
-      num = 0;
+      num = 1;
     } else {
       num = Integer.parseInt(ds_id);
     }
-
     return num;
   }
 
-  public void insertRowForPigScript(String driverName, String dirname, int maxcountforpigjob, int maxcount, String time, String time2, long epochtime, String title, Connection c, int id, String instance, int i) throws SQLException, IOException {
+  public void insertRowForPigScript(String dirname, int maxcountforpigjob, int maxcount, String time, String time2, long epochtime, String title, Connection c, int id, String instance, int i, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
 
     String maxcount1 = Integer.toString(maxcount);
     String epochtime1 = Long.toString(epochtime);
-    String ds_id = new String();
-    Statement stmt = null;
-    String sql2 = "";
-    String revsql = "";
-
-    stmt = c.createStatement();
-
-    if (driverName.contains("mysql")) {
-      sql2 = "INSERT INTO DS_PIGSCRIPT_" + id + " values ('" + maxcount1 + "','1970-01-17 20:28:55.586000 +00:00:00',0,'admin','" + dirname + "','','','" + title + "');";
-      revsql = "delete from  DS_PIGSCRIPT_" + id + " where ds_id='" + maxcount1 + "';";
+    String revSql = null;
 
-    } else if (driverName.contains("postgresql")) {
-      sql2 = "INSERT INTO ds_pigscript_" + id + " values ('" + maxcount1 + "','1970-01-17 20:28:55.586000 +00:00:00','f','admin','" + dirname + "','','','" + title + "');";
-      revsql = "delete from  ds_pigscript_" + id + " where ds_id='" + maxcount1 + "';";
+    PreparedStatement prSt = null;
 
-    } else if (driverName.contains("oracle")) {
-      sql2 = "INSERT INTO ds_pigscript_" + id + " values ('" + maxcount1 + "','1970-01-17 20:28:55.586000 +00:00:00','f','admin','" + dirname + "','','','" + title + "')";
-      revsql = "delete from  ds_pigscript_" + id + " where ds_id='" + maxcount1 + "'";
+    prSt = ambaridatabase.insertToPigScript(c, id, maxcount1, dirname, title);
 
-    }
+    prSt.executeUpdate();
 
-    stmt.executeUpdate(sql2);
+    revSql = ambaridatabase.revertSql(id, maxcount1);
 
-    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
+    wrtitetoalternatesqlfile(dirname, revSql, instance, i);
 
   }
 
@@ -291,126 +253,68 @@ public class PigScriptImpl {
   }
 
 
-  public ArrayList<PojoPig> fetchFromHueDatabase(String username, String startdate, String endtime, Connection connection, String driverName) throws ClassNotFoundException, IOException {
+  public ArrayList<PigModel> fetchFromHueDatabase(String username, String startdate, String endtime, Connection connection, QuerySet huedatabase) throws ClassNotFoundException, IOException {
     int id = 0;
     int i = 0;
     ResultSet rs1 = null;
     String[] query = new String[100];
-    ArrayList<PojoPig> pigArrayList = new ArrayList<PojoPig>();
+    ArrayList<PigModel> pigArrayList = new ArrayList<PigModel>();
     try {
-
       Statement statement = connection.createStatement();
-
+      connection.setAutoCommit(false);
+      PreparedStatement prSt = null;
+      ResultSet rs;
       if (username.equals("all")) {
       } else {
-        ResultSet rs = statement
-          .executeQuery("select id from auth_user where username='"
-            + username + "';");
+
+        prSt = huedatabase.getUseridfromUserName(connection, username);
+
+        rs = prSt.executeQuery();
+
         while (rs.next()) {
           id = rs.getInt("id");
         }
-
       }
 
-
       if (startdate.equals("") && endtime.equals("")) {
         if (username.equals("all")) {
-          if (driverName.contains("postgresql")) {
-
-            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=true;");
-
-          } else {
-
-            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1;");
-          }
-
+          prSt = huedatabase.getQueriesNoStartDateNoEndDateAllUser(connection);
         } else {
-
-          if (driverName.contains("postgresql")) {
-
-            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =" + id + ";");
-
-          } else {
-
-            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =" + id + ";");
-          }
+          prSt = huedatabase.getQueriesNoStartDateNoEndDate(connection, id);
 
         }
 
-      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
+      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
         if (username.equals("all")) {
-          if (driverName.contains("postgresql")) {
-
-            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND date_created >= date('" + startdate + "') AND date_created <= date('" + endtime + "');");
-
-          } else {
-
-            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND date_created >= date('" + startdate + "') AND date_created <= date('" + endtime + "');");
-          }
-
+          prSt = huedatabase.getQueriesNoStartDateYesEndDateAllUser(connection, endtime);
         } else {
-          if (driverName.contains("postgresql")) {
-
-            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =" + id + " AND date_created >= date('" + startdate + "') AND date_created <= date('" + endtime + "');");
-
-          } else {
-
-            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =" + id + " AND date_created >= date('" + startdate + "') AND date_created <= date('" + endtime + "');");
-          }
+          prSt = huedatabase.getQueriesNoStartDateYesEndDate(connection, id, endtime);
 
         }
-
       } else if (!(startdate.equals("")) && (endtime.equals(""))) {
         if (username.equals("all")) {
-          if (driverName.contains("postgresql")) {
-
-            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND date_created >= date('" + startdate + "');");
-
-          } else {
-
-            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND date_created >= date('" + startdate + "');");
-          }
-
+          prSt = huedatabase.getQueriesYesStartDateNoEndDateAllUser(connection, startdate);
         } else {
-          if (driverName.contains("postgresql")) {
-
-            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =" + id + " AND date_created >= date('" + startdate + "');");
-
-          } else {
-
-            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =" + id + " AND date_created >= date('" + startdate + "');");
-          }
+          prSt = huedatabase.getQueriesYesStartDateNoEndDate(connection, id, startdate);
 
         }
 
-      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
+      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
         if (username.equals("all")) {
-          if (driverName.contains("postgresql")) {
-
-            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND date_created <= date('" + endtime + "');");
-
-          } else {
-
-            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND date_created <= date('" + endtime + "');");
-          }
-
+          prSt = huedatabase.getQueriesYesStartDateYesEndDateAllUser(connection, startdate, endtime);
         } else {
-          if (driverName.contains("postgresql")) {
+          prSt = huedatabase.getQueriesYesStartDateYesEndDate(connection, id, startdate, endtime);
+        }
 
-            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =" + id + " AND date_created <= date('" + endtime + "');");
 
-          } else {
+      }
 
-            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =" + id + " AND date_created <= date('" + endtime + "');");
-          }
+      rs1 = prSt.executeQuery();
 
-        }
 
-      }
       // rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id ="+id+" AND date_created BETWEEN '"+ startdate +"' AND '"  +endtime +"';");
       while (rs1.next()) {
-        PojoPig pojopig = new PojoPig();
-
+        PigModel pojopig = new PigModel();
         String script = rs1.getString("pig_script");
         String title = rs1.getString("title");
         Date created_data = rs1.getDate("date_created");
@@ -424,13 +328,13 @@ public class PigScriptImpl {
 
 
     } catch (SQLException e) {
-      logger.error("SQLException" , e);
+      logger.error("SQLException", e);
     } finally {
       try {
         if (connection != null)
           connection.close();
       } catch (SQLException e) {
-        logger.error("SQLException" , e);
+        logger.error("SQLException", e);
       }
     }
 
@@ -455,25 +359,25 @@ public class PigScriptImpl {
 
     } catch (IOException e) {
 
-      logger.error("IOException" , e);
+      logger.error("IOException", e);
     }
 
   }
 
   public void deletePigScriptLocalFile(String homedir, String filename2) {
-    try{
+    try {
 
       File file = new File(homedir + filename2);
 
-      if(file.delete()){
+      if (file.delete()) {
         logger.info("Temproray file deleted");
-      }else{
+      } else {
         logger.info("Temproray file delete failed");
       }
 
-    }catch(Exception e){
+    } catch (Exception e) {
 
-     logger.error("File Exception: ",e);
+      logger.error("File Exception: ", e);
 
     }
 
@@ -531,7 +435,7 @@ public class PigScriptImpl {
         }
       });
     } catch (Exception e) {
-      logger.error("Webhdfs Exception: " , e);
+      logger.error("Webhdfs Exception: ", e);
     }
 
   }
@@ -592,7 +496,7 @@ public class PigScriptImpl {
         }
       });
     } catch (Exception e) {
-      logger.error("Webhdfs Exception: " , e);
+      logger.error("Webhdfs Exception: ", e);
 
     }
 

+ 229 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java

@@ -0,0 +1,229 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.migration.pig.pigscript;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.PigModel;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.savedscriptqueryset.*;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.savedscriptqueryset.*;
+import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
+import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
+import org.apache.log4j.Logger;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.text.ParseException;
+import java.util.ArrayList;
+
+public class PigScriptMigrationUtility {
+
+  protected MigrationResourceManager resourceManager = null;
+
+  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
+    if (resourceManager == null) {
+      resourceManager = new MigrationResourceManager(view);
+    }
+    return resourceManager;
+  }
+
+
+  public void pigScriptMigration(String username, String instance, String startDate, String endDate, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound {
+
+    long startTime = System.currentTimeMillis();
+
+    final Logger logger = Logger.getLogger(PigScriptMigrationUtility.class);
+    Connection connectionHuedb = null;
+    Connection connectionAmbaridb = null;
+
+    logger.info("-------------------------------------");
+    logger.info("pig saved script Migration started");
+    logger.info("-------------------------------------");
+
+
+    int i = 0;
+
+    logger.info("start date: " + startDate);
+    logger.info("enddate date: " + endDate);
+    logger.info("instance is: " + username);
+    logger.info("hue username is : " + instance);
+
+    //Reading the configuration file
+    PigScriptMigrationImplementation pigsavedscriptmigration = new PigScriptMigrationImplementation();
+
+    QuerySet huedatabase = null;
+
+    if (view.getProperties().get("huedrivername").contains("mysql")) {
+      huedatabase = new MysqlQuerySet();
+    } else if (view.getProperties().get("huedrivername").contains("postgresql")) {
+      huedatabase = new PostgressQuerySet();
+    } else if (view.getProperties().get("huedrivername").contains("sqlite")) {
+
+      huedatabase = new SqliteQuerySet();
+    } else if (view.getProperties().get("huedrivername").contains("oracle")) {
+      huedatabase = new OracleQuerySet();
+    }
+
+    QuerySetAmbariDB ambaridatabase = null;
+
+
+    if (view.getProperties().get("ambaridrivername").contains("mysql")) {
+      ambaridatabase = new MysqlQuerySetAmbariDB();
+    } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
+      ambaridatabase = new PostgressQuerySetAmbariDB();
+    } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
+      ambaridatabase = new OracleQuerySetAmbariDB();
+    }
+
+    int maxcountforsavequery = 0, maxcountforpigsavedscript;
+    String time = null, timetobeInorder = null;
+    Long epochTime = null;
+    String dirNameForPigScript, completeDirandFilePath, pigscriptFilename = "";
+    int pigInstanceTableName;
+
+    ArrayList<PigModel> dbpojoPigSavedscript = new ArrayList<PigModel>();
+
+    try {
+      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connection to Hue DB
+      dbpojoPigSavedscript = pigsavedscriptmigration.fetchFromHueDatabase(username, startDate, endDate, connectionHuedb, huedatabase);// Fetching pig script details from Hue DB
+
+      for (int j = 0; j < dbpojoPigSavedscript.size(); j++) {
+        logger.info("the query fetched from hue=" + dbpojoPigSavedscript.get(j).getScript());
+
+      }
+
+
+      /* If No pig Script has been fetched from Hue db according to our search criteria*/
+      if (dbpojoPigSavedscript.size() == 0) {
+
+        migrationresult.setIsNoQuerySelected("yes");
+        migrationresult.setProgressPercentage(0);
+        migrationresult.setNumberOfQueryTransfered(0);
+        migrationresult.setTotalNoQuery(dbpojoPigSavedscript.size());
+        getResourceManager(view).update(migrationresult, jobid);
+
+        logger.info("no pig script has been selected from hue according to your criteria of searching");
+
+
+      } else {
+
+        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();// connecting to ambari db
+        connectionAmbaridb.setAutoCommit(false);
+        logger.info("loop will continue for " + dbpojoPigSavedscript.size() + "times");
+
+        //for each pig script found in Hue Database
+
+        for (i = 0; i < dbpojoPigSavedscript.size(); i++) {
+
+
+          float calc = ((float) (i + 1)) / dbpojoPigSavedscript.size() * 100;
+          int progressPercentage = Math.round(calc);
+          migrationresult.setIsNoQuerySelected("no");
+          migrationresult.setProgressPercentage(progressPercentage);
+          migrationresult.setNumberOfQueryTransfered(i + 1);
+          migrationresult.setTotalNoQuery(dbpojoPigSavedscript.size());
+          getResourceManager(view).update(migrationresult, jobid);
+
+          logger.info("Loop No." + (i + 1));
+          logger.info("________________");
+          logger.info("the title of script:  " + dbpojoPigSavedscript.get(i).getTitle());
+
+          pigInstanceTableName = pigsavedscriptmigration.fetchInstanceTablenamePigScript(connectionAmbaridb, instance, ambaridatabase);// finding the table name in ambari from the given instance
+
+          maxcountforpigsavedscript = (pigsavedscriptmigration.fetchmaxIdforPigSavedScript(connectionAmbaridb, pigInstanceTableName, ambaridatabase) + 1);// maximum count of the primary key of pig Script table
+
+          time = pigsavedscriptmigration.getTime();
+
+          timetobeInorder = pigsavedscriptmigration.getTimeInorder();
+
+          epochTime = pigsavedscriptmigration.getEpochTime();
+
+          dirNameForPigScript = "/user/admin/pig/scripts/";
+
+          pigscriptFilename = dbpojoPigSavedscript.get(i).getTitle() + "-" + time + ".pig";
+
+          completeDirandFilePath = dirNameForPigScript + pigscriptFilename;
+
+          pigsavedscriptmigration.writetPigScripttoLocalFile(dbpojoPigSavedscript.get(i).getScript(), dbpojoPigSavedscript.get(i).getTitle(), dbpojoPigSavedscript.get(i).getDt(), ConfigurationCheckImplementation.getHomeDir(), pigscriptFilename);
+
+          pigsavedscriptmigration.insertRowForPigScript(completeDirandFilePath, maxcountforsavequery, maxcountforpigsavedscript, time, timetobeInorder, epochTime, dbpojoPigSavedscript.get(i).getTitle(), connectionAmbaridb, pigInstanceTableName, instance, i, ambaridatabase);
+
+          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
+            pigsavedscriptmigration.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir() + pigscriptFilename, dirNameForPigScript, view.getProperties().get("namenode_URI_Ambari"));
+          } else {
+            pigsavedscriptmigration.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + pigscriptFilename, dirNameForPigScript, view.getProperties().get("namenode_URI_Ambari"));
+          }
+
+          logger.info(dbpojoPigSavedscript.get(i).getTitle() + "Migrated to Ambari");
+
+          pigsavedscriptmigration.deletePigScriptLocalFile(ConfigurationCheckImplementation.getHomeDir(), pigscriptFilename);
+
+        }
+        connectionAmbaridb.commit();
+
+      }
+
+
+    } catch (SQLException e) {
+      logger.error("Sql exception in ambari database", e);
+      try {
+        connectionAmbaridb.rollback();
+        logger.info("rollback done");
+      } catch (SQLException e1) {
+        logger.error("Sql exception while doing roll back", e);
+      }
+    } catch (ClassNotFoundException e2) {
+      logger.error("class not found exception", e2);
+    } catch (ParseException e) {
+      logger.error("ParseException: ", e);
+    } catch (PropertyVetoException e) {
+      logger.error("PropertyVetoException: ", e);
+    } finally {
+      if (null != connectionAmbaridb)
+        try {
+          connectionAmbaridb.close();
+        } catch (SQLException e) {
+          logger.error("connection close exception: ", e);
+        }
+    }
+
+    long stopTime = System.currentTimeMillis();
+    long elapsedTime = stopTime - startTime;
+
+
+    migrationresult.setJobtype("hivehistoryquerymigration");
+    migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
+    getResourceManager(view).update(migrationresult, jobid);
+
+
+    logger.info("----------------------------------");
+    logger.info("pig saved script Migration ends");
+    logger.info("----------------------------------");
+
+  }
+
+
+}

+ 70 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeStartJob.java

@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.migration.revertchange;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
+import org.json.simple.JSONObject;
+
+import java.io.IOException;
+
+
+public class RevertChangeStartJob extends Thread{
+
+
+  String instance;
+  String revertdate;
+  String jobid;
+  ViewContext view;
+
+  public RevertChangeStartJob(String instance, String revertdate, String jobid, ViewContext view) {
+
+    this.instance=instance;
+    this.revertdate=revertdate;
+    this.jobid=jobid;
+    this.view=view;
+  }
+
+
+
+  @Override
+  public void run() {
+
+    MigrationResponse migrationresult=new MigrationResponse();
+
+    migrationresult.setId(jobid);
+    migrationresult.setIntanceName(instance);
+    migrationresult.setProgressPercentage(0);
+
+    JSONObject response = new JSONObject();
+
+
+    RevertChangeUtility revertchange = new RevertChangeUtility();
+    try {
+      revertchange.revertChangeUtility(instance,revertdate,jobid,view,migrationresult);
+    }
+    catch (IOException e) {
+      e.printStackTrace();
+    } catch (ItemNotFound itemNotFound) {
+      itemNotFound.printStackTrace();
+    }
+
+  }
+
+}

+ 59 - 51
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/revertchange/RevertChange.java → contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java

@@ -16,14 +16,12 @@
  * limitations under the License.
  */
 
-package org.apache.ambari.view.huetoambarimigration.controller.revertchange;
+package org.apache.ambari.view.huetoambarimigration.migration.revertchange;
 
 import java.beans.PropertyVetoException;
 import java.io.BufferedReader;
 import java.io.File;
-import java.io.FileReader;
 import java.io.IOException;
-import java.io.PrintWriter;
 import java.net.URISyntaxException;
 import java.security.PrivilegedExceptionAction;
 import java.sql.Connection;
@@ -34,50 +32,43 @@ import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.List;
 
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpSession;
-
 import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationModel;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
 import org.apache.log4j.Logger;
-import org.jdom.Attribute;
 import org.jdom.Document;
 import org.jdom.Element;
 import org.jdom.JDOMException;
 import org.jdom.input.SAXBuilder;
-import org.jdom.output.Format;
-import org.jdom.output.XMLOutputter;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.UserGroupInformation;
 
 import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
+
 
+public class RevertChangeUtility  {
 
-public class RevertChange extends HttpServlet {
 
-  private static final long serialVersionUID = 1L;
-  ViewContext view;
 
-  @Override
-  public void init(ServletConfig config) throws ServletException {
+  protected MigrationResourceManager resourceManager = null;
 
-    super.init(config);
-    ServletContext context = config.getServletContext();
-    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
+    if (resourceManager == null) {
+      resourceManager = new MigrationResourceManager(view);
+    }
+    return resourceManager;
   }
 
   public boolean stringtoDatecompare(String datefromservlet,
                                      String datefromfile) throws ParseException {
 
-    SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
+    SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
     Date date1 = formatter.parse(datefromservlet);
     Date date2 = formatter.parse(datefromfile);
     if (date1.compareTo(date2) < 0) {
@@ -120,21 +111,19 @@ public class RevertChange extends HttpServlet {
     }
   }
 
-  protected void doGet(HttpServletRequest request,
-                       HttpServletResponse response) throws ServletException, IOException {
+  public MigrationModel revertChangeUtility(String instance, String revertDate,String jobid,ViewContext view,MigrationResponse migrationresult) throws IOException, ItemNotFound {
 
-    final Logger logger = Logger.getLogger(RevertChange.class);
+    long startTime = System.currentTimeMillis();
+
+    final Logger logger = Logger.getLogger(RevertChangeUtility.class);
 
     logger.info("------------------------------");
     logger.info("Reverting the changes Start:");
     logger.info("------------------------------");
 
-    HttpSession session = request.getSession(true);
-    String revertDate = request.getParameter("revertdate");
-    String instance = request.getParameter("instance");
-
     logger.info("Revert Date " + revertDate);
     logger.info("instance name " + instance);
+    int i = 0;
 
     BufferedReader br = null;
     Connection connectionAmbariDatabase = null;
@@ -146,32 +135,42 @@ public class RevertChange extends HttpServlet {
       Statement stmt = null;
       stmt = connectionAmbariDatabase.createStatement();
       SAXBuilder builder = new SAXBuilder();
-      File xmlFile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml");
+      File xmlFile = new File(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml");
       try {
 
         Document document = (Document) builder.build(xmlFile);
         Element rootNode = document.getRootElement();
         List list = rootNode.getChildren("RevertRecord");
-
-        for (int i = 0; i < list.size(); i++) {
+        logger.info("list size is = "+list.size());
+        for (i = 0; i < list.size(); i++) {
 
           float calc = ((float) (i + 1)) / list.size() * 100;
           int progressPercentage = Math.round(calc);
-          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
+
+          migrationresult.setIsNoQuerySelected("yes");
+          migrationresult.setProgressPercentage(progressPercentage);
+          migrationresult.setNumberOfQueryTransfered(i+1);
+          migrationresult.setTotalNoQuery(list.size());
+
+          getResourceManager(view).update(migrationresult, jobid);
 
           Element node = (Element) list.get(i);
 
           if (node.getChildText("instance").equals(instance)) {
+            logger.info("instance matched");
 
             if (stringtoDatecompare(revertDate, node.getChildText("datetime").toString())) {
-
+              logger.info("date is less query is sucess");
               String sql = node.getChildText("query");
               logger.info(sql);
               stmt.executeUpdate(sql);
               removedir(node.getChildText("dirname").toString(), view.getProperties().get("namenode_URI_Ambari"));
-              logger.info(node.getChildText("dirname").toString()+" deleted");
+              logger.info(node.getChildText("dirname").toString() + " deleted");
 
             }
+            else {
+              logger.info("date is big query is failed");
+            }
 
           }
 
@@ -179,39 +178,48 @@ public class RevertChange extends HttpServlet {
 
         connectionAmbariDatabase.commit();
 
-        response.setContentType("text/html");
-        PrintWriter out = response.getWriter();
-        out.println("<br>");
-        out.println("<h4>" + " The change has been revert back for "
-          + instance + "</h4>");
 
-        session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
+
+
+
+
+
 
         logger.info("------------------------------");
         logger.info("Reverting the changes End");
         logger.info("------------------------------");
 
       } catch (IOException e) {
-        logger.error("IOException: ",e);
+        logger.error("IOException: ", e);
       } catch (ParseException e) {
-        logger.error("ParseException: ",e);
+        logger.error("ParseException: ", e);
       } catch (JDOMException e) {
-        logger.error("JDOMException: ",e);
+        logger.error("JDOMException: ", e);
       } catch (URISyntaxException e) {
-        logger.error("URISyntaxException:  ",e);
+        logger.error("URISyntaxException:  ", e);
       }
     } catch (SQLException e1) {
-      logger.error("SqlException  ",e1);
+      logger.error("SqlException  ", e1);
       try {
         connectionAmbariDatabase.rollback();
         logger.info("Rollback done");
       } catch (SQLException e2) {
-        logger.error("SqlException in Rollback  ",e2);
+        logger.error("SqlException in Rollback  ", e2);
       }
     } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException: ",e);
+      logger.error("PropertyVetoException: ", e);
     }
 
+    long stopTime = System.currentTimeMillis();
+    long elapsedTime = stopTime - startTime;
+
+    MigrationModel model = new MigrationModel();
+//    model.setInstanceName(instance);
+//    model.setNumberofQueryTransfered(i + 1);
+//    model.setTimeTakentotransfer(String.valueOf(elapsedTime));
+
+    return model;
   }
 
+
 }

+ 151 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/DataStoreStorage.java

@@ -0,0 +1,151 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.persistence;
+
+import org.apache.ambari.view.PersistenceException;
+import org.apache.ambari.view.ViewContext;
+
+import org.apache.ambari.view.huetoambarimigration.utils.ServiceFormattedException;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.WebApplicationException;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Engine for storing objects to context DataStore storage
+ */
+public class DataStoreStorage implements Storage {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(DataStoreStorage.class);
+  protected ViewContext context;
+
+  /**
+   * Constructor
+   * @param context View Context instance
+   */
+  public DataStoreStorage(ViewContext context) {
+    this.context = context;
+  }
+
+  @Override
+  public void store(Indexed obj) {
+    try {
+      if (obj.getId() == null) {
+        int id = nextIdForEntity(context, obj.getClass());
+        obj.setId(String.valueOf(id));
+      }
+      context.getDataStore().store(obj);
+    } catch (PersistenceException e) {
+      throw new ServiceFormattedException("Error while saving object to DataStorage", e);
+    }
+  }
+
+  private static int nextIdForEntity(ViewContext context, Class aClass) {
+    // auto increment id implementation
+    String lastId = context.getInstanceData(aClass.getName());
+    int newId;
+    if (lastId == null) {
+      newId = 1;
+    } else {
+      newId = Integer.parseInt(lastId) + 1;
+    }
+    context.putInstanceData(aClass.getName(), String.valueOf(newId));
+    return newId;
+  }
+
+  @Override
+  public  <T extends Indexed> T load(Class<T> model, int id) throws ItemNotFound {
+    LOG.debug(String.format("Loading %s #%d", model.getName(), id));
+    try {
+      T obj = context.getDataStore().find(model, String.valueOf(id));
+      if (obj != null) {
+        return obj;
+      } else {
+        throw new ItemNotFound();
+      }
+    } catch (PersistenceException e) {
+      throw new ServiceFormattedException("Error while finding object in DataStorage", e);
+    }
+  }
+
+  @Override
+  public synchronized <T extends Indexed> List<T> loadAll(Class<T> model, FilteringStrategy filter) {
+    LinkedList<T> list = new LinkedList<T>();
+    LOG.debug(String.format("Loading all %s-s", model.getName()));
+    try {
+      for(T item: context.getDataStore().findAll(model, null)) {
+        if ((filter == null) || filter.isConform(item)) {
+          list.add(item);
+        }
+      }
+    } catch (PersistenceException e) {
+      throw new ServiceFormattedException("Error while finding all objects in DataStorage", e);
+    }
+    return list;
+  }
+
+  @Override
+  public synchronized <T extends Indexed> List<T> loadAll(Class<T> model) {
+    return loadAll(model, new OnlyOwnersFilteringStrategy(this.context.getUsername()));
+  }
+
+  @Override
+  public synchronized void delete(Class model, int id) throws ItemNotFound {
+    LOG.debug(String.format("Deleting %s:%d", model.getName(), id));
+    Object obj = load(model, id);
+    try {
+      context.getDataStore().remove(obj);
+    } catch (PersistenceException e) {
+      throw new ServiceFormattedException("Error while removing object from DataStorage", e);
+    }
+  }
+
+  @Override
+  public boolean exists(Class model, int id) {
+    try {
+      return context.getDataStore().find(model, String.valueOf(id)) != null;
+    } catch (PersistenceException e) {
+      throw new ServiceFormattedException("Error while finding object in DataStorage", e);
+    }
+  }
+
+  public static void storageSmokeTest(ViewContext context) {
+    try {
+      SmokeTestEntity entity = new SmokeTestEntity();
+      entity.setData("42");
+      DataStoreStorage storage = new DataStoreStorage(context);
+      storage.store(entity);
+
+      if (entity.getId() == null) throw new ServiceFormattedException("Ambari Views instance data DB doesn't work properly (auto increment id doesn't work)", null);
+      int id = Integer.parseInt(entity.getId());
+      SmokeTestEntity entity2 = storage.load(SmokeTestEntity.class, id);
+      boolean status = entity2.getData().compareTo("42") == 0;
+      storage.delete(SmokeTestEntity.class, id);
+      if (!status) throw new ServiceFormattedException("Ambari Views instance data DB doesn't work properly", null);
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+
+}

+ 132 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/InstanceKeyValueStorage.java

@@ -0,0 +1,132 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.persistence;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.*;
+import org.apache.ambari.view.huetoambarimigration.utils.ServiceFormattedException;
+import org.apache.commons.configuration.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.WebApplicationException;
+
+
+/**
+ * Persistent storage engine for storing java beans to
+ * instance data
+ */
+@Deprecated
+public class InstanceKeyValueStorage extends KeyValueStorage {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(InstanceKeyValueStorage.class);
+
+  private ContextConfigurationAdapter config = null;
+  private int VALUE_LENGTH_LIMIT = 254;
+
+  /**
+   * Constructor.
+   * @param context View Context instance
+   */
+  public InstanceKeyValueStorage(ViewContext context) {
+    super(context);
+  }
+
+  /**
+   * Returns config instance, adapter to Persistence API
+   * @return config instance
+   */
+  @Override
+  protected synchronized Configuration getConfig() {
+    if (config == null) {
+      config = new ContextConfigurationAdapter(context);
+    }
+    return config;
+  }
+
+  /**
+   * Value is limited to 256 symbols, this code splits value into chunks and saves them as <key>#<chunk_id>
+   * @param modelPropName key
+   * @param json value
+   */
+  protected void write(String modelPropName, String json) {
+    int saved = 0;
+    int page = 1;
+    while (saved < json.length()) {
+      int end = Math.min(saved + VALUE_LENGTH_LIMIT, json.length());
+      String substring = json.substring(saved, end);
+      getConfig().setProperty(modelPropName + "#" + page, substring);
+      saved += VALUE_LENGTH_LIMIT;
+      page += 1;
+      LOG.debug("Chunk saved: " + modelPropName + "#" + page + "=" + substring);
+    }
+    getConfig().setProperty(modelPropName, page - 1);
+    LOG.debug("Write finished: " + modelPropName + " pages:" + (page - 1));
+  }
+
+  /**
+   * Read chunked value (keys format <key>#<chunk_id>)
+   * @param modelPropName key
+   * @return value
+   */
+  protected String read(String modelPropName) {
+    StringBuilder result = new StringBuilder();
+    int pages = getConfig().getInt(modelPropName);
+    LOG.debug("Read started: " + modelPropName + " pages:" + pages);
+
+    for(int page = 1; page <= pages; page++) {
+      String substring = getConfig().getString(modelPropName + "#" + page);
+      LOG.debug("Chunk read: " + modelPropName + "#" + page + "=" + substring);
+      if (substring != null) {
+        result.append(substring);
+      }
+    }
+
+    return result.toString();
+  }
+
+  /**
+   * Remove chunked value (keys format <key>#<chunk_id>)
+   * @param modelPropName key
+   */
+  protected void clear(String modelPropName) {
+    int pages = getConfig().getInt(modelPropName);
+    LOG.debug("Clean started: " + modelPropName + " pages:" + pages);
+
+    for(int page = 1; page <= pages; page++) {
+      getConfig().clearProperty(modelPropName + "#" + page);
+      LOG.debug("Chunk clean: " + modelPropName + "#" + page);
+    }
+    getConfig().clearProperty(modelPropName);
+  }
+
+  public static void storageSmokeTest(ViewContext context) {
+    try {
+      final String property = "test.smoke.property";
+      context.putInstanceData(property, "42");
+      boolean status = context.getInstanceData(property).equals("42");
+      context.removeInstanceData(property);
+      if (!status) throw new ServiceFormattedException("Ambari Views instance data DB doesn't work properly", null);
+    } catch (WebApplicationException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new ServiceFormattedException(ex.getMessage(), ex);
+    }
+  }
+}

+ 162 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/KeyValueStorage.java

@@ -0,0 +1,162 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.persistence;
+
+import com.google.gson.Gson;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.Indexed;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.commons.configuration.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Engine for storing objects to key-value storage
+ */
+public abstract class KeyValueStorage implements Storage {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(KeyValueStorage.class);
+  protected final Gson gson = new Gson();
+  protected ViewContext context;
+
+  /**
+   * Constructor
+   * @param context View Context instance
+   */
+  public KeyValueStorage(ViewContext context) {
+    this.context = context;
+  }
+
+  /**
+   * Returns config instance, adapter to Persistence API
+   * @return config instance
+   */
+  protected abstract Configuration getConfig();
+
+  @Override
+  public synchronized void store(Indexed obj) {
+    String modelIndexingPropName = getIndexPropertyName(obj.getClass());
+
+    if (obj.getId() == null) {
+      int lastIndex = getConfig().getInt(modelIndexingPropName, 0);
+      lastIndex ++;
+      getConfig().setProperty(modelIndexingPropName, lastIndex);
+      obj.setId(Integer.toString(lastIndex));
+    }
+
+    String modelPropName = getItemPropertyName(obj.getClass(), Integer.parseInt(obj.getId()));
+    String json = serialize(obj);
+    write(modelPropName, json);
+  }
+
+  @Override
+  public <T extends Indexed> T load(Class<T> model, int id) throws ItemNotFound {
+    String modelPropName = getItemPropertyName(model, id);
+    LOG.debug(String.format("Loading %s", modelPropName));
+    if (getConfig().containsKey(modelPropName)) {
+      String json = read(modelPropName);
+      LOG.debug(String.format("json: %s", json));
+      return deserialize(model, json);
+    } else {
+      throw new ItemNotFound();
+    }
+  }
+
+  /**
+   * Write json to storage
+   * @param modelPropName key
+   * @param json value
+   */
+  protected void write(String modelPropName, String json) {
+    getConfig().setProperty(modelPropName, json);
+  }
+
+  /**
+   * Read json from storage
+   * @param modelPropName key
+   * @return value
+   */
+  protected String read(String modelPropName) {
+    return getConfig().getString(modelPropName);
+  }
+
+  /**
+   * Remove line from storage
+   * @param modelPropName key
+   */
+  protected void clear(String modelPropName) {
+    getConfig().clearProperty(modelPropName);
+  }
+
+  protected String serialize(Indexed obj) {
+    return gson.toJson(obj);
+  }
+
+  protected <T extends Indexed> T deserialize(Class<T> model, String json) {
+    return gson.fromJson(json, model);
+  }
+
+  @Override
+  public synchronized <T extends Indexed> List<T> loadAll(Class<T> model, FilteringStrategy filter) {
+    ArrayList<T> list = new ArrayList<T>();
+    String modelIndexingPropName = getIndexPropertyName(model);
+    LOG.debug(String.format("Loading all %s-s", model.getName()));
+    int lastIndex = getConfig().getInt(modelIndexingPropName, 0);
+    for(int i=1; i<=lastIndex; i++) {
+      try {
+        T item = load(model, i);
+        if ((filter == null) || filter.isConform(item)) {
+          list.add(item);
+        }
+      } catch (ItemNotFound ignored) {
+      }
+    }
+    return list;
+  }
+
+  @Override
+  public synchronized <T extends Indexed> List<T> loadAll(Class<T> model) {
+    return loadAll(model, new OnlyOwnersFilteringStrategy(this.context.getUsername()));
+  }
+
+  @Override
+  public synchronized void delete(Class model, int id) {
+    LOG.debug(String.format("Deleting %s:%d", model.getName(), id));
+    String modelPropName = getItemPropertyName(model, id);
+    clear(modelPropName);
+  }
+
+  @Override
+  public boolean exists(Class model, int id) {
+    return getConfig().containsKey(getItemPropertyName(model, id));
+  }
+
+  private String getIndexPropertyName(Class model) {
+    return String.format("%s:index", model.getName());
+  }
+
+  private String getItemPropertyName(Class model, int id) {
+    return String.format("%s.%d", model.getName(), id);
+  }
+}

+ 70 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/LocalKeyValueStorage.java

@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.persistence;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.utils.MisconfigurationFormattedException;
+import org.apache.commons.configuration.ConfigurationException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+/**
+ * Persistent storage engine for storing java beans to
+ * properties file
+ * Path to file should be in 'dataworker.storagePath' parameter
+ */
+@Deprecated
+public class LocalKeyValueStorage extends KeyValueStorage {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(LocalKeyValueStorage.class);
+
+  private PersistentConfiguration config = null;
+
+  /**
+   * Constructor
+   * @param context View Context instance
+   */
+  public LocalKeyValueStorage(ViewContext context) {
+    super(context);
+  }
+
+  /**
+   * Returns config instance
+   * @return config instance
+   */
+  @Override
+  protected synchronized PersistentConfiguration getConfig() {
+    if (config == null) {
+      String fileName = context.getProperties().get("dataworker.storagePath");
+      if (fileName == null) {
+        String msg = "dataworker.storagePath is not configured!";
+        LOG.error(msg);
+        throw new MisconfigurationFormattedException("dataworker.storagePath");
+      }
+      try {
+        config = new PersistentConfiguration(fileName);
+      } catch (ConfigurationException e) {
+        e.printStackTrace();
+      }
+    }
+    return config;
+  }
+
+}

+ 52 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/PersistentConfiguration.java

@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.persistence;
+
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.apache.commons.configuration.reloading.FileChangedReloadingStrategy;
+
+import java.io.File;
+
+/**
+ * Configuration enables all necessary options for PropertiesConfiguration:
+ * auto-save, auto-reloading, no delimiter parsing and other
+ */
+@Deprecated
+public class PersistentConfiguration extends PropertiesConfiguration {
+  /**
+   * Constructor
+   * @param fileName path to data file
+   * @throws ConfigurationException
+   */
+  public PersistentConfiguration(String fileName) throws ConfigurationException {
+    super();
+
+    File config = new File(fileName);
+    setFile(config);
+    this.setAutoSave(true);
+    this.setReloadingStrategy(new FileChangedReloadingStrategy());
+    this.setDelimiterParsingDisabled(true);
+    this.setListDelimiter((char) 0);
+
+    if (config.exists()) {
+      this.load();
+    }
+  }
+}

+ 45 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/SmokeTestEntity.java

@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.persistence;
+
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.Indexed;
+
+/**
+ * DataStore entity to test whether DS interface works correctly.
+ */
+public class SmokeTestEntity implements Indexed {
+  private String id = null;
+  private String data = null;
+
+  public String getId() {
+    return id;
+  }
+
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  public String getData() {
+    return data;
+  }
+
+  public void setData(String data) {
+    this.data = data;
+  }
+}

+ 78 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/Storage.java

@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.persistence;
+
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.Indexed;
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
+
+import java.util.List;
+
+/**
+ * Object storage interface
+ */
+public interface Storage {
+  /**
+   * Persist object to DB. It should be Indexed
+   * @param obj object to save
+   */
+  void store(Indexed obj);
+
+  /**
+   * Load object
+   * @param model bean class
+   * @param id identifier
+   * @param <T> bean class
+   * @return bean instance
+   * @throws ItemNotFound thrown if item with id was not found in DB
+   */
+  <T extends Indexed> T load(Class<T> model, int id) throws ItemNotFound;
+
+  /**
+   * Load all objects of given bean class
+   * @param model bean class
+   * @param filter filtering strategy (return only those objects that conform condition)
+   * @param <T> bean class
+   * @return list of filtered objects
+   */
+  <T extends Indexed> List<T> loadAll(Class<T> model, FilteringStrategy filter);
+
+  /**
+   * Load all objects of given bean class
+   * @param model bean class
+   * @param <T> bean class
+   * @return list of all objects
+   */
+  <T extends Indexed> List<T> loadAll(Class<T> model);
+
+  /**
+   * Delete object
+   * @param model bean class
+   * @param id identifier
+   */
+  void delete(Class model, int id) throws ItemNotFound;
+
+  /**
+   * Check is object exists
+   * @param model bean class
+   * @param id identifier
+   * @return true if exists
+   */
+  boolean exists(Class model, int id);
+}

+ 260 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/ContextConfigurationAdapter.java

@@ -0,0 +1,260 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.persistence.utils;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.commons.configuration.Configuration;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * Persistence API to Apache Configuration adapter
+ */
+@Deprecated
+public class ContextConfigurationAdapter implements Configuration {
+  private ViewContext context;
+
+  /**
+   * Constructor of adapter
+   * @param context View Context
+   */
+  public ContextConfigurationAdapter(ViewContext context) {
+    this.context = context;
+  }
+
+  @Override
+  public Configuration subset(String prefix) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public boolean isEmpty() {
+    return context.getInstanceData().isEmpty();
+  }
+
+  @Override
+  public boolean containsKey(String s) {
+    Map<String, String> data = context.getInstanceData();
+    return data.containsKey(s);
+  }
+
+  @Override
+  public void addProperty(String s, Object o) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public void setProperty(String s, Object o) {
+    context.putInstanceData(s, o.toString());
+  }
+
+  @Override
+  public void clearProperty(String key) {
+    context.removeInstanceData(key);
+  }
+
+  @Override
+  public void clear() {
+    for (String key : context.getInstanceData().keySet())
+      context.removeInstanceData(key);
+  }
+
+  @Override
+  public Object getProperty(String key) {
+    return context.getInstanceData(key);
+  }
+
+  @Override
+  public Iterator getKeys(String s) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public Iterator getKeys() {
+    return context.getInstanceData().keySet().iterator();
+  }
+
+  @Override
+  public Properties getProperties(String s) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public boolean getBoolean(String s) {
+    return getBoolean(s, null);
+  }
+
+  @Override
+  public boolean getBoolean(String s, boolean b) {
+    return getBoolean(s, (Boolean)b);
+  }
+
+  @Override
+  public Boolean getBoolean(String s, Boolean aBoolean) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Boolean.parseBoolean(data):aBoolean;
+  }
+
+  @Override
+  public byte getByte(String s) {
+    return getByte(s, null);
+  }
+
+  @Override
+  public byte getByte(String s, byte b) {
+    return getByte(s, (Byte)b);
+  }
+
+  @Override
+  public Byte getByte(String s, Byte aByte) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Byte.parseByte(data):aByte;
+  }
+
+  @Override
+  public double getDouble(String s) {
+    return getDouble(s, null);
+  }
+
+  @Override
+  public double getDouble(String s, double v) {
+    return getDouble(s, (Double)v);
+  }
+
+  @Override
+  public Double getDouble(String s, Double aDouble) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Double.parseDouble(data):aDouble;
+  }
+
+  @Override
+  public float getFloat(String s) {
+    return getFloat(s, null);
+  }
+
+  @Override
+  public float getFloat(String s, float v) {
+    return getFloat(s, (Float)v);
+  }
+
+  @Override
+  public Float getFloat(String s, Float aFloat) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Float.parseFloat(data):aFloat;
+  }
+
+  @Override
+  public int getInt(String s) {
+    return getInteger(s, null);
+  }
+
+  @Override
+  public int getInt(String s, int i) {
+    return getInteger(s, i);
+  }
+
+  @Override
+  public Integer getInteger(String s, Integer integer) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Integer.parseInt(data):integer;
+  }
+
+  @Override
+  public long getLong(String s) {
+    return getLong(s, null);
+  }
+
+  @Override
+  public long getLong(String s, long l) {
+    return getLong(s, (Long)l);
+  }
+
+  @Override
+  public Long getLong(String s, Long aLong) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Long.parseLong(data):aLong;
+  }
+
+  @Override
+  public short getShort(String s) {
+    return getShort(s, null);
+  }
+
+  @Override
+  public short getShort(String s, short i) {
+    return getShort(s, (Short)i);
+  }
+
+  @Override
+  public Short getShort(String s, Short aShort) {
+    String data = context.getInstanceData(s);
+    return (data != null)?Short.parseShort(data):aShort;
+  }
+
+  @Override
+  public BigDecimal getBigDecimal(String s) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public BigDecimal getBigDecimal(String s, BigDecimal bigDecimal) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public BigInteger getBigInteger(String s) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public BigInteger getBigInteger(String s, BigInteger bigInteger) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public String getString(String s) {
+    return context.getInstanceData(s);
+  }
+
+  @Override
+  public String getString(String s, String s2) {
+    String data = getString(s);
+    return (data != null)?data:s2;
+  }
+
+  @Override
+  public String[] getStringArray(String s) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public List getList(String s) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public List getList(String s, List list) {
+    throw new UnsupportedOperationException();
+  }
+}

+ 31 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/FilteringStrategy.java

@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.persistence.utils;
+
+/**
+ * Filtering strategy for stored objects
+ */
+public interface FilteringStrategy {
+  /**
+   * Check whether item conforms chosen filter or not
+   * @param item item to check
+   * @return true if item conforms this filter
+   */
+  boolean isConform(Indexed item);
+}

+ 36 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/Indexed.java

@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.persistence.utils;
+
+/**
+ * Interface to represent item with identifier
+ */
+public interface Indexed {
+  /**
+   * Get the ID
+   * @return ID
+   */
+  String getId();
+
+  /**
+   * Set ID
+   * @param id ID
+   */
+  void setId(String id);
+}

+ 25 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/ItemNotFound.java

@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.persistence.utils;
+
+/**
+ * Thrown when item was not found in DB
+ */
+public class ItemNotFound extends Exception {
+}

Niektoré súbory nie sú zobrazené, pretože je v týchto rozdielových dátach zmenené mnoho súborov