Преглед на файлове

AMBARI-15933. Views work for Hue to Views Migration Tool (Pradarttana Panda via dipayanb)

Dipayan Bhowmick преди 9 години
родител
ревизия
af9654ff12
променени са 33 файла, в които са добавени 6213 реда и са изтрити 0 реда
  1. 246 0
      contrib/views/hueambarimigration/pom.xml
  2. 182 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ConfigurationCheck.java
  3. 54 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ProgressBarStatus.java
  4. 222 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveHistoryMigration.java
  5. 231 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveSavedQueryMigration.java
  6. 201 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigJobMigration.java
  7. 208 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigScriptMigration.java
  8. 217 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/revertchange/RevertChange.java
  9. 65 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceAmbariDatabase.java
  10. 64 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceHueDatabase.java
  11. 51 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/model/PojoHive.java
  12. 72 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/model/PojoPig.java
  13. 199 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/configurationcheck/ConfFileReader.java
  14. 562 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveHistoryQueryImpl.java
  15. 778 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveSavedQueryImpl.java
  16. 563 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java
  17. 600 0
      contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigScriptImpl.java
  18. 123 0
      contrib/views/hueambarimigration/src/main/resources/WEB-INF/web.xml
  19. BIN
      contrib/views/hueambarimigration/src/main/resources/image/updateimg.gif
  20. 119 0
      contrib/views/hueambarimigration/src/main/resources/index.jsp
  21. 33 0
      contrib/views/hueambarimigration/src/main/resources/ui/.gitignore
  22. 15 0
      contrib/views/hueambarimigration/src/main/resources/ui/bower.json
  23. 57 0
      contrib/views/hueambarimigration/src/main/resources/ui/checkconfiguration.jsp
  24. 229 0
      contrib/views/hueambarimigration/src/main/resources/ui/hivehistoryquerymigration.jsp
  25. 240 0
      contrib/views/hueambarimigration/src/main/resources/ui/hivesavedquerymigration.jsp
  26. 31 0
      contrib/views/hueambarimigration/src/main/resources/ui/homepage.jsp
  27. 27 0
      contrib/views/hueambarimigration/src/main/resources/ui/package.json
  28. 233 0
      contrib/views/hueambarimigration/src/main/resources/ui/pigjobmigration.jsp
  29. 227 0
      contrib/views/hueambarimigration/src/main/resources/ui/pigscriptsmigration.jsp
  30. 203 0
      contrib/views/hueambarimigration/src/main/resources/ui/revertchange.jsp
  31. 31 0
      contrib/views/hueambarimigration/src/main/resources/view.log4j.properties
  32. 129 0
      contrib/views/hueambarimigration/src/main/resources/view.xml
  33. 1 0
      contrib/views/pom.xml

+ 246 - 0
contrib/views/hueambarimigration/pom.xml

@@ -0,0 +1,246 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <parent>
+    <groupId>org.apache.ambari.contrib.views</groupId>
+    <artifactId>ambari-contrib-views</artifactId>
+    <version>2.0.0.0-SNAPSHOT</version>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>huetoambari-view</artifactId>
+  <packaging>jar</packaging>
+  <name>Hue To Ambari Migration-view</name>
+  <version>2.0.0.0-SNAPSHOT</version>
+  <url>http://maven.apache.org</url>
+  <properties>
+    <ambari.dir>${project.parent.parent.parent.basedir}</ambari.dir>
+  </properties>
+
+
+  <dependencies>
+
+    <dependency>
+      <groupId>org.easymock</groupId>
+      <artifactId>easymock</artifactId>
+      <version>3.1</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-views</artifactId>
+      <version>[1.7.0.0,)</version>
+    </dependency>
+
+    <dependency>
+      <groupId>javax.servlet</groupId>
+      <artifactId>servlet-api</artifactId>
+      <version>2.5</version>
+
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client</artifactId>
+      <version>2.3.0</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.xerial</groupId>
+      <artifactId>sqlite-jdbc</artifactId>
+      <version>3.7.2</version>
+    </dependency>
+
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>3.8.1</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>postgresql</groupId>
+      <artifactId>postgresql</artifactId>
+      <version>8.4-701.jdbc3</version>
+    </dependency>
+    <dependency>
+      <groupId>mysql</groupId>
+      <artifactId>mysql-connector-java</artifactId>
+      <version>5.1.35</version>
+    </dependency>
+
+    <dependency>
+      <groupId>com.googlecode.json-simple</groupId>
+      <artifactId>json-simple</artifactId>
+      <version>1.1.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.json</groupId>
+      <artifactId>json</artifactId>
+      <version>20151123</version>
+    </dependency>
+    <dependency>
+      <groupId>jdom</groupId>
+      <artifactId>jdom</artifactId>
+      <version>1.1</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-configuration</groupId>
+      <artifactId>commons-configuration</artifactId>
+      <version>1.6</version>
+    </dependency>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+      <version>1.2.17</version>
+    </dependency>
+    <dependency>
+      <groupId>c3p0</groupId>
+      <artifactId>c3p0</artifactId>
+      <version>0.9.1.2</version>
+
+    </dependency>
+
+  </dependencies>
+
+  <build>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-surefire-plugin</artifactId>
+        </plugin>
+      </plugins>
+    </pluginManagement>
+    <plugins>
+
+
+      <plugin>
+        <groupId>com.github.eirslett</groupId>
+        <artifactId>frontend-maven-plugin</artifactId>
+        <version>0.0.16</version>
+        <configuration>
+          <nodeVersion>v0.12.2</nodeVersion>
+          <npmVersion>1.4.8</npmVersion>
+          <workingDirectory>${project.basedir}/src/main/resources/ui</workingDirectory>
+        </configuration>
+        <executions>
+          <execution>
+            <id>install node and npm</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>install-node-and-npm</goal>
+            </goals>
+          </execution>
+          <execution>
+            <id>npm install</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>npm</goal>
+            </goals>
+            <configuration>
+              <arguments>install --python="${project.basedir}/src/main/unix/ambari-python-wrap" --unsafe-perm
+              </arguments>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+
+
+      <plugin>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <phase>generate-resources</phase>
+            <goals>
+              <goal>copy-dependencies</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>${project.build.outputDirectory}/WEB-INF/lib</outputDirectory>
+              <includeScope>runtime</includeScope>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+
+
+    </plugins>
+    <resources>
+
+      <resource>
+        <directory>src/main/resources</directory>
+        <filtering>true</filtering>
+        <includes>
+          <include>index.jsp</include>
+          <include>image/*.*</include>
+          <include>view.xml</include>
+          <include>view.log4j.properties</include>
+          <include>ui/*.*</include>
+          <include>WEB-INF/web.xml</include>
+        </includes>
+      </resource>
+
+
+      <resource>
+        <directory>src/main/resources/ui/bower_components/bootstrap/dist/css/</directory>
+        <filtering>false</filtering>
+        <targetPath>${project.build.outputDirectory}/css</targetPath>
+      </resource>
+
+      <resource>
+        <directory>src/main/resources/ui/bower_components/bootstrap/dist/fonts/</directory>
+        <filtering>false</filtering>
+        <targetPath>${project.build.outputDirectory}/fonts</targetPath>
+      </resource>
+
+
+      <resource>
+        <directory>src/main/resources/ui/bower_components/eonasdan-bootstrap-datetimepicker/build/css</directory>
+        <filtering>false</filtering>
+        <targetPath>${project.build.outputDirectory}/css</targetPath>
+      </resource>
+
+      <resource>
+        <directory>src/main/resources/ui//bower_components/moment/min/</directory>
+        <filtering>false</filtering>
+        <targetPath>${project.build.outputDirectory}/js</targetPath>
+      </resource>
+
+
+      <resource>
+        <directory>src/main/resources/ui/bower_components/eonasdan-bootstrap-datetimepicker/build/js</directory>
+        <filtering>false</filtering>
+        <targetPath>${project.build.outputDirectory}/js</targetPath>
+      </resource>
+
+      <resource>
+        <directory>src/main/resources/ui/bower_components/jquery/dist/</directory>
+        <filtering>false</filtering>
+        <targetPath>${project.build.outputDirectory}/js</targetPath>
+      </resource>
+      <resource>
+        <directory>src/main/resources/ui/bower_components/bootstrap/dist/js</directory>
+        <filtering>false</filtering>
+        <targetPath>${project.build.outputDirectory}/js</targetPath>
+      </resource>
+      <resource>
+        <targetPath>WEB-INF/lib</targetPath>
+        <filtering>false</filtering>
+        <directory>target/lib</directory>
+      </resource>
+    </resources>
+  </build>
+
+</project>

+ 182 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ConfigurationCheck.java

@@ -0,0 +1,182 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.controller.configurationcheck;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.URISyntaxException;
+import java.sql.Connection;
+
+import org.apache.ambari.view.ViewContext;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletContext;
+
+import org.apache.ambari.view.huetoambarimigration.service.*;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.log4j.Logger;
+
+
+public class ConfigurationCheck extends HttpServlet {
+  private static final long serialVersionUID = 1L;
+
+  ViewContext view;
+
+  @Override
+  public void init(ServletConfig config) throws ServletException {
+
+    super.init(config);
+    ServletContext context = config.getServletContext();
+    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+
+  }
+
+  protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+
+    final Logger logger = Logger.getLogger(ConfigurationCheck.class);
+    response.setContentType("text/html");
+    PrintWriter out = response.getWriter();
+
+    out.println("<table class=\"table\">");
+    out.println("<thead><tr><th>Service</th><th>Status</th></tr></thead>");
+    out.println("<tbody>");
+
+    if (ConfFileReader.checkConfigurationForHue(view.getProperties().get("Hue_URL"))) {
+      logger.info("Hue URl connection:- Success");
+      out.println("<tr class=\"success\">");
+      out.println("<td><h6>" + "Ambari" + "</h6></td>");
+      out.println("<td><h6>" + "OK" + "</h6></td>");
+      out.println("</tr>");
+    } else {
+      logger.info("Hue URl connection:- Failed");
+      out.println("<tr class=\"danger\">");
+      out.println("<td><h6>" + "Ambari" + "</h6></td>");
+      out.println("<td><h6>" + "ERROR" + "</h6></td>");
+      out.println("</tr>");
+    }
+
+    if (ConfFileReader.checkConfigurationForAmbari(view.getProperties().get("Ambari_URL"))) {
+
+      logger.info("Ambari URl connection:- Success");
+      out.println("<tr class=\"success\">");
+      out.println("<td><h6>" + "Hue" + "</h6></td>");
+      out.println("<td><h6>" + "OK" + "</h6></td>");
+      out.println("</tr>");
+
+    } else {
+
+      logger.info("Ambari URl connection:- Failed");
+      out.println("<tr class=\"danger\">");
+      out.println("<td><h6>" + "Hue" + "</h6></td>");
+      out.println("<td><h6>" + "ERROR" + "</h6></td>");
+      out.println("</tr>");
+
+    }
+
+    if (ConfFileReader.checkAmbariDatbaseConection(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword"))) {
+
+      logger.info("Ambari Database connection:- Success");
+      out.println("<tr class=\"success\">");
+      out.println("<td><h6>" + "Ambari Database" + "</h6></td>");
+      out.println("<td><h6>" + "OK" + "</h6></td>");
+      out.println("</tr>");
+
+    } else {
+
+      logger.info("Ambari Database connection:- Failed");
+      out.println("<tr class=\"danger\">");
+      out.println("<td><h6>" + "Ambari Database" + "</h6></td>");
+      out.println("<td><h6>" + "ERROR" + "</h6></td>");
+      out.println("</tr>");
+
+    }
+    if (ConfFileReader.checkHueDatabaseConnection(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword"))) {
+
+      logger.info("Hue Database connection:- Success");
+      out.println("<tr class=\"success\">");
+      out.println("<td><h6>" + "Hue Database" + "</h6></td>");
+      out.println("<td><h6>" + "OK" + "</h6></td>");
+      out.println("</tr>");
+
+    } else {
+
+      logger.info("Hue Database connection:- Failed");
+      out.println("<tr class=\"danger\">");
+      out.println("<td><h6>" + "Hue Database" + "</h6></td>");
+      out.println("<td><h6>" + "ERROR" + "</h6></td>");
+      out.println("</tr>");
+
+    }
+
+    try {
+
+      if (ConfFileReader.checkNamenodeURIConnectionforambari(view.getProperties().get("namenode_URI_Ambari"))) {
+
+        logger.info("Web hdfs Access to ambari:- Success");
+        out.println("<tr class=\"success\">");
+        out.println("<td><h6>" + "namenodeURIAmbari" + "</h6></td>");
+        out.println("<td><h6>" + "OK" + "</h6></td>");
+        out.println("</tr>");
+
+      } else {
+
+        logger.info("Web hdfs Access to ambari:- Failed");
+        out.println("<tr class=\"danger\">");
+        out.println("<td><h6>" + "namenodeURIAmbari" + "</h6></td>");
+        out.println("<td><h6>" + "ERROR" + "</h6></td>");
+        out.println("</tr>");
+
+      }
+    } catch (URISyntaxException e) {
+      logger.error("Error in accessing Webhdfs of Ambari: ", e);
+    }
+
+    try {
+      if (ConfFileReader.checkNamenodeURIConnectionforHue(view.getProperties().get("namenode_URI_Hue"))) {
+
+        logger.info("Web hdfs Access to hue:- Success");
+        out.println("<tr class=\"success\">");
+        out.println("<td><h6>" + "namenodeURIHue" + "</h6></td>");
+        out.println("<td><h6>" + "OK" + "</h6></td>");
+        out.println("</tr>");
+
+      } else {
+
+        logger.info("Web hdfs Access to hue:- Failed");
+        out.println("<tr class=\"danger\">");
+        out.println("<td><h6>" + "namenodeURIHue" + "</h6></td>");
+        out.println("<td><h6>" + "ERROR" + "</h6></td>");
+        out.println("</tr>");
+
+      }
+    } catch (URISyntaxException e) {
+      logger.error("Error in accessing Webhdfs of Hue: " , e);
+    }
+
+    out.println("</tbody></table>");
+
+  }
+
+
+}

+ 54 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ProgressBarStatus.java

@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.controller.configurationcheck;
+
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+
+
+public class ProgressBarStatus extends HttpServlet {
+
+  private static final long serialVersionUID = 1L;
+
+  public static String TASK_PROGRESS_VARIABLE = "task_progress_session";
+
+
+  protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+
+    HttpSession session = request.getSession(true);
+    Integer param = (Integer) session.getAttribute(TASK_PROGRESS_VARIABLE);
+
+    if (param == null) {
+      param = 0;
+    }
+
+    response.setContentType("text/html");
+    PrintWriter out = response.getWriter();
+    out.println(param + "%");
+
+  }
+
+}

+ 222 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveHistoryMigration.java

@@ -0,0 +1,222 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.controller.hive;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.URISyntaxException;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.text.ParseException;
+
+
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ConfigurationCheck;
+import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
+import org.apache.log4j.Logger;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.ambari.view.huetoambarimigration.service.hive.HiveHistoryQueryImpl;
+
+public class HiveHistoryMigration extends HttpServlet {
+
+
+  private static final long serialVersionUID = 1031422249396784970L;
+  ViewContext view;
+
+  private String startDate;
+  private String endDate;
+  private String instance;
+  private String username;
+
+  @Override
+  public void init(ServletConfig config) throws ServletException {
+
+    super.init(config);
+    ServletContext context = config.getServletContext();
+    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+
+  }
+
+  public void doGet(HttpServletRequest req, HttpServletResponse resp)
+    throws ServletException, IOException {
+
+    HttpSession session = req.getSession(true);
+    final Logger logger = Logger.getLogger(HiveHistoryMigration.class);
+    Connection connectionHuedb = null;
+    Connection connectionAmbaridb = null;
+
+    /* fetching the variable from the client */
+    username = req.getParameter("username");
+    startDate = req.getParameter("startdate");
+    endDate = req.getParameter("enddate");
+    instance = req.getParameter("instance");
+
+    logger.info("--------------------------------------");
+    logger.info("Hive History query Migration started");
+    logger.info("--------------------------------------");
+    logger.info("start date: " + startDate);
+    logger.info("enddate date: " + endDate);
+    logger.info("instance is: " + username);
+    logger.info("hue username is : " + instance);
+
+    int maxCountOfAmbariDb, i = 0;
+    String time = null;
+    Long epochTime = null;
+    String dirNameforHiveHistroy;
+
+    HiveHistoryQueryImpl hiveHistoryQueryImpl = new HiveHistoryQueryImpl();// creating objects of HiveHistroy implementation
+
+    String[] hiveQuery = new String[1000000];
+
+    try {
+
+      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();
+
+      hiveQuery = hiveHistoryQueryImpl.fetchFromHue(username, startDate, endDate, connectionHuedb);
+
+		   /* if No hive query selected from Hue Database according to our search criteria */
+
+      if (hiveQuery[i] == null) {
+
+        logger.info("No queries has been selected acccording to your criteria");
+        resp.setContentType("text/html");
+        PrintWriter out = resp.getWriter();
+        out.println("<br>");
+        out.println("<h4>No queries selected according to your criteria</h4>");
+
+      } else {
+        /* If Hive queries are selected based on our search criteria */
+
+        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();// connecting to ambari db
+        connectionAmbaridb.setAutoCommit(false);
+
+        // for each queries fetched from Hue database//
+
+        for (i = 0; hiveQuery[i] != null; i++) {
+
+          float calc = ((float) (i + 1)) / hiveQuery.length * 100;
+          int progressPercentage = Math.round(calc);
+
+          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
+
+          logger.info("_____________________");
+          logger.info("Loop No." + (i + 1));
+          logger.info("_____________________");
+          logger.info("Hue query that has been fetched" + hiveQuery[i]);
+          int id = 0;
+
+          id = hiveHistoryQueryImpl.fetchInstanceTablename(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance); // feching table name according to the given instance name
+
+          logger.info("Table name has been fetched from intance name");
+
+          hiveHistoryQueryImpl.writetoFileQueryhql(hiveQuery[i], ConfFileReader.getHomeDir());// writing to .hql file to a temp file on local disk
+
+          logger.info(".hql file created in Temp directory");
+
+          hiveHistoryQueryImpl.writetoFileLogs(ConfFileReader.getHomeDir());// writing to logs file to a temp file on local disk
+
+          logger.info("Log file created in Temp directory");
+
+          maxCountOfAmbariDb = (hiveHistoryQueryImpl.fetchMaximumIdfromAmbaridb(view.getProperties().get("ambaridrivername"), connectionAmbaridb, id) + 1);// fetching the maximum count for ambari db to insert
+
+          time = hiveHistoryQueryImpl.getTime();// getting the system current time.
+
+          epochTime = hiveHistoryQueryImpl.getEpochTime();// getting system time as epoch format
+
+          dirNameforHiveHistroy = "/user/admin/hive/jobs/hive-job-" + maxCountOfAmbariDb + "-" + time + "/";// creating the directory name
+
+          logger.info("Directory name where .hql will be saved: " + dirNameforHiveHistroy);
+
+          hiveHistoryQueryImpl.insertRowinAmbaridb(view.getProperties().get("ambaridrivername"), dirNameforHiveHistroy, maxCountOfAmbariDb, epochTime, connectionAmbaridb, id, instance, i);// inserting in ambari database
+
+          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
+
+            logger.info("kerberose enabled");
+            hiveHistoryQueryImpl.createDirKerberorisedSecured(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// creating directory in kerborized secured hdfs
+            logger.info("Directory created in hdfs");
+            hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfFileReader.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the .hql file to kerborized hdfs
+            hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfFileReader.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the log file to kerborized hdfs
+          } else {
+
+            logger.info("kerberose not enabled");
+            hiveHistoryQueryImpl.createDir(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs
+            logger.info("Directory created in hdfs");
+            hiveHistoryQueryImpl.putFileinHdfs(ConfFileReader.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the .hql file to hdfs
+            hiveHistoryQueryImpl.putFileinHdfs(ConfFileReader.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the log file to hdfs
+          }
+
+        }
+        connectionAmbaridb.commit();
+
+      }
+    } catch (SQLException e) {
+      logger.error("Sql exception in ambari database: ", e);
+      try {
+        connectionAmbaridb.rollback();
+        logger.error("Sql statement are Rolledback");
+      } catch (SQLException e1) {
+        logger.error("Sql rollback exception in ambari database",
+          e1);
+      }
+    } catch (ClassNotFoundException e) {
+      logger.error("Class not found :- " ,e);
+    } catch (ParseException e) {
+      logger.error("Parse Exception : " ,e);
+    } catch (URISyntaxException e) {
+      logger.error("URI Syntax Exception: " ,e);
+    } catch (PropertyVetoException e) {
+      logger.error("PropertyVetoException: " ,e);
+    } finally {
+      if (connectionAmbaridb != null) try {
+        connectionAmbaridb.close();
+      } catch (SQLException e) {
+        logger.error("Exception in closing the connection :" ,e);
+      }
+    }
+    //deleteing the temprary files that are created while execution
+    hiveHistoryQueryImpl.deleteFileQueryhql(ConfFileReader.getHomeDir());
+    hiveHistoryQueryImpl.deleteFileQueryLogs(ConfFileReader.getHomeDir());
+
+    session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
+    logger.info("------------------------------");
+    logger.info("Hive History query Migration Ends");
+    logger.info("------------------------------");
+
+    /* servlet returned to client */
+    resp.setContentType("text/html");
+    PrintWriter out = resp.getWriter();
+    out.println("<br>");
+    out.println("<h4>" + i + " Query has been migrated to  " + instance + "</h4>");
+
+  }
+
+}

+ 231 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveSavedQueryMigration.java

@@ -0,0 +1,231 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.controller.hive;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.URISyntaxException;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.text.ParseException;
+import java.util.ArrayList;
+
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.*;
+import javax.servlet.http.*;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
+import org.apache.log4j.Logger;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+import org.apache.ambari.view.huetoambarimigration.model.*;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.ambari.view.huetoambarimigration.service.hive.HiveSavedQueryImpl;
+
+public class HiveSavedQueryMigration extends HttpServlet {
+
+  private static final long serialVersionUID = 1031422249396784970L;
+
+  ViewContext view;
+  private String startDate;
+  private String endDate;
+  private String instance;
+  private String userName;
+
+  @Override
+  public void init(ServletConfig config) throws ServletException {
+    super.init(config);
+    ServletContext context = config.getServletContext();
+    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+  }
+
+  public void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
+
+    HttpSession session = req.getSession(true);
+    final Logger logger = Logger.getLogger(HiveSavedQueryMigration.class);
+
+    Connection connectionAmbaridb = null;
+    Connection connectionHuedb = null;
+
+    /* fetching from servlet */
+    userName = req.getParameter("username");
+    startDate = req.getParameter("startdate");
+    endDate = req.getParameter("enddate");
+    instance = req.getParameter("instance");
+
+    int i = 0;
+
+    logger.info("-------------------------------------");
+    logger.info("Hive saved query Migration started");
+    logger.info("-------------------------------------");
+    logger.info("start date: " + startDate);
+    logger.info("enddate date: " + endDate);
+    logger.info("instance is: " + instance);
+    logger.info("hue username is : " + userName);
+
+    HiveSavedQueryImpl hivesavedqueryimpl = new HiveSavedQueryImpl();/* creating Implementation object  */
+
+    int maxcountForHivehistroryAmbaridb, maxCountforSavequeryAmbaridb;
+    String time = null;
+    Long epochtime = null;
+    String dirNameforHiveSavedquery;
+    ArrayList<PojoHive> dbpojoHiveSavedQuery = new ArrayList<PojoHive>();
+
+    try {
+
+      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection(); /* fetching connection to hue DB */
+
+      dbpojoHiveSavedQuery = hivesavedqueryimpl.fetchFromHuedb(userName, startDate, endDate, connectionHuedb); /* fetching data from hue db and storing it in to a model */
+
+      if (dbpojoHiveSavedQuery.size() == 0) /* if no data has been fetched from hue db according to search criteria */ {
+
+        logger.info("no Hive saved query has been selected from hue according to your criteria of searching");
+        resp.setContentType("text/html");
+        PrintWriter out = resp.getWriter();
+        out.println("<br>");
+        out.println("<h4>No queries selected according to your criteria</h4>");
+
+      } else {
+
+        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();/* connecting to ambari DB */
+        connectionAmbaridb.setAutoCommit(false);
+
+        for (i = 0; i < dbpojoHiveSavedQuery.size(); i++) {
+
+          logger.info("_____________________");
+          logger.info("Loop No." + (i + 1));
+          logger.info("_____________________");
+
+          float calc = ((float) (i + 1)) / dbpojoHiveSavedQuery.size() * 100;
+          int progressPercentage = Math.round(calc);
+
+          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
+
+          logger.info("query fetched from hue:-  " + dbpojoHiveSavedQuery.get(i).getQuery());
+
+          int tableIdSavedQuery = hivesavedqueryimpl.fetchInstancetablenameForSavedqueryHive(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance); /* fetching the instance table name for hive saved query  from the given instance name */
+
+          int tableIdHistoryHive = hivesavedqueryimpl.fetchInstanceTablenameHiveHistory(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance); /* fetching the instance table name for hive history query from the given instance name */
+
+          logger.info("Table name are fetched from instance name.");
+
+          hivesavedqueryimpl.writetoFilequeryHql(dbpojoHiveSavedQuery.get(i).getQuery(), ConfFileReader.getHomeDir()); /* writing hive query to a local file*/
+
+          hivesavedqueryimpl.writetoFileLogs(ConfFileReader.getHomeDir());/* writing logs to localfile */
+
+          logger.info(".hql and logs file are saved in temporary directory");
+
+          maxcountForHivehistroryAmbaridb = (hivesavedqueryimpl.fetchMaxdsidFromHiveHistory(view.getProperties().get("ambaridrivername"), connectionAmbaridb, tableIdHistoryHive) + 1);/* fetching the maximum ds_id from hive history table*/
+
+          maxCountforSavequeryAmbaridb = (hivesavedqueryimpl.fetchMaxidforSavedQueryHive(view.getProperties().get("ambaridrivername"), connectionAmbaridb, tableIdSavedQuery) + 1);/* fetching the maximum ds_id from hive saved query table*/
+
+          time = hivesavedqueryimpl.getTime();/* getting system time */
+
+          epochtime = hivesavedqueryimpl.getEpochTime();/* getting epoch time */
+
+
+          dirNameforHiveSavedquery = "/user/admin/hive/jobs/hive-job-" + maxcountForHivehistroryAmbaridb + "-"
+            + time + "/"; // creating hdfs directory name
+
+          logger.info("Directory will be creted in HDFS" + dirNameforHiveSavedquery);
+
+
+          hivesavedqueryimpl.insertRowHiveHistory(view.getProperties().get("ambaridrivername"), dirNameforHiveSavedquery, maxcountForHivehistroryAmbaridb, epochtime, connectionAmbaridb, tableIdHistoryHive, instance, i);// inserting to hive history table
+
+          logger.info("Row inserted in Hive History table.");
+
+          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
+
+            logger.info("Kerberose Enabled");
+            hivesavedqueryimpl.createDirHiveSecured(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs in kerborized cluster
+            hivesavedqueryimpl.putFileinHdfsSecured(ConfFileReader.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting .hql file in hdfs in kerberoroized cluster
+            hivesavedqueryimpl.putFileinHdfsSecured(ConfFileReader.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting logs file in hdfs in kerberoroized cluster
+
+          } else {
+
+            logger.info("Kerberose Not Enabled");
+            hivesavedqueryimpl.createDirHive(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs
+            hivesavedqueryimpl.putFileinHdfs(ConfFileReader.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting .hql file in hdfs directory
+            hivesavedqueryimpl.putFileinHdfs(ConfFileReader.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting logs file in hdfs
+          }
+
+          //inserting into hived saved query table
+          hivesavedqueryimpl.insertRowinSavedQuery(view.getProperties().get("ambaridrivername"), maxCountforSavequeryAmbaridb, dbpojoHiveSavedQuery.get(i).getDatabase(), dirNameforHiveSavedquery, dbpojoHiveSavedQuery.get(i).getQuery(), dbpojoHiveSavedQuery.get(i).getOwner(), connectionAmbaridb, tableIdSavedQuery, instance, i);
+
+        }
+        connectionAmbaridb.commit();
+
+      }
+
+
+    } catch (SQLException e) {
+
+      logger.error("SQL exception: ", e);
+      try {
+        connectionAmbaridb.rollback();
+        logger.info("roll back done");
+      } catch (SQLException e1) {
+        logger.error("Rollback error: ", e1);
+
+      }
+    } catch (ClassNotFoundException e1) {
+      logger.error("Class not found : " , e1);
+    } catch (ParseException e) {
+      logger.error("ParseException: " , e);
+    } catch (URISyntaxException e) {
+      logger.error("URISyntaxException: " , e);
+    } catch (PropertyVetoException e) {
+      logger.error("PropertyVetoException:" , e);
+    } finally {
+      if (null != connectionAmbaridb)
+        try {
+          connectionAmbaridb.close();
+        } catch (SQLException e) {
+          logger.error("Error in connection close", e);
+        }
+    }
+
+
+    hivesavedqueryimpl.deleteFileQueryhql(ConfFileReader.getHomeDir());
+    hivesavedqueryimpl.deleteFileQueryLogs(ConfFileReader.getHomeDir());
+    session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
+
+    logger.info("-------------------------------");
+    logger.info("Hive saved query Migration end");
+    logger.info("--------------------------------");
+
+    resp.setContentType("text/html");
+    PrintWriter out = resp.getWriter();
+    out.println("<br>");
+    out.println("<h4>" + i + " Saved query has been migrated to  " + instance + "</h4>");
+  }
+}
+
+
+
+

+ 201 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigJobMigration.java

@@ -0,0 +1,201 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.controller.pig;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.URISyntaxException;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
+import org.apache.log4j.Logger;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+import org.apache.ambari.view.huetoambarimigration.service.*;
+import org.apache.ambari.view.huetoambarimigration.model.*;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.ambari.view.huetoambarimigration.service.pig.PigJobImpl;
+
+public class PigJobMigration extends HttpServlet {
+
+  private static final long serialVersionUID = 1031422249396784970L;
+  ViewContext view;
+  int i = 0;
+  private String userName;
+  private String startDate;
+  private String endDate;
+  private String instance;
+
+  @Override
+  public void init(ServletConfig config) throws ServletException {
+
+    super.init(config);
+    ServletContext context = config.getServletContext();
+    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+
+  }
+
+  public void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
+
+    HttpSession session = req.getSession(true);
+    final Logger logger = Logger.getLogger(PigJobMigration.class);
+    Connection connectionHuedb = null;
+    Connection connectionAmbaridb = null;
+
+    // fetchinf data from the clients
+    userName = req.getParameter("username");
+    startDate = req.getParameter("startdate");
+    endDate = req.getParameter("enddate");
+    instance = req.getParameter("instance");
+
+    logger.info("------------------------------");
+    logger.info("Pig Jobs Migration started");
+    logger.info("------------------------------");
+    logger.info("start date: " + startDate);
+    logger.info("enddate date: " + endDate);
+    logger.info("instance is: " + userName);
+    logger.info("hue username is : " + instance);
+
+    PigJobImpl pigjobimpl = new PigJobImpl();// creating the implementation object
+    int maxCountforPigScript = 0;
+
+    String time = null, timeIndorder = null;
+    Long epochtime = null;
+    String pigJobDirName;
+    ArrayList<PojoPig> pigJobDbPojo = new ArrayList<PojoPig>();
+
+    try {
+
+      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connecting to hue database
+      pigJobDbPojo = pigjobimpl.fetchFromHueDB(userName, startDate, endDate, connectionHuedb);// fetching the PigJobs details from hue
+
+			/*No Pig Job details has been fetched accordring to search criteria*/
+      if (pigJobDbPojo.size() == 0) {
+
+        logger.info("no Pig Job has been selected from hue according to your criteria of searching");
+        resp.setContentType("text/html");
+        PrintWriter out = resp.getWriter();
+        out.println("<br>");
+        out.println("<h4>No Pig Job  selected according to your criteria</h4>");
+
+      } else {
+
+        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
+        connectionAmbaridb.setAutoCommit(false);
+
+        for (i = 0; i < pigJobDbPojo.size(); i++) {
+
+          float calc = ((float) (i + 1)) / pigJobDbPojo.size() * 100;
+          int progressPercentage = Math.round(calc);
+
+          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
+
+          logger.info("Loop No." + (i + 1));
+          logger.info("________________");
+          logger.info("the title of script " + pigJobDbPojo.get(i).getTitle());
+
+          int fetchPigTablenameInstance = pigjobimpl.fetchInstanceTablename(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance);
+          maxCountforPigScript = (pigjobimpl.fetchMaxIdforPigJob(view.getProperties().get("ambaridrivername"), connectionAmbaridb, fetchPigTablenameInstance) + 1);
+
+          time = pigjobimpl.getTime();
+          timeIndorder = pigjobimpl.getTimeInorder();
+          epochtime = pigjobimpl.getEpochTime();
+
+          pigJobDirName = "/user/admin/pig/jobs/" + pigJobDbPojo.get(i).getTitle() + "_" + time + "/";
+
+          pigjobimpl.insertRowPigJob(view.getProperties().get("ambaridrivername"), pigJobDirName, maxCountforPigScript, time, timeIndorder, epochtime, pigJobDbPojo.get(i).getTitle(), connectionAmbaridb, fetchPigTablenameInstance, pigJobDbPojo.get(i).getStatus(), instance, i);
+
+          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
+
+            pigjobimpl.createDirPigJobSecured(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"));
+            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/script.pig", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
+            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stderr", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
+            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stdout", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
+
+          } else {
+
+            pigjobimpl.createDirPigJob(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"));
+            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/script.pig", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
+            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stderr", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
+            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stdout", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
+
+          }
+
+          logger.info(pigJobDbPojo.get(i).getTitle() + "has been migrated to Ambari");
+
+        }
+        connectionAmbaridb.commit();
+      }
+
+    } catch (SQLException e) {
+      logger.error("sql exception in ambari database:", e);
+      try {
+        connectionAmbaridb.rollback();
+        logger.info("roll back done");
+      } catch (SQLException e1) {
+        logger.error("roll back  exception:",e1);
+      }
+    } catch (ClassNotFoundException e2) {
+      logger.error("class not found exception:",e2);
+    } catch (ParseException e) {
+      logger.error("ParseException: " ,e);
+    } catch (URISyntaxException e) {
+      logger.error("URISyntaxException" ,e);
+    } catch (PropertyVetoException e) {
+      logger.error("PropertyVetoException" ,e);
+    } finally {
+      if (null != connectionAmbaridb)
+        try {
+          connectionAmbaridb.close();
+        } catch (SQLException e) {
+          logger.error("connection closing exception ", e);
+        }
+    }
+
+    logger.info("------------------------------");
+    logger.info("Pig Job Migration End");
+    logger.info("------------------------------");
+
+    session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
+    resp.setContentType("text/html");
+    PrintWriter out = resp.getWriter();
+    out.println("<br>");
+    out.println("<h4>" + i + " Pig jobs has been migrated to  "
+      + instance + "</h4>");
+  }
+
+}
+

+ 208 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigScriptMigration.java

@@ -0,0 +1,208 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.controller.pig;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.text.ParseException;
+import java.util.ArrayList;
+
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
+import org.apache.log4j.Logger;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+import org.apache.ambari.view.huetoambarimigration.model.*;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.ambari.view.huetoambarimigration.service.pig.PigScriptImpl;
+
+public class PigScriptMigration extends HttpServlet {
+
+
+  private static final long serialVersionUID = 1031422249396784970L;
+  ViewContext view;
+  private String startDate;
+  private String endDate;
+  private String instance;
+  private String userName;
+
+  @Override
+  public void init(ServletConfig config) throws ServletException {
+
+    super.init(config);
+    ServletContext context = config.getServletContext();
+    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+
+  }
+
+  public void doGet(HttpServletRequest req, HttpServletResponse resp)
+    throws ServletException, IOException {
+
+    HttpSession session = req.getSession(true);
+    final Logger logger = Logger.getLogger(PigScriptMigration.class);
+    Connection connectionHuedb = null;
+    Connection connectionAmbaridb = null;
+
+    logger.info("-------------------------------------");
+    logger.info("Pig saved script Migration started");
+    logger.info("-------------------------------------");
+
+    //fethcing data from client
+
+    userName = req.getParameter("username");
+    startDate = req.getParameter("startdate");
+    endDate = req.getParameter("enddate");
+    instance = req.getParameter("instance");
+    int i = 0;
+
+    logger.info("start date: " + startDate);
+    logger.info("enddate date: " + endDate);
+    logger.info("instance is: " + userName);
+    logger.info("hue username is : " + instance);
+
+    //Reading the configuration file
+    PigScriptImpl pigsavedscriptmigration = new PigScriptImpl();
+
+    int maxcountforsavequery = 0, maxcountforpigsavedscript;
+    String time = null, timetobeInorder = null;
+    Long epochTime = null;
+    String dirNameForPigScript, completeDirandFilePath, pigscriptFilename="";
+    int pigInstanceTableName;
+
+    ArrayList<PojoPig> dbpojoPigSavedscript = new ArrayList<PojoPig>();
+
+    try {
+      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connection to Hue DB
+      dbpojoPigSavedscript = pigsavedscriptmigration.fetchFromHueDatabase(userName, startDate, endDate, connectionHuedb, view.getProperties().get("huedrivername"));// Fetching Pig script details from Hue DB
+
+      /* If No Pig Script has been fetched from Hue db according to our search criteria*/
+      if (dbpojoPigSavedscript.size() == 0) {
+
+        logger.info("no Pig script has been selected from hue according to your criteria of searching");
+        resp.setContentType("text/html");
+        PrintWriter out = resp.getWriter();
+        out.println("<br>");
+        out.println("<h4>No Pig Script selected according to your criteria</h4>");
+
+      } else {
+
+        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();// connecting to ambari db
+        connectionAmbaridb.setAutoCommit(false);
+        logger.info("loop will continue for " + dbpojoPigSavedscript.size() + "times");
+
+        //for each pig script found in Hue Database
+
+        for (i = 0; i < dbpojoPigSavedscript.size(); i++) {
+
+
+          float calc = ((float) (i + 1)) / dbpojoPigSavedscript.size() * 100;
+          int progressPercentage = Math.round(calc);
+
+          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
+
+          logger.info("Loop No." + (i + 1));
+          logger.info("________________");
+          logger.info("the title of script:  " + dbpojoPigSavedscript.get(i).getTitle());
+
+          pigInstanceTableName = pigsavedscriptmigration.fetchInstanceTablenamePigScript(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance);// finding the table name in ambari from the given instance
+
+          maxcountforpigsavedscript = (pigsavedscriptmigration.fetchmaxIdforPigSavedScript(view.getProperties().get("ambaridrivername"), connectionAmbaridb, pigInstanceTableName) + 1);// maximum count of the primary key of Pig Script table
+
+          time = pigsavedscriptmigration.getTime();
+
+          timetobeInorder = pigsavedscriptmigration.getTimeInorder();
+
+          epochTime = pigsavedscriptmigration.getEpochTime();
+
+          dirNameForPigScript = "/user/admin/pig/scripts/";
+
+          pigscriptFilename = dbpojoPigSavedscript.get(i).getTitle() + "-" + time + ".pig";
+
+          completeDirandFilePath = dirNameForPigScript + pigscriptFilename;
+
+          pigsavedscriptmigration.writetPigScripttoLocalFile(dbpojoPigSavedscript.get(i).getScript(), dbpojoPigSavedscript.get(i).getTitle(), dbpojoPigSavedscript.get(i).getDt(), ConfFileReader.getHomeDir(), pigscriptFilename);
+
+          pigsavedscriptmigration.insertRowForPigScript(view.getProperties().get("ambaridrivername"), completeDirandFilePath, maxcountforsavequery, maxcountforpigsavedscript, time, timetobeInorder, epochTime, dbpojoPigSavedscript.get(i).getTitle(), connectionAmbaridb, pigInstanceTableName, instance, i);
+
+          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
+            pigsavedscriptmigration.putFileinHdfsSecured(ConfFileReader.getHomeDir() + pigscriptFilename, dirNameForPigScript, view.getProperties().get("namenode_URI_Ambari"));
+          } else {
+            pigsavedscriptmigration.putFileinHdfs(ConfFileReader.getHomeDir() + pigscriptFilename, dirNameForPigScript, view.getProperties().get("namenode_URI_Ambari"));
+          }
+
+          logger.info(dbpojoPigSavedscript.get(i).getTitle() + "Migrated to Ambari");
+
+          pigsavedscriptmigration.deletePigScriptLocalFile(ConfFileReader.getHomeDir(), pigscriptFilename);
+
+        }
+        connectionAmbaridb.commit();
+
+      }
+
+
+    } catch (SQLException e) {
+      logger.error("Sql exception in ambari database", e);
+      try {
+        connectionAmbaridb.rollback();
+        logger.info("rollback done");
+      } catch (SQLException e1) {
+        logger.error("Sql exception while doing roll back", e);
+      }
+    } catch (ClassNotFoundException e2) {
+      logger.error("class not found exception", e2);
+    } catch (ParseException e) {
+      logger.error("ParseException: " , e);
+    } catch (PropertyVetoException e) {
+      logger.error("PropertyVetoException: " , e);
+    } finally {
+      if (null != connectionAmbaridb)
+        try {
+          connectionAmbaridb.close();
+        } catch (SQLException e) {
+          logger.error("connection close exception: ", e);
+        }
+    }
+
+    session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
+
+    resp.setContentType("text/html");
+    PrintWriter out = resp.getWriter();
+    out.println("<br>");
+    out.println("<h4>" + i + " Pig Script has been migrated to " + instance + "</h4>");
+
+    logger.info("----------------------------------");
+    logger.info("Pig saved script Migration ends");
+    logger.info("----------------------------------");
+  }
+
+
+}

+ 217 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/revertchange/RevertChange.java

@@ -0,0 +1,217 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.controller.revertchange;
+
+import java.beans.PropertyVetoException;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.URISyntaxException;
+import java.security.PrivilegedExceptionAction;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.List;
+
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
+import org.apache.log4j.Logger;
+import org.jdom.Attribute;
+import org.jdom.Document;
+import org.jdom.Element;
+import org.jdom.JDOMException;
+import org.jdom.input.SAXBuilder;
+import org.jdom.output.Format;
+import org.jdom.output.XMLOutputter;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+
+
+public class RevertChange extends HttpServlet {
+
+  private static final long serialVersionUID = 1L;
+  ViewContext view;
+
+  @Override
+  public void init(ServletConfig config) throws ServletException {
+
+    super.init(config);
+    ServletContext context = config.getServletContext();
+    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+  }
+
+  public boolean stringtoDatecompare(String datefromservlet,
+                                     String datefromfile) throws ParseException {
+
+    SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
+    Date date1 = formatter.parse(datefromservlet);
+    Date date2 = formatter.parse(datefromfile);
+    if (date1.compareTo(date2) < 0) {
+      return true;
+    } else {
+      return false;
+    }
+
+  }
+
+  public void removedir(final String dir, final String namenodeuri)
+    throws IOException, URISyntaxException {
+
+    try {
+      UserGroupInformation ugi = UserGroupInformation
+        .createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          Configuration conf = new Configuration();
+          conf.set("fs.hdfs.impl",
+            org.apache.hadoop.hdfs.DistributedFileSystem.class
+              .getName());
+          conf.set("fs.file.impl",
+            org.apache.hadoop.fs.LocalFileSystem.class
+              .getName());
+          conf.set("fs.defaultFS", namenodeuri);
+          conf.set("hadoop.job.ugi", "hdfs");
+
+          FileSystem fs = FileSystem.get(conf);
+          Path src = new Path(dir);
+          fs.delete(src, true);
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+  }
+
+  protected void doGet(HttpServletRequest request,
+                       HttpServletResponse response) throws ServletException, IOException {
+
+    final Logger logger = Logger.getLogger(RevertChange.class);
+
+    logger.info("------------------------------");
+    logger.info("Reverting the changes Start:");
+    logger.info("------------------------------");
+
+    HttpSession session = request.getSession(true);
+    String revertDate = request.getParameter("revertdate");
+    String instance = request.getParameter("instance");
+
+    logger.info("Revert Date " + revertDate);
+    logger.info("instance name " + instance);
+
+    BufferedReader br = null;
+    Connection connectionAmbariDatabase = null;
+
+    try {
+      connectionAmbariDatabase = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
+      connectionAmbariDatabase.setAutoCommit(false);
+
+      Statement stmt = null;
+      stmt = connectionAmbariDatabase.createStatement();
+      SAXBuilder builder = new SAXBuilder();
+      File xmlFile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml");
+      try {
+
+        Document document = (Document) builder.build(xmlFile);
+        Element rootNode = document.getRootElement();
+        List list = rootNode.getChildren("RevertRecord");
+
+        for (int i = 0; i < list.size(); i++) {
+
+          float calc = ((float) (i + 1)) / list.size() * 100;
+          int progressPercentage = Math.round(calc);
+          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
+
+          Element node = (Element) list.get(i);
+
+          if (node.getChildText("instance").equals(instance)) {
+
+            if (stringtoDatecompare(revertDate, node.getChildText("datetime").toString())) {
+
+              String sql = node.getChildText("query");
+              logger.info(sql);
+              stmt.executeUpdate(sql);
+              removedir(node.getChildText("dirname").toString(), view.getProperties().get("namenode_URI_Ambari"));
+              logger.info(node.getChildText("dirname").toString()+" deleted");
+
+            }
+
+          }
+
+        }
+
+        connectionAmbariDatabase.commit();
+
+        response.setContentType("text/html");
+        PrintWriter out = response.getWriter();
+        out.println("<br>");
+        out.println("<h4>" + " The change has been revert back for "
+          + instance + "</h4>");
+
+        session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
+
+        logger.info("------------------------------");
+        logger.info("Reverting the changes End");
+        logger.info("------------------------------");
+
+      } catch (IOException e) {
+        logger.error("IOException: ",e);
+      } catch (ParseException e) {
+        logger.error("ParseException: ",e);
+      } catch (JDOMException e) {
+        logger.error("JDOMException: ",e);
+      } catch (URISyntaxException e) {
+        logger.error("URISyntaxException:  ",e);
+      }
+    } catch (SQLException e1) {
+      logger.error("SqlException  ",e1);
+      try {
+        connectionAmbariDatabase.rollback();
+        logger.info("Rollback done");
+      } catch (SQLException e2) {
+        logger.error("SqlException in Rollback  ",e2);
+      }
+    } catch (PropertyVetoException e) {
+      logger.error("PropertyVetoException: ",e);
+    }
+
+  }
+
+}

+ 65 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceAmbariDatabase.java

@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.datasource;
+
+import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ConfigurationCheck;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import com.mchange.v2.c3p0.ComboPooledDataSource;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.SQLException;
+
+public class DataSourceAmbariDatabase {
+
+  private static DataSourceAmbariDatabase datasource;
+  private ComboPooledDataSource cpds;
+
+  private DataSourceAmbariDatabase(String ambaridatabasedriver, String ambarijdbcurl, String ambaridatabaseusename, String ambaridatabasepassword) throws IOException, SQLException, PropertyVetoException {
+
+    cpds = new ComboPooledDataSource();
+    cpds.setDriverClass(ambaridatabasedriver); //loads the jdbc driver
+    cpds.setJdbcUrl(ambarijdbcurl);
+    cpds.setUser(ambaridatabaseusename);
+    cpds.setPassword(ambaridatabasepassword);
+
+    // the settings below are optional -- c3p0 can work with defaults
+    cpds.setMinPoolSize(10);
+    cpds.setAcquireIncrement(10);
+    cpds.setMaxPoolSize(20);
+    cpds.setMaxStatements(180);
+
+  }
+
+  public static DataSourceAmbariDatabase getInstance(String ambariDbDriver, String ambariDBjadbcURL, String ambaridbUsername, String ambariDBPasswd) throws IOException, SQLException, PropertyVetoException {
+    if (datasource == null) {
+      datasource = new DataSourceAmbariDatabase(ambariDbDriver, ambariDBjadbcURL, ambaridbUsername, ambariDBPasswd);
+      return datasource;
+    } else {
+      return datasource;
+    }
+  }
+
+  public Connection getConnection() throws SQLException {
+    return this.cpds.getConnection();
+  }
+
+
+}

+ 64 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceHueDatabase.java

@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.datasource;
+
+import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ConfigurationCheck;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import com.mchange.v2.c3p0.ComboPooledDataSource;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.SQLException;
+
+public class DataSourceHueDatabase {
+
+  private static DataSourceHueDatabase datasource;
+
+  private ComboPooledDataSource cpdshue;
+
+  private DataSourceHueDatabase(String huejdbcurlhue, String huejdbcurl, String huedbUsername, String huedbPassword) throws IOException, SQLException, PropertyVetoException {
+    cpdshue = new ComboPooledDataSource();
+    cpdshue.setDriverClass(huejdbcurlhue); //loads the jdbc driver
+    cpdshue.setJdbcUrl(huejdbcurl);
+    cpdshue.setUser(huedbUsername);
+    cpdshue.setPassword(huedbPassword);
+    // the settings below are optional -- c3p0 can work with defaults
+    cpdshue.setMinPoolSize(10);
+    cpdshue.setAcquireIncrement(10);
+    cpdshue.setMaxPoolSize(20);
+    cpdshue.setMaxStatements(180);
+
+  }
+
+  public static DataSourceHueDatabase getInstance(String hueDrivername, String hueJdbcurl, String huedbUsername, String huedbPassword) throws IOException, SQLException, PropertyVetoException {
+    if (datasource == null) {
+      datasource = new DataSourceHueDatabase(hueDrivername, hueJdbcurl, huedbUsername, huedbPassword);
+      return datasource;
+    } else {
+      return datasource;
+    }
+  }
+
+  public Connection getConnection() throws SQLException {
+    return this.cpdshue.getConnection();
+  }
+
+
+}

+ 51 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/model/PojoHive.java

@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.model;
+
+public class PojoHive {
+
+  private String database;
+  private String owner;
+  private String query;
+
+  public String getDatabase() {
+    return database;
+  }
+
+  public void setDatabase(String database) {
+    this.database = database;
+  }
+
+  public String getOwner() {
+    return owner;
+  }
+
+  public void setOwner(String owner) {
+    this.owner = owner;
+  }
+
+  public String getQuery() {
+    return query;
+  }
+
+  public void setQuery(String query) {
+    this.query = query;
+  }
+
+}

+ 72 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/model/PojoPig.java

@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.model;
+
+import java.util.Date;
+
+public class PojoPig {
+
+  private Date dt;
+  private String script;
+  private String Status;
+  private String title;
+  private String dir;
+
+  public Date getDt() {
+    return dt;
+  }
+
+  public void setDt(Date dt) {
+    this.dt = dt;
+  }
+
+  public String getScript() {
+    return script;
+  }
+
+  public void setScript(String script) {
+    this.script = script;
+  }
+
+  public String getStatus() {
+    return Status;
+  }
+
+  public void setStatus(String status) {
+    Status = status;
+  }
+
+  public String getTitle() {
+    return title;
+  }
+
+  public void setTitle(String title) {
+    this.title = title;
+  }
+
+  public String getDir() {
+    return dir;
+  }
+
+  public void setDir(String dir) {
+    this.dir = dir;
+  }
+
+
+}

+ 199 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/configurationcheck/ConfFileReader.java

@@ -0,0 +1,199 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.service.configurationcheck;
+
+import java.io.BufferedReader;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.*;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.util.Properties;
+import javax.ws.rs.core.Context;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.AmbariStreamProvider;
+import org.apache.ambari.view.URLStreamProvider;
+
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+
+import org.apache.ambari.view.huetoambarimigration.model.*;
+import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
+import org.apache.log4j.Logger;
+
+public class ConfFileReader {
+
+  static final Logger logger = Logger.getLogger(ConfFileReader.class);
+
+  private static String homeDir = System.getProperty("java.io.tmpdir")+"/";
+
+  public static boolean checkConfigurationForHue(String hueURL) {
+
+    URL url = null;
+    int resonseCode = 0;
+    try {
+      url = new URL(hueURL);
+      HttpURLConnection connection = (HttpURLConnection) url.openConnection();
+      connection.setRequestMethod("GET");  //OR  huc.setRequestMethod ("HEAD");
+      connection.connect();
+      resonseCode = connection.getResponseCode();
+
+
+    } catch (MalformedURLException e) {
+
+      logger.error("Error in accessing the URL:" , e);
+
+    } catch (ProtocolException e) {
+
+      logger.error("Error in protocol: ", e);
+    } catch (IOException e) {
+
+      logger.error("IO Exception while establishing connection:",e);
+    }
+
+    return resonseCode == 200 ;
+  }
+
+  public static boolean checkConfigurationForAmbari(String ambariURL) {
+
+
+    URL url = null;
+    int responseCode = 0;
+    try {
+      url = new URL(ambariURL);
+      HttpURLConnection connection = (HttpURLConnection) url.openConnection();
+      connection.setRequestMethod("GET");  //OR  huc.setRequestMethod ("HEAD");
+      connection.connect();
+      responseCode = connection.getResponseCode();
+
+    } catch (MalformedURLException e) {
+      logger.error("Error in accessing the URL: " , e);
+
+    } catch (ProtocolException e) {
+      logger.error("Error in protocol: ", e);
+    } catch (IOException e) {
+      logger.error("IO Exception while establishing connection: ",e);
+    }
+    return responseCode == 200 ;
+
+
+  }
+
+  public static boolean checkHueDatabaseConnection(String hueDBDRiver, String hueJdbcUrl, String huedbUsername, String huedbPassword) throws IOException {
+
+    try {
+      Connection con = DataSourceHueDatabase.getInstance(hueDBDRiver, hueJdbcUrl, huedbUsername, huedbPassword).getConnection();
+    }
+    catch (Exception e) {
+
+      logger.error("Sql exception in acessing Hue Database: " ,e);
+      return false;
+    }
+
+    return true;
+
+  }
+
+  public static boolean checkAmbariDatbaseConection(String ambariDBDriver, String ambariDBJdbcUrl, String ambariDbUsername, String ambariDbPassword) throws IOException {
+
+
+    try {
+
+      Connection con = DataSourceAmbariDatabase.getInstance(ambariDBDriver, ambariDBJdbcUrl, ambariDbUsername, ambariDbPassword).getConnection();
+
+
+    } catch (Exception e) {
+
+      logger.error("Sql exception in acessing Ambari Database: " ,e);
+
+      return false;
+    }
+
+    return true;
+
+  }
+
+  public static String getHomeDir() {
+    return homeDir;
+  }
+
+  public static void setHomeDir(String homeDir) {
+    ConfFileReader.homeDir = homeDir;
+  }
+
+  public static boolean checkNamenodeURIConnectionforambari(String ambariServerNameNode) throws IOException, URISyntaxException {
+
+
+    Configuration conf = new Configuration();
+    conf.set("fs.hdfs.impl",
+      org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
+    conf.set("fs.file.impl",
+      org.apache.hadoop.fs.LocalFileSystem.class.getName()
+    );
+
+    FileSystem fileSystem = FileSystem.get(new URI(ambariServerNameNode), conf);
+
+
+    if (fileSystem instanceof WebHdfsFileSystem) {
+
+      return true;
+
+    } else {
+
+      return false;
+    }
+
+
+  }
+
+  public static boolean checkNamenodeURIConnectionforHue(String hueServerNamenodeURI) throws IOException, URISyntaxException {
+
+    Configuration conf = new Configuration();
+    conf.set("fs.hdfs.impl",
+      org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+    );
+    conf.set("fs.file.impl",
+      org.apache.hadoop.fs.LocalFileSystem.class.getName()
+    );
+
+    FileSystem fileSystem = FileSystem.get(new URI(hueServerNamenodeURI), conf);
+
+
+    if (fileSystem instanceof WebHdfsFileSystem) {
+
+      return true;
+    } else {
+
+      return false;
+    }
+
+
+  }
+
+
+}

+ 562 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveHistoryQueryImpl.java

@@ -0,0 +1,562 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.service.hive;
+
+import java.net.URISyntaxException;
+import java.security.PrivilegedExceptionAction;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.io.BufferedInputStream;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.Scanner;
+
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+import org.apache.log4j.Logger;
+import org.jdom.Attribute;
+import org.jdom.Document;
+import org.jdom.Element;
+import org.jdom.JDOMException;
+import org.jdom.input.SAXBuilder;
+import org.jdom.output.Format;
+import org.jdom.output.XMLOutputter;
+
+public class HiveHistoryQueryImpl {
+
+  static final Logger logger = Logger.getLogger(HiveHistoryQueryImpl.class);
+
+  public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException {
+
+    Date dNow = new Date();
+    SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
+    String currentDate = ft.format(dNow);
+
+    XMLOutputter xmlOutput = new XMLOutputter();
+    xmlOutput.setFormat(Format.getPrettyFormat());
+
+    File xmlfile = new File("/var/lib/huetoambari/RevertChange.xml");
+
+    if (xmlfile.exists()) {
+      String iteration = Integer.toString(i + 1);
+      SAXBuilder builder = new SAXBuilder();
+      Document doc;
+      try {
+        doc = (Document) builder.build(xmlfile);
+        Element rootNode = doc.getRootElement();
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate.toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+        rootNode.addContent(record);
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+
+      } catch (JDOMException e) {
+        logger.error("JDOMException" ,e);
+
+      }
+
+    } else {
+
+      try {
+        String iteration = Integer.toString(i + 1);
+        Element revertrecord = new Element("RevertChangePage");
+        Document doc = new Document(revertrecord);
+        doc.setRootElement(revertrecord);
+
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate.toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+        doc.getRootElement().addContent(record);
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+      } catch (IOException io) {
+        logger.error("JDOMException" , io);
+      }
+
+    }
+
+  }
+
+  public int fetchMaximumIdfromAmbaridb(String driverName, Connection c, int id) throws SQLException {
+
+    String ds_id = null;
+    Statement stmt = null;
+    stmt = c.createStatement();
+    ResultSet rs = null;
+
+    if (driverName.contains("postgresql")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + ";");
+    } else if (driverName.contains("mysql")) {
+      rs = stmt.executeQuery("select max( cast(ds_id as unsigned) ) as max from DS_JOBIMPL_" + id + ";");
+    } else if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id);
+    }
+
+    while (rs.next()) {
+      ds_id = rs.getString("max");
+    }
+
+    int num;
+    if (ds_id == null) {
+      num = 1;
+    } else {
+      num = Integer.parseInt(ds_id);
+    }
+    return num;
+  }
+
+  public void insertRowinAmbaridb(String driverName, String dirname, int maxcount, long epochtime, Connection c, int id, String instance, int i) throws SQLException, IOException {
+
+    String maxcount1 = Integer.toString(maxcount);
+    String epochtime1 = Long.toString(epochtime);
+    String ds_id = new String();
+    Statement stmt = null;
+    String sql = "";
+    String revsql = "";
+    stmt = c.createStatement();
+
+    if (driverName.contains("mysql")) {
+      sql = "INSERT INTO DS_JOBIMPL_" + id + " values ('" + maxcount1
+        + "','','','','','default'," + epochtime1 + ",0,'','','"
+        + dirname + "logs','admin','" + dirname
+        + "query.hql','','job','','','Unknown','" + dirname
+        + "','','Worksheet');";
+      revsql = "delete from  DS_JOBIMPL_" + id + " where ds_id='" + maxcount1 + "';";
+
+    } else if (driverName.contains("postgresql")) {
+      sql = "INSERT INTO ds_jobimpl_" + id + " values ('" + maxcount1
+        + "','','','','','default'," + epochtime1 + ",0,'','','"
+        + dirname + "logs','admin','" + dirname
+        + "query.hql','','job','','','Unknown','" + dirname
+        + "','','Worksheet');";
+      revsql = "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount1 + "';";
+
+    } else if (driverName.contains("oracle")) {
+      sql = "INSERT INTO ds_jobimpl_" + id + " values ('" + maxcount1
+        + "','','','','','default'," + epochtime1 + ",0,'','','"
+        + dirname + "logs','admin','" + dirname
+        + "query.hql','','job','','','Unknown','" + dirname
+        + "','','Worksheet')";
+      revsql = "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount1 + "'";
+
+    }
+    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
+
+    stmt.executeUpdate(sql);
+
+  }
+
+  public int fetchInstanceTablename(String driverName, Connection c, String instance) throws SQLException {
+
+    String ds_id = new String();
+    int id = 0;
+    Statement stmt = null;
+    stmt = c.createStatement();
+    ResultSet rs = null;
+
+    if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name='" + instance + "'");
+    } else {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name='" + instance + "';");
+    }
+
+    while (rs.next()) {
+      id = rs.getInt("id");
+    }
+    return id;
+  }
+
+  public long getEpochTime() throws ParseException {
+    long seconds = System.currentTimeMillis() / 1000l;
+    return seconds;
+
+  }
+
+  public String getTime() throws ParseException {
+    int day, month, year;
+    int second, minute, hour;
+    int milisecond;
+    GregorianCalendar date = new GregorianCalendar();
+
+    day = date.get(Calendar.DAY_OF_MONTH);
+    month = date.get(Calendar.MONTH);
+    year = date.get(Calendar.YEAR);
+
+    second = date.get(Calendar.SECOND);
+    minute = date.get(Calendar.MINUTE);
+    hour = date.get(Calendar.HOUR);
+    milisecond = date.get(Calendar.MILLISECOND);
+
+    String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
+      + minute;
+    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
+      + minute + "-" + second + "-" + milisecond;
+    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
+    Date date1 = df.parse(s1);
+    long epoch = date1.getTime();
+    return s;
+
+  }
+
+  public String[] fetchFromHue(String username, String startdate, String endtime, Connection connection) throws ClassNotFoundException, SQLException {
+    int id = 0;
+    int i = 0;
+    String[] query = new String[100];
+
+    try {
+      connection.setAutoCommit(false);
+      Statement statement = connection.createStatement();
+
+      ResultSet rs1 = null;
+      if (username.equals("all")) {
+      } else {
+        ResultSet rs = statement.executeQuery("select id from auth_user where username='" + username + "';");
+        while (rs.next()) {
+          id = rs.getInt("id");
+        }
+      }
+      if (startdate.equals("") && endtime.equals("")) {
+        if (username.equals("all")) {
+          rs1 = statement.executeQuery("select query from beeswax_queryhistory;");
+        } else {
+          rs1 = statement.executeQuery("select query from beeswax_queryhistory where owner_id =" + id + ";");
+        }
+
+      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+          rs1 = statement.executeQuery("select query from beeswax_queryhistory where submission_date >= date('" + startdate + "') AND submission_date < date('" + endtime + "');");
+        } else {
+          rs1 = statement.executeQuery("select query from beeswax_queryhistory where owner_id =" + id + " AND submission_date >= date('" + startdate + "') AND submission_date <= date('" + endtime + "');");
+        }
+      } else if (!(startdate.equals("")) && (endtime.equals(""))) {
+        if (username.equals("all")) {
+          rs1 = statement.executeQuery("select query from beeswax_queryhistory where submission_date >= date('" + startdate + "');");
+        } else {
+          rs1 = statement.executeQuery("select query from beeswax_queryhistory where owner_id =" + id + " AND submission_date >= date('" + startdate + "');");
+        }
+
+      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+          rs1 = statement.executeQuery("select query from beeswax_queryhistory where submission_date < date('" + endtime + "');");
+        } else {
+          rs1 = statement.executeQuery("select query from beeswax_queryhistory where owner_id =" + id + " AND submission_date < date('" + endtime + "');");
+        }
+      }
+
+
+      while (rs1.next()) {
+        query[i] = rs1.getString("query");
+        i++;
+      }
+
+      connection.commit();
+
+    } catch (SQLException e) {
+      connection.rollback();
+
+    } finally {
+      try {
+        if (connection != null)
+          connection.close();
+      } catch (SQLException e) {
+        logger.error("Sql exception error: " + e);
+      }
+    }
+    return query;
+
+  }
+
+  public void writetoFileQueryhql(String content, String homedir) {
+    try {
+      File file = new File(homedir + "query.hql");
+      // if file doesnt exists, then create it
+      if (!file.exists()) {
+        file.createNewFile();
+      }
+      FileWriter fw = new FileWriter(file.getAbsoluteFile());
+      BufferedWriter bw = new BufferedWriter(fw);
+      bw.write(content);
+      bw.close();
+    } catch (IOException e) {
+      logger.error("IOException" , e);
+    }
+
+  }
+
+  public void deleteFileQueryhql(String homedir) {
+    try{
+      File file = new File(homedir + "query.hql");
+
+      if(file.delete()){
+        logger.info("temporary hql file deleted");
+      }else{
+        logger.info("temporary hql file delete failed");
+      }
+
+    }catch(Exception e){
+
+     logger.error("File Exception ",e);
+
+    }
+
+  }
+
+  public void deleteFileQueryLogs(String homedir) {
+    try{
+      File file = new File(homedir + "logs");
+
+      if(file.delete()){
+        logger.info("temporary logs file deleted");
+      }else{
+        logger.info("temporary logs file delete failed");
+      }
+
+    }catch(Exception e){
+
+      logger.error("File Exception ",e);
+
+    }
+
+  }
+
+  public void writetoFileLogs(String homedir) {
+    try {
+      String content = "";
+      File file = new File(homedir + "logs");
+      // if file doesnt exists, then create it
+      if (!file.exists()) {
+        file.createNewFile();
+      }
+      FileWriter fw = new FileWriter(file.getAbsoluteFile());
+      BufferedWriter bw = new BufferedWriter(fw);
+      bw.write(content);
+      bw.close();
+    } catch (IOException e) {
+      logger.error("IOException" , e);
+    }
+
+  }
+
+  public void createDir(final String dir, final String namenodeuri) throws IOException,
+    URISyntaxException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      UserGroupInformation.setConfiguration(conf);
+
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+
+        public Boolean run() throws Exception {
+
+          FileSystem fs = FileSystem.get(conf);
+          Path src = new Path(dir);
+          Boolean b = fs.mkdirs(src);
+          return b;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Exception in Webhdfs" , e);
+    }
+  }
+
+  public void createDirKerberorisedSecured(final String dir, final String namenodeuri) throws IOException,
+    URISyntaxException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+      ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+
+        public Boolean run() throws Exception {
+          FileSystem fs = FileSystem.get(conf);
+          Path src = new Path(dir);
+          Boolean b = fs.mkdirs(src);
+          return b;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Exception in Webhdfs" , e);
+    }
+  }
+
+
+  public void putFileinHdfs(final String source, final String dest, final String namenodeuri)
+    throws IOException {
+
+    try {
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          Configuration conf = new Configuration();
+          conf.set("fs.defaultFS", namenodeuri);
+          conf.set("hadoop.job.ugi", "hdfs");
+          conf.set("fs.hdfs.impl",
+            org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+          );
+          conf.set("fs.file.impl",
+            org.apache.hadoop.fs.LocalFileSystem.class.getName()
+          );
+          FileSystem fileSystem = FileSystem.get(conf);
+
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path = new Path(dest1);
+          if (fileSystem.exists(path)) {
+
+          }
+          //	Path pathsource = new Path(source);
+          FSDataOutputStream out = fileSystem.create(path);
+
+          InputStream in = new BufferedInputStream(
+            new FileInputStream(new File(source)));
+
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in.close();
+          out.close();
+          fileSystem.close();
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception" , e);
+    }
+
+  }
+
+  public void putFileinHdfsKerborizedSecured(final String source, final String dest, final String namenodeuri)
+    throws IOException {
+
+    try {
+
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          FileSystem fileSystem = FileSystem.get(conf);
+
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path = new Path(dest1);
+          if (fileSystem.exists(path)) {
+
+          }
+
+          FSDataOutputStream out = fileSystem.create(path);
+
+          InputStream in = new BufferedInputStream(
+            new FileInputStream(new File(source)));
+
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in.close();
+          out.close();
+          fileSystem.close();
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception" , e);
+
+    }
+
+  }
+
+}

+ 778 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveSavedQueryImpl.java

@@ -0,0 +1,778 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.service.hive;
+
+import java.nio.charset.Charset;
+import java.security.PrivilegedExceptionAction;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.io.BufferedInputStream;
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.Scanner;
+import java.io.*;
+import java.net.URISyntaxException;
+import java.net.URL;
+
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+import org.apache.log4j.Logger;
+import org.jdom.Attribute;
+import org.jdom.Document;
+import org.jdom.Element;
+import org.jdom.JDOMException;
+import org.jdom.input.SAXBuilder;
+import org.jdom.output.Format;
+import org.jdom.output.XMLOutputter;
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import org.apache.ambari.view.huetoambarimigration.model.*;
+
+public class HiveSavedQueryImpl {
+
+  static final Logger logger = Logger.getLogger(HiveSavedQueryImpl.class);
+
+  private static String readAll(Reader rd) throws IOException {
+    StringBuilder sb = new StringBuilder();
+    int cp;
+    while ((cp = rd.read()) != -1) {
+      sb.append((char) cp);
+    }
+    return sb.toString();
+  }
+
+  public void wrtitetoalternatesqlfile(String dirname, String content,
+                                       String instance, int i) throws IOException {
+
+    Date dNow = new Date();
+    SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
+    String currentDate = ft.format(dNow);
+
+    XMLOutputter xmlOutput = new XMLOutputter();
+
+    xmlOutput.setFormat(Format.getPrettyFormat());
+
+    File xmlfile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml");
+
+    if (xmlfile.exists()) {
+      String iteration = Integer.toString(i + 1);
+      SAXBuilder builder = new SAXBuilder();
+      Document doc;
+      try {
+        doc = (Document) builder.build(xmlfile);
+
+        Element rootNode = doc.getRootElement();
+
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate
+          .toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+
+        rootNode.addContent(record);
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+
+      } catch (JDOMException e) {
+        // TODO Auto-generated catch block
+        logger.error("JDOMException: " , e);
+      }
+
+    } else {
+
+      try {
+        String iteration = Integer.toString(i + 1);
+        Element revertrecord = new Element("RevertChangePage");
+        Document doc = new Document(revertrecord);
+        doc.setRootElement(revertrecord);
+
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate
+          .toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+
+        doc.getRootElement().addContent(record);
+
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+
+      } catch (IOException io) {
+
+      }
+
+    }
+
+  }
+
+  public int fetchMaxidforSavedQueryHive(String driverName, Connection c, int id)
+    throws SQLException {
+
+    String ds_id = null;
+    Statement stmt = null;
+    stmt = c.createStatement();
+    ResultSet rs = null;
+
+    if (driverName.contains("postgresql")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_savedquery_" + id + ";");
+    } else if (driverName.contains("mysql")) {
+      rs = stmt.executeQuery("select max(cast(ds_id as unsigned) ) as max from DS_SAVEDQUERY_" + id + ";");
+    } else if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_savedquery_" + id + ";");
+    }
+
+    while (rs.next()) {
+      ds_id = rs.getString("max");
+
+    }
+
+    int num;
+    if (ds_id == null) {
+      num = 1;
+    } else {
+      num = Integer.parseInt(ds_id);
+    }
+
+    return num;
+  }
+
+  public int fetchInstancetablenameForSavedqueryHive(String driverName, Connection c,
+                                                     String instance) throws SQLException {
+
+    String ds_id = new String();
+    int id = 0;
+    Statement stmt = null;
+
+    stmt = c.createStatement();
+    ResultSet rs = null;
+
+    if (driverName.contains("oracle")) {
+      rs = stmt
+        .executeQuery("select * from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.savedQueries.SavedQuery' and view_instance_name='"
+          + instance + "'");
+    } else {
+      rs = stmt
+        .executeQuery("select * from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.savedQueries.SavedQuery' and view_instance_name='"
+          + instance + "';");
+    }
+
+
+    while (rs.next()) {
+      id = rs.getInt("id");
+
+    }
+
+    return id;
+  }
+
+  public int fetchInstanceTablenameHiveHistory(String driverName, Connection c,
+                                               String instance) throws SQLException {
+    String ds_id = new String();
+    int id = 0;
+    Statement stmt = null;
+
+
+    stmt = c.createStatement();
+    ResultSet rs = null;
+
+    if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name='" + instance + "'");
+    } else {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name='" + instance + "';");
+    }
+
+
+    while (rs.next()) {
+      id = rs.getInt("id");
+      System.out.println("id is " + id);
+
+    }
+
+    return id;
+
+  }
+
+  public int fetchMaxdsidFromHiveHistory(String driverName, Connection c, int id)
+    throws SQLException {
+
+    String ds_id = null;
+    Statement stmt = null;
+
+    stmt = c.createStatement();
+    ResultSet rs = null;
+
+    if (driverName.contains("postgresql")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + ";");
+    } else if (driverName.contains("mysql")) {
+      rs = stmt.executeQuery("select max( cast(ds_id as unsigned) ) as max from DS_JOBIMPL_" + id + ";");
+    } else if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id);
+    }
+    while (rs.next()) {
+      ds_id = rs.getString("max");
+    }
+    int num;
+    if (ds_id == null) {
+      num = 1;
+    } else {
+      num = Integer.parseInt(ds_id);
+    }
+    return num;
+  }
+
+
+  /**/
+  public void insertRowHiveHistory(String driverName, String dirname, int maxcount,
+                                   long epochtime, Connection c, int id, String instance, int i)
+    throws SQLException, IOException {
+    String maxcount1 = Integer.toString(maxcount);
+
+    String epochtime1 = Long.toString(epochtime);
+
+    String ds_id = new String();
+    Statement stmt = null;
+
+    stmt = c.createStatement();
+    String sql = "";
+    String revsql = "";
+
+    if (driverName.contains("mysql")) {
+      sql = "INSERT INTO DS_JOBIMPL_" + id + " values ('" + maxcount1
+        + "','','','','','default'," + epochtime1 + ",0,'','','"
+        + dirname + "logs','admin','" + dirname
+        + "query.hql','','job','','','Unknown','" + dirname
+        + "','','Worksheet');";
+
+      revsql = "delete from  DS_JOBIMPL_" + id + " where ds_id='"
+        + maxcount1 + "';";
+
+    } else if (driverName.contains("postgresql")) {
+      sql = "INSERT INTO ds_jobimpl_" + id + " values ('" + maxcount1
+        + "','','','','','default'," + epochtime1 + ",0,'','','"
+        + dirname + "logs','admin','" + dirname
+        + "query.hql','','job','','','Unknown','" + dirname
+        + "','','Worksheet');";
+
+      revsql = "delete from  ds_jobimpl_" + id + " where ds_id='"
+        + maxcount1 + "';";
+
+    } else if (driverName.contains("oracle")) {
+      sql = "INSERT INTO ds_jobimpl_" + id + " values ('" + maxcount1
+        + "','','','','','default'," + epochtime1 + ",0,'','','"
+        + dirname + "logs','admin','" + dirname
+        + "query.hql','','job','','','Unknown','" + dirname
+        + "','','Worksheet')";
+      revsql = "delete from  ds_jobimpl_" + id + " where ds_id='"
+        + maxcount1 + "'";
+
+    }
+    stmt.executeUpdate(sql);
+    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
+  }
+
+  public void insertRowinSavedQuery(String driverName, int maxcount, String database,
+                                    String dirname, String query, String name, Connection c, int id,
+                                    String instance, int i) throws SQLException, IOException {
+    String maxcount1 = Integer.toString(maxcount);
+
+    String ds_id = new String();
+    Statement stmt = null;
+    String sql = "";
+    String revsql = "";
+    stmt = c.createStatement();
+
+    if (driverName.contains("mysql")) {
+      sql = "INSERT INTO DS_SAVEDQUERY_" + id + " values ('"
+        + maxcount1 + "','" + database + "','" + "admin" + "','"
+        + dirname + "query.hql','" + query + "','" + name + "');";
+
+      revsql = "delete from  DS_SAVEDQUERY_" + id + " where ds_id='"
+        + maxcount1 + "';";
+
+    } else if (driverName.contains("postgresql")) {
+      sql = "INSERT INTO ds_savedquery_" + id + " values ('"
+        + maxcount1 + "','" + database + "','" + "admin" + "','"
+        + dirname + "query.hql','" + query + "','" + name + "');";
+
+      revsql = "delete from  ds_savedquery_" + id + " where ds_id='"
+        + maxcount1 + "';";
+
+    } else if (driverName.contains("oracle")) {
+      sql = "INSERT INTO ds_savedquery_" + id + " values ('"
+        + maxcount1 + "','" + database + "','" + "admin" + "','"
+        + dirname + "query.hql','" + query + "','" + name + "')";
+
+      revsql = "delete from  ds_savedquery_" + id + " where ds_id='"
+        + maxcount1 + "'";
+
+    }
+    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
+    stmt.executeUpdate(sql);
+  }
+
+  public long getEpochTime() throws ParseException {
+
+    long seconds = System.currentTimeMillis() / 1000l;
+    return seconds;
+
+  }
+
+  public String getTime() throws ParseException {
+    int day, month, year;
+    int second, minute, hour;
+    int milisecond;
+    GregorianCalendar date = new GregorianCalendar();
+
+    day = date.get(Calendar.DAY_OF_MONTH);
+    month = date.get(Calendar.MONTH);
+    year = date.get(Calendar.YEAR);
+
+    second = date.get(Calendar.SECOND);
+    minute = date.get(Calendar.MINUTE);
+    hour = date.get(Calendar.HOUR);
+    milisecond = date.get(Calendar.MILLISECOND);
+
+    String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
+      + minute;
+    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
+      + minute + "-" + second + "-" + milisecond;
+    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
+    Date date1 = df.parse(s1);
+    long epoch = date1.getTime();
+
+    return s;
+
+  }
+
+  public ArrayList<PojoHive> fetchFromHuedb(String username,
+                                            String startdate, String endtime, Connection connection)
+    throws ClassNotFoundException, IOException {
+    int id = 0;
+    int i = 0;
+    String[] query = new String[100];
+    ArrayList<PojoHive> hiveArrayList = new ArrayList<PojoHive>();
+    ResultSet rs1 = null;
+
+    try {
+      Statement statement = connection.createStatement();
+      if (username.equals("all")) {
+      } else {
+        ResultSet rs = statement
+          .executeQuery("select id from auth_user where username='"
+            + username + "';");
+        while (rs.next()) {
+
+          id = rs.getInt("id");
+
+        }
+
+      }
+      if (startdate.equals("") && endtime.equals("")) {
+        if (username.equals("all")) {
+          rs1 = statement
+            .executeQuery("select data,name,owner_id from beeswax_savedquery;");
+
+        } else {
+          rs1 = statement
+            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
+              + id + ";");
+        }
+
+      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+          rs1 = statement
+            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime >= date('"
+              + startdate
+              + "') AND mtime <= date('"
+              + endtime + "');");
+        } else {
+          rs1 = statement
+            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
+              + id
+              + " AND mtime >= date('"
+              + startdate
+              + "') AND mtime <= date('"
+              + endtime
+              + "');");
+        }
+
+      } else if (!(startdate.equals("")) && (endtime.equals(""))) {
+        if (username.equals("all")) {
+          rs1 = statement
+            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and  mtime >= date('"
+              + startdate + "');");
+        } else {
+          rs1 = statement
+            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
+              + id
+              + " AND mtime >= date('"
+              + startdate
+              + "');");
+        }
+
+      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+          rs1 = statement
+            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime <= date('"
+              + endtime + "');");
+        } else {
+          rs1 = statement
+            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
+              + id
+              + " AND mtime <= date('"
+              + endtime
+              + "');");
+        }
+
+      }
+      while (rs1.next()) {
+        PojoHive hivepojo = new PojoHive();
+        String name = rs1.getString("name");
+        String temp = rs1.getString("data");
+        InputStream is = new ByteArrayInputStream(temp.getBytes());
+        BufferedReader rd = new BufferedReader(new InputStreamReader(
+          is, Charset.forName("UTF-8")));
+        String jsonText = readAll(rd);
+        JSONObject json = new JSONObject(jsonText);
+        String resources = json.get("query").toString();
+        json = new JSONObject(resources);
+
+        String resarr = (json.get("query")).toString();
+
+        json = new JSONObject(resources);
+        String database = (json.get("database")).toString();
+        hivepojo.setQuery(resarr);
+        hivepojo.setDatabase(database);
+        hivepojo.setOwner(name);
+        hiveArrayList.add(hivepojo);
+        i++;
+      }
+
+    } catch (SQLException e) {
+      // if the error message is "out of memory",
+      // it probably means no database file is found
+      System.err.println(e.getMessage());
+    } finally {
+      try {
+        if (connection != null)
+          connection.close();
+      } catch (SQLException e) {
+        logger.error("sql connection exception" , e);
+      }
+    }
+
+    return hiveArrayList;
+
+  }
+
+
+  public void writetoFilequeryHql(String content, String homedir) {
+    try {
+      File file = new File(homedir + "query.hql");
+      if (!file.exists()) {
+        file.createNewFile();
+      }
+
+      FileWriter fw = new FileWriter(file.getAbsoluteFile());
+      BufferedWriter bw = new BufferedWriter(fw);
+      bw.write(content);
+      bw.close();
+
+    } catch (IOException e) {
+      logger.error("IOException: " , e);
+    }
+
+  }
+
+  public void deleteFileQueryhql(String homedir) {
+    try{
+      File file = new File(homedir + "query.hql");
+
+      if(file.delete()){
+        logger.info("temporary hql file deleted");
+      }else{
+        logger.info("temporary hql file delete failed");
+      }
+
+    }catch(Exception e){
+
+      logger.error("File Exception ",e);
+
+    }
+
+  }
+
+  public void deleteFileQueryLogs(String homedir) {
+    try{
+      File file = new File(homedir + "logs");
+
+      if(file.delete()){
+        logger.info("temporary logs file deleted");
+      }else{
+        logger.info("temporary logs file delete failed");
+      }
+
+    }catch(Exception e){
+
+      logger.error("File Exception ",e);
+
+    }
+
+  }
+
+
+  public void writetoFileLogs(String homedir) {
+    try {
+
+      String content = "";
+      File file = new File(homedir + "logs");
+
+      // if file doesnt exists, then create it
+      if (!file.exists()) {
+        file.createNewFile();
+      }
+
+      FileWriter fw = new FileWriter(file.getAbsoluteFile());
+      BufferedWriter bw = new BufferedWriter(fw);
+      bw.write(content);
+      bw.close();
+
+    } catch (IOException e) {
+      logger.error("IOException: " , e);
+    }
+
+  }
+
+  public void createDirHive(final String dir, final String namenodeuri)
+    throws IOException, URISyntaxException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          FileSystem fs = FileSystem.get(conf);
+          Path src = new Path(dir);
+          fs.mkdirs(src);
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs: " , e);
+    }
+  }
+
+  public void createDirHiveSecured(final String dir, final String namenodeuri)
+    throws IOException, URISyntaxException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          FileSystem fs = FileSystem.get(conf);
+          Path src = new Path(dir);
+          fs.mkdirs(src);
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs: " , e);
+    }
+  }
+
+  public void putFileinHdfs(final String source, final String dest,
+                            final String namenodeuri) throws IOException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          FileSystem fileSystem = FileSystem.get(conf);
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path = new Path(dest1);
+          if (fileSystem.exists(path)) {
+
+          }
+          FSDataOutputStream out = fileSystem.create(path);
+
+          InputStream in = new BufferedInputStream(
+            new FileInputStream(new File(source)));
+
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in.close();
+          out.close();
+          fileSystem.close();
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception" , e);
+    }
+
+  }
+
+
+  public void putFileinHdfsSecured(final String source, final String dest,
+                                   final String namenodeuri) throws IOException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+
+          FileSystem fileSystem = FileSystem.get(conf);
+
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path = new Path(dest1);
+          if (fileSystem.exists(path)) {
+
+          }
+          // Path pathsource = new Path(source);
+          FSDataOutputStream out = fileSystem.create(path);
+
+          InputStream in = new BufferedInputStream(
+            new FileInputStream(new File(source)));
+
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in.close();
+          out.close();
+          fileSystem.close();
+
+
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception" , e);
+    }
+
+  }
+
+}

+ 563 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java

@@ -0,0 +1,563 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.service.pig;
+
+import java.nio.charset.Charset;
+import java.security.PrivilegedExceptionAction;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.io.BufferedInputStream;
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.Scanner;
+import java.io.*;
+import java.net.URISyntaxException;
+import java.net.URL;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.log4j.Logger;
+import org.jdom.Attribute;
+import org.jdom.Document;
+import org.jdom.Element;
+import org.jdom.JDOMException;
+import org.jdom.input.SAXBuilder;
+import org.jdom.output.Format;
+import org.jdom.output.XMLOutputter;
+import org.json.JSONArray;
+import org.json.JSONObject;
+
+import org.apache.ambari.view.huetoambarimigration.model.*;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+
+public class PigJobImpl {
+
+  static final Logger logger = Logger.getLogger(PigJobImpl.class);
+
+  private static String readAll(Reader rd) throws IOException {
+    StringBuilder sb = new StringBuilder();
+    int cp;
+    while ((cp = rd.read()) != -1) {
+      sb.append((char) cp);
+    }
+    return sb.toString();
+  }
+
+  public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException {
+    Date dNow = new Date();
+    SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
+    String currentDate = ft.format(dNow);
+    XMLOutputter xmlOutput = new XMLOutputter();
+    xmlOutput.setFormat(Format.getPrettyFormat());
+    File xmlfile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml");
+    if (xmlfile.exists()) {
+      String iteration = Integer.toString(i + 1);
+      SAXBuilder builder = new SAXBuilder();
+      Document doc;
+      try {
+        doc = (Document) builder.build(xmlfile);
+        Element rootNode = doc.getRootElement();
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate.toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+        rootNode.addContent(record);
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+      } catch (JDOMException e) {
+
+        logger.error("Jdom Exception: " , e);
+      }
+
+
+    } else {
+      // create
+      try {
+        String iteration = Integer.toString(i + 1);
+        Element revertrecord = new Element("RevertChangePage");
+        Document doc = new Document(revertrecord);
+        doc.setRootElement(revertrecord);
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate.toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+        doc.getRootElement().addContent(record);
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+      } catch (IOException io) {
+        logger.error("Jdom Exception: " , io);
+      }
+
+    }
+
+  }
+
+  public int fetchMaxIdforPigJob(String driverName, Connection c, int id) throws SQLException {
+
+    String ds_id = null;
+    Statement stmt = null;
+    ResultSet rs = null;
+
+    stmt = c.createStatement();
+
+    if (driverName.contains("postgresql")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_pigjob_" + id + ";");
+    } else if (driverName.contains("mysql")) {
+      rs = stmt.executeQuery("select max( cast(ds_id as unsigned) ) as max from DS_PIGJOB_" + id + ";");
+    } else if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_pigjob_" + id);
+    }
+
+    while (rs.next()) {
+      ds_id = rs.getString("max");
+
+    }
+
+    int num;
+    if (ds_id == null) {
+      num = 1;
+    } else {
+      num = Integer.parseInt(ds_id);
+    }
+
+    return num;
+
+  }
+
+  public int fetchInstanceTablename(String driverName, Connection c, String instance) throws SQLException {
+
+
+    String ds_id = new String();
+    int id = 0;
+    Statement stmt = null;
+    stmt = c.createStatement();
+
+    ResultSet rs = null;
+    if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.jobs.models.PigJob' and view_instance_name='" + instance + "'");
+    } else {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.jobs.models.PigJob' and view_instance_name='" + instance + "';");
+    }
+    while (rs.next()) {
+      id = rs.getInt("id");
+
+    }
+
+    return id;
+  }
+
+  public void insertRowPigJob(String driverName, String dirname, int maxcountforpigjob, String time, String time2, long epochtime, String title, Connection c, int id, String status, String instance, int i) throws SQLException, IOException {
+
+    String epochtime1 = Long.toString(epochtime);
+    String ds_id = new String();
+    Statement stmt = null;
+
+    stmt = c.createStatement();
+    String sql = "";
+    String revsql = "";
+
+    if (driverName.contains("mysql")) {
+      sql = "INSERT INTO DS_PIGJOB_" + id + " values ('" + maxcountforpigjob + "'," + epochtime1 + ",0,'','f','','','admin',0,'" + dirname + "script.pig','','" + maxcountforpigjob + "','','','" + status + "','" + dirname + "','','" + title + "');";
+      revsql = "delete from  DS_PIGJOB_" + id + " where ds_id='" + maxcountforpigjob + "';";
+
+    } else if (driverName.contains("postgresql")) {
+      sql = "INSERT INTO ds_pigjob_" + id + " values ('" + maxcountforpigjob + "'," + epochtime1 + ",0,'','f','','','admin',0,'" + dirname + "script.pig','','" + maxcountforpigjob + "','','','" + status + "','" + dirname + "','','" + title + "');";
+      revsql = "delete from  ds_pigjob_" + id + " where ds_id='" + maxcountforpigjob + "';";
+
+    } else if (driverName.contains("oracle")) {
+      sql = "INSERT INTO ds_pigjob_" + id + " values ('" + maxcountforpigjob + "'," + epochtime1 + ",0,'','f','','','admin',0,'" + dirname + "script.pig','','" + maxcountforpigjob + "','','','" + status + "','" + dirname + "','','" + title + "')";
+      revsql = "delete from  ds_pigjob_" + id + " where ds_id='" + maxcountforpigjob + "'";
+
+    }
+
+    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
+
+    stmt.executeUpdate(sql);
+
+  }
+
+  public long getEpochTime() throws ParseException {
+    int day, month, year;
+    int second, minute, hour;
+    int milisecond;
+    GregorianCalendar date = new GregorianCalendar();
+
+    day = date.get(Calendar.DAY_OF_MONTH);
+    month = date.get(Calendar.MONTH);
+    year = date.get(Calendar.YEAR);
+
+    second = date.get(Calendar.SECOND);
+    minute = date.get(Calendar.MINUTE);
+    hour = date.get(Calendar.HOUR);
+    milisecond = date.get(Calendar.MILLISECOND);
+    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute + "-" + second + "-" + milisecond;
+    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
+    Date date1 = df.parse(s1);
+    long epoch = date1.getTime();
+    return epoch;
+
+  }
+
+  public String getTime() throws ParseException {
+    int day, month, year;
+    int second, minute, hour;
+    int milisecond;
+    GregorianCalendar date = new GregorianCalendar();
+
+    day = date.get(Calendar.DAY_OF_MONTH);
+    month = date.get(Calendar.MONTH);
+    year = date.get(Calendar.YEAR);
+
+    second = date.get(Calendar.SECOND);
+    minute = date.get(Calendar.MINUTE);
+    hour = date.get(Calendar.HOUR);
+    milisecond = date.get(Calendar.MILLISECOND);
+    String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute;
+    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute + "-" + second + "-" + milisecond;
+    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
+    Date date1 = df.parse(s1);
+    long epoch = date1.getTime();
+    return s;
+
+  }
+
+  public String getTimeInorder() throws ParseException {
+    SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.msssss +00:00:00");//dd/MM/yyyy
+    Date now = new Date();
+    String strDate = sdfDate.format(now);
+    return strDate;
+  }
+
+  public ArrayList<PojoPig> fetchFromHueDB(String username, String startdate, String endtime, Connection connection) throws ClassNotFoundException, IOException {
+    int id = 0;
+    int i = 0;
+    String[] query = new String[100];
+    ArrayList<PojoPig> pigjobarraylist = new ArrayList<PojoPig>();
+    try {
+      Statement statement = connection.createStatement();
+      ResultSet rs1 = null;
+      if (username.equals("all")) {
+      } else {
+        ResultSet rs = statement
+          .executeQuery("select id from auth_user where username='"
+            + username + "';");
+        while (rs.next()) {
+
+          id = rs.getInt("id");
+
+        }
+
+      }
+
+      if (startdate.equals("") && endtime.equals("")) {
+        if (username.equals("all")) {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job;");
+
+        } else {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where user_id =" + id + ";");
+        }
+
+      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where start_time >= date('" + startdate + "') AND start_time <= date('" + endtime + "');");
+        } else {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where user_id =" + id + " AND start_time >= date('" + startdate + "') AND start_time <= date('" + endtime + "');");
+        }
+
+      } else if (!(startdate.equals("")) && (endtime.equals(""))) {
+        if (username.equals("all")) {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where start_time >= date('" + startdate + "');");
+        } else {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where user_id =" + id + " AND start_time >= date('" + startdate + "');");
+        }
+
+      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where start_time <= date('" + endtime + "');");
+        } else {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where user_id =" + id + " AND start_time <= date('" + endtime + "');");
+        }
+
+      }
+
+      while (rs1.next()) {
+        PojoPig pigjjobobject = new PojoPig();
+
+        int runstatus = rs1.getInt("status");
+
+        if (runstatus == 1) {
+          pigjjobobject.setStatus("RUNNING");
+        } else if (runstatus == 2) {
+          pigjjobobject.setStatus("SUCCEEDED");
+        } else if (runstatus == 3) {
+          pigjjobobject.setStatus("SUBMIT_FAILED");
+        } else if (runstatus == 4) {
+          pigjjobobject.setStatus("KILLED");
+        }
+        String title = rs1.getString("script_title");
+
+
+        pigjjobobject.setTitle(title);
+        String dir = rs1.getString("statusdir");
+        pigjjobobject.setDir(dir);
+        Date created_data = rs1.getDate("start_time");
+        pigjjobobject.setDt(created_data);
+
+        pigjobarraylist.add(pigjjobobject);
+
+        i++;
+      }
+
+
+    } catch (SQLException e) {
+      logger.error("Sqlexception: " , e);
+    } finally {
+      try {
+        if (connection != null)
+          connection.close();
+      } catch (SQLException e) {
+        logger.error("Sqlexception in closing the connection: " , e);
+
+      }
+    }
+
+    return pigjobarraylist;
+
+  }
+
+  public void createDirPigJob(final String dir, final String namenodeuri) throws IOException,
+    URISyntaxException {
+
+    try {
+      UserGroupInformation ugi = UserGroupInformation
+        .createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          Configuration conf = new Configuration();
+          conf.set("fs.hdfs.impl",
+            org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+          );
+          conf.set("fs.file.impl",
+            org.apache.hadoop.fs.LocalFileSystem.class.getName()
+          );
+          conf.set("fs.defaultFS", namenodeuri);
+          conf.set("hadoop.job.ugi", "hdfs");
+
+          FileSystem fs = FileSystem.get(conf);
+          Path src = new Path(dir);
+          fs.mkdirs(src);
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception: " , e);
+    }
+  }
+
+  /**/
+  public void createDirPigJobSecured(final String dir, final String namenodeuri) throws IOException,
+    URISyntaxException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+
+          FileSystem fs = FileSystem.get(conf);
+          Path src = new Path(dir);
+          fs.mkdirs(src);
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception: " , e);
+    }
+  }
+
+  /**/
+  public void copyFileBetweenHdfs(final String source, final String dest, final String nameNodeuriAmbari, final String nameNodeuriHue)
+    throws IOException {
+
+    try {
+      UserGroupInformation ugi = UserGroupInformation
+        .createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          Configuration confAmbari = new Configuration();
+          confAmbari.set("fs.defaultFS", nameNodeuriAmbari);
+          confAmbari.set("hadoop.job.ugi", "hdfs");
+          FileSystem fileSystemAmbari = FileSystem.get(confAmbari);
+
+          Configuration confHue = new Configuration();
+          confHue.set("fs.defaultFS", nameNodeuriAmbari);
+          confHue.set("hadoop.job.ugi", "hdfs");
+          FileSystem fileSystemHue = FileSystem.get(confHue);
+
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path1 = new Path(source);
+          FSDataInputStream in1 = fileSystemHue.open(path1);
+
+          Path path = new Path(dest1);
+          if (fileSystemAmbari.exists(path)) {
+
+          }
+
+          FSDataOutputStream out = fileSystemAmbari.create(path);
+
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in1.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in1.close();
+          out.close();
+          fileSystemAmbari.close();
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception: " , e);
+    }
+
+  }
+
+  /**/
+  public void copyFileBetweenHdfsSecured(final String source, final String dest, final String nameNodeuriAmbari, final String nameNodeuriHue)
+    throws IOException {
+
+    try {
+
+      final Configuration confAmbari = new Configuration();
+      confAmbari.set("fs.defaultFS", nameNodeuriAmbari);
+      confAmbari.set("hadoop.job.ugi", "hdfs");
+
+      final Configuration confHue = new Configuration();
+      confHue.set("fs.defaultFS", nameNodeuriAmbari);
+      confHue.set("hadoop.job.ugi", "hdfs");
+
+      confAmbari.set("hadoop.security.authentication", "Kerberos");
+      confHue.set("hadoop.security.authentication", "Kerberos");
+
+      UserGroupInformation ugi = UserGroupInformation
+        .createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+
+          FileSystem fileSystemAmbari = FileSystem.get(confAmbari);
+
+          FileSystem fileSystemHue = FileSystem.get(confHue);
+
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path1 = new Path(source);
+          FSDataInputStream in1 = fileSystemHue.open(path1);
+
+          Path path = new Path(dest1);
+          if (fileSystemAmbari.exists(path)) {
+
+          }
+          FSDataOutputStream out = fileSystemAmbari.create(path);
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in1.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in1.close();
+          out.close();
+          fileSystemAmbari.close();
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception: " , e);
+    }
+
+  }
+
+}

+ 600 - 0
contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigScriptImpl.java

@@ -0,0 +1,600 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.service.pig;
+
+import java.nio.charset.Charset;
+import java.security.PrivilegedExceptionAction;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.io.BufferedInputStream;
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.Scanner;
+import java.io.*;
+import java.net.URISyntaxException;
+import java.net.URL;
+
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.log4j.Logger;
+import org.jdom.Attribute;
+import org.jdom.Document;
+import org.jdom.Element;
+import org.jdom.JDOMException;
+import org.jdom.input.SAXBuilder;
+import org.jdom.output.Format;
+import org.jdom.output.XMLOutputter;
+import org.json.JSONArray;
+import org.json.JSONObject;
+
+import org.apache.ambari.view.huetoambarimigration.model.*;
+
+
+public class PigScriptImpl {
+
+  static final Logger logger = Logger.getLogger(PigJobImpl.class);
+
+  private static String readAll(Reader rd) throws IOException {
+    StringBuilder sb = new StringBuilder();
+    int cp;
+    while ((cp = rd.read()) != -1) {
+      sb.append((char) cp);
+    }
+    return sb.toString();
+  }
+
+  public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException {
+
+    Date dNow = new Date();
+    SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
+    String currentDate = ft.format(dNow);
+
+    XMLOutputter xmlOutput = new XMLOutputter();
+
+    xmlOutput.setFormat(Format.getPrettyFormat());
+
+    File xmlfile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml");
+
+    if (xmlfile.exists()) {
+      String iteration = Integer.toString(i + 1);
+      SAXBuilder builder = new SAXBuilder();
+      Document doc;
+      try {
+        doc = (Document) builder.build(xmlfile);
+
+        Element rootNode = doc.getRootElement();
+
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate.toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+
+        rootNode.addContent(record);
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+
+      } catch (JDOMException e) {
+        logger.error("JDOMException: " , e);
+      }
+
+
+    } else {
+      // create
+      try {
+        String iteration = Integer.toString(i + 1);
+        Element revertrecord = new Element("RevertChangePage");
+        Document doc = new Document(revertrecord);
+        doc.setRootElement(revertrecord);
+
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate.toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+
+        doc.getRootElement().addContent(record);
+
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+
+      } catch (IOException io) {
+        logger.error("IOException: " , io);
+
+      }
+
+    }
+
+
+  }
+
+  public int fetchInstanceTablenamePigScript(String driverName, Connection c, String instance) throws SQLException {
+
+    String ds_id = new String();
+    int id = 0;
+    Statement stmt = null;
+
+    stmt = c.createStatement();
+
+    ResultSet rs = null;
+
+    if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.scripts.models.PigScript' and view_instance_name='" + instance + "'");
+    } else {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.scripts.models.PigScript' and view_instance_name='" + instance + "';");
+    }
+
+    while (rs.next()) {
+      id = rs.getInt("id");
+
+    }
+
+    return id;
+
+  }
+
+  public int fetchmaxIdforPigSavedScript(String driverName, Connection c, int id) throws SQLException {
+
+    String ds_id = null;
+    Statement stmt = null;
+
+    stmt = c.createStatement();
+    ResultSet rs = null;
+
+    if (driverName.contains("postgresql")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_pigscript_" + id + ";");
+    } else if (driverName.contains("mysql")) {
+      rs = stmt.executeQuery("select max( cast(ds_id as unsigned) ) as max from DS_PIGSCRIPT_" + id + ";");
+    } else if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_pigscript_" + id + "");
+    }
+
+    while (rs.next()) {
+      ds_id = rs.getString("max");
+    }
+
+    int num;
+    if (ds_id == null) {
+      num = 0;
+    } else {
+      num = Integer.parseInt(ds_id);
+    }
+
+    return num;
+  }
+
+  public void insertRowForPigScript(String driverName, String dirname, int maxcountforpigjob, int maxcount, String time, String time2, long epochtime, String title, Connection c, int id, String instance, int i) throws SQLException, IOException {
+
+    String maxcount1 = Integer.toString(maxcount);
+    String epochtime1 = Long.toString(epochtime);
+    String ds_id = new String();
+    Statement stmt = null;
+    String sql2 = "";
+    String revsql = "";
+
+    stmt = c.createStatement();
+
+    if (driverName.contains("mysql")) {
+      sql2 = "INSERT INTO DS_PIGSCRIPT_" + id + " values ('" + maxcount1 + "','1970-01-17 20:28:55.586000 +00:00:00',0,'admin','" + dirname + "','','','" + title + "');";
+      revsql = "delete from  DS_PIGSCRIPT_" + id + " where ds_id='" + maxcount1 + "';";
+
+    } else if (driverName.contains("postgresql")) {
+      sql2 = "INSERT INTO ds_pigscript_" + id + " values ('" + maxcount1 + "','1970-01-17 20:28:55.586000 +00:00:00','f','admin','" + dirname + "','','','" + title + "');";
+      revsql = "delete from  ds_pigscript_" + id + " where ds_id='" + maxcount1 + "';";
+
+    } else if (driverName.contains("oracle")) {
+      sql2 = "INSERT INTO ds_pigscript_" + id + " values ('" + maxcount1 + "','1970-01-17 20:28:55.586000 +00:00:00','f','admin','" + dirname + "','','','" + title + "')";
+      revsql = "delete from  ds_pigscript_" + id + " where ds_id='" + maxcount1 + "'";
+
+    }
+
+    stmt.executeUpdate(sql2);
+
+    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
+
+  }
+
+
+  public long getEpochTime() throws ParseException {
+    int day, month, year;
+    int second, minute, hour;
+    int milisecond;
+    GregorianCalendar date = new GregorianCalendar();
+
+    day = date.get(Calendar.DAY_OF_MONTH);
+    month = date.get(Calendar.MONTH);
+    year = date.get(Calendar.YEAR);
+
+    second = date.get(Calendar.SECOND);
+    minute = date.get(Calendar.MINUTE);
+    hour = date.get(Calendar.HOUR);
+    milisecond = date.get(Calendar.MILLISECOND);
+
+    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute + "-" + second + "-" + milisecond;
+    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
+    Date date1 = df.parse(s1);
+    long epoch = date1.getTime();
+
+    return epoch;
+
+  }
+
+
+  public String getTime() throws ParseException {
+    int day, month, year;
+    int second, minute, hour;
+    int milisecond;
+    GregorianCalendar date = new GregorianCalendar();
+
+    day = date.get(Calendar.DAY_OF_MONTH);
+    month = date.get(Calendar.MONTH);
+    year = date.get(Calendar.YEAR);
+
+    second = date.get(Calendar.SECOND);
+    minute = date.get(Calendar.MINUTE);
+    hour = date.get(Calendar.HOUR);
+    milisecond = date.get(Calendar.MILLISECOND);
+
+    String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute;
+    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute + "-" + second + "-" + milisecond;
+    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
+    Date date1 = df.parse(s1);
+    long epoch = date1.getTime();
+
+    return s;
+
+  }
+
+
+  public String getTimeInorder() throws ParseException {
+    SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.msssss +00:00:00");//dd/MM/yyyy
+    Date now = new Date();
+    String strDate = sdfDate.format(now);
+    return strDate;
+  }
+
+
+  public ArrayList<PojoPig> fetchFromHueDatabase(String username, String startdate, String endtime, Connection connection, String driverName) throws ClassNotFoundException, IOException {
+    int id = 0;
+    int i = 0;
+    ResultSet rs1 = null;
+    String[] query = new String[100];
+    ArrayList<PojoPig> pigArrayList = new ArrayList<PojoPig>();
+    try {
+
+      Statement statement = connection.createStatement();
+
+      if (username.equals("all")) {
+      } else {
+        ResultSet rs = statement
+          .executeQuery("select id from auth_user where username='"
+            + username + "';");
+        while (rs.next()) {
+          id = rs.getInt("id");
+        }
+
+      }
+
+
+      if (startdate.equals("") && endtime.equals("")) {
+        if (username.equals("all")) {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=true;");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1;");
+          }
+
+        } else {
+
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =" + id + ";");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =" + id + ";");
+          }
+
+        }
+
+      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND date_created >= date('" + startdate + "') AND date_created <= date('" + endtime + "');");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND date_created >= date('" + startdate + "') AND date_created <= date('" + endtime + "');");
+          }
+
+        } else {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =" + id + " AND date_created >= date('" + startdate + "') AND date_created <= date('" + endtime + "');");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =" + id + " AND date_created >= date('" + startdate + "') AND date_created <= date('" + endtime + "');");
+          }
+
+        }
+
+      } else if (!(startdate.equals("")) && (endtime.equals(""))) {
+        if (username.equals("all")) {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND date_created >= date('" + startdate + "');");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND date_created >= date('" + startdate + "');");
+          }
+
+        } else {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =" + id + " AND date_created >= date('" + startdate + "');");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =" + id + " AND date_created >= date('" + startdate + "');");
+          }
+
+        }
+
+      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND date_created <= date('" + endtime + "');");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND date_created <= date('" + endtime + "');");
+          }
+
+        } else {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =" + id + " AND date_created <= date('" + endtime + "');");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =" + id + " AND date_created <= date('" + endtime + "');");
+          }
+
+        }
+
+      }
+      // rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id ="+id+" AND date_created BETWEEN '"+ startdate +"' AND '"  +endtime +"';");
+      while (rs1.next()) {
+        PojoPig pojopig = new PojoPig();
+
+        String script = rs1.getString("pig_script");
+        String title = rs1.getString("title");
+        Date created_data = rs1.getDate("date_created");
+        pojopig.setDt(created_data);
+        pojopig.setScript(script);
+        pojopig.setTitle(title);
+
+        pigArrayList.add(pojopig);
+        i++;
+      }
+
+
+    } catch (SQLException e) {
+      logger.error("SQLException" , e);
+    } finally {
+      try {
+        if (connection != null)
+          connection.close();
+      } catch (SQLException e) {
+        logger.error("SQLException" , e);
+      }
+    }
+
+    return pigArrayList;
+
+  }
+
+  public void writetPigScripttoLocalFile(String script, String title, Date createddate, String homedir, String filename2) {
+    try {
+      logger.info(homedir + filename2);
+      File file = new File(homedir + filename2);
+
+      if (!file.exists()) {
+        file.createNewFile();
+      }
+
+      FileWriter fw = new FileWriter(file.getAbsoluteFile());
+      BufferedWriter bw = new BufferedWriter(fw);
+      bw.write(script);
+      bw.close();
+
+
+    } catch (IOException e) {
+
+      logger.error("IOException" , e);
+    }
+
+  }
+
+  public void deletePigScriptLocalFile(String homedir, String filename2) {
+    try{
+
+      File file = new File(homedir + filename2);
+
+      if(file.delete()){
+        logger.info("Temproray file deleted");
+      }else{
+        logger.info("Temproray file delete failed");
+      }
+
+    }catch(Exception e){
+
+     logger.error("File Exception: ",e);
+
+    }
+
+  }
+
+  public void putFileinHdfs(final String source, final String dest, final String namenodeuri)
+    throws IOException {
+
+    try {
+      UserGroupInformation ugi = UserGroupInformation
+        .createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          Configuration conf = new Configuration();
+          conf.set("fs.hdfs.impl",
+            org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+          );
+          conf.set("fs.file.impl",
+            org.apache.hadoop.fs.LocalFileSystem.class.getName()
+          );
+          conf.set("fs.defaultFS", namenodeuri);
+          conf.set("hadoop.job.ugi", "hdfs");
+          FileSystem fileSystem = FileSystem.get(conf);
+
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path = new Path(dest1);
+          if (fileSystem.exists(path)) {
+
+          }
+          FSDataOutputStream out = fileSystem.create(path);
+
+          InputStream in = new BufferedInputStream(
+            new FileInputStream(new File(source)));
+
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in.close();
+          out.close();
+          fileSystem.close();
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs Exception: " , e);
+    }
+
+  }
+
+  public void putFileinHdfsSecured(final String source, final String dest, final String namenodeuri)
+    throws IOException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          FileSystem fileSystem = FileSystem.get(conf);
+
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path = new Path(dest1);
+          if (fileSystem.exists(path)) {
+
+          }
+          //	Path pathsource = new Path(source);
+          FSDataOutputStream out = fileSystem.create(path);
+
+          InputStream in = new BufferedInputStream(
+            new FileInputStream(new File(source)));
+
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in.close();
+          out.close();
+          fileSystem.close();
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs Exception: " , e);
+
+    }
+
+  }
+}

+ 123 - 0
contrib/views/hueambarimigration/src/main/resources/WEB-INF/web.xml

@@ -0,0 +1,123 @@
+<?xml version="1.0" encoding="ISO-8859-1" ?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License. Kerberos, LDAP, Custom. Binary/Htt
+-->
+
+<web-app xmlns="http://java.sun.com/xml/ns/j2ee"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://java.sun.com/xml/ns/j2ee http://java.sun.com/xml/ns/j2ee/web-app_2_4.xsd"
+         version="2.4">
+
+  <display-name>Hello Servlet Application</display-name>
+  <welcome-file-list>
+    <welcome-file>index.jsp</welcome-file>
+  </welcome-file-list>
+  <description>
+    This is the hello servlet view application.
+  </description>
+
+  <servlet>
+    <servlet-name>HiveHistory</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.hive.HiveHistoryMigration</servlet-class>
+    <load-on-startup>1</load-on-startup>
+  </servlet>
+
+  <servlet>
+    <description>
+    </description>
+    <display-name>
+      SavedQuery
+    </display-name>
+    <servlet-name>SavedQuery</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.hive.HiveSavedQueryMigration</servlet-class>
+  </servlet>
+
+  <servlet>
+    <description>
+    </description>
+    <display-name>PigServlet</display-name>
+    <servlet-name>PigServlet</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.pig.PigScriptMigration</servlet-class>
+  </servlet>
+
+  <servlet>
+    <description>
+    </description>
+    <display-name>Configuration_check</display-name>
+    <servlet-name>Configuration_check</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ConfigurationCheck
+    </servlet-class>
+  </servlet>
+
+  <servlet>
+    <description>
+    </description>
+    <display-name>Pigjobsevlet</display-name>
+    <servlet-name>Pigjobsevlet</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.pig.PigJobMigration</servlet-class>
+  </servlet>
+
+  <servlet-mapping>
+    <servlet-name>HiveHistory</servlet-name>
+    <url-pattern>/HiveHistory</url-pattern>
+  </servlet-mapping>
+
+  <servlet-mapping>
+    <servlet-name>SavedQuery</servlet-name>
+    <url-pattern>/SavedQuery</url-pattern>
+  </servlet-mapping>
+
+  <servlet-mapping>
+    <servlet-name>PigServlet</servlet-name>
+    <url-pattern>/PigServlet</url-pattern>
+  </servlet-mapping>
+
+  <servlet-mapping>
+    <servlet-name>Configuration_check</servlet-name>
+    <url-pattern>/Configuration_check</url-pattern>
+  </servlet-mapping>
+
+  <servlet-mapping>
+    <servlet-name>Pigjobsevlet</servlet-name>
+    <url-pattern>/Pigjobsevlet</url-pattern>
+  </servlet-mapping>
+
+  <servlet>
+    <description></description>
+    <display-name>RevertChange</display-name>
+    <servlet-name>RevertChange</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.revertchange.RevertChange</servlet-class>
+  </servlet>
+
+  <servlet-mapping>
+    <servlet-name>RevertChange</servlet-name>
+    <url-pattern>/RevertChange</url-pattern>
+  </servlet-mapping>
+
+  <servlet>
+    <description></description>
+    <display-name>ProgressBarStatus</display-name>
+    <servlet-name>ProgressBarStatus</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus
+    </servlet-class>
+  </servlet>
+  <servlet-mapping>
+    <servlet-name>ProgressBarStatus</servlet-name>
+    <url-pattern>/ProgressBarStatus</url-pattern>
+  </servlet-mapping>
+
+</web-app>

BIN
contrib/views/hueambarimigration/src/main/resources/image/updateimg.gif


+ 119 - 0
contrib/views/hueambarimigration/src/main/resources/index.jsp

@@ -0,0 +1,119 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<html>
+<head>
+<title>Hue to Ambari Migration</title>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<!-- Bootstrap CSS and bootstrap datepicker CSS used for styling the demo pages-->
+
+<link rel="stylesheet" href="css/bootstrap.css">
+
+
+
+
+<script src="js/jquery.js"></script>
+<script src="js/bootstrap.min.js"></script>
+
+
+
+
+
+<script type="text/javascript">
+	$(function() {
+		home();
+	});
+	function makeTabActive(tab) {
+		if (!tab) {
+			return;
+		}
+		$(".nav-tab").removeClass('active');
+		$(tab).parents('.nav-tab').addClass('active');
+	}
+	function loadconfiguration(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/checkconfiguration.jsp');
+	}
+	function revertchange(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/revertchange.jsp');
+	}
+	function home(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/homepage.jsp');
+	}
+	function loadhivehistory(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/hivehistoryquerymigration.jsp');
+	}
+	function loadpigscript(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/pigscriptsmigration.jsp');
+	}
+	function loadpigjobs(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/pigjobmigration.jsp');
+	}
+	function loadhivesaved(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/hivesavedquerymigration.jsp');
+	}
+</script>
+
+
+</head>
+
+<div class="container">
+	<!-- <div class="jumbotron" style="margin:10px">
+    <h1>Hue to Ambari Migration</h1>        
+  </div> -->
+
+
+
+<div class="row">
+	<nav class="navbar navbar-default">
+		<div class="container-fluid">
+			<ul class="nav navbar-nav">
+				<li class="nav-tab active"><a onclick="home(this)">Home</a></li>
+				<li class="nav-tab"><a onclick="loadconfiguration(this)">Check
+						configuration</a></li>
+				<li class="dropdown nav-tab"><a class="dropdown-toggle"
+					data-toggle="dropdown" href="#">Hive <span class="caret"></span></a>
+					<ul class="dropdown-menu">
+						<li><span onclick="loadhivesaved(this)">HiveSaved Query</span></li>
+						<li><span onclick="loadhivehistory(this)">HiveHistory</span></li>
+					</ul></li>
+				<li class="dropdown nav-tab"><a class="dropdown-toggle"
+					data-toggle="dropdown" href="#">Pig <span class="caret"></span></a>
+					<ul class="dropdown-menu">
+						<li><span onclick="loadpigscript(this)">Pigsavedscript</span></li>
+						<li><span onclick="loadpigjobs(this)">Pigjobs</span></li>
+					</ul></li>
+				<li class="nav-tab"><a onclick="revertchange(this)">Revert
+						the changes Page</a></li>
+			</ul>
+		</div>
+	</nav>
+</div>
+<div>
+	<div class="col-lg-2 main"></div>
+	<div class="col-lg-8 main">
+		<div id="maincenter11"></div>
+	</div>
+</div>
+</div>

+ 33 - 0
contrib/views/hueambarimigration/src/main/resources/ui/.gitignore

@@ -0,0 +1,33 @@
+# See http://help.github.com/ignore-files/ for more about ignoring files.
+
+# dependencies
+/node_modules
+/bower_components
+node/
+
+# misc
+
+/.idea
+
+# Numerous always-ignore extensions
+*.diff
+*.err
+*.orig
+*.log
+*.rej
+*.swo
+*.swp
+*.vi
+*~
+*.sass-cache
+
+# OS or Editor folders
+.DS_Store
+.cache
+.project
+.settings
+.tmproj
+dist
+nbproject
+Thumbs.db
+

+ 15 - 0
contrib/views/hueambarimigration/src/main/resources/ui/bower.json

@@ -0,0 +1,15 @@
+{
+  "name": "huetoambari",
+  "homepage": "https://github.com/apache/ambari",
+  "authors": [
+    "pradarttana"
+  ],
+  "description": "",
+  "main": "",
+  "license": "MIT",
+  "private": true,
+  "dependencies": {
+    "bootstrap": "^3.3.6",
+    "eonasdan-bootstrap-datetimepicker": "^4.17.37"
+  }
+}

+ 57 - 0
contrib/views/hueambarimigration/src/main/resources/ui/checkconfiguration.jsp

@@ -0,0 +1,57 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+
+
+<!DOCTYPE html>
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+
+<script type="text/javascript">
+	$(document).ready(function() {
+		// we call the function
+		conf_check();
+	});
+	function conf_check() {
+		var url = "Configuration_check";
+
+		$.ajax({
+			url : url,
+			success : function(result) {
+				console.log("Got Result");
+				document.getElementById("areacenter").innerHTML = result;
+
+			}
+		});
+  }
+</script>	
+
+</head>
+<div class="panel panel-default">
+	<div class="panel-heading">
+		<h3>Checking configuration</h3>
+	</div>
+	<div class="panel-body">
+		<div id="areacenter">
+			<center>
+				<img src="image/updateimg.gif" alt="Smiley face">
+			</center>
+		</div>
+	</div>
+</div>

+ 229 - 0
contrib/views/hueambarimigration/src/main/resources/ui/hivehistoryquerymigration.jsp

@@ -0,0 +1,229 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<!DOCTYPE html>
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<script type="text/javascript">
+
+	function validateAndSearch() {
+
+		var uname = document.getElementById("username");
+		uname = uname.options[uname.selectedIndex].value;
+		var startdate = document.getElementById('startdate').value;
+		var enddate = document.getElementById('enddate').value;
+		var instance = document.getElementById("instance");
+		instance = instance.options[instance.selectedIndex].value;
+
+		if (uname == "default") {
+			alert("Please select an username");
+		} else if (instance == "default") {
+			alert("Please select an instance name");
+		} else {
+			$('#progressbar').show();
+			$('#lines').hide();
+
+			historyquery(uname, startdate, enddate, instance);
+			interval = setInterval(loadpercentage, 1000 );
+		}
+	}
+
+	function loadpercentage() {
+		$.ajax({
+      url : "ProgressBarStatus",
+      success : function(result) {
+        $('#progressbarhivesavedquery').css('width', result);
+        console.log("Got the precentage completion "+ result);
+      },
+    });
+
+  }
+
+	function historyquery(uname, startdate, enddate, instance) {
+
+		var url = "HiveHistory?username=" + uname + "&startdate=" + startdate
+        				+ "&enddate=" + enddate + "&instance=" + instance;
+		$.ajax({
+			url : url,
+			success : function(result) {
+				console.log("Got Result");
+				document.getElementById("lines").innerHTML = result;
+				$('#progressbar').hide()
+				$('#lines').show()
+				clearInterval(interval);
+
+			}
+		});
+
+
+
+	}
+</script>
+<%@ page import="java.sql.*"%>
+<%@ page import="org.sqlite.*"%>
+<%@ page import="java.util.ArrayList"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase"%>
+<%@ page import="javax.servlet.ServletConfig"%>
+<%@ page import="javax.servlet.ServletContext"%>
+<%@ page import="org.apache.ambari.view.ViewContext"%>
+</head>
+<div class="row">
+	<%
+		ArrayList<String> username = new ArrayList<String>();
+		ArrayList<String> instancename = new ArrayList<String>();
+		int i;
+
+		Connection conn = null;
+
+		ServletContext context = request.getServletContext();
+        ViewContext view=(ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+        System.out.println(view.getProperties());
+
+		conn = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"),view.getProperties().get("huejdbcurl"),view.getProperties().get("huedbusername"),view.getProperties().get("huedbpassword")).getConnection();
+
+		Statement stat = conn.createStatement();
+
+		ResultSet rs = stat.executeQuery("select * from auth_user;");
+
+		while (rs.next()) {
+			username.add(rs.getString(2));
+		}
+
+		rs.close();
+
+		Connection c = null;
+		Statement stmt = null;
+
+		c = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"),view.getProperties().get("ambarijdbcurl"),view.getProperties().get("ambaridbusername"),view.getProperties().get("ambaridbpassword")).getConnection();
+
+		c.setAutoCommit(false);
+		stmt = c.createStatement();
+
+		ResultSet rs1=null;
+
+		if(view.getProperties().get("ambaridrivername").contains("oracle")){
+      rs1 = stmt.executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}'");
+    } else {
+      rs1 = stmt.executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}';");
+    }
+
+		while (rs1.next()) {
+			instancename.add(rs1.getString(1));
+
+		}
+		rs1.close();
+		stmt.close();
+
+	%>
+	<div class="col-sm-12">
+		<form method="GET" onSubmit="validateAndSearch()">
+			<div class="panel panel-default">
+				<div class="panel-heading">
+					<h3>Hive History Query Migration</h3>
+				</div>
+				<div class="panel-body">
+
+					<div class="row">
+						<div class="col-sm-3">
+							UserName <font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter username1234(*)" name="username1" id="username1"> -->
+							<select class="form-control" name="username"
+								placeholder="User name" id="username" required>
+								<option value="default" selected>Select below</option>
+								<option value="all">ALL User</option>
+
+								<%
+									for (i = 0; i < username.size(); i++) {
+								%><option value="<%=username.get(i)%>"><%=username.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									username.clear();
+								%>
+							</select>
+
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">
+							Instance name <font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter Instance Name(*)" name="instance" id="instance"> -->
+							<select class="form-control" name="instance"
+								placeholder="Instance name" id="instance" required>
+								<option value="default" selected>Select below</option>
+
+								<%
+									for (i = 0; i < instancename.size(); i++) {
+								%><option value="<%=instancename.get(i)%>"><%=instancename.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									instancename.clear();
+								%>
+							</select>
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">Start Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="startdate"
+								id="startdate">
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">End Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="enddate"
+								id="enddate">
+						</div>
+					</div>
+
+					<div class="row">
+
+						<div class="col-sm-3">
+							<input type="button" id="submit" class="btn btn-success"
+								value="submit" onclick="validateAndSearch()">
+						</div>
+					</div>
+
+					<div id="lines" style="display: none;"></div>
+					<br>
+					<br>
+					   <div class="progress" id="progressbar" style="display: none;">
+              <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="70" aria-valuemin="0" aria-valuemax="100"  style="width:0%">
+              </div>
+              </div>
+				</div>
+		</form>
+
+	</div>
+</div>

+ 240 - 0
contrib/views/hueambarimigration/src/main/resources/ui/hivesavedquerymigration.jsp

@@ -0,0 +1,240 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<!DOCTYPE html>
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<!-- Bootstrap CSS and bootstrap datepicker CSS used for styling the demo pages-->
+
+
+
+
+ <script type="text/javascript">
+
+	function validateAndSearch() {
+
+		var uname = document.getElementById("username");
+		uname = uname.options[uname.selectedIndex].value;
+		var startdate = document.getElementById('startdate').value;
+		var enddate = document.getElementById('enddate').value;
+		var instance = document.getElementById("instance");
+		instance = instance.options[instance.selectedIndex].value;
+
+		if (uname == "default") {
+			alert("Please select an username");
+		} else if (instance == "default") {
+			alert("Please select an instance name");
+		} else {
+			$('#progressbar').show();
+			$('#lines').hide();
+
+			historyquery(uname, startdate, enddate, instance);
+			interval = setInterval(loadpercentage, 1000 );
+
+		}
+
+	}
+
+	function loadpercentage() {
+      $.ajax({
+         url : "ProgressBarStatus",
+         success : function(result) {
+         $('#progressbarhivesavedquery').css('width', result);
+         console.log("Got the precentage completion "+ result);
+        },
+
+      });
+
+  }
+
+
+
+	function historyquery(uname, startdate, enddate, instance) {
+
+		var url = "SavedQuery?username=" + uname + "&startdate=" + startdate
+        				+ "&enddate=" + enddate + "&instance=" + instance;
+		$.ajax({
+			url : url,
+			success : function(result) {
+				console.log("Got Result");
+				document.getElementById("lines").innerHTML = result;
+				$('#progressbar').hide()
+				$('#lines').show()
+				clearInterval(interval);
+
+			}
+		});
+
+
+
+	}
+</script>
+<%@ page import="java.sql.*"%>
+<%@ page import="org.sqlite.*"%>
+<%@ page import="java.util.ArrayList"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase"%>
+<%@ page import="javax.servlet.ServletConfig"%>
+<%@ page import="javax.servlet.ServletContext"%>
+<%@ page import="org.apache.ambari.view.ViewContext"%>
+</head>
+<div class="row">
+	<%
+		ArrayList<String> username = new ArrayList<String>();
+		ArrayList<String> instancename = new ArrayList<String>();
+		int i;
+
+		Connection conn = null;
+
+		ServletContext context = request.getServletContext();
+        ViewContext view=(ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+        System.out.println(view.getProperties());
+
+		conn = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"),view.getProperties().get("huejdbcurl"),view.getProperties().get("huedbusername"),view.getProperties().get("huedbpassword")).getConnection();
+
+		Statement stat = conn.createStatement();
+
+		ResultSet rs = stat.executeQuery("select * from auth_user;");
+
+		while (rs.next()) {
+			username.add(rs.getString(2));
+		}
+
+		rs.close();
+
+		Connection c = null;
+		Statement stmt = null;
+
+		c = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"),view.getProperties().get("ambarijdbcurl"),view.getProperties().get("ambaridbusername"),view.getProperties().get("ambaridbpassword")).getConnection();
+
+		c.setAutoCommit(false);
+		stmt = c.createStatement();
+
+		ResultSet rs1=null;
+
+		if(view.getProperties().get("ambaridrivername").contains("oracle")){
+		  rs1 = stmt.executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}'");
+		} else {
+		  rs1 = stmt.executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}';");
+		}
+    while (rs1.next()) {
+			instancename.add(rs1.getString(1));
+
+		}
+		rs1.close();
+		stmt.close();
+
+	%>
+	<div class="col-sm-12">
+		<form method="GET" onSubmit="validateAndSearch()">
+			<div class="panel panel-default">
+				<div class="panel-heading">
+					<h3>Hive Saved Query Migration</h3>
+				</div>
+				<div class="panel-body">
+
+					<div class="row">
+						<div class="col-sm-3">
+							UserName <font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter username1234(*)" name="username1" id="username1"> -->
+							<select class="form-control" name="username"
+								placeholder="User name" id="username" required>
+								<option value="default" selected>Select below</option>
+								<option value="all">ALL User</option>
+
+								<%
+									for (i = 0; i < username.size(); i++) {
+								%><option value="<%=username.get(i)%>"><%=username.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									username.clear();
+								%>
+							</select>
+
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">
+							Instance name <font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter Instance Name(*)" name="instance" id="instance"> -->
+							<select class="form-control" name="instance"
+								placeholder="Instance name" id="instance" required>
+								<option value="default" selected>Select below</option>
+
+								<%
+									for (i = 0; i < instancename.size(); i++) {
+								%><option value="<%=instancename.get(i)%>"><%=instancename.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									instancename.clear();
+								%>
+							</select>
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">Start Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="startdate"
+								id="startdate">
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">End Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="enddate"
+								id="enddate">
+						</div>
+					</div>
+
+					<div class="row">
+
+						<div class="col-sm-3">
+							<input type="button" id="submit" class="btn btn-success"
+								value="submit" onclick="validateAndSearch()">
+						</div>
+					</div>
+
+					<div id="lines" style="display: none;"></div>
+
+					<br>
+					<br>
+					   <div class="progress" id="progressbar" style="display: none;">
+              <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="70" aria-valuemin="0" aria-valuemax="100"  style="width:0%">
+          </div>
+        </div>
+
+				</div>
+		</form>
+
+	</div>
+</div>

+ 31 - 0
contrib/views/hueambarimigration/src/main/resources/ui/homepage.jsp

@@ -0,0 +1,31 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<%@ page language="java" contentType="text/html; charset=US-ASCII"
+	pageEncoding="US-ASCII"%>
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=US-ASCII">
+
+</head>
+<body>
+
+
+</body>
+</html>

+ 27 - 0
contrib/views/hueambarimigration/src/main/resources/ui/package.json

@@ -0,0 +1,27 @@
+{
+  "name": "huetoambari",
+  "version": "0.0.0",
+  "private": true,
+  "directories": {
+    "doc": "doc",
+    "test": "tests"
+  },
+  "scripts": {
+    "start": "ember server",
+    "build": "ember build",
+    "test": "ember test",
+    "preinstall": "chmod +x node/npm/bin/node-gyp-bin/node-gyp",
+    "postinstall": "bash node/with_new_path.sh node node_modules/.bin/bower --allow-root install"
+  },
+
+  "engines": {
+    "node": ">= 0.10.32"
+  },
+  "author": "",
+  "license": "MIT",
+  "devDependencies": {
+
+    "bower": ">= 1.3.12"
+
+  }
+}

+ 233 - 0
contrib/views/hueambarimigration/src/main/resources/ui/pigjobmigration.jsp

@@ -0,0 +1,233 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<!DOCTYPE html>
+<html>
+<head>
+<title>bootstrap datepicker examples</title>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+
+<script type="text/javascript">
+	function pigjobquery1() {
+
+		var startdate = document.getElementById('startdate4').value;
+		var enddate = document.getElementById('enddate4').value;
+
+		var uname = document.getElementById("username4");
+		uname = uname.options[uname.selectedIndex].value;
+		var instance = document.getElementById("instance4");
+		instance = instance.options[instance.selectedIndex].value;
+
+		if (uname == "default") {
+			alert("Please select an username");
+		} else if (instance == "default") {
+			alert("Please select an instance name");
+		} else {
+			$('#progressbar').show();
+			$('#lines').hide();
+			pigjob(uname, startdate, enddate, instance);
+			interval = setInterval(loadpercentage, 1000 );
+		}
+
+	}
+
+	function loadpercentage() {
+     	$.ajax({
+        url : "ProgressBarStatus",
+        success : function(result) {
+         $('#progressbarhivesavedquery').css('width', result);
+          console.log("Got the precentage completion "+ result);
+   			},
+
+       });
+  }
+
+	function pigjob(uname, startdate, enddate, instance) {
+
+		var url = "Pigjobsevlet?username=" + uname + "&startdate="
+				+ startdate + "&enddate=" + enddate + "&instance=" + instance;
+
+		$.ajax({
+			url : url,
+			success : function(result) {
+				console.log("Got Result");
+				document.getElementById("lines").innerHTML = result;
+				clearInterval(interval);
+				$('#progressbar').hide()
+                $('#lines').show()
+			}
+		});
+
+	}
+</script>
+<%@ page import="java.sql.*"%>
+<%@ page import="org.sqlite.*"%>
+<%@ page import="java.util.ArrayList"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase"%>
+<%@ page import="javax.servlet.ServletConfig"%>
+<%@ page import="javax.servlet.ServletContext"%>
+<%@ page import="org.apache.ambari.view.ViewContext"%>
+
+</head>
+<%
+	ArrayList<String> username = new ArrayList<String>();
+	ArrayList<String> instancename = new ArrayList<String>();
+	int i;
+	
+	Connection conn = null;
+
+	 ServletContext context = request.getServletContext();
+     ViewContext view=(ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+
+	conn = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"),view.getProperties().get("huejdbcurl"),view.getProperties().get("huedbusername"),view.getProperties().get("huedbpassword")).getConnection();
+	Statement stat = conn.createStatement();
+
+	ResultSet rs = stat.executeQuery("select * from auth_user;");
+
+	while (rs.next()) {
+		username.add(rs.getString(2));
+	}
+
+	rs.close();
+
+	Connection c = null;
+	Statement stmt = null;
+	
+
+	c =  DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"),view.getProperties().get("ambarijdbcurl"),view.getProperties().get("ambaridbusername"),view.getProperties().get("ambaridbpassword")).getConnection();
+	c.setAutoCommit(false);
+	stmt = c.createStatement();
+
+	ResultSet rs1=null;
+
+	if(view.getProperties().get("ambaridrivername").contains("oracle"))
+    		{
+    		 rs1 = stmt
+            	    .executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='PIG{1.0.0}'");
+    		}
+    		else
+    		{
+    		 rs1 = stmt
+            			.executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='PIG{1.0.0}';");
+    		}
+
+
+	while (rs1.next()) {
+		instancename.add(rs1.getString(1));
+
+	}
+	rs1.close();
+	stmt.close();
+	
+%>
+<div class="row">
+	<div class="col-sm-12">
+		<form method="GET" onSubmit="pigjobquery()">
+			<div class="panel panel-default">
+				<div class="panel-heading">
+					<h3>Pig Job Migration</h3>
+				</div>
+				<div class="panel-body">
+					<div class="row">
+						<div class="col-sm-3">
+							UserName<font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter username(*)" name="username4" id="username4"> -->
+							<select class="form-control" name="username4"
+								placeholder="User name" id="username4" required>
+								<option value="default" selected>Select below</option>
+								<option value="all">ALL User</option>
+
+								<%
+									for (i = 0; i < username.size(); i++) {
+								%><option value="<%=username.get(i)%>"><%=username.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									username.clear();
+								%>
+							</select>
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">
+							Instance name<font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter Instance Name(*)" name="instance4" id="instance4"> -->
+							<select class="form-control" name="instance4"
+								placeholder="Instance name" id="instance4" required>
+								<option value="default" selected>Select below</option>
+
+								<%
+									for (i = 0; i < instancename.size(); i++) {
+								%><option value="<%=instancename.get(i)%>"><%=instancename.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									instancename.clear();
+								%>
+							</select>
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">Start Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="startdate4"
+								id="startdate4">
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">End Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="enddate4"
+								id="enddate4">
+						</div>
+					</div>
+
+					<div class="row">
+
+						<div class="col-sm-3">
+							<input type="button" id="submit" class="btn btn-success"
+								value="submit" onclick="pigjobquery1()">
+						</div>
+					</div>
+
+					<div id="lines" style="display: none;"></div>
+
+					<br>
+           <br>
+
+           <div class="progress" id="progressbar" style="display: none;">
+           <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="70" aria-valuemin="0" aria-valuemax="100"  style="width:0%">
+           </div>
+				</div>
+			</div>
+		</form>
+	</div>
+</div>

+ 227 - 0
contrib/views/hueambarimigration/src/main/resources/ui/pigscriptsmigration.jsp

@@ -0,0 +1,227 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<!DOCTYPE html>
+<html>
+<head>
+<title>bootstrap datepicker examples</title>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+
+<script type="text/javascript">
+	function pigsavedquery() {
+		var startdate = document.getElementById('startdate3').value;
+		var enddate = document.getElementById('enddate3').value;
+
+		var uname = document.getElementById("username3");
+		uname = uname.options[uname.selectedIndex].value;
+		var instance = document.getElementById("instance3");
+		instance = instance.options[instance.selectedIndex].value;
+		//	alert("lets see");
+		if (uname == "default") {
+			alert("Please select an username");
+		} else if (instance == "default") {
+			alert("Please select an instance name");
+		} else {
+			console.log("Showing loading");
+			$('#progressbar').show();
+			$('#lines').hide();
+			pigsaved(uname, startdate, enddate, instance);
+			interval = setInterval(loadpercentage, 1000 );
+
+		}
+
+	}
+
+	function loadpercentage() {
+  		$.ajax({
+        url : "ProgressBarStatus",
+         success : function(result) {
+           $('#progressbarhivesavedquery').css('width', result);
+           console.log("Got the precentage completion "+ result);
+         },
+
+      });
+
+    }
+
+	function pigsaved(uname, startdate, enddate, instance) {
+		//alert("savedquery");
+		var url = "PigServlet?username=" + uname + "&startdate=" + startdate
+				+ "&enddate=" + enddate + "&instance=" + instance;
+
+		$.ajax({
+			url : url,
+			success : function(result) {
+				console.log("Got Result");
+				document.getElementById("lines").innerHTML = result;
+				clearInterval(interval);
+				$('#progressbar').hide()
+                $('#lines').show()
+			}
+		});
+
+	}
+</script>
+
+<%@ page import="java.sql.*"%>
+<%@ page import="org.sqlite.*"%>
+<%@ page import="java.util.ArrayList"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase"%>
+<%@ page import="javax.servlet.ServletConfig"%>
+<%@ page import="javax.servlet.ServletContext"%>
+<%@ page import="org.apache.ambari.view.ViewContext"%>
+
+</head>
+<%
+	ArrayList<String> username = new ArrayList<String>();
+	ArrayList<String> instancename = new ArrayList<String>();
+	int i;
+	
+	Connection conn = null;
+	 ServletContext context = request.getServletContext();
+     ViewContext view=(ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+
+	conn = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"),view.getProperties().get("huejdbcurl"),view.getProperties().get("huedbusername"),view.getProperties().get("huedbpassword")).getConnection();
+	Statement stat = conn.createStatement();
+
+	ResultSet rs = stat.executeQuery("select * from auth_user;");
+
+	while (rs.next()) {
+		username.add(rs.getString(2));
+	}
+
+	Connection c = null;
+	Statement stmt = null;
+
+	c =  DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"),view.getProperties().get("ambarijdbcurl"),view.getProperties().get("ambaridbusername"),view.getProperties().get("ambaridbpassword")).getConnection();
+
+	stmt = c.createStatement();
+	ResultSet rs1=null;
+		if(view.getProperties().get("ambaridrivername").contains("oracle"))
+        		{
+        		 rs1 = stmt
+                			.executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='PIG{1.0.0}'");
+        		}
+        		else
+        		{
+        		 rs1 = stmt
+                			.executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='PIG{1.0.0}';");
+        		}
+	while (rs1.next()) {
+		instancename.add(rs1.getString(1));
+
+	}
+	rs1.close();
+	stmt.close();
+	
+%>
+<div class="row">
+	<div class="col-sm-12">
+		<form method="GET" onSubmit="pigsavedquery()">
+			<div class="panel panel-default">
+				<div class="panel-heading">
+					<h3>Pig Saved Script Migration</h3>
+				</div>
+				<div class="panel-body">
+					<div class="row">
+						<div class="col-sm-3">
+							UserName<font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter username(*)" name="username3" id="username3"> -->
+							<select class="form-control" name="username3"
+								placeholder="User name" id="username3" required>
+								<option value="default" selected>Select below</option>
+								<option value="all">ALL User</option>
+
+								<%
+									for (i = 0; i < username.size(); i++) {
+								%><option value="<%=username.get(i)%>"><%=username.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									username.clear();
+								%>
+							</select>
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">
+							Instance name<font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter Instance Name(*)" name="instance3" id="instance3"> -->
+							<select class="form-control" name="instance3"
+								placeholder="Instance name" id="instance3" required>
+								<option value="default" selected>Select below</option>
+
+								<%
+									for (i = 0; i < instancename.size(); i++) {
+								%><option value="<%=instancename.get(i)%>"><%=instancename.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									instancename.clear();
+								%>
+							</select>
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">Start Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="startdate3"
+								id="startdate3">
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">End Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="enddate3"
+								id="enddate3">
+						</div>
+					</div>
+
+					<div class="row">
+
+						<div class="col-sm-3">
+							<input type="button" id="submit" class="btn btn-success"
+								value="submit" onclick="pigsavedquery()">
+						</div>
+					</div>
+
+					<div id="lines" style="display: none;"></div>
+
+					 <br>
+           <br>
+            <div class="progress" id="progressbar" style="display: none;">
+            <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="70" aria-valuemin="0" aria-valuemax="100"  style="width:0%">
+            </div>
+				</div>
+			</div>
+		</form>
+	</div>
+</div>

+ 203 - 0
contrib/views/hueambarimigration/src/main/resources/ui/revertchange.jsp

@@ -0,0 +1,203 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<html>
+<head>
+<title>bootstrap datepicker examples</title>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+
+  <script type="text/javascript" src="js/jquery.min.js"></script>
+  <script type="text/javascript" src="js/moment.min.js"></script>
+  <script type="text/javascript" src="js/bootstrap.min.js"></script>
+  <script type="text/javascript" src="js/bootstrap-datetimepicker.min.js"></script>
+
+  <link rel="stylesheet" href="css/bootstrap.min.css" />
+  <link rel="stylesheet" href="css/bootstrap-datetimepicker.min.css" />
+
+<%@ page import="java.sql.*"%>
+<%@ page import="org.sqlite.*"%>
+<%@ page import="java.util.ArrayList"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase"%>
+<%@ page import="javax.servlet.ServletConfig"%>
+<%@ page import="javax.servlet.ServletContext"%>
+<%@ page import="org.apache.ambari.view.ViewContext"%>
+</head>
+<%
+				int i;
+				ArrayList<String> instancename=new ArrayList<String>();          
+                Connection c = null;
+                Statement stmt = null;
+                ServletContext context = request.getServletContext();
+                ViewContext view=(ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+
+                c =  DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"),view.getProperties().get("ambarijdbcurl"),view.getProperties().get("ambaridbusername"),view.getProperties().get("ambaridbpassword")).getConnection();
+	
+	stmt = c.createStatement();
+	ResultSet rs1=null;
+		if(view.getProperties().get("ambaridrivername").contains("oracle"))
+        		{
+        		 rs1 = stmt.executeQuery("select distinct(view_instance_name) as instancename from viewentity");
+        		}
+        		else
+        		{
+        		 rs1 = stmt.executeQuery("select distinct(view_instance_name) as instancename from viewentity;");
+        		}
+
+	while (rs1.next()) {
+		instancename.add(rs1.getString(1));
+ 
+	}
+	rs1.close();
+	stmt.close();
+	c.close();
+%>
+<div class="row">
+
+	<div class="col-sm-12">
+		<form method="GET" onSubmit="validateAndSearch()">
+			<div class="panel panel-default">
+				<div class="panel-heading">
+					<h3>Revert Change</h3>
+				</div>
+				<div class="panel-body">
+					<p></p>
+					<p></p>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-6">
+							  &nbsp; &nbsp; Instance name<font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter Instance Name(*)" name="instance4" id="instance4"> -->
+							<select class="form-control" name="instance"
+								placeholder="Instance name" id="instance" required>
+								<option value="default" selected>Select below</option>
+
+								<%
+									for(i=0;i<instancename.size();i++)
+																	{
+								%><option value="<%=instancename.get(i)%>"><%=instancename.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									instancename.clear();
+								%>
+							</select>
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<p></p>
+					<p></p>
+
+					<div class="row">
+						<div class="col-sm-6"> &nbsp; &nbsp; Enter the Time Upto which you want to
+							Revert</div>
+
+
+						<div class="container">
+                <div class="row">
+                    <div class='col-sm-3'>
+                        <div class="form-group">
+                            <div class='input-group date' id='datetimepicker1'>
+                                <input type='text' class="form-control"  id="startdate" name="startdate" />
+                                <span class="input-group-addon">
+                                    <span class="glyphicon glyphicon-calendar"></span>
+                                </span>
+                            </div>
+                        </div>
+                    </div>
+                    <script type="text/javascript">
+                        $(function () {
+                            $('#datetimepicker1').datetimepicker(
+                            {format : "YYYY-MM-DD HH:MM:SS"}
+                            );
+                        });
+                    </script>
+                </div>
+            </div>
+
+
+
+					</div>
+					<p></p>
+					<p></p>
+					<p></p>
+					<p></p>
+
+
+					<div class="row">
+
+						<div class="col-sm-3">
+							&nbsp; &nbsp;<input type="button" id="submit" class="btn btn-success"
+								value="submit" onclick="submittime()">
+						</div>
+					</div>
+					<div id="lines" style="display: none;">
+
+					 <div class="progress" id="progressbar" >
+                                            <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="70" aria-valuemin="0" aria-valuemax="100"  style="width:0%">
+
+                                            </div>
+
+				</div>
+			</div>
+		</form>
+
+	</div>
+</div>
+
+<script type="text/javascript">
+	function submittime() {
+	var strDatetime = $("#startdate").val();
+	var instance = document.getElementById("instance");
+   instance= instance.options[instance.selectedIndex].value;
+		
+		$('#progressbar').show();
+	    $('#lines').hide();
+		revertingchange(strDatetime,instance);
+		interval = setInterval(loadpercentage, 1000 );
+
+	}
+
+	function revertingchange(revertdate,instance) {
+		//alert("hello");
+		
+		var url = "RevertChange?revertdate="+revertdate+"&instance="+instance;
+		
+		$.ajax({url: url, success: function(result){
+			console.log("Got Result");
+			document.getElementById("lines").innerHTML = result;
+			$('#progressbar').hide()
+			$('#lines').show()
+			clearInterval(interval);
+   		 }});
+	}
+	function loadpercentage() {
+    	$.ajax({
+        url : "ProgressBarStatus",
+        success : function(result) {
+        $('#progressbarhivesavedquery').css('width', result);
+        console.log("Got the precentage completion "+ result);
+        },
+      });
+  }
+</script>

+ 31 - 0
contrib/views/hueambarimigration/src/main/resources/view.log4j.properties

@@ -0,0 +1,31 @@
+# Copyright 2011 The Apache Software Foundation
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+log4j.logger.org.apache.ambari.view.huetoambarimigration=DEBUG, hueambarimigration
+log4j.additivity.org.apache.ambari.view.huetoambarimigration=false
+
+
+
+# Redirect log messages to a log file, support file rolling.
+log4j.appender.hueambarimigration=org.apache.log4j.RollingFileAppender
+log4j.appender.hueambarimigration.File=/var/log/ambari-server/huetoambarimigration-view/huetoambarimigration.log
+log4j.appender.hueambarimigration.MaxFileSize=5MB
+log4j.appender.hueambarimigration.MaxBackupIndex=10
+log4j.appender.hueambarimigration.layout=org.apache.log4j.PatternLayout
+log4j.appender.hueambarimigration.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
+

+ 129 - 0
contrib/views/hueambarimigration/src/main/resources/view.xml

@@ -0,0 +1,129 @@
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License. Kerberos, LDAP, Custom. Binary/Htt
+-->
+<view>
+  <name>HUETOAMBARI_MIGRATION</name>
+  <label>Hue to Ambari Migration</label>
+  <version>1.0.0</version>
+
+
+  <parameter>
+    <name>Hue_URL</name>
+    <description>The url on which Hue Exist</description>
+    <label>Hue Http URL</label>
+    <placeholder>Enter Hue Server http URL</placeholder>
+  </parameter>
+
+  <parameter>
+    <name>Hue_Server_hostname</name>
+    <description>Hostname of the server where hue exist</description>
+    <label>Hue Server hostname</label>
+    <placeholder>Enter Hue Server Hostname</placeholder>
+  </parameter>
+
+  <parameter>
+    <name>Ambari_URL</name>
+    <description>The url on which Ambari Exist</description>
+    <label>Ambari http URL</label>
+    <placeholder>Enter Ambari Server http URL</placeholder>
+  </parameter>
+
+  <parameter>
+    <name>Ambari_Server_hostname</name>
+    <description>Hostname where ambari exixt</description>
+    <label>Ambari Server hostname</label>
+    <placeholder>Enter Hue Server Hostname</placeholder>
+  </parameter>
+
+  <parameter>
+    <name>namenode_URI_Hue</name>
+    <description>namenode_URI_Hue</description>
+    <label>Webhdfs URI(Hue)</label>
+    <placeholder>Enter Webhdfs URI of Hue</placeholder>
+  </parameter>
+
+  <parameter>
+    <name>namenode_URI_Ambari</name>
+    <description>namenode_URI_Ambari</description>
+    <label>Webhdfs URI(Ambari)</label>
+    <placeholder>Enter Webhdfs URI of Ambari</placeholder>
+  </parameter>
+
+  <parameter>
+    <name>huedrivername</name>
+    <description>The url on which Hue Exist</description>
+    <label>Hue Database Driver</label>
+    <placeholder>Enter Hue Database Driver</placeholder>
+  </parameter>
+
+  <parameter>
+    <name>huejdbcurl</name>
+    <description>The url on which Hue Exist</description>
+    <label>Hue JDBC URL</label>
+    <placeholder>Enter Hue JDBC Url</placeholder>
+  </parameter>
+
+  <parameter>
+    <name>huedbusername</name>
+    <description>The url on which Hue Exist</description>
+    <label>Hue Database Username</label>
+    <placeholder>Enter Username for Hue DB</placeholder>
+  </parameter>
+
+  <parameter>
+    <name>huedbpassword</name>
+    <description>The url on which Hue Exist</description>
+    <label>Hue Database Password</label>
+    <placeholder>Enter Password for Hue DB</placeholder>
+  </parameter>
+
+
+  <parameter>
+    <name>ambaridrivername</name>
+    <description>The url on which Hue Exist</description>
+    <label>Ambari Database Driver</label>
+    <placeholder>Enter Ambari Database Driver</placeholder>
+  </parameter>
+
+  <parameter>
+    <name>ambarijdbcurl</name>
+    <description>The url on which Hue Exist</description>
+    <label>Ambari JDBC URL</label>
+    <placeholder>Enter Ambari JDBC Url</placeholder>
+  </parameter>
+
+  <parameter>
+    <name>ambaridbusername</name>
+    <description>The url on which Hue Exist</description>
+    <label>Ambari Database Username</label>
+    <placeholder>Enter Ambari DB Username</placeholder>
+  </parameter>
+
+  <parameter>
+    <name>ambaridbpassword</name>
+    <description>The url on which Hue Exist</description>
+    <label>Ambari Database Password</label>
+    <placeholder>Enter Ambari DB Password</placeholder>
+  </parameter>
+
+  <parameter>
+    <name>KerberoseEnabled</name>
+    <description>Kerberose enable</description>
+    <label>Kerberose enabled on Ambari cluster?(y/n)</label>
+    <placeholder>y/n</placeholder>
+  </parameter>
+
+</view>

+ 1 - 0
contrib/views/pom.xml

@@ -44,6 +44,7 @@
     <module>tez</module>
     <module>storm</module>
     <module>zeppelin</module>
+    <module>hueambarimigration</module>
   </modules>
   <build>
     <pluginManagement>