浏览代码

AMBARI-10035. Hive View: Retrieve history from ATS (alexantonenko)

Alex Antonenko 10 年之前
父节点
当前提交
db97312728
共有 100 个文件被更改,包括 2232 次插入742 次删除
  1. 0 0
      contrib/views/files/src/main/resources/ui/app/assets/javascripts/modernizr-2.6.2.min.js
  2. 10 3
      contrib/views/files/src/main/resources/view.xml
  3. 1 1
      contrib/views/hive/pom.xml
  4. 8 12
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/BaseService.java
  5. 1 1
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/HelpService.java
  6. 3 3
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/TestBean.java
  7. 21 3
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
  8. 12 21
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
  9. 24 0
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/IConnectionFactory.java
  10. 15 17
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/DataStoreStorage.java
  11. 23 0
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/IStorageFactory.java
  12. 8 7
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/KeyValueStorage.java
  13. 3 3
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/Storage.java
  14. 1 0
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/FilteringStrategy.java
  15. 2 2
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/Indexed.java
  16. 5 0
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/OnlyOwnersFilteringStrategy.java
  17. 18 43
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/StorageFactory.java
  18. 20 22
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/CRUDResourceManager.java
  19. 37 0
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/IResourceManager.java
  20. 8 12
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/PersonalCRUDResourceManager.java
  21. 3 9
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/SharedCRUDResourceManager.java
  22. 16 8
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java
  23. 37 10
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/files/FileService.java
  24. 210 0
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/Aggregator.java
  25. 4 17
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ConnectionController.java
  26. 36 0
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/IOperationHandleResourceManager.java
  27. 7 5
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobResourceProvider.java
  28. 26 11
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java
  29. 52 27
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/LogParser.java
  30. 10 12
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleController.java
  31. 8 17
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleControllerFactory.java
  32. 22 19
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleResourceManager.java
  33. 6 6
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/StoredOperationHandle.java
  34. 139 0
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSParser.java
  35. 9 10
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSParserFactory.java
  36. 29 0
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSRequestsDelegate.java
  37. 86 0
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java
  38. 37 0
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/HiveQueryId.java
  39. 29 0
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/IATSParser.java
  40. 26 0
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/TezDagId.java
  41. 23 0
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/IJobControllerFactory.java
  42. 15 6
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/Job.java
  43. 2 2
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobController.java
  44. 42 0
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerFactory.java
  45. 36 19
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerImpl.java
  46. 30 7
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobImpl.java
  47. 10 10
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobResourceManager.java
  48. 3 3
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceItem.java
  49. 5 4
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceManager.java
  50. 5 5
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceProvider.java
  51. 5 6
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceService.java
  52. 3 3
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQuery.java
  53. 21 21
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManager.java
  54. 12 6
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceProvider.java
  55. 4 6
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryService.java
  56. 3 3
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDF.java
  57. 5 4
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceManager.java
  58. 6 5
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceProvider.java
  59. 6 9
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFService.java
  60. 3 3
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/FilePaginator.java
  61. 12 33
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsApi.java
  62. 7 10
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsUtil.java
  63. 163 0
      contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/SharedObjectsFactory.java
  64. 1 1
      contrib/views/hive/src/main/resources/ui/hive-web/app/components/typeahead-widget.js
  65. 4 5
      contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/history.js
  66. 44 12
      contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index.js
  67. 6 6
      contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/logs.js
  68. 3 2
      contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/results.js
  69. 2 3
      contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/job.js
  70. 6 7
      contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/open-queries.js
  71. 1 4
      contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/queries.js
  72. 81 48
      contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/settings.js
  73. 22 0
      contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/tez-ui.js
  74. 22 0
      contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/visual-explain.js
  75. 25 0
      contrib/views/hive/src/main/resources/ui/hive-web/app/helpers/all-uppercase.js
  76. 5 2
      contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js
  77. 13 1
      contrib/views/hive/src/main/resources/ui/hive-web/app/models/job.js
  78. 2 2
      contrib/views/hive/src/main/resources/ui/hive-web/app/routes/application.js
  79. 10 3
      contrib/views/hive/src/main/resources/ui/hive-web/app/routes/index/history-query/index.js
  80. 73 27
      contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss
  81. 17 0
      contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/_typeahead-widget.hbs
  82. 1 1
      contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/popover-widget.hbs
  83. 1 1
      contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases-search-results.hbs
  84. 2 2
      contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases.hbs
  85. 2 2
      contrib/views/hive/src/main/resources/ui/hive-web/app/templates/history.hbs
  86. 72 52
      contrib/views/hive/src/main/resources/ui/hive-web/app/templates/index.hbs
  87. 30 28
      contrib/views/hive/src/main/resources/ui/hive-web/app/templates/queries.hbs
  88. 35 28
      contrib/views/hive/src/main/resources/ui/hive-web/app/templates/settings.hbs
  89. 22 0
      contrib/views/hive/src/main/resources/ui/hive-web/app/templates/tez-ui.hbs
  90. 22 0
      contrib/views/hive/src/main/resources/ui/hive-web/app/templates/visual-explain.hbs
  91. 79 8
      contrib/views/hive/src/main/resources/ui/hive-web/app/utils/constants.js
  92. 16 3
      contrib/views/hive/src/main/resources/ui/hive-web/app/utils/functions.js
  93. 35 0
      contrib/views/hive/src/main/resources/ui/hive-web/app/views/tez-ui.js
  94. 35 0
      contrib/views/hive/src/main/resources/ui/hive-web/app/views/visual-explain.js
  95. 4 4
      contrib/views/hive/src/main/resources/ui/hive-web/bower.json
  96. 6 6
      contrib/views/hive/src/main/resources/ui/hive-web/package.json
  97. 1 1
      contrib/views/hive/src/main/resources/ui/hive-web/tests/integration/query-editor-test.js
  98. 2 2
      contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/history-test.js
  99. 0 15
      contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/queries-test.js
  100. 97 0
      contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/settings-test.js

+ 0 - 0
contrib/views/files/src/main/resources/ui/app/assets/javascripts/modernizr-2.6.2.min.js


+ 10 - 3
contrib/views/files/src/main/resources/view.xml

@@ -19,19 +19,26 @@
     <label>Files</label>
     <version>0.1.0</version>
 
+    <min-ambari-version>1.7.*</min-ambari-version>
+
     <parameter>
         <name>webhdfs.url</name>
-        <description>WebHDFS FileSystem URI (example: webhdfs://namenode:50070)</description>
+        <description>Enter the WebHDFS FileSystem URI. Typically this is the dfs.namenode.http-address property in the hdfs-site.xml configuration. URL must be accessible from Ambari Server.</description>
+        <label>WebHDFS FileSystem URI</label>
+        <placeholder>webhdfs://namenode:50070</placeholder>
         <required>true</required>
     </parameter>
     <parameter>
         <name>webhdfs.username</name>
-        <description>doAs for proxy user for HDFS</description>
+        <description>doAs for proxy user for HDFS. By default, uses the currently logged-in Ambari user.</description>
+        <label>WebHDFS Username</label>
         <required>false</required>
     </parameter>
     <parameter>
         <name>webhdfs.auth</name>
-        <description>Semicolon-separated authentication configs. Default: auth=SIMPLE</description>
+        <description>Semicolon-separated authentication configs.</description>
+        <placeholder>auth=SIMPLE</placeholder>
+        <default-value>auth=SIMPLE</default-value>
         <required>false</required>
     </parameter>
 

+ 1 - 1
contrib/views/hive/pom.xml

@@ -195,7 +195,7 @@
         <artifactId>frontend-maven-plugin</artifactId>
         <version>0.0.14</version>
         <configuration>
-          <nodeVersion>v0.10.26</nodeVersion>
+          <nodeVersion>v0.10.32</nodeVersion>
           <npmVersion>1.4.3</npmVersion>
           <workingDirectory>src/main/resources/ui/hive-web/</workingDirectory>
         </configuration>

+ 8 - 12
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/BaseService.java

@@ -20,9 +20,8 @@ package org.apache.ambari.view.hive;
 
 import com.google.inject.Inject;
 import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.hive.persistence.Storage;
-import org.apache.ambari.view.hive.persistence.utils.StorageUtil;
 import org.apache.ambari.view.hive.utils.HdfsApi;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -37,19 +36,16 @@ public class BaseService {
   protected final static Logger LOG =
       LoggerFactory.getLogger(BaseService.class);
 
-  private Storage storage;
-  protected Storage getStorage() {
-    if (storage == null) {
-      storage = StorageUtil.getInstance(context).getStorage();
+  private SharedObjectsFactory sharedObjectsFactory;
+  public SharedObjectsFactory getSharedObjectsFactory() {
+    if (sharedObjectsFactory == null) {
+      sharedObjectsFactory = new SharedObjectsFactory(context);
     }
-    return storage;
+    return sharedObjectsFactory;
   }
 
-  private HdfsApi hdfsApi = null;
-  protected HdfsApi getHdfsApi()  {
-    if (hdfsApi == null)
-      hdfsApi = HdfsApi.getInstance(context);
-    return hdfsApi;
+  public void setSharedObjectsFactory(SharedObjectsFactory sharedObjectsFactory) {
+    this.sharedObjectsFactory = sharedObjectsFactory;
   }
 
   public BaseService() {

+ 1 - 1
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/HelpService.java

@@ -66,7 +66,7 @@ public class HelpService extends BaseService {
   public Response testStorage(){
     TestBean test = new TestBean();
     test.someData = "hello world";
-    getStorage().store(TestBean.class, test);
+    getSharedObjectsFactory().getStorage().store(TestBean.class, test);
     return Response.ok("OK").build();
   }
 }

+ 3 - 3
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/TestBean.java

@@ -22,15 +22,15 @@ import org.apache.ambari.view.hive.persistence.utils.Indexed;
 
 public class TestBean implements Indexed {
   public String someData;
-  public Integer id;
+  public String id;
 
   @Override
-  public Integer getId() {
+  public String getId() {
     return id;
   }
 
   @Override
-  public void setId(Integer id) {
+  public void setId(String id) {
     this.id = id;
   }
 }

+ 21 - 3
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java

@@ -18,7 +18,9 @@
 
 package org.apache.ambari.view.hive.client;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hive.service.auth.HiveAuthFactory;
 import org.apache.hive.service.auth.KerberosSaslHelper;
 import org.apache.hive.service.auth.PlainSaslHelper;
@@ -54,11 +56,13 @@ public class Connection {
   private TTransport transport;
 
   private DDLDelegator ddl;
+  private String username;
 
-  public Connection(String host, int port, Map<String, String> authParams) throws HiveClientException {
+  public Connection(String host, int port, Map<String, String> authParams, String username) throws HiveClientException {
     this.host = host;
     this.port = port;
     this.authParams = authParams;
+    this.username = username;
 
     openConnection();
     ddl = new DDLDelegator(this);
@@ -78,11 +82,11 @@ public class Connection {
           + host + ":" + port + ": " + e.toString(), e);
     }
     LOG.info("Hive connection opened");
-    openSession();
   }
 
   /**
    * Based on JDBC implementation of HiveConnection.createBinaryTransport
+   *
    * @return transport
    * @throws HiveClientException
    */
@@ -107,6 +111,11 @@ public class Connection {
           }
           saslProps.put(Sasl.QOP, saslQOP.toString());
           saslProps.put(Sasl.SERVER_AUTH, "true");
+
+          Configuration conf = new Configuration();
+          conf.set("hadoop.security.authentication", "kerberos");
+          UserGroupInformation.setConfiguration(conf);
+
           transport = KerberosSaslHelper.getKerberosTransport(
               authParams.get(Utils.HiveAuthenticationParams.AUTH_PRINCIPAL), host,
               HiveAuthFactory.getSocketTransport(host, port, 10000), saslProps,
@@ -119,7 +128,7 @@ public class Connection {
                 host, HiveAuthFactory.getSocketTransport(host, port, 10000), saslProps);
           } else {
             // we are using PLAIN Sasl connection with user/password
-            String userName = getAuthParamDefault(Utils.HiveAuthenticationParams.AUTH_USER, Utils.HiveAuthenticationParams.ANONYMOUS_USER);
+            String userName = getAuthParamDefault(Utils.HiveAuthenticationParams.AUTH_USER, getUsername());
             String passwd = getAuthParamDefault(Utils.HiveAuthenticationParams.AUTH_PASSWD, Utils.HiveAuthenticationParams.ANONYMOUS_USER);
             // Note: Thrift returns an SSL socket that is already bound to the specified host:port
             // Therefore an open called on this would be a no-op later
@@ -250,6 +259,7 @@ public class Connection {
         public TExecuteStatementResp body() throws HiveClientException {
 
           TExecuteStatementReq execReq = null;
+          openSession();
           execReq = new TExecuteStatementReq(getSessHandle(), oneCmd);
           execReq.setRunAsync(async);
           execReq.setConfOverlay(new HashMap<String, String>()); //maybe it's hive configuration? use it, Luke!
@@ -398,4 +408,12 @@ public class Connection {
   public void setAuthParams(Map<String, String> authParams) {
     this.authParams = authParams;
   }
+
+  public String getUsername() {
+    return username;
+  }
+
+  public void setUsername(String username) {
+    this.username = username;
+  }
 }

+ 12 - 21
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionPool.java → contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java

@@ -26,43 +26,34 @@ import org.slf4j.LoggerFactory;
 import java.util.HashMap;
 import java.util.Map;
 
-public class ConnectionPool {
+public class ConnectionFactory implements IConnectionFactory {
   private final static Logger LOG =
-      LoggerFactory.getLogger(ConnectionPool.class);
+      LoggerFactory.getLogger(ConnectionFactory.class);
+  private ViewContext context;
 
-  private static Map<String, Connection> viewSingletonObjects = new HashMap<String, Connection>();
-  /**
-   * Returns HdfsApi object specific to instance
-   * @param context View Context instance
-   * @return Hdfs business delegate object
-   */
-  public static Connection getConnection(ViewContext context) {
-    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
-      viewSingletonObjects.put(context.getInstanceName(), connectToHive(context));
-    return viewSingletonObjects.get(context.getInstanceName());
+  public ConnectionFactory(ViewContext context) {
+    this.context = context;
   }
 
-  private static Connection connectToHive(ViewContext context) {
+  @Override
+  public Connection getHiveConnection() {
     try {
-      return new Connection(getHiveHost(context), Integer.valueOf(getHivePort(context)), getHiveAuthParams(context));
+      return new Connection(getHiveHost(), Integer.valueOf(getHivePort()),
+          getHiveAuthParams(), context.getUsername());
     } catch (HiveClientException e) {
       throw new ServiceFormattedException("Couldn't open connection to Hive: " + e.toString(), e);
     }
   }
 
-  public static void setInstance(ViewContext context, Connection api) {
-    viewSingletonObjects.put(context.getInstanceName(), api);
-  }
-
-  private static String getHiveHost(ViewContext context) {
+  private String getHiveHost() {
     return context.getProperties().get("hive.host");
   }
 
-  private static String getHivePort(ViewContext context) {
+  private String getHivePort() {
     return context.getProperties().get("hive.port");
   }
 
-  private static Map<String, String> getHiveAuthParams(ViewContext context) {
+  private Map<String, String> getHiveAuthParams() {
     String auth = context.getProperties().get("hive.auth");
     Map<String, String> params = new HashMap<String, String>();
     if (auth == null || auth.isEmpty()) {

+ 24 - 0
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/IConnectionFactory.java

@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+
+public interface IConnectionFactory {
+  Connection getHiveConnection();
+}

+ 15 - 17
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/DataStoreStorage.java

@@ -30,7 +30,6 @@ import org.slf4j.LoggerFactory;
 
 import javax.ws.rs.WebApplicationException;
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.LinkedList;
 import java.util.List;
 
@@ -55,7 +54,7 @@ public class DataStoreStorage implements Storage {
   public synchronized void store(Class model, Indexed obj) {
     try {
       if (obj.getId() == null) {
-        int id = nextIdForEntity(context, model);
+        String id = nextIdForEntity(context, model);
         obj.setId(id);
       }
       context.getDataStore().store(obj);
@@ -64,7 +63,7 @@ public class DataStoreStorage implements Storage {
     }
   }
 
-  private static synchronized int nextIdForEntity(ViewContext context, Class aClass) {
+  private static synchronized String nextIdForEntity(ViewContext context, Class aClass) {
     // auto increment id implementation
     String lastId = context.getInstanceData(aClass.getName());
     int newId;
@@ -74,12 +73,12 @@ public class DataStoreStorage implements Storage {
       newId = Integer.parseInt(lastId) + 1;
     }
     context.putInstanceData(aClass.getName(), String.valueOf(newId));
-    return newId;
+    return String.valueOf(newId);
   }
 
   @Override
-  public synchronized <T extends Indexed> T load(Class<T> model, Integer id) throws ItemNotFound {
-    LOG.debug(String.format("Loading %s #%d", model.getName(), id));
+  public synchronized <T extends Indexed> T load(Class<T> model, Object id) throws ItemNotFound {
+    LOG.debug(String.format("Loading %s #%s", model.getName(), id));
     try {
       T obj = context.getDataStore().find(model, id);
       if (obj != null) {
@@ -97,10 +96,9 @@ public class DataStoreStorage implements Storage {
     LinkedList<T> list = new LinkedList<T>();
     LOG.debug(String.format("Loading all %s-s", model.getName()));
     try {
-      for(T item: context.getDataStore().findAll(model, null)) {
-        if ((filter == null) || filter.isConform(item)) {
-          list.add(item);
-        }
+      //TODO: use WHERE statement instead of this ugly filter
+      for(T item: context.getDataStore().findAll(model, filter.whereStatement())) {
+        list.add(item);
       }
     } catch (PersistenceException e) {
       throw new ServiceFormattedException("Error while finding all objects in DataStorage", e);
@@ -124,8 +122,8 @@ public class DataStoreStorage implements Storage {
   }
 
   @Override
-  public synchronized void delete(Class model, int id) throws ItemNotFound {
-    LOG.debug(String.format("Deleting %s:%d", model.getName(), id));
+  public synchronized void delete(Class model, Object id) throws ItemNotFound {
+    LOG.debug(String.format("Deleting %s:%s", model.getName(), id));
     Object obj = load(model, id);
     try {
       context.getDataStore().remove(obj);
@@ -135,7 +133,7 @@ public class DataStoreStorage implements Storage {
   }
 
   @Override
-  public boolean exists(Class model, Integer id) {
+  public boolean exists(Class model, Object id) {
     try {
       return context.getDataStore().find(model, id) != null;
     } catch (PersistenceException e) {
@@ -151,7 +149,7 @@ public class DataStoreStorage implements Storage {
       storage.store(SmokeTestEntity.class, entity);
 
       if (entity.getId() == null) throw new ServiceFormattedException("Ambari Views instance data DB doesn't work properly (auto increment id doesn't work)", null);
-      Integer id = entity.getId();
+      Object id = entity.getId();
       SmokeTestEntity entity2 = storage.load(SmokeTestEntity.class, id);
       boolean status = entity2.getData().compareTo("42") == 0;
       storage.delete(SmokeTestEntity.class, id);
@@ -164,14 +162,14 @@ public class DataStoreStorage implements Storage {
   }
 
   public static class SmokeTestEntity implements Indexed {
-    private Integer id = null;
+    private String id = null;
     private String data = null;
 
-    public Integer getId() {
+    public String getId() {
       return id;
     }
 
-    public void setId(Integer id) {
+    public void setId(String id) {
       this.id = id;
     }
 

+ 23 - 0
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/IStorageFactory.java

@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.persistence;
+
+public interface IStorageFactory {
+  Storage getStorage();
+}

+ 8 - 7
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/KeyValueStorage.java

@@ -62,7 +62,7 @@ public abstract class KeyValueStorage implements Storage {
       int lastIndex = getConfig().getInt(modelIndexingPropName, 0);
       lastIndex ++;
       getConfig().setProperty(modelIndexingPropName, lastIndex);
-      obj.setId(lastIndex);
+      obj.setId(String.valueOf(lastIndex));
     }
 
     String modelPropName = getItemPropertyName(model, obj.getId());
@@ -71,12 +71,13 @@ public abstract class KeyValueStorage implements Storage {
   }
 
   @Override
-  public <T extends Indexed> T load(Class<T> model, Integer id) throws ItemNotFound {
+  public <T extends Indexed> T load(Class<T> model, Object id) throws ItemNotFound {
     String modelPropName = getItemPropertyName(model, id);
     LOG.debug(String.format("Loading %s", modelPropName));
     if (getConfig().containsKey(modelPropName)) {
       String json = read(modelPropName);
       LOG.debug(String.format("json: %s", json));
+
       return deserialize(model, json);
     } else {
       throw new ItemNotFound();
@@ -141,14 +142,14 @@ public abstract class KeyValueStorage implements Storage {
   }
 
   @Override
-  public synchronized void delete(Class model, int id) {
-    LOG.debug(String.format("Deleting %s:%d", model.getName(), id));
+  public synchronized void delete(Class model, Object id) {
+    LOG.debug(String.format("Deleting %s:%s", model.getName(), id));
     String modelPropName = getItemPropertyName(model, id);
     clear(modelPropName);
   }
 
   @Override
-  public boolean exists(Class model, Integer id) {
+  public boolean exists(Class model, Object id) {
     return getConfig().containsKey(getItemPropertyName(model, id));
   }
 
@@ -156,7 +157,7 @@ public abstract class KeyValueStorage implements Storage {
     return String.format("%s:index", model.getName());
   }
 
-  private String getItemPropertyName(Class model, int id) {
-    return String.format("%s.%d", model.getName(), id);
+  private String getItemPropertyName(Class model, Object id) {
+    return String.format("%s.%s", model.getName(), id);
   }
 }

+ 3 - 3
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/Storage.java

@@ -41,7 +41,7 @@ public interface Storage {
    * @return bean instance
    * @throws ItemNotFound thrown if item with id was not found in DB
    */
-  <T extends Indexed> T load(Class<T> model, Integer id) throws ItemNotFound;
+  <T extends Indexed> T load(Class<T> model, Object id) throws ItemNotFound;
 
   /**
    * Load all objects of given bean class
@@ -74,7 +74,7 @@ public interface Storage {
    * @param model bean class
    * @param id identifier
    */
-  void delete(Class model, int id) throws ItemNotFound;
+  void delete(Class model, Object id) throws ItemNotFound;
 
   /**
    * Check is object exists
@@ -82,5 +82,5 @@ public interface Storage {
    * @param id identifier
    * @return true if exists
    */
-  boolean exists(Class model, Integer id);
+  boolean exists(Class model, Object id);
 }

+ 1 - 0
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/FilteringStrategy.java

@@ -28,4 +28,5 @@ public interface FilteringStrategy {
    * @return true if item conforms this filter
    */
   boolean isConform(Indexed item);
+  String whereStatement();
 }

+ 2 - 2
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/Indexed.java

@@ -26,11 +26,11 @@ public interface Indexed {
    * Get the ID
    * @return ID
    */
-  Integer getId();
+  String getId();
 
   /**
    * Set ID
    * @param id ID
    */
-  void setId(Integer id);
+  void setId(String id);
 }

+ 5 - 0
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/OnlyOwnersFilteringStrategy.java

@@ -30,4 +30,9 @@ public class OnlyOwnersFilteringStrategy implements FilteringStrategy {
     Owned object = (Owned) item;
     return object.getOwner().compareTo(username) == 0;
   }
+
+  @Override
+  public String whereStatement() {
+    return "owner = '" + username + "'";
+  }
 }

+ 18 - 43
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/StorageUtil.java → contrib/views/hive/src/main/java/org/apache/ambari/view/hive/persistence/utils/StorageFactory.java

@@ -20,14 +20,12 @@ package org.apache.ambari.view.hive.persistence.utils;
 
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.hive.persistence.DataStoreStorage;
+import org.apache.ambari.view.hive.persistence.IStorageFactory;
 import org.apache.ambari.view.hive.persistence.LocalKeyValueStorage;
 import org.apache.ambari.view.hive.persistence.Storage;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.HashMap;
-import java.util.Map;
-
 /**
  * Storage factory, creates storage of Local or Persistence API type.
  * Type depends on context configuration: if "dataworker.storagePath" is set,
@@ -35,60 +33,37 @@ import java.util.Map;
  *
  * Storage is singleton.
  */
-public class StorageUtil {
-  private Storage storageInstance = null;
-
+public class StorageFactory implements IStorageFactory {
   protected final static Logger LOG =
-      LoggerFactory.getLogger(StorageUtil.class);
-
-
-  private static Map<String, StorageUtil> viewSingletonObjects = new HashMap<String, StorageUtil>();
-  public static StorageUtil getInstance(ViewContext context) {
-    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
-      viewSingletonObjects.put(context.getInstanceName(), new StorageUtil(context));
-    return viewSingletonObjects.get(context.getInstanceName());
-  }
-
-  public static void dropAllConnections() {
-    viewSingletonObjects.clear();
-  }
+      LoggerFactory.getLogger(StorageFactory.class);
 
   private ViewContext context;
 
   /**
-   * Constructor of storage util
+   * Constructor of storage factory
    * @param context View Context instance
    */
-  public StorageUtil(ViewContext context) {
+  public StorageFactory(ViewContext context) {
     this.context = context;
   }
 
   /**
-   * Get storage instance. If one is not created, creates instance.
+   * Creates storage instance
    * @return storage instance
    */
-  public synchronized Storage getStorage() {
-    if (storageInstance == null) {
-      String fileName = context.getProperties().get("dataworker.storagePath");
-      if (fileName != null) {
-        LOG.debug("Using local storage in " + fileName + " to store data");
-        // If specifed, use LocalKeyValueStorage - key-value file based storage
-        storageInstance = new LocalKeyValueStorage(context);
-      } else {
-        LOG.debug("Using Persistence API to store data");
-        // If not specifed, use ambari-views Persistence API
-        storageInstance = new DataStoreStorage(context);
-      }
+  public Storage getStorage() {
+    String fileName = context.getProperties().get("dataworker.storagePath");
+
+    Storage storageInstance;
+    if (fileName != null) {
+      LOG.debug("Using local storage in " + fileName + " to store data");
+      // If specifed, use LocalKeyValueStorage - key-value file based storage
+      storageInstance = new LocalKeyValueStorage(context);
+    } else {
+      LOG.debug("Using Persistence API to store data");
+      // If not specifed, use ambari-views Persistence API
+      storageInstance = new DataStoreStorage(context);
     }
     return storageInstance;
   }
-
-  /**
-   * Set storage to use across all application.
-   * Used in unit tests.
-   * @param storage storage instance
-   */
-  public void setStorage(Storage storage) {
-    storageInstance = storage;
-  }
 }

+ 20 - 22
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/CRUDResourceManager.java

@@ -18,12 +18,11 @@
 
 package org.apache.ambari.view.hive.resources;
 
-import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.persistence.IStorageFactory;
 import org.apache.ambari.view.hive.persistence.Storage;
 import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
 import org.apache.ambari.view.hive.persistence.utils.Indexed;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.hive.persistence.utils.StorageUtil;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
 
 import java.util.List;
@@ -32,18 +31,20 @@ import java.util.List;
  * CRUD resource manager
  * @param <T> Data type with ID
  */
-abstract public class CRUDResourceManager<T extends Indexed> {
+abstract public class CRUDResourceManager<T extends Indexed> implements IResourceManager<T> {
   //TODO: refactor: generic parameter gets Fabric for Indexed objects, not objects itself
   private Storage storage = null;
 
   protected final Class<? extends T> resourceClass;
+  protected IStorageFactory storageFabric;
 
   /**
    * Constructor
    * @param resourceClass model class
    */
-  public CRUDResourceManager(Class<? extends T> resourceClass) {
+  public CRUDResourceManager(Class<? extends T> resourceClass, IStorageFactory storageFabric) {
     this.resourceClass = resourceClass;
+    this.storageFabric = storageFabric;
   }
   // CRUD operations
 
@@ -52,7 +53,8 @@ abstract public class CRUDResourceManager<T extends Indexed> {
    * @param object object
    * @return model object
    */
-  protected T create(T object) {
+  @Override
+  public T create(T object) {
     object.setId(null);
     return this.save(object);
   }
@@ -63,9 +65,10 @@ abstract public class CRUDResourceManager<T extends Indexed> {
    * @return model object
    * @throws org.apache.ambari.view.hive.persistence.utils.ItemNotFound
    */
-  protected T read(Integer id) throws ItemNotFound {
+  @Override
+  public T read(Object id) throws ItemNotFound {
     T object = null;
-    object = getStorage().load(this.resourceClass, id);
+    object = storageFabric.getStorage().load(this.resourceClass, id);
     if (!checkPermissions(object))
       throw new ItemNotFound();
     return object;
@@ -76,8 +79,9 @@ abstract public class CRUDResourceManager<T extends Indexed> {
    * @param filteringStrategy filtering strategy
    * @return list of filtered objects
    */
-  protected List<T> readAll(FilteringStrategy filteringStrategy) {
-    return getStorage().loadAll(this.resourceClass, filteringStrategy);
+  @Override
+  public List<T> readAll(FilteringStrategy filteringStrategy) {
+    return storageFabric.getStorage().loadAll(this.resourceClass, filteringStrategy);
   }
 
   /**
@@ -87,7 +91,8 @@ abstract public class CRUDResourceManager<T extends Indexed> {
    * @return model object
    * @throws org.apache.ambari.view.hive.persistence.utils.ItemNotFound
    */
-  protected T update(T newObject, Integer id) throws ItemNotFound {
+  @Override
+  public T update(T newObject, String id) throws ItemNotFound {
     newObject.setId(id);
     this.save(newObject);
     return newObject;
@@ -98,29 +103,22 @@ abstract public class CRUDResourceManager<T extends Indexed> {
    * @param resourceId object identifier
    * @throws org.apache.ambari.view.hive.persistence.utils.ItemNotFound
    */
-  protected void delete(Integer resourceId) throws ItemNotFound {
-    if (!getStorage().exists(this.resourceClass, resourceId)) {
+  @Override
+  public void delete(Object resourceId) throws ItemNotFound {
+    if (!storageFabric.getStorage().exists(this.resourceClass, resourceId)) {
       throw new ItemNotFound();
     }
-    getStorage().delete(this.resourceClass, resourceId);
+    storageFabric.getStorage().delete(this.resourceClass, resourceId);
   }
 
   // UTILS
 
   protected T save(T object) {
-    getStorage().store(resourceClass, object);
+    storageFabric.getStorage().store(resourceClass, object);
     return object;
   }
 
-  protected Storage getStorage() {
-    if (storage == null) {
-      storage = StorageUtil.getInstance(getContext()).getStorage();
-    }
-    return storage;
-  }
-
   protected abstract boolean checkPermissions(T object);
-  protected abstract ViewContext getContext();
 
   protected void cleanupAfterErrorAndThrowAgain(Indexed object, ServiceFormattedException e) {
     try {

+ 37 - 0
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/IResourceManager.java

@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources;
+
+import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive.persistence.utils.Indexed;
+import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
+
+import java.util.List;
+
+public interface IResourceManager<T extends Indexed> {
+  T create(T object);
+
+  T read(Object id) throws ItemNotFound;
+
+  List<T> readAll(FilteringStrategy filteringStrategy);
+
+  T update(T newObject, String id) throws ItemNotFound;
+
+  void delete(Object resourceId) throws ItemNotFound;
+}

+ 8 - 12
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/PersonalCRUDResourceManager.java

@@ -19,6 +19,7 @@
 package org.apache.ambari.view.hive.resources;
 
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.persistence.IStorageFactory;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.persistence.utils.PersonalResource;
 import org.slf4j.Logger;
@@ -31,24 +32,24 @@ import java.util.concurrent.Callable;
  * @param <T> Data type with ID and Owner
  */
 public class PersonalCRUDResourceManager<T extends PersonalResource> extends CRUDResourceManager<T> {
-  protected ViewContext context;
   protected boolean ignorePermissions = false;
 
   private final static Logger LOG =
       LoggerFactory.getLogger(PersonalCRUDResourceManager.class);
+  protected ViewContext context;
+
   /**
    * Constructor
    * @param resourceClass model class
-   * @param context View Context instance
    */
-  public PersonalCRUDResourceManager(Class<? extends T> resourceClass, ViewContext context) {
-    super(resourceClass);
+  public PersonalCRUDResourceManager(Class<? extends T> resourceClass, IStorageFactory storageFabric, ViewContext context) {
+    super(resourceClass, storageFabric);
     this.context = context;
   }
 
   @Override
-  public T update(T newObject, Integer id) throws ItemNotFound {
-    T object = getStorage().load(this.resourceClass, id);
+  public T update(T newObject, String id) throws ItemNotFound {
+    T object = storageFabric.getStorage().load(this.resourceClass, id);
     if (object.getOwner().compareTo(this.context.getUsername()) != 0) {
       throw new ItemNotFound();
     }
@@ -74,18 +75,13 @@ public class PersonalCRUDResourceManager<T extends PersonalResource> extends CRU
     return object.getOwner().compareTo(this.context.getUsername()) == 0;
   }
 
-  @Override
-  public ViewContext getContext() {
-    return context;
-  }
-
   /**
    * Execute action ignoring objects owner
    * @param actions callable to execute
    * @return value returned from actions
    * @throws Exception
    */
-  public <T> T ignorePermissions(Callable<T> actions) throws Exception {
+  public T ignorePermissions(Callable<T> actions) throws Exception {
     ignorePermissions = true;
     T result;
     try {

+ 3 - 9
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/SharedCRUDResourceManager.java

@@ -19,6 +19,7 @@
 package org.apache.ambari.view.hive.resources;
 
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.persistence.IStorageFactory;
 import org.apache.ambari.view.hive.persistence.utils.Indexed;
 
 /**
@@ -31,20 +32,13 @@ public class SharedCRUDResourceManager<T extends Indexed> extends CRUDResourceMa
   /**
    * Constructor
    * @param responseClass model class
-   * @param context View Context instance
    */
-  public SharedCRUDResourceManager(Class<T> responseClass, ViewContext context) {
-    super(responseClass);
-    this.context = context;
+  public SharedCRUDResourceManager(Class<T> responseClass, IStorageFactory storageFabric) {
+    super(responseClass, storageFabric);
   }
 
   @Override
   protected boolean checkPermissions(T object) {
     return true; //everyone has permission
   }
-
-  @Override
-  protected ViewContext getContext() {
-    return context;
-  }
 }

+ 16 - 8
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/browser/HiveBrowserService.java

@@ -21,13 +21,13 @@ package org.apache.ambari.view.hive.resources.browser;
 import com.google.inject.Inject;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.ViewResourceHandler;
-import org.apache.ambari.view.hive.BaseService;
 import org.apache.ambari.view.hive.client.ColumnDescription;
-import org.apache.ambari.view.hive.client.ConnectionPool;
 import org.apache.ambari.view.hive.client.Cursor;
+import org.apache.ambari.view.hive.client.IConnectionFactory;
 import org.apache.ambari.view.hive.resources.jobs.ResultsPaginationController;
 import org.apache.ambari.view.hive.utils.BadRequestFormattedException;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.apache.commons.collections4.map.PassiveExpiringMap;
 import org.json.simple.JSONObject;
 import org.slf4j.Logger;
@@ -55,6 +55,8 @@ public class HiveBrowserService {
 
   private static final long EXPIRING_TIME = 10*60*1000;  // 10 minutes
   private static Map<String, Cursor> resultsCache;
+  private IConnectionFactory connectionFactory;
+
   public static Map<String, Cursor> getResultsCache() {
     if (resultsCache == null) {
       PassiveExpiringMap<String, Cursor> resultsCacheExpiringMap =
@@ -64,6 +66,12 @@ public class HiveBrowserService {
     return resultsCache;
   }
 
+  private IConnectionFactory getConnectionFactory() {
+    if (connectionFactory == null)
+      connectionFactory = new SharedObjectsFactory(context);
+    return new SharedObjectsFactory(context);
+  }
+
   /**
    * Returns list of databases
    */
@@ -81,7 +89,7 @@ public class HiveBrowserService {
     String curl = null;
     try {
       JSONObject response = new JSONObject();
-      List<String> tables = ConnectionPool.getConnection(context).ddl().getDBList(like);
+      List<String> tables = getConnectionFactory().getHiveConnection().ddl().getDBList(like);
       response.put("databases", tables);
       return Response.ok(response).build();
     } catch (WebApplicationException ex) {
@@ -116,7 +124,7 @@ public class HiveBrowserService {
               new Callable<Cursor>() {
                 @Override
                 public Cursor call() throws Exception {
-                  return ConnectionPool.getConnection(context).ddl().getDBListCursor(finalLike);
+                  return getConnectionFactory().getHiveConnection().ddl().getDBListCursor(finalLike);
                 }
               }).build();
     } catch (WebApplicationException ex) {
@@ -146,7 +154,7 @@ public class HiveBrowserService {
     String curl = null;
     try {
       JSONObject response = new JSONObject();
-      List<String> tables = ConnectionPool.getConnection(context).ddl().getTableList(db, like);
+      List<String> tables = getConnectionFactory().getHiveConnection().ddl().getTableList(db, like);
       response.put("tables", tables);
       response.put("database", db);
       return Response.ok(response).build();
@@ -183,7 +191,7 @@ public class HiveBrowserService {
               new Callable<Cursor>() {
                 @Override
                 public Cursor call() throws Exception {
-                  Cursor cursor = ConnectionPool.getConnection(context).ddl().getTableListCursor(db, finalLike);
+                  Cursor cursor = getConnectionFactory().getHiveConnection().ddl().getTableListCursor(db, finalLike);
                   cursor.selectColumns(requestedColumns);
                   return cursor;
                 }
@@ -212,7 +220,7 @@ public class HiveBrowserService {
     String curl = null;
     try {
       JSONObject response = new JSONObject();
-      List<ColumnDescription> columnDescriptions = ConnectionPool.getConnection(context).ddl()
+      List<ColumnDescription> columnDescriptions = getConnectionFactory().getHiveConnection().ddl()
           .getTableDescription(db, table, like, extendedTableDescription);
       response.put("columns", columnDescriptions);
       response.put("database", db);
@@ -247,7 +255,7 @@ public class HiveBrowserService {
               new Callable<Cursor>() {
                 @Override
                 public Cursor call() throws Exception {
-                  Cursor cursor = ConnectionPool.getConnection(context).ddl().getTableDescriptionCursor(db, table, like);
+                  Cursor cursor = getConnectionFactory().getHiveConnection().ddl().getTableDescriptionCursor(db, table, like);
                   cursor.selectColumns(requestedColumns);
                   return cursor;
                 }

+ 37 - 10
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/files/FileService.java

@@ -23,6 +23,7 @@ import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.ViewResourceHandler;
 import org.apache.ambari.view.hive.BaseService;
 import org.apache.ambari.view.hive.utils.*;
+import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileAlreadyExistsException;
 import org.json.simple.JSONObject;
@@ -51,6 +52,8 @@ import java.io.IOException;
  *      update file content
  */
 public class FileService extends BaseService {
+  public static final String FAKE_FILE = "fakefile://";
+
   @Inject
   ViewResourceHandler handler;
 
@@ -66,17 +69,23 @@ public class FileService extends BaseService {
   public Response getFilePage(@PathParam("filePath") String filePath, @QueryParam("page") Long page) throws IOException, InterruptedException {
     LOG.debug("Reading file " + filePath);
     try {
-      FilePaginator paginator = new FilePaginator(filePath, context);
+      FileResource file = new FileResource();
 
       if (page == null)
         page = 0L;
 
-      FileResource file = new FileResource();
-      file.setFilePath(filePath);
-      file.setFileContent(paginator.readPage(page));
-      file.setHasNext(paginator.pageCount() > page + 1);
-      file.setPage(page);
-      file.setPageCount(paginator.pageCount());
+      if (filePath.startsWith(FAKE_FILE)) {
+        if (page > 1)
+          throw new IllegalArgumentException("There's only one page in fake files");
+
+        String content = filePath.substring(FAKE_FILE.length());
+
+        fillFakeFileObject(filePath, file, content);
+      } else {
+        FilePaginator paginator = new FilePaginator(filePath, getSharedObjectsFactory().getHdfsApi());
+
+        fillRealFileObject(filePath, page, file, paginator);
+      }
 
       JSONObject object = new JSONObject();
       object.put("file", file);
@@ -92,6 +101,24 @@ public class FileService extends BaseService {
     }
   }
 
+  public void fillRealFileObject(String filePath, Long page, FileResource file, FilePaginator paginator) throws IOException, InterruptedException {
+    file.setFilePath(filePath);
+    file.setFileContent(paginator.readPage(page));
+    file.setHasNext(paginator.pageCount() > page + 1);
+    file.setPage(page);
+    file.setPageCount(paginator.pageCount());
+  }
+
+  public void fillFakeFileObject(String filePath, FileResource file, String encodedContent) {
+    String content = new String(Base64.decodeBase64(encodedContent));
+
+    file.setFilePath(filePath);
+    file.setFileContent(content);
+    file.setHasNext(false);
+    file.setPage(0);
+    file.setPageCount(1);
+  }
+
   /**
    * Delete single item
    */
@@ -100,7 +127,7 @@ public class FileService extends BaseService {
   public Response deleteFile(@PathParam("filePath") String filePath) throws IOException, InterruptedException {
     try {
       LOG.debug("Deleting file " + filePath);
-      if (getHdfsApi().delete(filePath, false)) {
+      if (getSharedObjectsFactory().getHdfsApi().delete(filePath, false)) {
         return Response.status(204).build();
       }
       throw new NotFoundFormattedException("FileSystem.delete returned false", null);
@@ -121,7 +148,7 @@ public class FileService extends BaseService {
                              @PathParam("filePath") String filePath) throws IOException, InterruptedException {
     try {
       LOG.debug("Rewriting file " + filePath);
-      FSDataOutputStream output = getHdfsApi().create(filePath, true);
+      FSDataOutputStream output = getSharedObjectsFactory().getHdfsApi().create(filePath, true);
       output.writeBytes(request.file.getFileContent());
       output.close();
       return Response.status(204).build();
@@ -143,7 +170,7 @@ public class FileService extends BaseService {
     try {
       LOG.debug("Creating file " + request.file.getFilePath());
       try {
-        FSDataOutputStream output = getHdfsApi().create(request.file.getFilePath(), false);
+        FSDataOutputStream output = getSharedObjectsFactory().getHdfsApi().create(request.file.getFilePath(), false);
         if (request.file.getFileContent() != null) {
           output.writeBytes(request.file.getFileContent());
         }

+ 210 - 0
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/Aggregator.java

@@ -0,0 +1,210 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs;
+
+import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive.persistence.utils.Indexed;
+import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive.resources.IResourceManager;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.HiveQueryId;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.IATSParser;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.TezDagId;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl;
+import org.apache.commons.beanutils.PropertyUtils;
+import org.apache.commons.codec.binary.Base64;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.lang.reflect.InvocationTargetException;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * View Jobs and ATS Jobs aggregator
+ * Not all ViewJobs create ATS job
+ */
+public class Aggregator {
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(Aggregator.class);
+
+  private final IATSParser ats;
+  private final IOperationHandleResourceManager operationHandleResourceManager;
+  private IResourceManager<Job> viewJobResourceManager;
+
+  public Aggregator(IResourceManager<Job> jobResourceManager,
+                    IOperationHandleResourceManager operationHandleResourceManager,
+                    IATSParser ats) {
+    this.viewJobResourceManager = jobResourceManager;
+    this.operationHandleResourceManager = operationHandleResourceManager;
+    this.ats = ats;
+  }
+
+  public List<Job> readAll(String username) {
+      Set<String> addedOperationIds = new HashSet<String>();
+
+    List<Job> allJobs = new LinkedList<Job>();
+    for (HiveQueryId atsHiveQuery : ats.getHiveQuieryIdsList(username)) {
+
+      TezDagId atsTezDag;
+      if (atsHiveQuery.dagNames != null && atsHiveQuery.dagNames.size() > 0) {
+        String dagName = atsHiveQuery.dagNames.get(0);
+
+        atsTezDag = ats.getTezDAGByName(dagName);
+      } else {
+        atsTezDag = new TezDagId();
+      }
+
+      JobImpl atsJob;
+      if (hasOperationId(atsHiveQuery)) {
+        try {
+          Job viewJob = getJobByOperationId(urlSafeBase64ToHexString(atsHiveQuery.operationId));
+          saveJobInfoIfNeeded(atsHiveQuery, atsTezDag, viewJob);
+
+          atsJob = mergeAtsJobWithViewJob(atsHiveQuery, atsTezDag, viewJob);
+        } catch (ItemNotFound itemNotFound) {
+          // Executed from HS2, but outside of Hive View
+          atsJob = atsOnlyJob(atsHiveQuery, atsTezDag);
+        }
+      } else {
+        atsJob = atsOnlyJob(atsHiveQuery, atsTezDag);
+      }
+      allJobs.add(atsJob);
+
+      addedOperationIds.add(atsHiveQuery.operationId);
+    }
+
+    //cover case when operationId is present, but not exists in ATS
+    //e.g. optimized queries without executing jobs, like "SELECT * FROM TABLE"
+    for (Job job : viewJobResourceManager.readAll(new OnlyOwnersFilteringStrategy(username))) {
+      List<StoredOperationHandle> operationHandles = operationHandleResourceManager.readJobRelatedHandles(job);
+      assert operationHandles.size() <= 1;
+
+      if (operationHandles.size() > 0) {
+        StoredOperationHandle operationHandle = operationHandles.get(0);
+
+        if (!addedOperationIds.contains(hexStringToUrlSafeBase64(operationHandle.getGuid()))) {
+          //e.g. query without hadoop job: select * from table
+          allJobs.add(job);
+        }
+      }
+    }
+
+    return allJobs;
+  }
+
+  protected boolean hasOperationId(HiveQueryId atsHiveQuery) {
+    return atsHiveQuery.operationId != null;
+  }
+
+  protected JobImpl mergeAtsJobWithViewJob(HiveQueryId atsHiveQuery, TezDagId atsTezDag, Job viewJob) {
+    JobImpl atsJob;
+    try {
+      atsJob = new JobImpl(PropertyUtils.describe(viewJob));
+    }catch(IllegalAccessException e){
+      LOG.error("Can't instantiate JobImpl", e);
+      return null;
+    }catch(InvocationTargetException e){
+      LOG.error("Can't instantiate JobImpl", e);
+      return null;
+    }catch(NoSuchMethodException e){
+      LOG.error("Can't instantiate JobImpl", e);
+      return null;
+    }
+    fillAtsJobFields(atsJob, atsHiveQuery, atsTezDag);
+    return atsJob;
+  }
+
+  protected void saveJobInfoIfNeeded(HiveQueryId hiveQueryId, TezDagId tezDagId, Job viewJob) throws ItemNotFound {
+    if (viewJob.getDagName() == null) {
+      viewJob.setDagName(tezDagId.dagName);
+      viewJobResourceManager.update(viewJob, viewJob.getId());
+    }
+    if (viewJob.getStatus().equals(tezDagId.status)) {
+      viewJob.setStatus(tezDagId.status);
+      viewJobResourceManager.update(viewJob, viewJob.getId());
+    }
+  }
+
+  protected JobImpl atsOnlyJob(HiveQueryId atsHiveQuery, TezDagId atsTezDag) {
+    JobImpl atsJob = new JobImpl();
+    atsJob.setId(atsHiveQuery.entity);
+    fillAtsJobFields(atsJob, atsHiveQuery, atsTezDag);
+
+    String query = atsHiveQuery.query;
+    atsJob.setTitle(query.substring(0, (query.length() > 42)?42:query.length()));
+
+    atsJob.setQueryFile("fakefile://" + Base64.encodeBase64URLSafeString(query.getBytes()));  // fake queryFile
+    return atsJob;
+  }
+
+  protected JobImpl fillAtsJobFields(JobImpl atsJob, HiveQueryId atsHiveQuery, TezDagId atsTezDag) {
+    atsJob.setApplicationId(atsTezDag.applicationId);
+
+    atsJob.setDagName(atsTezDag.dagName);
+    if (!atsTezDag.status.equals(TezDagId.STATUS_UNKNOWN))
+      atsJob.setStatus(atsTezDag.status);
+    if (atsHiveQuery.starttime != 0)
+      atsJob.setDateSubmitted(atsHiveQuery.starttime);
+    atsJob.setDuration(atsHiveQuery.duration);
+    return atsJob;
+  }
+
+  protected Job getJobByOperationId(final String opId) throws ItemNotFound {
+    List<StoredOperationHandle> operationHandles = operationHandleResourceManager.readAll(new FilteringStrategy() {
+      @Override
+      public boolean isConform(Indexed item) {
+        StoredOperationHandle opHandle = (StoredOperationHandle) item;
+        return opHandle.getGuid().equals(opId);
+      }
+
+      @Override
+      public String whereStatement() {
+        return "guid='" + opId + "'";
+      }
+    });
+
+    if (operationHandles.size() != 1)
+      throw new ItemNotFound();
+
+    return viewJobResourceManager.read(operationHandles.get(0).getJobId());
+  }
+
+  protected static String urlSafeBase64ToHexString(String urlsafeBase64){
+    byte[] decoded = Base64.decodeBase64(urlsafeBase64);
+
+    StringBuilder sb = new StringBuilder();
+    for(byte b : decoded){
+      sb.append(String.format("%02x", b));
+    }
+    return sb.toString();
+  }
+
+  protected static String hexStringToUrlSafeBase64(String hexString){
+    byte[] decoded = new byte[hexString.length() / 2];
+
+    for(int i=0; i<hexString.length(); i+=2) {
+       decoded[i / 2] = (byte) Integer.parseInt(String.format("%c%c", hexString.charAt(i), hexString.charAt(i+1)), 16);
+    }
+    return Base64.encodeBase64URLSafeString(decoded);
+  }
+}

+ 4 - 17
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ConnectionController.java

@@ -18,32 +18,19 @@
 
 package org.apache.ambari.view.hive.resources.jobs;
 
-import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.hive.client.Connection;
-import org.apache.ambari.view.hive.client.ConnectionPool;
 import org.apache.ambari.view.hive.client.HiveClientException;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
 import org.apache.hive.service.cli.thrift.TOperationHandle;
 
-import java.util.HashMap;
-import java.util.Map;
 
 public class ConnectionController {
-  private ViewContext context;
-  private Connection connection;
   private OperationHandleControllerFactory operationHandleControllerFactory;
+  private Connection connection;
 
-  private ConnectionController(ViewContext context) {
-    this.context = context;
-    connection = ConnectionPool.getConnection(context);
-    operationHandleControllerFactory = OperationHandleControllerFactory.getInstance(context);
-  }
-
-  private static Map<String, ConnectionController> viewSingletonObjects = new HashMap<String, ConnectionController>();
-  public static ConnectionController getInstance(ViewContext context) {
-    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
-      viewSingletonObjects.put(context.getInstanceName(), new ConnectionController(context));
-    return viewSingletonObjects.get(context.getInstanceName());
+  public ConnectionController(OperationHandleControllerFactory operationHandleControllerFactory, Connection connection) {
+    this.connection = connection;
+    this.operationHandleControllerFactory = operationHandleControllerFactory;
   }
 
   public void selectDatabase(String database) {

+ 36 - 0
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/IOperationHandleResourceManager.java

@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs;
+
+import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive.resources.IResourceManager;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.Job;
+import org.apache.hive.service.cli.thrift.TOperationHandle;
+
+import java.util.List;
+
+public interface IOperationHandleResourceManager extends IResourceManager<StoredOperationHandle> {
+  List<StoredOperationHandle> readJobRelatedHandles(Job job);
+
+  void putHandleForJob(TOperationHandle h, Job job);
+
+  boolean containsHandleForJob(Job job);
+
+  TOperationHandle getHandleForJob(Job job) throws ItemNotFound;
+}

+ 7 - 5
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobResourceProvider.java

@@ -22,6 +22,8 @@ import com.google.inject.Inject;
 import org.apache.ambari.view.*;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.*;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -43,7 +45,7 @@ public class JobResourceProvider implements ResourceProvider<Job> {
 
   protected synchronized JobResourceManager getResourceManager() {
     if (resourceManager == null) {
-      resourceManager = new JobResourceManager(context);
+      resourceManager = new JobResourceManager(new SharedObjectsFactory(context), context);
     }
     return resourceManager;
   }
@@ -51,7 +53,7 @@ public class JobResourceProvider implements ResourceProvider<Job> {
   @Override
   public Job getResource(String resourceId, Set<String> properties) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
     try {
-      return getResourceManager().read(Integer.valueOf(resourceId));
+      return getResourceManager().read(resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }
@@ -74,7 +76,7 @@ public class JobResourceProvider implements ResourceProvider<Job> {
       throw new SystemException("error on creating resource", e);
     }
     getResourceManager().create(item);
-    JobController jobController = JobControllerFactory.getInstance(context).createControllerForJob(item);
+    JobController jobController = new SharedObjectsFactory(context).getJobControllerFactory().createControllerForJob(item);
     jobController.submit();
   }
 
@@ -89,7 +91,7 @@ public class JobResourceProvider implements ResourceProvider<Job> {
       throw new SystemException("error on updating resource", e);
     }
     try {
-      getResourceManager().update(item, Integer.valueOf(resourceId));
+      getResourceManager().update(item, resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }
@@ -99,7 +101,7 @@ public class JobResourceProvider implements ResourceProvider<Job> {
   @Override
   public boolean deleteResource(String resourceId) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
     try {
-      getResourceManager().delete(Integer.valueOf(resourceId));
+      getResourceManager().delete(resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }

+ 26 - 11
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobService.java

@@ -24,7 +24,11 @@ import org.apache.ambari.view.hive.BaseService;
 import org.apache.ambari.view.hive.backgroundjobs.BackgroundJobController;
 import org.apache.ambari.view.hive.client.Cursor;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.hive.persistence.utils.OnlyOwnersFilteringStrategy;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.ATSRequestsDelegate;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.ATSRequestsDelegateImpl;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.ATSParser;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.IATSParser;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.*;
 import org.apache.ambari.view.hive.utils.*;
 import org.apache.ambari.view.hive.utils.HdfsApi;
 import org.apache.commons.beanutils.PropertyUtils;
@@ -59,16 +63,25 @@ public class JobService extends BaseService {
   ViewResourceHandler handler;
 
   protected JobResourceManager resourceManager;
+  private IOperationHandleResourceManager opHandleResourceManager;
   protected final static Logger LOG =
       LoggerFactory.getLogger(JobService.class);
 
   protected synchronized JobResourceManager getResourceManager() {
     if (resourceManager == null) {
-      resourceManager = new JobResourceManager(context);
+      SharedObjectsFactory connectionsFactory = getSharedObjectsFactory();
+      resourceManager = new JobResourceManager(connectionsFactory, context);
     }
     return resourceManager;
   }
 
+  private IOperationHandleResourceManager getOperationHandleResourceManager() {
+    if (opHandleResourceManager == null) {
+      opHandleResourceManager = new OperationHandleResourceManager(getSharedObjectsFactory());
+    }
+    return opHandleResourceManager;
+  }
+
   /**
    * Get single item
    */
@@ -77,7 +90,7 @@ public class JobService extends BaseService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response getOne(@PathParam("jobId") String jobId) {
     try {
-      JobController jobController = getResourceManager().readController(Integer.valueOf(jobId));
+      JobController jobController = getResourceManager().readController(jobId);
 
       JSONObject jsonJob = jsonObjectFromJob(jobController);
 
@@ -110,7 +123,7 @@ public class JobService extends BaseService {
                                 @Context HttpServletResponse response,
                                 @QueryParam("columns") final String requestedColumns) {
     try {
-      JobController jobController = getResourceManager().readController(Integer.valueOf(jobId));
+      JobController jobController = getResourceManager().readController(jobId);
       final Cursor resultSet = jobController.getResults();
       resultSet.selectColumns(requestedColumns);
 
@@ -153,7 +166,7 @@ public class JobService extends BaseService {
                                    @QueryParam("columns") final String requestedColumns,
                                    @Context HttpServletResponse response) {
     try {
-      final JobController jobController = getResourceManager().readController(Integer.valueOf(jobId));
+      final JobController jobController = getResourceManager().readController(jobId);
 
       String backgroundJobId = "csv" + String.valueOf(jobController.getJob().getId());
       if (commence != null && commence.equals("true")) {
@@ -167,7 +180,7 @@ public class JobService extends BaseService {
               Cursor resultSet = jobController.getResults();
               resultSet.selectColumns(requestedColumns);
 
-              FSDataOutputStream stream = HdfsApi.getInstance(context).create(targetFile, true);
+              FSDataOutputStream stream = getSharedObjectsFactory().getHdfsApi().create(targetFile, true);
               Writer writer = new BufferedWriter(new OutputStreamWriter(stream));
               CSVPrinter csvPrinter = new CSVPrinter(writer, CSVFormat.DEFAULT);
               try {
@@ -225,7 +238,7 @@ public class JobService extends BaseService {
                              @QueryParam("searchId") String searchId,
                              @QueryParam("columns") final String requestedColumns) {
     try {
-      final JobController jobController = getResourceManager().readController(Integer.valueOf(jobId));
+      final JobController jobController = getResourceManager().readController(jobId);
 
       return ResultsPaginationController.getInstance(context)
            .request(jobId, searchId, true, fromBeginning, count,
@@ -276,13 +289,13 @@ public class JobService extends BaseService {
     try {
       JobController jobController;
       try {
-        jobController = getResourceManager().readController(Integer.valueOf(id));
+        jobController = getResourceManager().readController(id);
       } catch (ItemNotFound itemNotFound) {
         throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
       }
       jobController.cancel();
       if (remove != null && remove.compareTo("true") == 0) {
-        getResourceManager().delete(Integer.valueOf(id));
+        getResourceManager().delete(id);
       }
 //      getResourceManager().delete(Integer.valueOf(queryId));
       return Response.status(204).build();
@@ -303,8 +316,10 @@ public class JobService extends BaseService {
   public Response getList() {
     try {
       LOG.debug("Getting all job");
-      List allJobs = getResourceManager().readAll(
-          new OnlyOwnersFilteringStrategy(this.context.getUsername()));  //TODO: move strategy to PersonalCRUDRM
+      ATSRequestsDelegate transport = new ATSRequestsDelegateImpl(context, "http://127.0.0.1:8188");
+      IATSParser atsParser = new ATSParser(transport);
+      Aggregator aggregator = new Aggregator(getResourceManager(), getOperationHandleResourceManager(), atsParser);
+      List allJobs = aggregator.readAll(context.getUsername());
 
       JSONObject object = new JSONObject();
       object.put("jobs", allJobs);

+ 52 - 27
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/LogParser.java

@@ -23,29 +23,35 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 public class LogParser {
-  public static final Pattern HADOOP_MR_JOBS_RE = Pattern.compile("(http[^\\s]*/proxy/([a-z0-9_]+?)/)");
-  public static final Pattern HADOOP_TEZ_JOBS_RE = Pattern.compile("\\(Executing on YARN cluster with App id ([a-z0-9_]+?)\\)");
-  private LinkedHashSet<JobId> jobsList;
+  public static final Pattern HADOOP_MR_APPS_RE = Pattern.compile("(http[^\\s]*/proxy/([a-z0-9_]+?)/)");
+  public static final Pattern HADOOP_TEZ_APPS_RE = Pattern.compile("\\(Executing on YARN cluster with App id ([a-z0-9_]+?)\\)");
+  private LinkedHashSet<AppId> appsList;
+
+  private LogParser() {}
 
   public static LogParser parseLog(String logs) {
     LogParser parser = new LogParser();
 
-    LinkedHashSet<JobId> mrJobIds = getMRJobIds(logs);
-    LinkedHashSet<JobId> tezJobIds = getTezJobIds(logs);
+    parser.setAppsList(parseApps(logs, parser));
+    return parser;
+  }
 
-    LinkedHashSet<JobId> jobIds = new LinkedHashSet<JobId>();
-    jobIds.addAll(mrJobIds);
-    jobIds.addAll(tezJobIds);
+  public static LinkedHashSet<AppId> parseApps(String logs, LogParser parser) {
+    LinkedHashSet<AppId> mrAppIds = getMRAppIds(logs);
+    LinkedHashSet<AppId> tezAppIds = getTezAppIds(logs);
 
-    parser.setJobsList(jobIds);
-    return parser;
+    LinkedHashSet<AppId> appIds = new LinkedHashSet<AppId>();
+    appIds.addAll(mrAppIds);
+    appIds.addAll(tezAppIds);
+
+    return appIds;
   }
 
-  private static LinkedHashSet<JobId> getMRJobIds(String logs) {
-    Matcher m = HADOOP_MR_JOBS_RE.matcher(logs);
-    LinkedHashSet<JobId> list = new LinkedHashSet<JobId>();
+  private static LinkedHashSet<AppId> getMRAppIds(String logs) {
+    Matcher m = HADOOP_MR_APPS_RE.matcher(logs);
+    LinkedHashSet<AppId> list = new LinkedHashSet<AppId>();
     while (m.find()) {
-      JobId applicationInfo = new JobId();
+      AppId applicationInfo = new AppId();
       applicationInfo.setTrackingUrl(m.group(1));
       applicationInfo.setIdentifier(m.group(2));
       list.add(applicationInfo);
@@ -53,27 +59,34 @@ public class LogParser {
     return list;
   }
 
-  private static LinkedHashSet<JobId> getTezJobIds(String logs) {
-    Matcher m = HADOOP_TEZ_JOBS_RE.matcher(logs);
-    LinkedHashSet<JobId> list = new LinkedHashSet<JobId>();
+  private static LinkedHashSet<AppId> getTezAppIds(String logs) {
+    Matcher m = HADOOP_TEZ_APPS_RE.matcher(logs);
+    LinkedHashSet<AppId> list = new LinkedHashSet<AppId>();
     while (m.find()) {
-      JobId applicationInfo = new JobId();
-      applicationInfo.setTrackingUrl(null);
+      AppId applicationInfo = new AppId();
+      applicationInfo.setTrackingUrl("");
       applicationInfo.setIdentifier(m.group(1));
       list.add(applicationInfo);
     }
     return list;
   }
 
-  public void setJobsList(LinkedHashSet<JobId> jobsList) {
-    this.jobsList = jobsList;
+  public void setAppsList(LinkedHashSet<AppId> appsList) {
+    this.appsList = appsList;
+  }
+
+  public LinkedHashSet<AppId> getAppsList() {
+    return appsList;
   }
 
-  public LinkedHashSet<JobId> getJobsList() {
-    return jobsList;
+  public AppId getLastAppInList() {
+    Object[] appIds = appsList.toArray();
+    if (appIds.length == 0)
+      return null;
+    return (AppId) appIds[appsList.size()-1];
   }
 
-  public static class JobId {
+  public static class AppId {
     private String trackingUrl;
     private String identifier;
 
@@ -96,11 +109,11 @@ public class LogParser {
     @Override
     public boolean equals(Object o) {
       if (this == o) return true;
-      if (!(o instanceof JobId)) return false;
+      if (!(o instanceof AppId)) return false;
 
-      JobId jobId = (JobId) o;
+      AppId appId = (AppId) o;
 
-      if (!identifier.equals(jobId.identifier)) return false;
+      if (!identifier.equals(appId.identifier)) return false;
 
       return true;
     }
@@ -110,4 +123,16 @@ public class LogParser {
       return identifier.hashCode();
     }
   }
+
+  public static class EmptyAppId extends AppId {
+    @Override
+    public String getTrackingUrl() {
+      return "";
+    }
+
+    @Override
+    public String getIdentifier() {
+      return "";
+    }
+  }
 }

+ 10 - 12
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleController.java

@@ -19,10 +19,10 @@
 package org.apache.ambari.view.hive.resources.jobs;
 
 
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.hive.client.ConnectionPool;
 import org.apache.ambari.view.hive.client.Cursor;
 import org.apache.ambari.view.hive.client.HiveClientException;
+import org.apache.ambari.view.hive.client.IConnectionFactory;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.Job;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
 import org.apache.hive.service.cli.thrift.TGetOperationStatusResp;
 import org.apache.hive.service.cli.thrift.TOperationHandle;
@@ -33,18 +33,16 @@ public class OperationHandleController {
   private final static Logger LOG =
       LoggerFactory.getLogger(OperationHandleController.class);
 
-  private ViewContext context;
+  private IConnectionFactory connectionsFabric;
   private TOperationHandle operationHandle;
-  private OperationHandleResourceManager operationHandlesStorage;
+  private IOperationHandleResourceManager operationHandlesStorage;
 
-  public OperationHandleController(ViewContext context, TOperationHandle storedOperationHandle, OperationHandleResourceManager operationHandlesStorage) {
-    this.context = context;
+  public OperationHandleController(IConnectionFactory connectionsFabric, TOperationHandle storedOperationHandle, IOperationHandleResourceManager operationHandlesStorage) {
+    this.connectionsFabric = connectionsFabric;
     this.operationHandle = storedOperationHandle;
     this.operationHandlesStorage = operationHandlesStorage;
   }
 
-
-
   public TOperationHandle getStoredOperationHandle() {
     return operationHandle;
   }
@@ -54,7 +52,7 @@ public class OperationHandleController {
   }
 
   public String getOperationStatus() throws NoOperationStatusSetException, HiveClientException {
-    TGetOperationStatusResp statusResp = ConnectionPool.getConnection(context).getOperationStatus(operationHandle);
+    TGetOperationStatusResp statusResp = connectionsFabric.getHiveConnection().getOperationStatus(operationHandle);
     if (!statusResp.isSetOperationState()) {
       throw new NoOperationStatusSetException("Operation state is not set");
     }
@@ -93,7 +91,7 @@ public class OperationHandleController {
 
   public void cancel() {
     try {
-      ConnectionPool.getConnection(context).cancelOperation(operationHandle);
+      connectionsFabric.getHiveConnection().cancelOperation(operationHandle);
     } catch (HiveClientException e) {
       throw new ServiceFormattedException("Cancel failed: " + e.toString(), e);
     }
@@ -104,10 +102,10 @@ public class OperationHandleController {
   }
 
   public String getLogs() {
-    return ConnectionPool.getConnection(context).getLogs(operationHandle);
+    return connectionsFabric.getHiveConnection().getLogs(operationHandle);
   }
 
   public Cursor getResults() {
-    return ConnectionPool.getConnection(context).getResults(operationHandle);
+    return connectionsFabric.getHiveConnection().getResults(operationHandle);
   }
 }

+ 8 - 17
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleControllerFactory.java

@@ -18,31 +18,22 @@
 
 package org.apache.ambari.view.hive.resources.jobs;
 
-import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.apache.hive.service.cli.thrift.TOperationHandle;
 
-import java.util.HashMap;
-import java.util.Map;
-
 public class OperationHandleControllerFactory {
-  private ViewContext context;
-  private OperationHandleResourceManager operationHandlesStorage;
-
-  private OperationHandleControllerFactory(ViewContext context) {
-    this.context = context;
-    operationHandlesStorage = new OperationHandleResourceManager(context);
-  }
+  private SharedObjectsFactory connectionsFabric;
+  private IOperationHandleResourceManager operationHandlesStorage;
 
-  private static Map<String, OperationHandleControllerFactory> viewSingletonObjects = new HashMap<String, OperationHandleControllerFactory>();
-  public static OperationHandleControllerFactory getInstance(ViewContext context) {
-    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
-      viewSingletonObjects.put(context.getInstanceName(), new OperationHandleControllerFactory(context));
-    return viewSingletonObjects.get(context.getInstanceName());
+  public OperationHandleControllerFactory(SharedObjectsFactory connectionsFabric) {
+    this.connectionsFabric = connectionsFabric;
+    operationHandlesStorage = new OperationHandleResourceManager(connectionsFabric);
   }
 
   public OperationHandleController createControllerForHandle(TOperationHandle storedOperationHandle) {
-    return new OperationHandleController(context, storedOperationHandle, operationHandlesStorage);
+    return new OperationHandleController(connectionsFabric, storedOperationHandle, operationHandlesStorage);
   }
 
   public OperationHandleController getHandleForJob(Job job) throws ItemNotFound {

+ 22 - 19
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/OperationHandleResourceManager.java

@@ -18,42 +18,43 @@
 
 package org.apache.ambari.view.hive.resources.jobs;
 
-import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.persistence.IStorageFactory;
 import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
 import org.apache.ambari.view.hive.persistence.utils.Indexed;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.resources.SharedCRUDResourceManager;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.Job;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
 import org.apache.hive.service.cli.thrift.TOperationHandle;
-import sun.reflect.generics.reflectiveObjects.NotImplementedException;
 
 import java.util.List;
 
-public class OperationHandleResourceManager extends SharedCRUDResourceManager<StoredOperationHandle> {
+public class OperationHandleResourceManager extends SharedCRUDResourceManager<StoredOperationHandle>
+    implements IOperationHandleResourceManager {
   /**
    * Constructor
-   *
-   * @param context       View Context instance
    */
-  public OperationHandleResourceManager(ViewContext context) {
-    super(StoredOperationHandle.class, context);
+  public OperationHandleResourceManager(IStorageFactory storageFabric) {
+    super(StoredOperationHandle.class, storageFabric);
   }
 
+  @Override
   public List<StoredOperationHandle> readJobRelatedHandles(final Job job) {
-    try {
-      return getStorage().loadWhere(StoredOperationHandle.class, "jobId = " + job.getId());
-    } catch (NotImplementedException e) {
-      // fallback to filtering strategy
-      return getStorage().loadAll(StoredOperationHandle.class, new FilteringStrategy() {
-        @Override
-        public boolean isConform(Indexed item) {
-          StoredOperationHandle handle = (StoredOperationHandle) item;
-          return (handle.getJobId() != null && handle.getJobId().equals(job.getId()));
-        }
-      });
-    }
+    return storageFabric.getStorage().loadAll(StoredOperationHandle.class, new FilteringStrategy() {
+      @Override
+      public boolean isConform(Indexed item) {
+        StoredOperationHandle handle = (StoredOperationHandle) item;
+        return (handle.getJobId() != null && handle.getJobId().equals(job.getId()));
+      }
+
+      @Override
+      public String whereStatement() {
+        return "jobId = '" + job.getId() + "'";
+      }
+    });
   }
 
+  @Override
   public void putHandleForJob(TOperationHandle h, Job job) {
     StoredOperationHandle handle = StoredOperationHandle.buildFromTOperationHandle(h);
     handle.setJobId(job.getId());
@@ -71,11 +72,13 @@ public class OperationHandleResourceManager extends SharedCRUDResourceManager<St
     }
   }
 
+  @Override
   public boolean containsHandleForJob(Job job) {
     List<StoredOperationHandle> jobRelatedHandles = readJobRelatedHandles(job);
     return jobRelatedHandles.size() > 0;
   }
 
+  @Override
   public TOperationHandle getHandleForJob(Job job) throws ItemNotFound {
     List<StoredOperationHandle> jobRelatedHandles = readJobRelatedHandles(job);
     if (jobRelatedHandles.size() == 0)

+ 6 - 6
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/StoredOperationHandle.java

@@ -40,9 +40,9 @@ public class StoredOperationHandle implements Indexed {
   private String guid;
   private String secret;
 
-  private Integer jobId;
+  private String jobId;
 
-  private Integer id;
+  private String id;
 
   public StoredOperationHandle() {}
   public StoredOperationHandle(Map<String, Object> stringObjectMap) throws InvocationTargetException, IllegalAccessException {
@@ -126,21 +126,21 @@ public class StoredOperationHandle implements Indexed {
     this.secret = secret;
   }
 
-  public Integer getJobId() {
+  public String getJobId() {
     return jobId;
   }
 
-  public void setJobId(Integer jobId) {
+  public void setJobId(String jobId) {
     this.jobId = jobId;
   }
 
   @Override
-  public Integer getId() {
+  public String getId() {
     return id;
   }
 
   @Override
-  public void setId(Integer id) {
+  public void setId(String id) {
     this.id = id;
   }
 }

+ 139 - 0
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSParser.java

@@ -0,0 +1,139 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.atsJobs;
+
+import org.apache.ambari.view.hive.utils.ServiceFormattedException;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.LinkedList;
+import java.util.List;
+
+public class ATSParser implements IATSParser {
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(ATSParser.class);
+
+  private ATSRequestsDelegate delegate;
+
+  private static final long MillisInSecond = 1000L;
+
+  public ATSParser(ATSRequestsDelegate delegate) {
+    this.delegate = delegate;
+  }
+
+  @Override
+  public List<HiveQueryId> getHiveQuieryIdsList(String username) {
+    JSONObject entities = delegate.hiveQueryIdList(username);
+    JSONArray jobs = (JSONArray) entities.get("entities");
+
+    List<HiveQueryId> parsedJobs = new LinkedList<HiveQueryId>();
+    for(Object job : jobs) {
+      try {
+        HiveQueryId parsedJob = parseAtsHiveJob((JSONObject) job);
+        parsedJobs.add(parsedJob);
+      } catch (Exception ex) {
+        LOG.error("Error while parsing ATS job", ex);
+      }
+    }
+
+    return parsedJobs;
+  }
+
+  @Override
+  public HiveQueryId getHiveQuieryIdByOperationId(byte[] guid) {
+    String guidString = new String(guid);
+    JSONObject entities = delegate.hiveQueryIdByOperationId(guidString);
+    JSONArray jobs = (JSONArray) entities.get("entities");
+
+    assert jobs.size() <= 1;
+    if (jobs.size() == 0) {
+      //TODO: throw appropriate exception
+      throw new ServiceFormattedException("HIVE_QUERY_ID with operationid=" + guidString + " not found");
+    }
+
+    return parseAtsHiveJob((JSONObject) jobs.get(0));
+  }
+
+  @Override
+  public TezDagId getTezDAGByName(String name) {
+    JSONArray tezDagEntities = (JSONArray) delegate.tezDagByName(name).get("entities");
+    assert tezDagEntities.size() <= 1;
+    if (tezDagEntities.size() == 0) {
+      return new TezDagId();
+    }
+    JSONObject tezDagEntity = (JSONObject) tezDagEntities.get(0);
+
+    TezDagId parsedDag = new TezDagId();
+    JSONArray applicationIds = (JSONArray) ((JSONObject) tezDagEntity.get("primaryfilters")).get("applicationId");
+    parsedDag.applicationId = (String) applicationIds.get(0);
+    parsedDag.status = (String) ((JSONObject) tezDagEntity.get("otherinfo")).get("status");
+    return parsedDag;
+  }
+
+  private HiveQueryId parseAtsHiveJob(JSONObject job) {
+    HiveQueryId parsedJob = new HiveQueryId();
+
+    parsedJob.entity = (String) job.get("entity");
+    parsedJob.starttime = ((Long) job.get("starttime")) / MillisInSecond;
+
+    JSONObject primaryfilters = (JSONObject) job.get("primaryfilters");
+    JSONArray operationIds = (JSONArray) primaryfilters.get("operationid");
+    if (operationIds != null) {
+      parsedJob.operationId = (String) (operationIds).get(0);
+    }
+    JSONArray users = (JSONArray) primaryfilters.get("user");
+    if (users != null) {
+      parsedJob.user = (String) (users).get(0);
+    }
+
+    JSONObject lastEvent = getLastEvent(job);
+    long lastEventTimestamp = ((Long) lastEvent.get("timestamp")) / MillisInSecond;
+
+    parsedJob.duration = lastEventTimestamp - parsedJob.starttime;
+
+    JSONObject otherinfo = (JSONObject) job.get("otherinfo");
+    JSONObject query = (JSONObject) JSONValue.parse((String) otherinfo.get("QUERY"));
+
+    parsedJob.query = (String) query.get("queryText");
+    JSONObject stages = (JSONObject) ((JSONObject) query.get("queryPlan")).get("STAGE PLANS");
+
+    List<String> dagIds = new LinkedList<String>();
+    List<JSONObject> stagesList = new LinkedList<JSONObject>();
+
+    for (Object key : stages.keySet()) {
+      JSONObject stage = (JSONObject) stages.get(key);
+      if (stage.get("Tez") != null) {
+        String dagId = (String) ((JSONObject) stage.get("Tez")).get("DagName:");
+        dagIds.add(dagId);
+      }
+      stagesList.add(stage);
+    }
+    parsedJob.dagNames = dagIds;
+    parsedJob.stages = stagesList;
+    return parsedJob;
+  }
+
+  private JSONObject getLastEvent(JSONObject atsEntity) {
+    JSONArray events = (JSONArray) atsEntity.get("events");
+    return (JSONObject) events.get(0);
+  }
+}

+ 9 - 10
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobControllerFactory.java → contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSParserFactory.java

@@ -16,28 +16,27 @@
  * limitations under the License.
  */
 
-package org.apache.ambari.view.hive.resources.jobs;
+package org.apache.ambari.view.hive.resources.jobs.atsJobs;
 
 import org.apache.ambari.view.ViewContext;
 
 import java.util.HashMap;
 import java.util.Map;
 
-public class JobControllerFactory {
+public class ATSParserFactory {
+
   private ViewContext context;
 
-  private JobControllerFactory(ViewContext context) {
+  public ATSParserFactory(ViewContext context) {
     this.context = context;
   }
 
-  private static Map<String, JobControllerFactory> viewSingletonObjects = new HashMap<String, JobControllerFactory>();
-  public static JobControllerFactory getInstance(ViewContext context) {
-    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
-      viewSingletonObjects.put(context.getInstanceName(), new JobControllerFactory(context));
-    return viewSingletonObjects.get(context.getInstanceName());
+  public ATSParser getATSParser() {
+    ATSRequestsDelegateImpl delegate = new ATSRequestsDelegateImpl(context, getATSUrl(context));
+    return new ATSParser(delegate);
   }
 
-  public JobController createControllerForJob(Job job) {
-    return new JobControllerImpl(context, job);
+  public static String getATSUrl(ViewContext context) {
+    return context.getProperties().get("yarn.ats.url");
   }
 }

+ 29 - 0
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSRequestsDelegate.java

@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.atsJobs;
+
+import org.json.simple.JSONObject;
+
+public interface ATSRequestsDelegate {
+  JSONObject hiveQueryIdList(String username);
+
+  JSONObject hiveQueryIdByOperationId(String operationId);
+
+  JSONObject tezDagByName(String name);
+}

+ 86 - 0
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/ATSRequestsDelegateImpl.java

@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.atsJobs;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.commons.io.IOUtils;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.HashMap;
+
+public class ATSRequestsDelegateImpl implements ATSRequestsDelegate {
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(ATSRequestsDelegateImpl.class);
+  public static final String EMPTY_ENTITIES_JSON = "{ \"entities\" : [  ] }";
+
+  private ViewContext context;
+  private String atsUrl;
+
+  public ATSRequestsDelegateImpl(ViewContext context, String atsUrl) {
+    this.context = context;
+    this.atsUrl = atsUrl;
+  }
+
+  @Override
+  public JSONObject hiveQueryIdList(String username) {
+    String hiveQueriesListUrl = atsUrl + "/ws/v1/timeline/HIVE_QUERY_ID?primaryFilter=requestuser:" + username;
+    String response = readFromWithDefault(hiveQueriesListUrl, "{ \"entities\" : [  ] }");
+    return (JSONObject) JSONValue.parse(response);
+  }
+
+  @Override
+  public JSONObject hiveQueryIdByOperationId(String operationId) {
+    String hiveQueriesListUrl = atsUrl + "/ws/v1/timeline/HIVE_QUERY_ID?primaryFilter=operationid:" + operationId;
+    String response = readFromWithDefault(hiveQueriesListUrl, "{ \"entities\" : [  ] }");
+    return (JSONObject) JSONValue.parse(response);
+  }
+
+  @Override
+  public JSONObject tezDagByName(String name) {
+    String tezDagUrl = atsUrl + "/ws/v1/timeline/TEZ_DAG_ID?primaryFilter=dagName:" + name;
+    String response = readFromWithDefault(tezDagUrl, EMPTY_ENTITIES_JSON);
+    return (JSONObject) JSONValue.parse(response);
+  }
+
+  protected String readFromWithDefault(String hiveQueriesListUrl, String defaultResponse) {
+    String response;
+    try {
+      InputStream responseInputStream = context.getURLStreamProvider().readFrom(hiveQueriesListUrl, "GET",
+          null, new HashMap<String, String>());
+      response = IOUtils.toString(responseInputStream);
+    } catch (IOException e) {
+      LOG.error("Error while reading from ATS", e);
+      response = defaultResponse;
+    }
+    return response;
+  }
+
+  public String getAtsUrl() {
+    return atsUrl;
+  }
+
+  public void setAtsUrl(String atsUrl) {
+    this.atsUrl = atsUrl;
+  }
+}

+ 37 - 0
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/HiveQueryId.java

@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.atsJobs;
+
+import org.json.simple.JSONObject;
+
+import java.util.List;
+
+public class HiveQueryId {
+  public String entity;
+  public String query;
+
+  public List<String> dagNames;
+
+  public List<JSONObject> stages;
+
+  public long starttime;
+  public long duration;
+  public String operationId;
+  public String user;
+}

+ 29 - 0
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/IATSParser.java

@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.atsJobs;
+
+import java.util.List;
+
+public interface IATSParser {
+  List<HiveQueryId> getHiveQuieryIdsList(String username);
+
+  HiveQueryId getHiveQuieryIdByOperationId(byte[] guid);
+
+  TezDagId getTezDAGByName(String name);
+}

+ 26 - 0
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/atsJobs/TezDagId.java

@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.atsJobs;
+
+public class TezDagId {
+  public static final String STATUS_UNKNOWN = "UNKNOWN";
+  public String applicationId = "";
+  public String dagName = "";
+  public String status = STATUS_UNKNOWN;
+}

+ 23 - 0
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/IJobControllerFactory.java

@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.viewJobs;
+
+public interface IJobControllerFactory {
+  JobController createControllerForJob(Job job);
+}

+ 15 - 6
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/Job.java → contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/Job.java

@@ -16,7 +16,8 @@
  * limitations under the License.
  */
 
-package org.apache.ambari.view.hive.resources.jobs;
+package org.apache.ambari.view.hive.resources.jobs.viewJobs;
+
 
 import org.apache.ambari.view.hive.persistence.utils.Indexed;
 import org.apache.ambari.view.hive.persistence.utils.PersonalResource;
@@ -30,15 +31,15 @@ public interface Job extends Serializable,Indexed,PersonalResource {
   public static final String JOB_STATE_UNKNOWN = "Unknown";
   public static final String JOB_STATE_INITIALIZED = "Initialized";
   public static final String JOB_STATE_RUNNING = "Running";
-  public static final String JOB_STATE_FINISHED = "Finished";
+  public static final String JOB_STATE_FINISHED = "Succeeded";
   public static final String JOB_STATE_CANCELED = "Canceled";
   public static final String JOB_STATE_CLOSED = "Closed";
   public static final String JOB_STATE_ERROR = "Error";
   public static final String JOB_STATE_PENDING = "Pending";
 
-  Integer getId();
+  String getId();
 
-  void setId(Integer id);
+  void setId(String id);
 
   String getOwner();
 
@@ -68,9 +69,9 @@ public interface Job extends Serializable,Indexed,PersonalResource {
 
   void setForcedContent(String forcedContent);
 
-  Integer getQueryId();
+  String getQueryId();
 
-  void setQueryId(Integer queryId);
+  void setQueryId(String queryId);
 
   String getStatusDir();
 
@@ -87,4 +88,12 @@ public interface Job extends Serializable,Indexed,PersonalResource {
   String getConfFile();
 
   void setConfFile(String confFile);
+
+  String getApplicationId();
+
+  void setApplicationId(String applicationId);
+
+  String getDagName();
+
+  void setDagName(String DagName);
 }

+ 2 - 2
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobController.java → contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobController.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.ambari.view.hive.resources.jobs;
+package org.apache.ambari.view.hive.resources.jobs.viewJobs;
 
 import org.apache.ambari.view.hive.client.Cursor;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
@@ -38,7 +38,7 @@ public interface JobController {
 
   void afterCreation();
 
-  void onRead();
+  void update();
 
   boolean isModified();
 

+ 42 - 0
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerFactory.java

@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs.viewJobs;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
+
+public class JobControllerFactory implements IJobControllerFactory {
+  private SharedObjectsFactory sharedObjectsFactory;
+  private ViewContext context;
+
+  public JobControllerFactory(ViewContext context, SharedObjectsFactory sharedObjectsFactory) {
+    this.sharedObjectsFactory = sharedObjectsFactory;
+    this.context = context;
+  }
+
+  @Override
+  public JobController createControllerForJob(Job job) {
+    return new JobControllerImpl(context, job,
+        sharedObjectsFactory.getHiveConnectionController(),
+        sharedObjectsFactory.getOperationHandleControllerFactory(),
+        sharedObjectsFactory.getSavedQueryResourceManager(),
+        sharedObjectsFactory.getATSParser(),
+        sharedObjectsFactory.getHdfsApi());
+  }
+}

+ 36 - 19
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobControllerImpl.java → contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobControllerImpl.java

@@ -16,11 +16,13 @@
  * limitations under the License.
  */
 
-package org.apache.ambari.view.hive.resources.jobs;
+package org.apache.ambari.view.hive.resources.jobs.viewJobs;
 
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.hive.client.*;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive.resources.jobs.*;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.IATSParser;
 import org.apache.ambari.view.hive.resources.savedQueries.SavedQuery;
 import org.apache.ambari.view.hive.resources.savedQueries.SavedQueryResourceManager;
 import org.apache.ambari.view.hive.utils.*;
@@ -39,28 +41,37 @@ public class JobControllerImpl implements JobController, ModifyNotificationDeleg
       LoggerFactory.getLogger(JobControllerImpl.class);
 
   private ViewContext context;
+  private HdfsApi hdfsApi;
   private Job jobUnproxied;
   private Job job;
   private boolean modified;
 
-  private OperationHandleControllerFactory operationHandleControllerFactory;
+  private OperationHandleControllerFactory opHandleControllerFactory;
   private ConnectionController hiveSession;
   private SavedQueryResourceManager savedQueryResourceManager;
+  private IATSParser atsParser;
 
   /**
    * JobController constructor
    * Warning: Create JobControllers ONLY using JobControllerFactory!
    */
-  public JobControllerImpl(ViewContext context, Job job) {
+  public JobControllerImpl(ViewContext context, Job job,
+                           ConnectionController hiveSession,
+                           OperationHandleControllerFactory opHandleControllerFactory,
+                           SavedQueryResourceManager savedQueryResourceManager,
+                           IATSParser atsParser,
+                           HdfsApi hdfsApi) {
     this.context = context;
     setJobPOJO(job);
-    operationHandleControllerFactory = OperationHandleControllerFactory.getInstance(context);
-    hiveSession = ConnectionController.getInstance(context);
-    savedQueryResourceManager = SavedQueryResourceManager.getInstance(context);
+    this.opHandleControllerFactory = opHandleControllerFactory;
+    this.hiveSession = hiveSession;
+    this.savedQueryResourceManager = savedQueryResourceManager;
+    this.atsParser = atsParser;
+    this.hdfsApi = hdfsApi;
   }
 
   public String getQueryForJob() {
-    FilePaginator paginator = new FilePaginator(job.getQueryFile(), context);
+    FilePaginator paginator = new FilePaginator(job.getQueryFile(), hdfsApi);
     String query;
     try {
       query = paginator.readPage(0);  //warning - reading only 0 page restricts size of query to 1MB
@@ -89,6 +100,8 @@ public class JobControllerImpl implements JobController, ModifyNotificationDeleg
     OperationHandleController handleController = hiveSession.executeQuery(query);
 
     handleController.persistHandleForJob(job);
+
+//    atsParser.getHiveQuieryIdsList()
   }
 
   private void setupHiveBeforeQueryExecute() {
@@ -98,12 +111,12 @@ public class JobControllerImpl implements JobController, ModifyNotificationDeleg
 
   @Override
   public void cancel() throws ItemNotFound {
-    OperationHandleController handle = operationHandleControllerFactory.getHandleForJob(job);
+    OperationHandleController handle = opHandleControllerFactory.getHandleForJob(job);
     handle.cancel();
   }
 
   @Override
-  public void onRead() {
+  public void update() {
     updateOperationStatus();
     updateOperationLogs();
 
@@ -113,7 +126,7 @@ public class JobControllerImpl implements JobController, ModifyNotificationDeleg
   public void updateOperationStatus() {
     try {
 
-      OperationHandleController handle = operationHandleControllerFactory.getHandleForJob(job);
+      OperationHandleController handle = opHandleControllerFactory.getHandleForJob(job);
       String status = handle.getOperationStatus();
       job.setStatus(status);
       LOG.debug("Status of job#" + job.getId() + " is " + job.getStatus());
@@ -135,13 +148,17 @@ public class JobControllerImpl implements JobController, ModifyNotificationDeleg
 
   public void updateOperationLogs() {
     try {
-      OperationHandleController handle = operationHandleControllerFactory.getHandleForJob(job);
+      OperationHandleController handle = opHandleControllerFactory.getHandleForJob(job);
       String logs = handle.getLogs();
 
-//      LogParser info = LogParser.parseLog(logs);
+      LogParser info = LogParser.parseLog(logs);
+      LogParser.AppId app = info.getLastAppInList();
+      if (app != null) {
+        job.setApplicationId(app.getIdentifier());
+      }
 
       String logFilePath = job.getLogFile();
-      HdfsUtil.putStringToFile(context, logFilePath, logs);
+      HdfsUtil.putStringToFile(hdfsApi, logFilePath, logs);
 
     } catch (HiveClientRuntimeException ex) {
       LOG.error("Error while fetching logs: " + ex.getMessage());
@@ -182,7 +199,7 @@ public class JobControllerImpl implements JobController, ModifyNotificationDeleg
 
   @Override
   public Cursor getResults() throws ItemNotFound {
-    OperationHandleController handle = operationHandleControllerFactory.getHandleForJob(job);
+    OperationHandleController handle = opHandleControllerFactory.getHandleForJob(job);
     return handle.getResults();
   }
 
@@ -229,7 +246,7 @@ public class JobControllerImpl implements JobController, ModifyNotificationDeleg
     LOG.debug("Creating log file for job#" + job.getId());
 
     String logFile = job.getStatusDir() + "/" + "logs";
-    HdfsUtil.putStringToFile(context, logFile, "");
+    HdfsUtil.putStringToFile(hdfsApi, logFile, "");
 
     job.setLogFile(logFile);
     LOG.debug("Log file for job#" + job.getId() + ": " + logFile);
@@ -237,7 +254,7 @@ public class JobControllerImpl implements JobController, ModifyNotificationDeleg
 
   private void setupStatusDir() {
     String newDirPrefix = makeStatusDirectoryPrefix();
-    String newDir = HdfsUtil.findUnallocatedFileName(context, newDirPrefix, "");
+    String newDir = HdfsUtil.findUnallocatedFileName(hdfsApi, newDirPrefix, "");
 
     job.setStatusDir(newDir);
     LOG.debug("Status dir for job#" + job.getId() + ": " + newDir);
@@ -252,7 +269,7 @@ public class JobControllerImpl implements JobController, ModifyNotificationDeleg
       throw new MisconfigurationFormattedException("jobs.dir");
     }
 
-    String normalizedName = String.format("hive-job-%d", job.getId());
+    String normalizedName = String.format("hive-job-%s", job.getId());
     String timestamp = new SimpleDateFormat("yyyy-MM-dd_hh-mm").format(new Date());
     return String.format(userScriptsPath +
         "/%s-%s", normalizedName, timestamp);
@@ -268,14 +285,14 @@ public class JobControllerImpl implements JobController, ModifyNotificationDeleg
 
       if (job.getForcedContent() != null) {
 
-        HdfsUtil.putStringToFile(context, jobQueryFilePath, job.getForcedContent());
+        HdfsUtil.putStringToFile(hdfsApi, jobQueryFilePath, job.getForcedContent());
         job.setForcedContent("");  // prevent forcedContent to be written to DB
 
       }
       else if (job.getQueryId() != null) {
 
         String savedQueryFile = getRelatedSavedQueryFile();
-        HdfsApi.getInstance(context).copy(savedQueryFile, jobQueryFilePath);
+        hdfsApi.copy(savedQueryFile, jobQueryFilePath);
         job.setQueryFile(jobQueryFilePath);
 
       } else {

+ 30 - 7
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobImpl.java → contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobImpl.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.ambari.view.hive.resources.jobs;
+package org.apache.ambari.view.hive.resources.jobs.viewJobs;
 
 import org.apache.commons.beanutils.PropertyUtils;
 
@@ -35,9 +35,12 @@ public class JobImpl implements Job {
   private String status = JOB_STATE_UNKNOWN;
   private String forcedContent = null;
   private String dataBase = null;
-  private Integer queryId = null;
+  private String queryId = null;
 
-  private Integer id = null;
+  private String applicationId;
+  private String dagName;
+
+  private String id = null;
   private String owner = null;
 
   private String logFile;
@@ -72,12 +75,12 @@ public class JobImpl implements Job {
   }
 
   @Override
-  public Integer getId() {
+  public String getId() {
     return id;
   }
 
   @Override
-  public void setId(Integer id) {
+  public void setId(String id) {
     this.id = id;
   }
 
@@ -152,12 +155,12 @@ public class JobImpl implements Job {
   }
 
   @Override
-  public Integer getQueryId() {
+  public String getQueryId() {
     return queryId;
   }
 
   @Override
-  public void setQueryId(Integer queryId) {
+  public void setQueryId(String queryId) {
     this.queryId = queryId;
   }
 
@@ -200,4 +203,24 @@ public class JobImpl implements Job {
   public void setConfFile(String confFile) {
     this.confFile = confFile;
   }
+
+  @Override
+  public String getApplicationId() {
+    return applicationId;
+  }
+
+  @Override
+  public void setApplicationId(String applicationId) {
+    this.applicationId = applicationId;
+  }
+
+  @Override
+  public String getDagName() {
+    return dagName;
+  }
+
+  @Override
+  public void setDagName(String DagName) {
+    this.dagName = DagName;
+  }
 }

+ 10 - 10
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/JobResourceManager.java → contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/viewJobs/JobResourceManager.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.ambari.view.hive.resources.jobs;
+package org.apache.ambari.view.hive.resources.jobs.viewJobs;
 
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.hive.client.*;
@@ -36,15 +36,15 @@ public class JobResourceManager extends PersonalCRUDResourceManager<Job> {
   private final static Logger LOG =
       LoggerFactory.getLogger(JobResourceManager.class);
 
-  private JobControllerFactory jobControllerFactory;
+  private IJobControllerFactory jobControllerFactory;
 
   /**
    * Constructor
    * @param context View Context instance
    */
-  public JobResourceManager(ViewContext context) {
-    super(JobImpl.class, context);
-    jobControllerFactory = JobControllerFactory.getInstance(context);
+  public JobResourceManager(SharedObjectsFactory sharedObjectsFactory, ViewContext context) {
+    super(JobImpl.class, sharedObjectsFactory, context);
+    jobControllerFactory = sharedObjectsFactory.getJobControllerFactory();
   }
 
   @Override
@@ -64,7 +64,7 @@ public class JobResourceManager extends PersonalCRUDResourceManager<Job> {
     return object;
   }
 
-  private void saveIfModified(JobController jobController) {
+  public void saveIfModified(JobController jobController) {
     if (jobController.isModified()) {
       save(jobController.getJobPOJO());
       jobController.clearModified();
@@ -73,10 +73,10 @@ public class JobResourceManager extends PersonalCRUDResourceManager<Job> {
 
 
   @Override
-  public Job read(Integer id) throws ItemNotFound {
+  public Job read(Object id) throws ItemNotFound {
     Job job = super.read(id);
     JobController jobController =  jobControllerFactory.createControllerForJob(job);
-    jobController.onRead();
+    jobController.update();
     saveIfModified(jobController);
     return job;
   }
@@ -87,11 +87,11 @@ public class JobResourceManager extends PersonalCRUDResourceManager<Job> {
   }
 
   @Override
-  public void delete(Integer resourceId) throws ItemNotFound {
+  public void delete(Object resourceId) throws ItemNotFound {
     super.delete(resourceId);
   }
 
-  public JobController readController(Integer id) throws ItemNotFound {
+  public JobController readController(Object id) throws ItemNotFound {
     Job job = read(id);
     return jobControllerFactory.createControllerForJob(job);
   }

+ 3 - 3
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceItem.java

@@ -32,7 +32,7 @@ public class FileResourceItem implements Serializable, PersonalResource {
   private String name;
   private String path;
 
-  private Integer id;
+  private String id;
   private String owner;
 
   public FileResourceItem() {}
@@ -41,12 +41,12 @@ public class FileResourceItem implements Serializable, PersonalResource {
   }
 
   @Override
-  public Integer getId() {
+  public String getId() {
     return id;
   }
 
   @Override
-  public void setId(Integer id) {
+  public void setId(String id) {
     this.id = id;
   }
 

+ 5 - 4
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceManager.java

@@ -19,6 +19,7 @@
 package org.apache.ambari.view.hive.resources.resources;
 
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.persistence.IStorageFactory;
 import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
@@ -38,8 +39,8 @@ public class FileResourceResourceManager extends PersonalCRUDResourceManager<Fil
    * Constructor
    * @param context View Context instance
    */
-  public FileResourceResourceManager(ViewContext context) {
-    super(FileResourceItem.class, context);
+  public FileResourceResourceManager(IStorageFactory storageFactory, ViewContext context) {
+    super(FileResourceItem.class, storageFactory, context);
   }
 
   @Override
@@ -48,12 +49,12 @@ public class FileResourceResourceManager extends PersonalCRUDResourceManager<Fil
   }
 
   @Override
-  public FileResourceItem read(Integer id) throws ItemNotFound {
+  public FileResourceItem read(Object id) throws ItemNotFound {
     return super.read(id);
   }
 
   @Override
-  public void delete(Integer resourceId) throws ItemNotFound {
+  public void delete(Object resourceId) throws ItemNotFound {
     super.delete(resourceId);
   }
 

+ 5 - 5
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceResourceProvider.java

@@ -22,7 +22,7 @@ import com.google.inject.Inject;
 import org.apache.ambari.view.*;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.persistence.utils.OnlyOwnersFilteringStrategy;
-import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -44,7 +44,7 @@ public class FileResourceResourceProvider implements ResourceProvider<FileResour
 
   protected synchronized FileResourceResourceManager getResourceManager() {
     if (resourceManager == null) {
-      resourceManager = new FileResourceResourceManager(context);
+      resourceManager = new FileResourceResourceManager(new SharedObjectsFactory(context), context);
     }
     return resourceManager;
   }
@@ -52,7 +52,7 @@ public class FileResourceResourceProvider implements ResourceProvider<FileResour
   @Override
   public FileResourceItem getResource(String resourceId, Set<String> properties) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
     try {
-      return getResourceManager().read(Integer.valueOf(resourceId));
+      return getResourceManager().read(resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }
@@ -88,7 +88,7 @@ public class FileResourceResourceProvider implements ResourceProvider<FileResour
       throw new SystemException("error on updating resource", e);
     }
     try {
-      getResourceManager().update(item, Integer.valueOf(resourceId));
+      getResourceManager().update(item, resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }
@@ -98,7 +98,7 @@ public class FileResourceResourceProvider implements ResourceProvider<FileResour
   @Override
   public boolean deleteResource(String resourceId) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
     try {
-      getResourceManager().delete(Integer.valueOf(resourceId));
+      getResourceManager().delete(resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }

+ 5 - 6
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/resources/FileResourceService.java

@@ -57,7 +57,7 @@ public class FileResourceService extends BaseService {
 
   protected synchronized FileResourceResourceManager getResourceManager() {
     if (resourceManager == null) {
-      resourceManager = new FileResourceResourceManager(context);
+      resourceManager = new FileResourceResourceManager(getSharedObjectsFactory(), context);
     }
     return resourceManager;
   }
@@ -70,10 +70,9 @@ public class FileResourceService extends BaseService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response getOne(@PathParam("id") String id) {
     try {
-      FileResourceItem FileResourceItem = null;
-      FileResourceItem = getResourceManager().read(Integer.valueOf(id));
+      FileResourceItem fileResourceItem = getResourceManager().read(id);
       JSONObject object = new JSONObject();
-      object.put("fileResource", FileResourceItem);
+      object.put("fileResource", fileResourceItem);
       return Response.ok(object).build();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -91,7 +90,7 @@ public class FileResourceService extends BaseService {
   @Path("{id}")
   public Response delete(@PathParam("id") String id) {
     try {
-      getResourceManager().delete(Integer.valueOf(id));
+      getResourceManager().delete(id);
       return Response.status(204).build();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -132,7 +131,7 @@ public class FileResourceService extends BaseService {
   public Response update(ResourceRequest request,
                          @PathParam("id") String id) {
     try {
-      getResourceManager().update(request.fileResource, Integer.valueOf(id));
+      getResourceManager().update(request.fileResource, id);
       return Response.status(204).build();
     } catch (WebApplicationException ex) {
       throw ex;

+ 3 - 3
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQuery.java

@@ -34,7 +34,7 @@ public class SavedQuery implements Serializable, PersonalResource {
   private String title;
   private String shortQuery;
 
-  private Integer id;
+  private String id;
   private String owner;
 
   public SavedQuery() {}
@@ -43,12 +43,12 @@ public class SavedQuery implements Serializable, PersonalResource {
   }
 
   @Override
-  public Integer getId() {
+  public String getId() {
     return id;
   }
 
   @Override
-  public void setId(Integer id) {
+  public void setId(String id) {
     this.id = id;
   }
 

+ 21 - 21
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManager.java

@@ -19,6 +19,7 @@
 package org.apache.ambari.view.hive.resources.savedQueries;
 
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.persistence.IStorageFactory;
 import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
@@ -41,23 +42,15 @@ public class SavedQueryResourceManager extends PersonalCRUDResourceManager<Saved
   private final static Logger LOG =
       LoggerFactory.getLogger(SavedQueryResourceManager.class);
 
+  private SharedObjectsFactory sharedObjectsFactory;
+
   /**
    * Constructor
    * @param context View Context instance
    */
-  private SavedQueryResourceManager(ViewContext context) {
-    super(SavedQuery.class, context);
-  }
-
-  //TODO: move all context-singletones to ContextController or smth like that
-  private static Map<String, SavedQueryResourceManager> viewSingletonObjects = new HashMap<String, SavedQueryResourceManager>();
-  public static SavedQueryResourceManager getInstance(ViewContext context) {
-    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
-      viewSingletonObjects.put(context.getInstanceName(), new SavedQueryResourceManager(context));
-    return viewSingletonObjects.get(context.getInstanceName());
-  }
-  static Map<String, SavedQueryResourceManager> getViewSingletonObjects() {
-    return viewSingletonObjects;
+  public SavedQueryResourceManager(ViewContext context, SharedObjectsFactory sharedObjectsFactory) {
+    super(SavedQuery.class, sharedObjectsFactory, context);
+    this.sharedObjectsFactory = sharedObjectsFactory;
   }
 
   @Override
@@ -83,20 +76,20 @@ public class SavedQueryResourceManager extends PersonalCRUDResourceManager<Saved
       throw new MisconfigurationFormattedException("scripts.dir");
     }
 
-    String normalizedName = String.format("hive-query-%d", object.getId());
+    String normalizedName = String.format("hive-query-%s", object.getId());
     String timestamp = new SimpleDateFormat("yyyy-MM-dd_hh-mm").format(new Date());
     String baseFileName = String.format(userScriptsPath +
         "/%s-%s", normalizedName, timestamp);
 
-    String newFilePath = HdfsUtil.findUnallocatedFileName(context, baseFileName, ".hql");
-    HdfsUtil.putStringToFile(context, newFilePath, "");
+    String newFilePath = HdfsUtil.findUnallocatedFileName(sharedObjectsFactory.getHdfsApi(), baseFileName, ".hql");
+    HdfsUtil.putStringToFile(sharedObjectsFactory.getHdfsApi(), newFilePath, "");
 
     object.setQueryFile(newFilePath);
-    getStorage().store(SavedQuery.class, object);
+    storageFabric.getStorage().store(SavedQuery.class, object);
   }
 
   @Override
-  public SavedQuery read(Integer id) throws ItemNotFound {
+  public SavedQuery read(Object id) throws ItemNotFound {
     SavedQuery savedQuery = super.read(id);
     fillShortQueryField(savedQuery);
     return savedQuery;
@@ -104,7 +97,7 @@ public class SavedQueryResourceManager extends PersonalCRUDResourceManager<Saved
 
   private void fillShortQueryField(SavedQuery savedQuery) {
     if (savedQuery.getQueryFile() != null) {
-      FilePaginator paginator = new FilePaginator(savedQuery.getQueryFile(), context);
+      FilePaginator paginator = new FilePaginator(savedQuery.getQueryFile(), sharedObjectsFactory.getHdfsApi());
       String query = null;
       try {
         query = paginator.readPage(0);
@@ -117,7 +110,14 @@ public class SavedQueryResourceManager extends PersonalCRUDResourceManager<Saved
       }
       savedQuery.setShortQuery(query.substring(0, (query.length() > 42)?42:query.length()));
     }
-    getStorage().store(SavedQuery.class, savedQuery);
+    storageFabric.getStorage().store(SavedQuery.class, savedQuery);
+  }
+
+  @Override
+  public SavedQuery update(SavedQuery newObject, String id) throws ItemNotFound {
+    SavedQuery savedQuery = super.update(newObject, id);
+    fillShortQueryField(savedQuery);
+    return savedQuery;
   }
 
   @Override
@@ -126,7 +126,7 @@ public class SavedQueryResourceManager extends PersonalCRUDResourceManager<Saved
   }
 
   @Override
-  public void delete(Integer resourceId) throws ItemNotFound {
+  public void delete(Object resourceId) throws ItemNotFound {
     super.delete(resourceId);
   }
 }

+ 12 - 6
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceProvider.java

@@ -22,7 +22,7 @@ import com.google.inject.Inject;
 import org.apache.ambari.view.*;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.persistence.utils.OnlyOwnersFilteringStrategy;
-import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -38,18 +38,24 @@ public class SavedQueryResourceProvider implements ResourceProvider<SavedQuery>
   @Inject
   ViewContext context;
 
-  protected SavedQueryResourceManager resourceManager = null;
   protected final static Logger LOG =
       LoggerFactory.getLogger(SavedQueryResourceProvider.class);
+  private SharedObjectsFactory sharedObjectsFactory;
+
+  public SharedObjectsFactory getSharedObjectsFactory() {
+    if (sharedObjectsFactory == null)
+      sharedObjectsFactory = new SharedObjectsFactory(context);
+    return sharedObjectsFactory;
+  }
 
   protected synchronized SavedQueryResourceManager getResourceManager() {
-    return SavedQueryResourceManager.getInstance(context);
+    return getSharedObjectsFactory().getSavedQueryResourceManager();
   }
 
   @Override
   public SavedQuery getResource(String resourceId, Set<String> properties) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
     try {
-      return getResourceManager().read(Integer.valueOf(resourceId));
+      return getResourceManager().read(resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }
@@ -85,7 +91,7 @@ public class SavedQueryResourceProvider implements ResourceProvider<SavedQuery>
       throw new SystemException("error on updating resource", e);
     }
     try {
-      getResourceManager().update(item, Integer.valueOf(resourceId));
+      getResourceManager().update(item, resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }
@@ -95,7 +101,7 @@ public class SavedQueryResourceProvider implements ResourceProvider<SavedQuery>
   @Override
   public boolean deleteResource(String resourceId) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
     try {
-      getResourceManager().delete(Integer.valueOf(resourceId));
+      getResourceManager().delete(resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }

+ 4 - 6
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryService.java

@@ -23,7 +23,6 @@ import org.apache.ambari.view.ViewResourceHandler;
 import org.apache.ambari.view.hive.BaseService;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.persistence.utils.OnlyOwnersFilteringStrategy;
-import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
 import org.apache.ambari.view.hive.utils.NotFoundFormattedException;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
 import org.json.simple.JSONObject;
@@ -58,7 +57,7 @@ public class SavedQueryService extends BaseService {
       LoggerFactory.getLogger(SavedQueryService.class);
 
   protected synchronized SavedQueryResourceManager getResourceManager() {
-    return SavedQueryResourceManager.getInstance(context);
+    return getSharedObjectsFactory().getSavedQueryResourceManager();
   }
 
   protected void setResourceManager(SavedQueryResourceManager resourceManager) {
@@ -73,8 +72,7 @@ public class SavedQueryService extends BaseService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response getOne(@PathParam("queryId") String queryId) {
     try {
-      SavedQuery savedQuery = null;
-      savedQuery = getResourceManager().read(Integer.valueOf(queryId));
+      SavedQuery savedQuery = getResourceManager().read(queryId);
       JSONObject object = new JSONObject();
       object.put("savedQuery", savedQuery);
       return Response.ok(object).build();
@@ -94,7 +92,7 @@ public class SavedQueryService extends BaseService {
   @Path("{queryId}")
   public Response delete(@PathParam("queryId") String queryId) {
     try {
-      getResourceManager().delete(Integer.valueOf(queryId));
+      getResourceManager().delete(queryId);
       return Response.status(204).build();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -135,7 +133,7 @@ public class SavedQueryService extends BaseService {
   public Response update(SavedQueryRequest request,
                          @PathParam("queryId") String queryId) {
     try {
-      getResourceManager().update(request.savedQuery, Integer.valueOf(queryId));
+      getResourceManager().update(request.savedQuery, queryId);
       return Response.status(204).build();
     } catch (WebApplicationException ex) {
       throw ex;

+ 3 - 3
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDF.java

@@ -33,7 +33,7 @@ public class UDF implements Serializable, PersonalResource {
   private String classname;
   private Integer fileResource;
 
-  private Integer id;
+  private String id;
   private String owner;
 
   public UDF() {}
@@ -42,12 +42,12 @@ public class UDF implements Serializable, PersonalResource {
   }
 
   @Override
-  public Integer getId() {
+  public String getId() {
     return id;
   }
 
   @Override
-  public void setId(Integer id) {
+  public void setId(String id) {
     this.id = id;
   }
 

+ 5 - 4
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceManager.java

@@ -19,6 +19,7 @@
 package org.apache.ambari.view.hive.resources.udfs;
 
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.persistence.IStorageFactory;
 import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
@@ -38,12 +39,12 @@ public class UDFResourceManager extends PersonalCRUDResourceManager<UDF> {
    * Constructor
    * @param context View Context instance
    */
-  public UDFResourceManager(ViewContext context) {
-    super(UDF.class, context);
+  public UDFResourceManager(IStorageFactory storageFactory, ViewContext context) {
+    super(UDF.class, storageFactory, context);
   }
 
   @Override
-  public UDF read(Integer id) throws ItemNotFound {
+  public UDF read(Object id) throws ItemNotFound {
     return super.read(id);
   }
 
@@ -58,7 +59,7 @@ public class UDFResourceManager extends PersonalCRUDResourceManager<UDF> {
   }
 
   @Override
-  public void delete(Integer resourceId) throws ItemNotFound {
+  public void delete(Object resourceId) throws ItemNotFound {
     super.delete(resourceId);
   }
 }

+ 6 - 5
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFResourceProvider.java

@@ -22,7 +22,7 @@ import com.google.inject.Inject;
 import org.apache.ambari.view.*;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.persistence.utils.OnlyOwnersFilteringStrategy;
-import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -42,9 +42,10 @@ public class UDFResourceProvider implements ResourceProvider<UDF> {
   protected final static Logger LOG =
       LoggerFactory.getLogger(UDFResourceProvider.class);
 
+
   protected synchronized UDFResourceManager getResourceManager() {
     if (resourceManager == null) {
-      resourceManager = new UDFResourceManager(context);
+      resourceManager = new UDFResourceManager(new SharedObjectsFactory(context), context);
     }
     return resourceManager;
   }
@@ -52,7 +53,7 @@ public class UDFResourceProvider implements ResourceProvider<UDF> {
   @Override
   public UDF getResource(String resourceId, Set<String> properties) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
     try {
-      return getResourceManager().read(Integer.valueOf(resourceId));
+      return getResourceManager().read(resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }
@@ -88,7 +89,7 @@ public class UDFResourceProvider implements ResourceProvider<UDF> {
       throw new SystemException("error on updating resource", e);
     }
     try {
-      getResourceManager().update(item, Integer.valueOf(resourceId));
+      getResourceManager().update(item, resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }
@@ -98,7 +99,7 @@ public class UDFResourceProvider implements ResourceProvider<UDF> {
   @Override
   public boolean deleteResource(String resourceId) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
     try {
-      getResourceManager().delete(Integer.valueOf(resourceId));
+      getResourceManager().delete(resourceId);
     } catch (ItemNotFound itemNotFound) {
       throw new NoSuchResourceException(resourceId);
     }

+ 6 - 9
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/udfs/UDFService.java

@@ -23,8 +23,6 @@ import org.apache.ambari.view.ViewResourceHandler;
 import org.apache.ambari.view.hive.BaseService;
 import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive.persistence.utils.OnlyOwnersFilteringStrategy;
-import org.apache.ambari.view.hive.resources.PersonalCRUDResourceManager;
-import org.apache.ambari.view.hive.resources.resources.FileResourceItem;
 import org.apache.ambari.view.hive.resources.resources.FileResourceResourceManager;
 import org.apache.ambari.view.hive.utils.NotFoundFormattedException;
 import org.apache.ambari.view.hive.utils.ServiceFormattedException;
@@ -61,14 +59,14 @@ public class UDFService extends BaseService {
 
   protected synchronized UDFResourceManager getResourceManager() {
     if (resourceManager == null) {
-      resourceManager = new UDFResourceManager(context);
+      resourceManager = new UDFResourceManager(getSharedObjectsFactory(), context);
     }
     return resourceManager;
   }
 
   protected synchronized FileResourceResourceManager getFileResourceResourceManager() {
     if (fileResourceResourceManager == null) {
-      fileResourceResourceManager = new FileResourceResourceManager(context);
+      fileResourceResourceManager = new FileResourceResourceManager(getSharedObjectsFactory(), context);
     }
     return fileResourceResourceManager;
   }
@@ -81,10 +79,9 @@ public class UDFService extends BaseService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response getOne(@PathParam("id") String id) {
     try {
-      UDF UDF = null;
-      UDF = getResourceManager().read(Integer.valueOf(id));
+      UDF udf = getResourceManager().read(id);
       JSONObject object = new JSONObject();
-      object.put("udf", UDF);
+      object.put("udf", udf);
       return Response.ok(object).build();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -102,7 +99,7 @@ public class UDFService extends BaseService {
   @Path("{id}")
   public Response delete(@PathParam("id") String id) {
     try {
-      getResourceManager().delete(Integer.valueOf(id));
+      getResourceManager().delete(id);
       return Response.status(204).build();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -145,7 +142,7 @@ public class UDFService extends BaseService {
     try {
       if (request.udf.getFileResource() != null)
         getFileResourceResourceManager().read(request.udf.getFileResource());
-      getResourceManager().update(request.udf, Integer.valueOf(id));
+      getResourceManager().update(request.udf, id);
       return Response.status(204).build();
     } catch (WebApplicationException ex) {
       throw ex;

+ 3 - 3
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/FilePaginator.java

@@ -40,11 +40,11 @@ public class FilePaginator {
   /**
    * Constructor
    * @param filePath Path to file on HDFS
-   * @param context View Context instance
+   * @param hdfsApi hdfs api
    */
-  public FilePaginator(String filePath, ViewContext context) {
+  public FilePaginator(String filePath, HdfsApi hdfsApi) {
     this.filePath = filePath;
-    hdfsApi = HdfsApi.getInstance(context);
+    this.hdfsApi = hdfsApi;
   }
 
   /**

+ 12 - 33
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsApi.java

@@ -102,7 +102,7 @@ public class HdfsApi {
    * @throws java.io.IOException
    * @throws InterruptedException
    */
-  public FileStatus[] listdir(final String path) throws FileNotFoundException,
+  public synchronized FileStatus[] listdir(final String path) throws FileNotFoundException,
       IOException, InterruptedException {
     return ugi.doAs(new PrivilegedExceptionAction<FileStatus[]>() {
       public FileStatus[] run() throws FileNotFoundException, Exception {
@@ -119,7 +119,7 @@ public class HdfsApi {
    * @throws java.io.FileNotFoundException
    * @throws InterruptedException
    */
-  public FileStatus getFileStatus(final String path) throws IOException,
+  public synchronized FileStatus getFileStatus(final String path) throws IOException,
       FileNotFoundException, InterruptedException {
     return ugi.doAs(new PrivilegedExceptionAction<FileStatus>() {
       public FileStatus run() throws FileNotFoundException, IOException {
@@ -135,7 +135,7 @@ public class HdfsApi {
    * @throws java.io.IOException
    * @throws InterruptedException
    */
-  public boolean mkdir(final String path) throws IOException,
+  public synchronized boolean mkdir(final String path) throws IOException,
       InterruptedException {
     return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
       public Boolean run() throws Exception {
@@ -152,7 +152,7 @@ public class HdfsApi {
    * @throws java.io.IOException
    * @throws InterruptedException
    */
-  public boolean rename(final String src, final String dst) throws IOException,
+  public synchronized boolean rename(final String src, final String dst) throws IOException,
       InterruptedException {
     return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
       public Boolean run() throws Exception {
@@ -169,7 +169,7 @@ public class HdfsApi {
    * @throws java.io.IOException
    * @throws InterruptedException
    */
-  public boolean delete(final String path, final boolean recursive)
+  public synchronized boolean delete(final String path, final boolean recursive)
       throws IOException, InterruptedException {
     return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
       public Boolean run() throws Exception {
@@ -183,7 +183,7 @@ public class HdfsApi {
    * @return home directory
    * @throws Exception
    */
-  public Path getHomeDir() throws Exception {
+  public synchronized Path getHomeDir() throws Exception {
     return ugi.doAs(new PrivilegedExceptionAction<Path>() {
       public Path run() throws IOException {
         return fs.getHomeDirectory();
@@ -196,7 +196,7 @@ public class HdfsApi {
    * @return home directory
    * @throws Exception
    */
-  public FsStatus getStatus() throws Exception {
+  public synchronized FsStatus getStatus() throws Exception {
     return ugi.doAs(new PrivilegedExceptionAction<FsStatus>() {
       public FsStatus run() throws IOException {
         return fs.getStatus();
@@ -212,7 +212,7 @@ public class HdfsApi {
    * @throws java.io.IOException
    * @throws InterruptedException
    */
-  public FSDataOutputStream create(final String path, final boolean overwrite)
+  public synchronized FSDataOutputStream create(final String path, final boolean overwrite)
       throws IOException, InterruptedException {
     return ugi.doAs(new PrivilegedExceptionAction<FSDataOutputStream>() {
       public FSDataOutputStream run() throws Exception {
@@ -228,7 +228,7 @@ public class HdfsApi {
    * @throws java.io.IOException
    * @throws InterruptedException
    */
-  public FSDataInputStream open(final String path) throws IOException,
+  public synchronized FSDataInputStream open(final String path) throws IOException,
       InterruptedException {
     return ugi.doAs(new PrivilegedExceptionAction<FSDataInputStream>() {
       public FSDataInputStream run() throws Exception {
@@ -245,7 +245,7 @@ public class HdfsApi {
    * @throws java.io.IOException
    * @throws InterruptedException
    */
-  public void copy(final String src, final String dest) throws IOException,
+  public synchronized void copy(final String src, final String dest) throws IOException,
       InterruptedException {
     boolean result = ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
       public Boolean run() throws Exception {
@@ -257,7 +257,7 @@ public class HdfsApi {
     }
   }
 
-  public boolean exists(final String newFilePath) throws IOException, InterruptedException {
+  public synchronized boolean exists(final String newFilePath) throws IOException, InterruptedException {
     return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
       public Boolean run() throws Exception {
         return fs.exists(new Path(newFilePath));
@@ -326,24 +326,7 @@ public class HdfsApi {
     return json;
   }
 
-
-  private static Map<String, HdfsApi> viewSingletonObjects = new HashMap<String, HdfsApi>();
-  /**
-   * Returns HdfsApi object specific to instance
-   * @param context View Context instance
-   * @return Hdfs business delegate object
-   */
-  public static HdfsApi getInstance(ViewContext context) {
-    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
-      viewSingletonObjects.put(context.getInstanceName(), connectToHDFSApi(context));
-    return viewSingletonObjects.get(context.getInstanceName());
-  }
-
-  public static void setInstance(ViewContext context, HdfsApi api) {
-    viewSingletonObjects.put(context.getInstanceName(), api);
-  }
-
-  public static HdfsApi connectToHDFSApi(ViewContext context) {
+  public static synchronized HdfsApi connectToHDFSApi(ViewContext context) {
     HdfsApi api = null;
     Thread.currentThread().setContextClassLoader(null);
 
@@ -392,8 +375,4 @@ public class HdfsApi {
       userName = context.getUsername();
     return userName;
   }
-
-  public static void dropAllConnections() {
-    viewSingletonObjects.clear();
-  }
 }

+ 7 - 10
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/HdfsUtil.java

@@ -19,7 +19,6 @@
 package org.apache.ambari.view.hive.utils;
 
 
-import org.apache.ambari.view.ViewContext;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -35,14 +34,14 @@ public class HdfsUtil {
    * @param filePath path to file
    * @param content new content of file
    */
-  public static void putStringToFile(ViewContext context, String filePath, String content) {
-    HdfsApi hdfs = HdfsApi.getInstance(context);
-
+  public static void putStringToFile(HdfsApi hdfs, String filePath, String content) {
     FSDataOutputStream stream;
     try {
-      stream = hdfs.create(filePath, true);
-      stream.writeBytes(content);
-      stream.close();
+      synchronized (hdfs) {
+        stream = hdfs.create(filePath, true);
+        stream.writeBytes(content);
+        stream.close();
+      }
     } catch (IOException e) {
       throw new ServiceFormattedException("Could not write file " + filePath, e);
     } catch (InterruptedException e) {
@@ -57,9 +56,7 @@ public class HdfsUtil {
    * @param extension file extension
    * @return if fullPathAndFilename="/tmp/file",extension=".txt" then filename will be like "/tmp/file_42.txt"
    */
-  public static String findUnallocatedFileName(ViewContext context, String fullPathAndFilename, String extension) {
-    HdfsApi hdfs = HdfsApi.getInstance(context);
-
+  public static String findUnallocatedFileName(HdfsApi hdfs, String fullPathAndFilename, String extension) {
     int triesCount = 0;
     String newFilePath;
     boolean isUnallocatedFilenameFound;

+ 163 - 0
contrib/views/hive/src/main/java/org/apache/ambari/view/hive/utils/SharedObjectsFactory.java

@@ -0,0 +1,163 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.utils;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.client.Connection;
+import org.apache.ambari.view.hive.client.ConnectionFactory;
+import org.apache.ambari.view.hive.client.IConnectionFactory;
+import org.apache.ambari.view.hive.persistence.IStorageFactory;
+import org.apache.ambari.view.hive.persistence.Storage;
+import org.apache.ambari.view.hive.persistence.utils.StorageFactory;
+import org.apache.ambari.view.hive.resources.jobs.ConnectionController;
+import org.apache.ambari.view.hive.resources.jobs.OperationHandleControllerFactory;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.ATSParser;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.ATSParserFactory;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.IJobControllerFactory;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.JobControllerFactory;
+import org.apache.ambari.view.hive.resources.savedQueries.SavedQueryResourceManager;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Generates shared connections. Clients with same tag will get the same connection.
+ * e.g. user 'admin' using view instance 'HIVE1' will use one connection, another user
+ * will use different connection.
+ */
+public class SharedObjectsFactory implements IStorageFactory, IConnectionFactory {
+  private ViewContext context;
+  private IConnectionFactory hiveConnectionFactory;
+  private IStorageFactory storageFactory;
+  private ATSParserFactory atsParserFactory;
+
+  private static final Map<Class, Map<String, Object>> localObjects = new HashMap<Class, Map<String, Object>>();
+
+  public SharedObjectsFactory(ViewContext context) {
+    this.context = context;
+    this.hiveConnectionFactory = new ConnectionFactory(context);
+    this.storageFactory = new StorageFactory(context);
+    this.atsParserFactory = new ATSParserFactory(context);
+
+    synchronized (localObjects) {
+      if (localObjects.size() == 0) {
+        localObjects.put(Connection.class, new HashMap<String, Object>());
+        localObjects.put(OperationHandleControllerFactory.class, new HashMap<String, Object>());
+        localObjects.put(Storage.class, new HashMap<String, Object>());
+        localObjects.put(IJobControllerFactory.class, new HashMap<String, Object>());
+        localObjects.put(ATSParser.class, new HashMap<String, Object>());
+        localObjects.put(SavedQueryResourceManager.class, new HashMap<String, Object>());
+        localObjects.put(HdfsApi.class, new HashMap<String, Object>());
+      }
+    }
+  }
+
+  /**
+   * Returns Connection object specific to unique tag
+   * @return Hdfs business delegate object
+   */
+  @Override
+  public Connection getHiveConnection() {
+    if (!localObjects.get(Connection.class).containsKey(getTagName())) {
+      Connection newConnection = hiveConnectionFactory.getHiveConnection();
+      localObjects.get(Connection.class).put(getTagName(), newConnection);
+    }
+    return (Connection) localObjects.get(Connection.class).get(getTagName());
+  }
+
+  public ConnectionController getHiveConnectionController() {
+    return new ConnectionController(getOperationHandleControllerFactory(), getHiveConnection());
+  }
+
+  // =============================
+
+  public OperationHandleControllerFactory getOperationHandleControllerFactory() {
+    if (!localObjects.get(OperationHandleControllerFactory.class).containsKey(getTagName()))
+      localObjects.get(OperationHandleControllerFactory.class).put(getTagName(), new OperationHandleControllerFactory(this));
+    return (OperationHandleControllerFactory) localObjects.get(OperationHandleControllerFactory.class).get(getTagName());
+  }
+
+  // =============================
+  @Override
+  public Storage getStorage() {
+    if (!localObjects.get(Storage.class).containsKey(getTagName()))
+      localObjects.get(Storage.class).put(getTagName(), storageFactory.getStorage());
+    return (Storage) localObjects.get(Storage.class).get(getTagName());
+  }
+
+  // =============================
+  public IJobControllerFactory getJobControllerFactory() {
+    if (!localObjects.get(IJobControllerFactory.class).containsKey(getTagName()))
+      localObjects.get(IJobControllerFactory.class).put(getTagName(), new JobControllerFactory(context, this));
+    return (IJobControllerFactory) localObjects.get(IJobControllerFactory.class).get(getTagName());
+  }
+
+  // =============================
+
+  public SavedQueryResourceManager getSavedQueryResourceManager() {
+    if (!localObjects.get(SavedQueryResourceManager.class).containsKey(getTagName()))
+      localObjects.get(SavedQueryResourceManager.class).put(getTagName(), new SavedQueryResourceManager(context, this));
+    return (SavedQueryResourceManager) localObjects.get(SavedQueryResourceManager.class).get(getTagName());
+  }
+
+  // =============================
+  public ATSParser getATSParser() {
+    if (!localObjects.get(ATSParser.class).containsKey(getTagName()))
+      localObjects.get(ATSParser.class).put(getTagName(), atsParserFactory.getATSParser());
+    return (ATSParser) localObjects.get(ATSParser.class).get(getTagName());
+  }
+
+  // =============================
+  public HdfsApi getHdfsApi() {
+    if (!localObjects.get(HdfsApi.class).containsKey(getTagName()))
+      localObjects.get(HdfsApi.class).put(getTagName(), HdfsApi.connectToHDFSApi(context));
+    return (HdfsApi) localObjects.get(HdfsApi.class).get(getTagName());
+  }
+
+  /**
+   * Generates tag name. Clients with same tag will share one connection.
+   * @return tag name
+   */
+  public String getTagName() {
+    return String.format("%s:%s", context.getInstanceName(), context.getUsername());
+  }
+
+  /**
+   * For testing purposes, ability to substitute some local object
+   */
+  public void setInstance(Class clazz, Object object) {
+    localObjects.get(clazz).put(getTagName(), object);
+  }
+
+  /**
+   * For testing purposes, ability to clear all local objects of particular class
+   */
+  public void clear(Class clazz) {
+    localObjects.get(clazz).clear();
+  }
+
+  /**
+   * For testing purposes, ability to clear all connections
+   */
+  public void clear() {
+    for(Map<String, Object> map : localObjects.values()) {
+      map.clear();
+    }
+  }
+}

+ 1 - 1
contrib/views/hive/src/main/resources/ui/hive-web/app/components/typeahead-widget.js

@@ -19,7 +19,7 @@
 import Typeahead from 'ember-cli-selectize/components/ember-selectize';
 import Ember from 'ember';
 
-export default Typeahead.extend({
+export default Typeahead.extend(Ember.I18n.TranslateableProperties, {
   didInsertElement: function() {
     this._super();
 

+ 4 - 5
contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/history.js

@@ -23,8 +23,8 @@ import constants from 'hive/utils/constants';
 export default Ember.ArrayController.extend(FilterableMixin, {
   itemController: constants.namingConventions.job,
 
-  sortAscending: true,
-  sortProperties: ['dateSubmitted'],
+  sortAscending: false,
+  sortProperties: ['dateSubmittedTimestamp'],
 
   init: function () {
     var oneMonthAgo = new Date();
@@ -40,12 +40,11 @@ export default Ember.ArrayController.extend(FilterableMixin, {
       }),
       Ember.Object.create({
         caption: 'columns.status',
-        property: 'status',
-        classBinding: 'status'
+        property: 'status'
       }),
       Ember.Object.create({
         caption: 'columns.date',
-        property: 'dateSubmitted',
+        property: 'dateSubmittedTimestamp',
         dateRange: Ember.Object.create({
           min: oneMonthAgo,
           max: new Date()

+ 44 - 12
contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index.js

@@ -18,6 +18,7 @@
 
 import Ember from 'ember';
 import constants from 'hive/utils/constants';
+import utils from 'hive/utils/functions';
 
 export default Ember.Controller.extend({
   needs: [ constants.namingConventions.openQueries,
@@ -26,7 +27,9 @@ export default Ember.Controller.extend({
            constants.namingConventions.jobLogs,
            constants.namingConventions.jobResults,
            constants.namingConventions.jobExplain,
-           constants.namingConventions.settings
+           constants.namingConventions.settings,
+           constants.namingConventions.visualExplain,
+           constants.namingConventions.tezUI
   ],
 
   openQueries: Ember.computed.alias('controllers.' + constants.namingConventions.openQueries),
@@ -36,6 +39,8 @@ export default Ember.Controller.extend({
   results: Ember.computed.alias('controllers.' + constants.namingConventions.jobResults),
   explain: Ember.computed.alias('controllers.' + constants.namingConventions.jobExplain),
   settings: Ember.computed.alias('controllers.' + constants.namingConventions.settings),
+  visualExplain: Ember.computed.alias('controllers.' + constants.namingConventions.visualExplain),
+  tezUI: Ember.computed.alias('controllers.' + constants.namingConventions.tezUI),
 
   canExecute: function () {
     var isModelRunning = this.get('model.isRunning');
@@ -77,7 +82,6 @@ export default Ember.Controller.extend({
 
   _executeQuery: function (shouldExplain) {
     var queryId,
-        self = this,
         query,
         finalQuery,
         job,
@@ -166,8 +170,6 @@ export default Ember.Controller.extend({
     }
 
     queries = queries.map(function (query) {
-      var explainIndex = query.indexOf(constants.namingConventions.explainPrefix);
-
       if (shouldExplain) {
         if (query.indexOf(constants.namingConventions.explainPrefix) === -1) {
           return constants.namingConventions.explainPrefix + query;
@@ -213,7 +215,7 @@ export default Ember.Controller.extend({
     this._super();
 
     // initialize queryParams with an empty array
-    this.set('queryParams', Ember.ArrayProxy.create({ content: Ember.A([]) }))
+    this.set('queryParams', Ember.ArrayProxy.create({ content: Ember.A([]) }));
 
     this.set('queryProcessTabs', Ember.ArrayProxy.create({ content: Ember.A([
       Ember.Object.create({
@@ -232,20 +234,27 @@ export default Ember.Controller.extend({
   },
 
   displayJobTabs: function () {
-    return this.get('content.constructor.typeKey') === constants.namingConventions.job;
+    return this.get('content.constructor.typeKey') === constants.namingConventions.job &&
+           utils.isInteger(this.get('content.id'));
   }.property('content'),
 
   modelChanged: function () {
     var self = this;
     var content = this.get('content');
     var openQueries = this.get('openQueries');
+    var database = this.get('databases').findBy('name', this.get('content.dataBase'));
+
+    if (database) {
+      this.set('databases.selectedDatabase', database);
+    }
 
     //update open queries list when current query model changes
     openQueries.update(content).then(function (isExplainedQuery) {
       var newId = content.get('id');
       var tab = openQueries.getTabForModel(content);
 
-      if (content.get('constructor.typeKey') === constants.namingConventions.job) {
+      //if not an ATS job
+      if (content.get('constructor.typeKey') === constants.namingConventions.job && utils.isInteger(newId)) {
         self.get('queryProcessTabs').forEach(function (queryTab) {
           queryTab.set('id', newId);
         });
@@ -269,7 +278,7 @@ export default Ember.Controller.extend({
       return;
     }
 
-    if (this.get('content.status') !== constants.statuses.finished) {
+    if (!utils.insensitiveCompare(this.get('content.status'), constants.statuses.succeeded)) {
       return;
     }
 
@@ -285,7 +294,7 @@ export default Ember.Controller.extend({
     var tabs = this.get('queryProcessTabs');
     var isResultsTabVisible = tabs.findBy('path', constants.namingConventions.subroutes.jobResults).get('visible');
 
-    if (this.get('content.status') === constants.statuses.finished && isResultsTabVisible) {
+    if (utils.insensitiveCompare(this.get('content.status'), constants.statuses.succeeded) && isResultsTabVisible) {
       items.push({
         title: Ember.I18n.t('buttons.saveHdfs'),
         action: 'saveToHDFS'
@@ -320,7 +329,7 @@ export default Ember.Controller.extend({
   saveToHDFS: function () {
     var job = this.get('content');
 
-    if (job.get('status') !== constants.statuses.finished) {
+    if (!utils.insensitiveCompare(job.get('status'), constants.statuses.succeeded)) {
       return;
     }
 
@@ -347,7 +356,7 @@ export default Ember.Controller.extend({
 
     Ember.run.later(function () {
       Ember.$.getJSON(url).then(function (response) {
-        if (response.status !== constants.results.statuses.terminated) {
+        if (!utils.insensitiveCompare(response.status, constants.results.statuses.terminated)) {
           self.pollSaveToHDFS(response);
         } else {
           self.set('content.isRunning', false);
@@ -413,6 +422,10 @@ export default Ember.Controller.extend({
           id: 'fixture_' + idCounter
         });
 
+        if (idCounter) {
+          model.set('title', model.get('title') + ' (' + idCounter + ')')
+        }
+
         idCounter++;
 
         this.transitionToRoute(constants.namingConventions.subroutes.savedQuery, model);
@@ -424,6 +437,8 @@ export default Ember.Controller.extend({
           wasNew = this.get('model.isNew'),
           defer = Ember.RSVP.defer();
 
+      this.set('model.dataBase', this.get('databases.selectedDatabase.name'));
+
       this.send('openModal', 'modal-save', {
         heading: "modals.save.heading",
         text: this.get('content.title'),
@@ -446,7 +461,7 @@ export default Ember.Controller.extend({
       var subroute;
 
       this._executeQuery().then(function (job) {
-        if (job.get('status') !== constants.statuses.finished) {
+        if (job.get('status') !== constants.statuses.succeeded) {
           subroute = constants.namingConventions.subroutes.jobLogs;
         } else {
           subroute = constants.namingConventions.subroutes.jobResults;
@@ -470,6 +485,23 @@ export default Ember.Controller.extend({
       }, function (err) {
         self.send('addAlert', constants.alerts.error, err.responseText, "alerts.errors.save.query");
       });
+    },
+
+    toggleOverlay: function (targetController) {
+      if (this.get('visualExplain.showOverlay') && targetController !== 'visualExplain') {
+        this.set('visualExplain.showOverlay', false);
+      } else if (this.get('tezUI.showOverlay') && targetController !== 'tezUI') {
+        this.set('tezUI.showOverlay', false);
+      } else if (this.get('settings.showOverlay') && targetController !== 'settings') {
+        this.set('settings.showOverlay', false);
+      }
+
+      if (targetController !== 'settings') {
+        //set content for visual explain and tez ui.
+        this.set(targetController + '.content', this.get('content'));
+      }
+
+      this.toggleProperty(targetController + '.showOverlay');
     }
   }
 });

+ 6 - 6
contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/logs.js

@@ -18,6 +18,7 @@
 
 import Ember from 'ember';
 import constants from 'hive/utils/constants';
+import utils from 'hive/utils/functions';
 
 export default Ember.ObjectController.extend({
   needs: [ constants.namingConventions.loadedFiles ],
@@ -73,12 +74,11 @@ export default Ember.ObjectController.extend({
   },
 
   isJobRunning: function (job) {
-    var status = job.get('status');
-
-    return status !== constants.statuses.finished &&
-           status !== constants.statuses.canceled &&
-           status !== constants.statuses.closed &&
-           status !== constants.statuses.error;
+    return utils.insensitiveCompare(job.get('status'),
+                                    constants.statuses.unknown,
+                                    constants.statuses.initialized,
+                                    constants.statuses.running,
+                                    constants.statuses.pending);
   },
 
   getLogs: function () {

+ 3 - 2
contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/index/history-query/results.js

@@ -18,6 +18,7 @@
 
 import Ember from 'ember';
 import constants from 'hive/utils/constants';
+import utils from 'hive/utils/functions';
 
 export default Ember.ObjectController.extend({
   cachedResults: [],
@@ -51,7 +52,7 @@ export default Ember.ObjectController.extend({
   initResults: function () {
     var existingJob;
 
-    if (this.get('content.status') !== constants.statuses.finished) {
+    if (!utils.insensitiveCompare(this.get('content.status'), constants.statuses.succeeded)) {
       return;
     }
 
@@ -131,4 +132,4 @@ export default Ember.ObjectController.extend({
       }
     }
   }
-});
+});

+ 2 - 3
contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/job.js

@@ -18,6 +18,7 @@
 
 import Ember from 'ember';
 import constants from 'hive/utils/constants';
+import utils from 'hive/utils/functions';
 
 export default Ember.ObjectController.extend({
   needs: [ constants.namingConventions.history, constants.namingConventions.loadedFiles ],
@@ -25,9 +26,7 @@ export default Ember.ObjectController.extend({
   files: Ember.computed.alias('controllers.' + constants.namingConventions.loadedFiles),
 
   canStop: function () {
-    return this.get('status') === constants.statuses.running ||
-           this.get('status') === constants.statuses.initialized ||
-           this.get('status') === constants.statuses.pending;
+    return utils.insensitiveCompare(this.get('status'), constants.statuses.running, constants.statuses.initialized, constants.statuses.pending)
   }.property('status'),
 
   actions: {

+ 6 - 7
contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/open-queries.js

@@ -18,6 +18,7 @@
 
 import Ember from 'ember';
 import constants from 'hive/utils/constants';
+import utils from 'hive/utils/functions';
 
 export default Ember.ArrayController.extend({
   needs: [ constants.namingConventions.databases,
@@ -91,7 +92,9 @@ export default Ember.ArrayController.extend({
           var isExplainedQuery,
               subroute;
 
-          if (model.get('constructor.typeKey') === constants.namingConventions.job) {
+          //jobs that were run from hive ui (exclude ats jobs)
+          if (model.get('constructor.typeKey') === constants.namingConventions.job &&
+              utils.isInteger(model.get('id'))) {
             isExplainedQuery = self.get('currentQuery.fileContent').indexOf(constants.namingConventions.explainPrefix) > -1;
 
             if (isExplainedQuery) {
@@ -251,13 +254,9 @@ export default Ember.ArrayController.extend({
     var hasQueryParams = this.get('index.queryParams.length');
     var hasSettings = this.get('settings').hasSettings(jobId);
 
-    if ( selected && selected[0] !== "" ||
+    return selected && selected[0] !== "" ||
          hasQueryParams ||
-         hasSettings ) {
-      return true;
-    }
-
-    return false;
+         hasSettings;
   },
 
   actions: {

+ 1 - 4
contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/queries.js

@@ -63,10 +63,7 @@ export default Ember.ArrayController.extend(FilterableMixin, {
   ],
 
   model: function () {
-    var queries = this.get('queries');
-    queries = queries ? queries.filterBy('isNew', false) : queries;
-
-    return this.filter(queries);
+    return this.filter(this.get('queries'));
   }.property('queries', 'filters.@each'),
 
   actions: {

+ 81 - 48
contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/settings.js

@@ -28,19 +28,28 @@ export default Ember.ArrayController.extend({
   index: Ember.computed.alias('controllers.' + constants.namingConventions.index),
   openQueries: Ember.computed.alias('controllers.' + constants.namingConventions.openQueries),
 
-  showSettingsOverlay: false,
+  predefinedSettings: constants.hiveParameters,
 
-  querySettings: function () {
+  currentSettings: function () {
     var currentId = this.get('index.model.id');
-    return this.findBy('id', currentId);
-  }.property('model.[]', 'index.model.id'),
+    var targetSettings = this.findBy('id', currentId);
+
+   if (!targetSettings) {
+      targetSettings = this.pushObject(Ember.Object.create({
+        id: currentId,
+        settings: []
+      }));
+    }
+
+    return targetSettings;
+  }.property('index.model.id'),
 
   updateSettingsId: function (oldId, newId) {
     this.filterBy('id', oldId).setEach('id', newId);
   },
 
-  getSettingsString: function (id) {
-    var currentId = id ? id : this.get('index.model.id');
+  getSettingsString: function () {
+    var currentId = this.get('index.model.id');
 
     var querySettings = this.findBy('id', currentId);
 
@@ -49,13 +58,9 @@ export default Ember.ArrayController.extend({
     }
 
     var settings = querySettings.get('settings').map(function (setting) {
-      return 'set %@ = %@;'.fmt(setting.key, setting.value);
+      return 'set %@ = %@;'.fmt(setting.get('key.name'), setting.get('value'));
     });
 
-    if (querySettings.get('runOnTez')) {
-      settings.push('set %@ = tez;'.fmt(constants.settings.executionEngine));
-    }
-
     return settings.join("\n");
   },
 
@@ -70,8 +75,7 @@ export default Ember.ArrayController.extend({
     var id = this.get('index.model.id');
     var query = this.get('openQueries.currentQuery');
     var content = query.get('fileContent');
-    var runOnTez = false;
-
+    var self = this;
 
     var regex = new RegExp(/^set\s+[\w-.]+(\s+|\s?)=(\s+|\s?)[\w-.]+(\s+|\s?);/gim);
     var settings = content.match(regex);
@@ -83,68 +87,97 @@ export default Ember.ArrayController.extend({
     query.set('fileContent', content.replace(regex, '').trim());
     settings = settings.map(function (setting) {
       var KV = setting.split('=');
-
-      return {
-        key: KV[0].replace('set', '').trim(),
+      var obj = {
+        key: {
+          name: KV[0].replace('set', '').trim()
+        },
         value: KV[1].replace(';', '').trim()
       };
-    });
 
-    // remove runOnTez from settings
-    settings = settings.findBy('key', constants.settings.executionEngine).without(false);
+      if (!self.get('predefinedSettings').findBy('name', obj.key.name)) {
+        self.get('predefinedSettings').pushObject({
+          name: obj.key.name
+        });
+      }
 
-    this.setSettingForQuery(id, settings, !!runOnTez);
+      return obj;
+    });
+
+    this.setSettingForQuery(id, settings);
   }.observes('openQueries.currentQuery', 'openQueries.tabUpdated'),
 
-  setSettingForQuery: function (id, settings, runOnTez) {
+  setSettingForQuery: function (id, settings) {
     var querySettings = this.findBy('id', id);
 
     if (!querySettings) {
       this.pushObject(Ember.Object.create({
         id: id,
-        settings: settings,
-        runOnTez: runOnTez
+        settings: settings
       }));
     } else {
       querySettings.setProperties({
-        'settings': settings,
-        'runOnTez': runOnTez
+        'settings': settings
       });
     }
   },
 
-  createSettingsForQuery: function () {
-    var currentId = this.get('index.model.id');
+  validate: function() {
+    var settings = this.get('currentSettings.settings') || [];
+    var predefinedSettings = this.get('predefinedSettings');
+
+    settings.forEach(function(setting) {
+      var predefined = predefinedSettings.filterProperty('name', setting.get('key.name'));
+      if (!predefined.length) {
+        return;
+      } else {
+        predefined = predefined[0];
+      }
+
+      if (predefined.values && predefined.values.contains(setting.get('value'))) {
+        setting.set('valid', true);
+        return;
+      }
+
+      if (predefined.validate && predefined.validate.test(setting.get('value'))) {
+        setting.set('valid', true);
+        return;
+      }
+
+      setting.set('valid', false);
+    });
+  }.observes('currentSettings.[]', 'currentSettings.settings.@each.value', 'currentSettings.settings.@each.key'),
 
-    if (!this.findBy('id', currentId)) {
-      this.pushObject(Ember.Object.create({
-        id: currentId,
-        settings: [],
-        runOnTez: false
-      }));
-    }
-  },
+  currentSettingsAreValid: function() {
+    var currentSettings = this.get('currentSettings.settings');
+    var invalid = currentSettings.filterProperty('valid', false);
 
-  actions: {
-    toggleOverlay: function () {
-      // create a setting object if its not already there
-      this.createSettingsForQuery();
-      this.toggleProperty('showSettingsOverlay');
-    },
+    return invalid.length ? false : true;
+  }.property('currentSettings.settings.@each.value', 'currentSettings.settings.@each.key'),
 
+  actions: {
     add: function () {
       var currentId = this.get('index.model.id'),
-       querySettings = this.findBy('id', currentId);
+          querySettings = this.findBy('id', currentId);
 
-      querySettings.settings.pushObject(Ember.Object.create({
-        key: '',
-        value: ''
-      }));
+      var Setting = Ember.Object.extend({
+        valid: true,
+        selection: Ember.Object.create(),
+        value: Ember.computed.alias('selection.value')
+      });
+
+      querySettings.get('settings').pushObject(Setting.create({}));
     },
 
     remove: function (setting) {
-      var currentId = this.get('index.model.id');
-      this.findBy('id', currentId).settings.removeObject(setting);
+      this.findBy('id', this.get('index.model.id')).settings.removeObject(setting);
+    },
+
+    addKey: function (param) {
+      var newKey = this.get('predefinedSettings').pushObject({
+        name: param
+      });
+
+      this.get('currentSettings.settings').findBy('key', null).set('key', newKey);
     }
   }
 });

+ 22 - 0
contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/tez-ui.js

@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Controller.extend({
+});

+ 22 - 0
contrib/views/hive/src/main/resources/ui/hive-web/app/controllers/visual-explain.js

@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.ObjectController.extend({
+});

+ 25 - 0
contrib/views/hive/src/main/resources/ui/hive-web/app/helpers/all-uppercase.js

@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export function allUppercase(input) {
+  return input.toUpperCase();
+};
+
+export default Ember.Handlebars.makeBoundHelper(allUppercase);

+ 5 - 2
contrib/views/hive/src/main/resources/ui/hive-web/app/initializers/i18n.js

@@ -64,7 +64,9 @@ TRANSLATIONS = {
     query: {
       editor: 'Query Editor',
       process: 'Query Process Results',
-      parameters: 'Parameters'
+      parameters: 'Parameters',
+      visualExplain: 'Visual Explain',
+      tez: 'TEZ'
     },
     download: 'Save results...'
   },
@@ -77,7 +79,8 @@ TRANSLATIONS = {
       database: 'Select Database...',
       udfs: 'Insert udfs',
       file: 'Select File Resource...',
-      noFileResource: '(no file)'
+      noFileResource: '(no file)',
+      value: "Select value..."
     },
     fileResource: {
       name: "resource name",

+ 13 - 1
contrib/views/hive/src/main/resources/ui/hive-web/app/models/job.js

@@ -28,5 +28,17 @@ export default DS.Model.extend({
   status: DS.attr('string'),
   dateSubmitted: DS.attr('date'),
   forcedContent: DS.attr('string'),
-  logFile: DS.attr('string')
+  logFile: DS.attr('string'),
+
+  dateSubmittedTimestamp: function () {
+    var date = this.get('dateSubmitted');
+
+    return date ? date * 1000 : date;
+  }.property('dateSubmitted'),
+
+  uppercaseStatus: function () {
+    var status = this.get('status');
+
+    return status ? status.toUpperCase() : status;
+  }.property('status')
 });

+ 2 - 2
contrib/views/hive/src/main/resources/ui/hive-web/app/routes/application.js

@@ -20,7 +20,7 @@ import Ember from 'ember';
 import constants from 'hive/utils/constants';
 
 export default Ember.Route.extend({
-  setupController: function() {
+  setupController: function () {
     var self = this;
 
     this.controllerFor(constants.namingConventions.databases).set('model', this.store.find(constants.namingConventions.database));
@@ -31,7 +31,7 @@ export default Ember.Route.extend({
   },
 
   actions: {
-    openModal: function(modalTemplate, options) {
+    openModal: function (modalTemplate, options) {
       this.controllerFor(modalTemplate).setProperties({
         heading: options.heading,
         text: options.text,

+ 10 - 3
contrib/views/hive/src/main/resources/ui/hive-web/app/routes/index/history-query/index.js

@@ -18,6 +18,7 @@
 
 import Ember from 'ember';
 import constants from 'hive/utils/constants';
+import utils from 'hive/utils/functions';
 
 export default Ember.Route.extend({
   setupController: function (controller, model) {
@@ -28,10 +29,16 @@ export default Ember.Route.extend({
       subroute = existingTab.get('subroute');
     }
 
-    if (subroute) {
-      this.transitionTo(subroute, model);
+    // filter out hdfs jobs
+    if (utils.isInteger(model.get('id'))) {
+      if (subroute) {
+        this.transitionTo(subroute, model);
+      } else {
+        this.transitionTo(constants.namingConventions.subroutes.jobLogs, model);
+      }
     } else {
-      this.transitionTo(constants.namingConventions.subroutes.jobLogs, model);
+      this.transitionTo(constants.namingConventions.subroutes.historyQuery, model);
+      this.controllerFor(constants.namingConventions.routes.index).set('model', model);
     }
   }
 });

+ 73 - 27
contrib/views/hive/src/main/resources/ui/hive-web/app/styles/app.scss

@@ -20,6 +20,7 @@
 
 $panel-background: #f5f5f5;
 $placeholder-color: #aaa;
+$border-color: #ddd;
 
 @-webkit-keyframes fadeIn {
   0% {opacity: 0;}
@@ -50,10 +51,23 @@ $placeholder-color: #aaa;
   -webkit-animation-name: fadeOut;
           animation-name: fadeOut;
 }
+
 #content {
   padding: 20px 0;
 }
 
+#index-content {
+  display: flex;
+}
+
+#visual-explain, #tez-ui {
+  position: absolute;
+  left: 0;
+  width: 0;
+  z-index: 99;
+  background: white;
+}
+
 #alerts-container {
   position: absolute;
   left: 15px;
@@ -78,7 +92,7 @@ aside  hr {
 }
 
 .toolbox {
-  margin-top: 15px;
+  margin: 15px 15px 0 0;
 
   insert-udfs {
     display: inline-block;
@@ -97,6 +111,10 @@ aside  hr {
   color: $placeholder-color;
 }
 
+.form-group {
+  margin-bottom: 0;
+}
+
 .secondary-row {
   background: $panel-background;
 }
@@ -109,12 +127,12 @@ aside  hr {
 }
 
 .CodeMirror {
-  border: 0 1px solid #ddd;
+  border: 0 1px solid $border-color;
 }
 
 .grip {
   height: 20px;
-  border: 0 1px 1px solid #ddd;
+  border: 0 1px 1px solid $border-color;
   background-color: $panel-background;
   color: #bbb;
   text-align: center;
@@ -133,19 +151,19 @@ aside  hr {
   background-color: white;
 }
 
-.Unknown {
+.UNKNOWN {
   color: gray;
 }
 
-.Running, .Pending, .Initialized, .fa-edit {
+.RUNNING, .PENDING, .INITIALIZED, .fa-edit {
   color: orange;
 }
 
-.Finished {
+.SUCCEEDED {
   color: green;
 }
 
-.Canceled, .Error {
+.CANCELED, .ERROR {
   color: red;
 }
 
@@ -153,7 +171,7 @@ dropdown .fa-remove {
   color: red;
 }
 
-.Closed {
+.CLOSED {
   color: blue;
 }
 
@@ -162,6 +180,35 @@ dropdown .fa-remove {
   padding-right: 0;
 }
 
+.main-content {
+  flex-grow: 1;
+}
+
+.query-menu {
+  margin-top: 57px;
+
+  span, popover {
+    cursor: pointer;
+    overflow: hidden;
+    display: block;
+    border-bottom: 1px solid $border-color;
+    padding: 10px;
+  }
+}
+
+.queries-icon {
+  font-size: 20px;
+
+  &.active {
+    color: #428bca;
+  }
+
+  &.text-icon {
+    font-size: 12px;
+    font-weight: 800;
+  }
+}
+
 .alert {
   margin-bottom: 5px;
   padding-bottom: 10px;
@@ -231,7 +278,7 @@ body {
   height: 36px;
   background: url("/img/spinner.gif");
   background-repeat: no-repeat;
-  margin: 0px auto;
+  margin: 0 auto;
 
   &.small {
     background-size: 20px;
@@ -257,25 +304,18 @@ body {
   padding-right: 0 !important;
 }
 
-.popover-right {
-  z-index: 92;
-  float: right;
-  position: relative;
-}
-
 .query-editor-panel .panel-body {
   position: relative;
+  padding-right: 0;
 }
-.settings-toggle {
+
+.settings-containers-toggle {
   position: absolute;
   top: 0;
   right: 25px;
   cursor: pointer;
 }
 
-.settings-toggle.active {
-  color: #428bca;
-}
 .settings-container {
   width: 100%;
   overflow-y: scroll;
@@ -286,8 +326,8 @@ body {
   position: absolute;
   padding: 0 15px;
   z-index: 1000;
-  border: 1px solid #ddd;
 
+  border: 1px solid $border-color;
   -webkit-animation-duration: .5s;
           animation-duration: .5s;
   -webkit-animation-fill-mode: both;
@@ -302,14 +342,25 @@ body {
 
 .setting {
   float: left;
-  margin: 0 10px 10px 0;
+  padding-right: 10px;
+  padding-top: 10px;
+
+  .input-group {
+    width: 100%;
+  }
+  .input-group-addon {
+    text-align: justify;
+    width: 50%;
+  }
 }
 
 .setting .remove {
   line-height: 30px;
-  margin-left: 10px;
   font-size: 18px;
   cursor: pointer;
+  position: absolute;
+  right: -5px;
+  top: -10px;
 }
 
 tabs {
@@ -320,8 +371,3 @@ tabs {
 tree-view ul li {
   padding-left: 10px;
 }
-
-.runOnTez {
-  float: right;
-  margin: 0
-}

+ 17 - 0
contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/_typeahead-widget.hbs

@@ -0,0 +1,17 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}

+ 1 - 1
contrib/views/hive/src/main/resources/ui/hive-web/app/templates/components/popover-widget.hbs

@@ -16,4 +16,4 @@
 * limitations under the License.
 }}
 
-<div class="hide"> {{yield}} </div>
+<span class="hide"> {{yield}} </span>

+ 1 - 1
contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases-search-results.hbs

@@ -45,4 +45,4 @@
   </div>
 {{else}}
   <h4>{{t "labels.noTablesMatches"}} "{{tablesSearchTerm}}"</h4>
-{{/if}}
+{{/if}}

+ 2 - 2
contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases.hbs

@@ -21,8 +21,8 @@
 
     {{typeahead-widget
         content=model
-        optionValuePath="content.id"
-        optionLabelPath="content.name"
+        optionValuePath="id"
+        optionLabelPath="name"
         selection=selectedDatabase
     }}
 

+ 2 - 2
contrib/views/hive/src/main/resources/ui/hive-web/app/templates/history.hbs

@@ -47,8 +47,8 @@
           {{item.title}}
         {{/link-to}}
         </td>
-        <td {{bind-attr class=item.status}}>{{item.status}}</td>
-        <td>{{date-binding item "dateSubmitted"}}</td>
+        <td {{bind-attr class=item.uppercaseStatus}}>{{all-uppercase item.status}}</td>
+        <td>{{date-binding item "dateSubmittedTimestamp"}}</td>
         <td>{{item.duration}}</td>
         <td>
           <a class="fa fa-expand pull-right"></a>

+ 72 - 52
contrib/views/hive/src/main/resources/ui/hive-web/app/templates/index.hbs

@@ -16,67 +16,87 @@
 * limitations under the License.
 }}
 
-<aside class="col-md-3 col-xs-12 pull-left no-padding">
-  {{render 'databases'}}
-</aside>
+<div id="index-content">
+  <div class="main-content">
+    <aside class="col-md-3 col-xs-12 no-padding">
+      {{render 'databases'}}
+    </aside>
 
-<div class="col-md-9 col-xs 12 pull-left query-container">
+    <div class="col-md-9 col-xs-12 query-container">
+      {{render 'alerts'}}
 
-  {{render 'alerts'}}
-  {{#panel-widget headingTranslation="titles.query.editor" classNames="query-editor-panel"}}
-    {{#popover-widget classNames="fa fa-info-circle popover-right" titleTranslation="popover.queryEditorHelp.title" }}
-      <ul>
-        <li>{{t 'popover.queryEditorHelp.content.line1'}}</li>
-        <li>{{t 'popover.queryEditorHelp.content.line2'}}</li>
-        <li>{{t 'popover.queryEditorHelp.content.line3'}}</li>
-      </ul>
-    {{/popover-widget}}
+      {{#panel-widget headingTranslation="titles.query.editor" classNames="query-editor-panel"}}
+        {{render 'open-queries'}}
 
-    {{render 'open-queries'}}
+        <div class="toolbox">
+          <button type="button" class="btn btn-sm btn-success execute-query"
+                  {{bind-attr class="canExecute::disabled"}}
+                  {{action "executeQuery"}}>
+            {{t "buttons.execute"}}
+          </button>
+          <button type="button" class="btn btn-sm btn-default"
+                  {{bind-attr class="canExecute::disabled"}}
+                  {{action "explainQuery"}}>
+            {{t "buttons.explain"}}
+          </button>
+          <button type="button" class="btn btn-sm btn-default save-query-as" {{action "saveQuery"}}>{{t "buttons.saveAs"}}</button>
 
-    <div class="toolbox">
-      <button type="button" class="btn btn-sm btn-success execute-query"
-              {{bind-attr class="canExecute::disabled"}}
-              {{action "executeQuery"}}>
-        {{t "buttons.execute"}}
-      </button>
-      <button type="button" class="btn btn-sm btn-default"
-              {{bind-attr class="canExecute::disabled"}}
-              {{action "explainQuery"}}>
-        {{t "buttons.explain"}}
-      </button>
-      <button type="button" class="btn btn-sm btn-default save-query-as" {{action "saveQuery"}}>{{t "buttons.saveAs"}}</button>
+          {{render 'insert-udfs'}}
 
-      {{render 'insert-udfs'}}
+          <button type="button" class="btn btn-sm btn-primary  pull-right" {{action "addQuery"}}>{{t "buttons.newQuery"}}</button>
+        </div>
+      {{/panel-widget}}
 
-      <button type="button" class="btn btn-sm btn-primary  pull-right" {{action "addQuery"}}>{{t "buttons.newQuery"}}</button>
-    </div>
-  {{/panel-widget}}
-
-  {{#if queryParams}}
-    {{#panel-widget headingTranslation="titles.query.parameters"}}
-      <div class="form-horizontal">
-        {{#each param in queryParams}}
-          <div {{bind-attr class=":form-group param.value:has-success:has-error"}}>
-            <label class="col-sm-3 control-label">{{param.name}}</label>
-              <div class="col-sm-9">
-                {{input value=param.value placeholder="value" class="form-control"}}
+      {{#if queryParams}}
+        {{#panel-widget headingTranslation="titles.query.parameters"}}
+          <div class="form-horizontal">
+            {{#each param in queryParams}}
+              <div {{bind-attr class=":form-group param.value:has-success:has-error"}}>
+                <label class="col-sm-3 control-label">{{param.name}}</label>
+                  <div class="col-sm-9">
+                    {{input value=param.value placeholder="value" class="form-control"}}
+                  </div>
               </div>
+            {{/each}}
           </div>
-        {{/each}}
-      </div>
-    {{/panel-widget}}
+        {{/panel-widget}}
+      {{/if}}
+
+      {{#if displayJobTabs}}
+        {{#panel-widget headingTranslation="titles.query.process"
+                        isLoading=model.isRunning
+                        menuItems=downloadMenu
+                        menuHeadingTranslation="titles.download"
+                        classNames="query-process-results-panel"}}
+          {{#tabs-widget tabs=queryProcessTabs selectedTab=selectedQueryProcessTab}}
+            {{outlet}}
+          {{/tabs-widget}}
+        {{/panel-widget}}
+      {{/if}}
+    </div>
+  </div>
+
+  {{#if tezUI.showOverlay}}
+    {{render 'tez-ui'}}
   {{/if}}
 
-  {{#if displayJobTabs}}
-    {{#panel-widget headingTranslation="titles.query.process"
-                    isLoading=model.isRunning
-                    menuItems=downloadMenu
-                    menuHeadingTranslation="titles.download"
-                    classNames="query-process-results-panel"}}
-      {{#tabs-widget tabs=queryProcessTabs selectedTab=selectedQueryProcessTab}}
-        {{outlet}}
-      {{/tabs-widget}}
-    {{/panel-widget}}
+  {{#if visualExplain.showOverlay}}
+    {{render 'visual-explain'}}
   {{/if}}
+
+  <div class="query-menu">
+    {{#popover-widget classNames="fa fa-info-circle queries-icon" titleTranslation="popover.queryEditorHelp.title" }}
+      <ul>
+        <li>{{t 'popover.queryEditorHelp.content.line1'}}</li>
+        <li>{{t 'popover.queryEditorHelp.content.line2'}}</li>
+        <li>{{t 'popover.queryEditorHelp.content.line3'}}</li>
+      </ul>
+    {{/popover-widget}}
+
+    <span {{bind-attr class="settings.showOverlay:active :fa :fa-gear :queries-icon"}} {{action 'toggleOverlay' 'settings'}}></span>
+
+    <span {{bind-attr class="visualExplain.showOverlay:active :fa :fa-bar-chart :queries-icon"}} {{action 'toggleOverlay' 'visualExplain'}}></span>
+
+    <span {{bind-attr class="tezUI.showOverlay:active :queries-icon :text-icon"}} {{action 'toggleOverlay' 'tezUI'}}>TEZ</span>
+  </div>
 </div>

+ 30 - 28
contrib/views/hive/src/main/resources/ui/hive-web/app/templates/queries.hbs

@@ -41,38 +41,40 @@
   </thead>
   <tbody>
     {{#each query in this}}
-      <tr>
-        <td>
-          {{#link-to "index.savedQuery" query}}
-            {{query.shortQuery}}
-          {{/link-to}}
-        </td>
+      {{#unless query.isNew}}
+        <tr>
+          <td>
+            {{#link-to "index.savedQuery" query}}
+              {{query.shortQuery}}
+            {{/link-to}}
+          </td>
 
-        <td>
-          {{#link-to "index.savedQuery" query}}
-            {{query.title}}
-          {{/link-to}}
-        </td>
+          <td>
+            {{#link-to "index.savedQuery" query}}
+              {{query.title}}
+            {{/link-to}}
+          </td>
 
-        <td>{{query.dataBase}}</td>
+          <td>{{query.dataBase}}</td>
 
-        <td>{{query.owner}}</td>
+          <td>{{query.owner}}</td>
 
-        <td>
-          {{#unless query.isNew}}
-            <div class="btn-group pull-right">
-              <span data-toggle="dropdown">
-                <a class="fa fa-gear"></a>
-              </span>
-              <ul class="dropdown-menu" role="menu">
-                {{#each link in controller.links}}
-                  <li {{action 'executeAction' link query}}><a>{{tb-helper link}}</a></li>
-                {{/each}}
-              </ul>
-            </div>
-          {{/unless}}
-        </td>
-      </tr>
+          <td>
+            {{#unless query.isNew}}
+              <div class="btn-group pull-right">
+                <span data-toggle="dropdown">
+                  <a class="fa fa-gear"></a>
+                </span>
+                <ul class="dropdown-menu" role="menu">
+                  {{#each link in controller.links}}
+                    <li {{action 'executeAction' link query}}><a>{{tb-helper link}}</a></li>
+                  {{/each}}
+                </ul>
+              </div>
+            {{/unless}}
+          </td>
+        </tr>
+      {{/unless}}
     {{/each}}
   </tbody>
 </table>

+ 35 - 28
contrib/views/hive/src/main/resources/ui/hive-web/app/templates/settings.hbs

@@ -16,37 +16,44 @@
 * limitations under the License.
 }}
 
-<span {{bind-attr class="showSettingsOverlay:active :fa :fa-gear :settings-toggle"}} {{action 'toggleOverlay'}}></span>
+{{#if showOverlay}}
+  <div class="settings-container fadeIn">
+    <h3> Settings
+      <button class="btn btn-success btn-xs" {{action 'add'}}><i class="fa fa-plus"></i> Add</button>
+    </h3>
 
-{{#if showSettingsOverlay}}
-<div class="settings-container fadeIn">
-  <h3>Settings
-    <button class="btn btn-success btn-xs" {{action 'add'}}><i class="fa fa-plus"></i> Add</button>
+    {{#each setting in currentSettings.settings}}
+      <div class="setting col-md-6 col-sm-12">
+        <form>
+          <div class="form-group">
+            <div class="input-group">
+              <div class="input-group-addon">
+                {{typeahead-widget
+                    content=predefinedSettings
+                    optionLabelPath="name"
+                    optionValuePath="name"
+                    selection=setting.key
+                    create="addKey"
+                }}
+              </div>
+              <div {{bind-attr class=":input-group-addon setting.valid::has-error"}}>
 
-    <div class="checkbox btn btn-primary btn-xs runOnTez">
-        <label for="runOnTez">
-          {{input type="checkbox" checked=querySettings.runOnTez id="runOnTez"}} {{t "buttons.runOnTez"}}
-        </label>
-    </div>
-  </h3>
+                {{#if setting.key.values}}
+                  {{select-widget items=setting.key.values
+                                  labelPath="value"
+                                  selectedValue=setting.selection
+                                  defaultLabelTranslation="placeholders.select.value"
+                  }}
+                {{else}}
+                  {{input class="input-sm form-control" placeholderTranslation="placeholders.select.value" value=setting.selection.value}}
+                {{/if}}
 
-  {{#each setting in querySettings.settings}}
-    <div class="setting">
-      <form class="form-inline">
-        <div class="form-group">
-          <div class="input-group">
-            <div class="input-group-addon">
-              {{input class="input-sm form-control" placeholder="Key" value=setting.key}}
-            </div>
-            <div class="input-group-addon">
-              {{input class="input-sm form-control" placeholder="Value" value=setting.value}}
-              <span class="fa fa-times-circle remove" {{action 'remove' setting}}></span>
+                <span class="fa fa-times-circle remove pull-right" {{action 'remove' setting}}></span>
+              </div>
             </div>
           </div>
-        </div>
-      </form>
-    </div>
-  {{/each}}
-
-</div>
+        </form>
+      </div>
+    {{/each}}
+  </div>
 {{/if}}

+ 22 - 0
contrib/views/hive/src/main/resources/ui/hive-web/app/templates/tez-ui.hbs

@@ -0,0 +1,22 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div id="tez-ui">
+  {{#panel-widget headingTranslation="titles.query.tez"}}
+  {{/panel-widget}}
+</div>

+ 22 - 0
contrib/views/hive/src/main/resources/ui/hive-web/app/templates/visual-explain.hbs

@@ -0,0 +1,22 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div id="visual-explain">
+  {{#panel-widget headingTranslation="titles.query.visualExplain"}}
+  {{/panel-widget}}
+</div>

+ 79 - 8
contrib/views/hive/src/main/resources/ui/hive-web/app/utils/constants.js

@@ -17,6 +17,7 @@
  */
 
 import Ember from 'ember';
+import helpers from 'hive/utils/functions';
 
 export default Ember.Object.create({
   appTitle: 'Hive',
@@ -62,6 +63,8 @@ export default Ember.Object.create({
     database: 'database',
     databases: 'databases',
     openQueries: 'open-queries',
+    visualExplain: 'visual-explain',
+    tezUI: 'tez-ui',
     file: 'file',
     fileResource: 'file-resource',
     fileResources: 'file-resources',
@@ -79,15 +82,83 @@ export default Ember.Object.create({
     settings: 'settings'
   },
 
+  hiveParameters: [
+    {
+      name: 'hive.tez.container.size',
+      values: [
+        Ember.Object.create({ value: 'true' }),
+        Ember.Object.create({ value: 'false' })
+      ]
+    },
+    {
+      name: 'hive.prewarm.enabled',
+      validate: helpers.regexes.digits
+    },
+    {
+      name: 'hive.prewarm.numcontainers',
+      values: [
+        Ember.Object.create({ value: 'one' }),
+        Ember.Object.create({ value: 'two' }),
+        Ember.Object.create({ value: 'three' })
+      ]
+    },
+    {
+      name: 'hive.tez.auto.reducer.parallelism',
+      value: 'test'
+    },
+    {
+      name: 'hive.execution.engine'
+    },
+    {
+      name: 'hive.vectorized.execution.enabled'
+    },
+    {
+      name: 'tez.am.resource.memory.mb'
+    },
+    {
+      name: 'tez.am.container.idle.release-timeout-min.millis'
+    },
+    {
+      name: 'tez.am.container.idle.release-timeout-max.millis'
+    },
+    {
+      name: 'tez.queue.name'
+    },
+    {
+      name: 'tez.runtime.io.sort.mb'
+    },
+    {
+      name: 'tez.runtime.sort.threads'
+    },
+    {
+      name: 'tez.runtime.optimize.shared.fetch'
+    },
+    {
+      name: 'tez.runtime.compress.codec'
+    },
+    {
+      name: 'tez.runtime.shuffle.keep-alive.enabled'
+    },
+    {
+      name: 'tez.grouping.min-size'
+    },
+    {
+      name: 'tez.grouping.max-size'
+    },
+    {
+      name: 'tez.generate.debug.artifacts'
+    }
+  ],
+
   statuses: {
-    unknown: "Unknown",
-    initialized: "Initialized",
-    running: "Running",
-    finished: "Finished",
-    canceled: "Canceled",
-    closed: "Closed",
-    error: "Error",
-    pending: "Pending"
+    unknown: "UNKNOWN",
+    initialized: "INITIALIZED",
+    running: "RUNNING",
+    succeeded: "SUCCEEDED",
+    canceled: "CANCELED",
+    closed: "CLOSED",
+    error: "ERROR",
+    pending: "PENDING"
   },
 
   alerts: {

+ 16 - 3
contrib/views/hive/src/main/resources/ui/hive-web/app/utils/functions.js

@@ -22,7 +22,7 @@ import Ember from 'ember';
 
 export default Ember.Object.create({
   isInteger: function (x) {
-    return (x^0) === x;
+    return !isNaN(x);
   },
 
   isDate: function(date) {
@@ -31,6 +31,19 @@ export default Ember.Object.create({
 
   regexes: {
     allUppercase: /^[^a-z]*$/,
-    whitespaces: /^(\s*).*$/
+    whitespaces: /^(\s*).*$/,
+    digits: /^\d+$/
+  },
+
+  insensitiveCompare: function (sourceString) {
+    var args = Array.prototype.slice.call(arguments, 1);
+
+    if (!sourceString) {
+      return;
+    }
+
+    return args.find(function (arg) {
+      return sourceString.match(new RegExp('^' + arg + '$', 'i'));
+    });
   }
-});
+});

+ 35 - 0
contrib/views/hive/src/main/resources/ui/hive-web/app/views/tez-ui.js

@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.View.extend({
+  didInsertElement: function () {
+    var target = this.$('#tez-ui');
+
+    target.css('min-height', $('.main-content').height());
+    target.animate({ width: $('.main-content').width() }, 'fast');
+  },
+
+  willDestroyElement: function () {
+    var target = this.$('#tez-ui');
+
+    target.css('min-height', 0);
+    target.css('width', 0);
+  }
+});

+ 35 - 0
contrib/views/hive/src/main/resources/ui/hive-web/app/views/visual-explain.js

@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.View.extend({
+  didInsertElement: function () {
+    var target = this.$('#visual-explain');
+
+    target.css('min-height', $('.main-content').height());
+    target.animate({ width: $('.main-content').width() }, 'fast');
+  },
+
+  willDestroyElement: function () {
+    var target = this.$('#visual-explain');
+
+    target.css('min-height', 0);
+    target.css('width', 0);
+  }
+});

+ 4 - 4
contrib/views/hive/src/main/resources/ui/hive-web/bower.json

@@ -4,20 +4,20 @@
     "handlebars": "2.0.0",
     "jquery": "^1.11.1",
     "ember": "1.9.0",
-    "ember-data": "1.0.0-beta.11",
+    "ember-data": "1.0.0-beta.14.1",
     "ember-resolver": "~0.1.7",
     "loader.js": "stefanpenner/loader.js#1.0.1",
     "ember-cli-shims": "stefanpenner/ember-cli-shims#0.0.3",
     "ember-cli-test-loader": "rwjblue/ember-cli-test-loader#0.0.4",
     "ember-load-initializers": "stefanpenner/ember-load-initializers#0.0.2",
-    "ember-qunit": "0.1.8",
-    "ember-qunit-notifications": "0.0.4",
+    "ember-qunit": "0.2.8",
+    "ember-qunit-notifications": "0.0.7",
     "qunit": "~1.15.0",
     "bootstrap": "~3.2.0",
     "ember-i18n": "~2.9.0",
     "blanket": "~1.1.5",
     "jquery-ui": "~1.11.2",
-    "selectize": "~0.11.2",
+    "selectize": "~0.12.0",
     "pretender": "0.1.0"
   },
   "resolutions": {

+ 6 - 6
contrib/views/hive/src/main/resources/ui/hive-web/package.json

@@ -15,7 +15,7 @@
   },
   "repository": "https://github.com/stefanpenner/ember-cli",
   "engines": {
-    "node": ">= 0.10.0"
+    "node": ">= 0.10.32"
   },
   "author": "",
   "license": "MIT",
@@ -24,7 +24,7 @@
     "bower": ">= 1.3.12",
     "broccoli-asset-rev": "0.3.1",
     "broccoli-sass": "^0.3.2",
-    "ember-cli": "0.1.2",
+    "ember-cli": "0.1.15",
     "ember-cli-blanket": "^0.2.2",
     "ember-cli-content-security-policy": "0.3.0",
     "ember-cli-font-awesome": "0.0.4",
@@ -34,12 +34,12 @@
     "ember-cli-jquery-ui": "0.0.12",
     "ember-cli-moment": "0.0.1",
     "ember-cli-pretender": "^0.3.1",
-    "ember-cli-qunit": "0.1.0",
-    "ember-cli-selectize": "0.0.7",
-    "ember-data": "1.0.0-beta.10",
+    "ember-cli-qunit": "0.3.7",
+    "ember-cli-selectize": "0.0.19",
+    "ember-data": "1.0.0-beta.14.1",
     "ember-dynamic-component": "0.0.1",
     "ember-export-application-global": "^1.0.0",
     "express": "^4.8.5",
-    "glob": "^4.0.5"
+    "glob": "4.4.0"
   }
 }

+ 1 - 1
contrib/views/hive/src/main/resources/ui/hive-web/tests/integration/query-editor-test.js

@@ -59,7 +59,7 @@ test('Can execute query', function() {
   click('.execute-query');
 
   andThen(function() {
-    ok(find('.query-process-results-panel .nav-tabs li:nth-child(2)').hasClass('active'), 'Results tab is visible');
+    equal(find('.query-process-results-panel').length, 1, 'Job tabs are visible.');
   });
 });
 

+ 2 - 2
contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/history-test.js

@@ -40,10 +40,10 @@ test('date range is set correctly', function () {
 
   var history = Ember.ArrayProxy.create({ content: [
     Ember.Object.create({
-      dateSubmitted: min
+      dateSubmittedTimestamp: min
     }),
     Ember.Object.create({
-      dateSubmitted: max
+      dateSubmittedTimestamp: max
     })
   ]});
 

+ 0 - 15
contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/queries-test.js

@@ -30,18 +30,3 @@ test('controller is initialized', function() {
 
   equal(component.get('columns.length'), 4, 'Columns are initialized correctly');
 });
-
-test('Should hide new queries', function() {
-  expect(1);
-
-  var queries = [
-    { isNew: true },
-    { isNew: false}
-  ];
-
-  var controller = this.subject({
-    queries: queries
-  });
-
-  equal(controller.get('model.length'), 1, 'Hide new queries from the list');
-});

+ 97 - 0
contrib/views/hive/src/main/resources/ui/hive-web/tests/unit/controllers/settings-test.js

@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+import { moduleFor, test } from 'ember-qunit';
+
+moduleFor('controller:settings', 'SettingsController', {
+  needs: [
+    'controller:databases',
+    'controller:index',
+    'controller:open-queries',
+    'controller:loaded-files',
+    'controller:index/history-query/results',
+    'controller:index/history-query/explain',
+    'controller:columns',
+    'controller:udfs',
+    'controller:index/history-query/logs'
+  ]
+});
+
+test('can add a setting', function() {
+  var controller = this.subject();
+
+  ok(!controller.get('currentSettings.settings.length'), 'No initial settings');
+
+  Ember.run(function() {
+    controller.send('add');
+  });
+
+  equal(controller.get('currentSettings.settings.length'), 1, 'Can add settings');
+});
+
+test('hasSettings return true if there are settings', function() {
+  var controller = this.subject();
+
+  ok(!controller.hasSettings(null), 'No settings => return false');
+
+  Ember.run(function() {
+    controller.send('add');
+  });
+
+  ok(controller.hasSettings(null), '1 setting => returns true');
+});
+
+test('setSettingForQuery', function() {
+  var controller = this.subject();
+
+  var settings = [ Ember.Object.create({key: 'key', value: 'value'}) ];
+
+  Ember.run(function() {
+    controller.setSettingForQuery(1, settings);
+  });
+
+  equal(controller.get('currentSettings.settings.firstObject.key'), settings.get('key'), 'It sets the settings for specified query');
+});
+
+test('validate', function() {
+  var predefinedSettings = [
+    {
+      name: 'some.key',
+      validate: new RegExp(/^\d+$/) // digits
+    }
+  ];
+
+  var controller = this.subject({
+    predefinedSettings: predefinedSettings
+  });
+
+  var settings = [
+    Ember.Object.create({key: { name: 'some.key' }, value: 'value'}),
+    Ember.Object.create({key: { name: 'some.key' }, value: '123'})
+  ];
+
+  Ember.run(function() {
+    controller.setSettingForQuery(1, settings);
+  });
+
+  var currentSettings = controller.get('model.firstObject.settings');
+  console.log(currentSettings);
+  ok(!currentSettings.get('firstObject.valid'), "First setting doesn\' pass validataion");
+  ok(currentSettings.get('lastObject.valid'), 'Second setting passes validation');
+});

部分文件因为文件数量过多而无法显示