Browse Source

AMBARI-17480. Hive2 view : port changes of following 10 bugs in hive2 view (Nitiraj Rathore via pallavkul)

Pallav Kulshreshtha 9 years ago
parent
commit
c02c565979
32 changed files with 2068 additions and 327 deletions
  1. 4 36
      contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/persistence/DataStoreStorage.java
  2. 3 3
      contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/Aggregator.java
  3. 3 0
      contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/JobService.java
  4. 8 8
      contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadFromHdfsInput.java
  5. 101 65
      contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadService.java
  6. 2 3
      contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/DataParser.java
  7. 1 5
      contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/IParser.java
  8. 66 10
      contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/ParseUtils.java
  9. 46 32
      contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/Parser.java
  10. 4 3
      contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/csv/CSVParser.java
  11. 1 1
      contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/json/JSONParser.java
  12. 1 1
      contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/xml/XMLParser.java
  13. 0 1
      contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/file-upload.js
  14. 1 5
      contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/upload-table.js
  15. 10 1
      contrib/views/hive-next/src/main/resources/ui/hive-web/app/components/file-upload.js
  16. 60 0
      contrib/views/hive-next/src/main/resources/ui/hive-web/app/components/validated-text-field.js
  17. 9 1
      contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/index.js
  18. 138 115
      contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/upload-table.js
  19. 59 3
      contrib/views/hive-next/src/main/resources/ui/hive-web/app/initializers/i18n.js
  20. 1 8
      contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/history.js
  21. 4 4
      contrib/views/hive-next/src/main/resources/ui/hive-web/app/services/job.js
  22. 4 0
      contrib/views/hive-next/src/main/resources/ui/hive-web/app/styles/app.scss
  23. 23 0
      contrib/views/hive-next/src/main/resources/ui/hive-web/app/templates/components/validated-text-field.hbs
  24. 34 22
      contrib/views/hive-next/src/main/resources/ui/hive-web/app/templates/upload-table.hbs
  25. 109 0
      contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/resources/upload/CSVParserTest.java
  26. 326 0
      contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/resources/upload/DataParserCSVTest.java
  27. 263 0
      contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/resources/upload/DataParserJSONTest.java
  28. 295 0
      contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/resources/upload/DataParserXMLTest.java
  29. 146 0
      contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/resources/upload/JsonParserTest.java
  30. 84 0
      contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/resources/upload/QueryGeneratorTest.java
  31. 127 0
      contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/resources/upload/TableDataReaderTest.java
  32. 135 0
      contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/resources/upload/XMLParserTest.java

+ 4 - 36
contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/persistence/DataStoreStorage.java

@@ -54,36 +54,17 @@ public class DataStoreStorage implements Storage {
 
   @Override
   public synchronized void store(Class model, Indexed obj) {
-    assignId(model, obj);
-
-    Indexed newBean;
-    try {
-      newBean = (Indexed) BeanUtils.cloneBean(obj);
-    } catch (IllegalAccessException e) {
-      throw new ServiceFormattedException("S010 Data storage error", e);
-    } catch (InstantiationException e) {
-      throw new ServiceFormattedException("S010 Data storage error", e);
-    } catch (InvocationTargetException e) {
-      throw new ServiceFormattedException("S010 Data storage error", e);
-    } catch (NoSuchMethodException e) {
-      throw new ServiceFormattedException("S010 Data storage error", e);
-    }
-    preprocessEntity(newBean);
 
     try {
+      Indexed newBean = (Indexed) BeanUtils.cloneBean(obj);
+      preprocessEntity(newBean);
       context.getDataStore().store(newBean);
-    } catch (PersistenceException e) {
+      obj.setId(newBean.getId());
+    } catch (Exception e) {
       throw new ServiceFormattedException("S020 Data storage error", e);
     }
   }
 
-  public void assignId(Class model, Indexed obj) {
-    if (obj.getId() == null) {
-      String id = nextIdForEntity(context, model);
-      obj.setId(id);
-    }
-  }
-
   private void preprocessEntity(Indexed obj) {
     cleanTransientFields(obj);
   }
@@ -103,19 +84,6 @@ public class DataStoreStorage implements Storage {
     }
   }
 
-  private static synchronized String nextIdForEntity(ViewContext context, Class aClass) {
-    // auto increment id implementation
-    String lastId = context.getInstanceData(aClass.getName());
-    int newId;
-    if (lastId == null) {
-      newId = 1;
-    } else {
-      newId = Integer.parseInt(lastId) + 1;
-    }
-    context.putInstanceData(aClass.getName(), String.valueOf(newId));
-    return String.valueOf(newId);
-  }
-
   @Override
   public synchronized <T extends Indexed> T load(Class<T> model, Object id) throws ItemNotFound {
     LOG.debug(String.format("Loading %s #%s", model.getName(), id));

+ 3 - 3
contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/Aggregator.java

@@ -73,9 +73,9 @@ public class Aggregator {
   public List<Job> readAll(String username) {
     Set<String> addedOperationIds = new HashSet<>();
 
-    List<Job> allJobs = new LinkedList<>();
-    for (HiveQueryId atsHiveQuery : ats.getHiveQueryIdsList(username)) {
-
+    List<Job> allJobs = new LinkedList<Job>();
+    List<HiveQueryId> queries = ats.getHiveQueryIdsList(username);
+    for (HiveQueryId atsHiveQuery : queries) {
       TezDagId atsTezDag = getTezDagFromHiveQueryId(atsHiveQuery);
 
       JobImpl atsJob;

+ 3 - 0
contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/jobs/JobService.java

@@ -500,10 +500,13 @@ public class JobService extends BaseService {
 
       return Response.ok(jobObject).status(201).build();
     } catch (WebApplicationException ex) {
+      LOG.error("Error occurred while creating job : ",ex);
       throw ex;
     } catch (ItemNotFound itemNotFound) {
+      LOG.error("Error occurred while creating job : ",itemNotFound);
       throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
     } catch (Throwable ex) {
+      LOG.error("Error occurred while creating job : ",ex);
       throw new ServiceFormattedException(ex.getMessage(), ex);
     }
   }

+ 8 - 8
contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadFromHdfsInput.java

@@ -21,7 +21,7 @@ package org.apache.ambari.view.hive2.resources.uploads;
 import java.io.Serializable;
 
 public class UploadFromHdfsInput implements Serializable{
-  private Boolean isFirstRowHeader;
+  private Boolean isFirstRowHeader = Boolean.FALSE;
   private String inputFileType;
   private String hdfsPath;
   private String tableName;
@@ -80,12 +80,12 @@ public class UploadFromHdfsInput implements Serializable{
 
   @Override
   public String toString() {
-    return "UploadFromHdfsInput{" +
-            "isFirstRowHeader=" + isFirstRowHeader +
-            ", inputFileType='" + inputFileType + '\'' +
-            ", hdfsPath='" + hdfsPath + '\'' +
-            ", tableName='" + tableName + '\'' +
-            ", databaseName='" + databaseName + '\'' +
-            '}';
+    return new StringBuilder("UploadFromHdfsInput{" )
+            .append("isFirstRowHeader=").append( isFirstRowHeader )
+            .append(", inputFileType='" ).append(inputFileType)
+            .append(", hdfsPath='").append(hdfsPath)
+            .append(", tableName='").append( tableName )
+            .append(", databaseName='").append(databaseName )
+            .append('}').toString();
   }
 }

+ 101 - 65
contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/UploadService.java

@@ -21,7 +21,6 @@ package org.apache.ambari.view.hive2.resources.uploads;
 import com.sun.jersey.core.header.FormDataContentDisposition;
 import com.sun.jersey.multipart.FormDataParam;
 import org.apache.ambari.view.hive2.BaseService;
-import org.apache.ambari.view.hive2.persistence.utils.ItemNotFound;
 import org.apache.ambari.view.hive2.resources.jobs.viewJobs.Job;
 import org.apache.ambari.view.hive2.resources.jobs.viewJobs.JobController;
 import org.apache.ambari.view.hive2.resources.jobs.viewJobs.JobImpl;
@@ -29,7 +28,11 @@ import org.apache.ambari.view.hive2.resources.jobs.viewJobs.JobResourceManager;
 import org.apache.ambari.view.hive2.resources.uploads.parsers.DataParser;
 import org.apache.ambari.view.hive2.resources.uploads.parsers.ParseOptions;
 import org.apache.ambari.view.hive2.resources.uploads.parsers.PreviewData;
-import org.apache.ambari.view.hive2.resources.uploads.query.*;
+import org.apache.ambari.view.hive2.resources.uploads.query.DeleteQueryInput;
+import org.apache.ambari.view.hive2.resources.uploads.query.InsertFromQueryInput;
+import org.apache.ambari.view.hive2.resources.uploads.query.LoadQueryInput;
+import org.apache.ambari.view.hive2.resources.uploads.query.QueryGenerator;
+import org.apache.ambari.view.hive2.resources.uploads.query.TableInfo;
 import org.apache.ambari.view.hive2.utils.ServiceFormattedException;
 import org.apache.ambari.view.hive2.utils.SharedObjectsFactory;
 import org.apache.ambari.view.utils.ambari.AmbariApi;
@@ -37,13 +40,24 @@ import org.apache.commons.io.input.ReaderInputStream;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.json.simple.JSONObject;
-
-import javax.ws.rs.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
-import java.io.*;
-import java.lang.reflect.InvocationTargetException;
-import java.util.*;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 
 /**
  * UI driven end points for creation of new hive table and inserting data into it.
@@ -61,13 +75,16 @@ import java.util.*;
  */
 public class UploadService extends BaseService {
 
+  private final static Logger LOG =
+    LoggerFactory.getLogger(UploadService.class);
+
   private AmbariApi ambariApi;
   protected JobResourceManager resourceManager;
 
   final private static String HIVE_METASTORE_LOCATION_KEY = "hive.metastore.warehouse.dir";
   final private static String HIVE_SITE = "hive-site";
   final private static String HIVE_METASTORE_LOCATION_KEY_VIEW_PROPERTY = HIVE_METASTORE_LOCATION_KEY;
-  private static final String HIVE_DEFAULT_METASTORE_LOCATION = "/apps/hive/warehouse" ;
+  private static final String HIVE_DEFAULT_METASTORE_LOCATION = "/apps/hive/warehouse";
   final private static String HIVE_DEFAULT_DB = "default";
 
   public void validateForUploadFile(UploadFromHdfsInput input){
@@ -107,17 +124,19 @@ public class UploadService extends BaseService {
   @Consumes(MediaType.APPLICATION_JSON)
   @Produces(MediaType.APPLICATION_JSON)
   public Response uploadForPreviewFromHDFS(UploadFromHdfsInput input) {
-
     InputStream uploadedInputStream = null;
     try {
       uploadedInputStream = getHDFSFileStream(input.getHdfsPath());
       this.validateForPreview(input);
       PreviewData pd = generatePreview(input.getIsFirstRowHeader(), input.getInputFileType(), uploadedInputStream);
       String tableName = getBasenameFromPath(input.getHdfsPath());
-      return createPreviewResponse(pd, input.getIsFirstRowHeader(),tableName);
+      return createPreviewResponse(pd, input.getIsFirstRowHeader(), tableName);
+    } catch (WebApplicationException e) {
+      LOG.error(getErrorMessage(e), e);
+      throw e;
     } catch (Exception e) {
-      LOG.error("Exception occurred while generating preview for hdfs file : " + input.getHdfsPath(), e);
-      throw new ServiceFormattedException(e.getMessage(), e);
+      LOG.error(e.getMessage(), e);
+      throw new ServiceFormattedException(e);
     } finally {
       if (null != uploadedInputStream) {
         try {
@@ -146,10 +165,13 @@ public class UploadService extends BaseService {
         isFirstRowHeader = false;
 
       PreviewData pd = generatePreview(isFirstRowHeader, inputFileType, uploadedInputStream);
-      return createPreviewResponse(pd, isFirstRowHeader,getBasename(fileDetail.getFileName()));
+      return createPreviewResponse(pd, isFirstRowHeader, getBasename(fileDetail.getFileName()));
+    } catch (WebApplicationException e) {
+      LOG.error(getErrorMessage(e), e);
+      throw e;
     } catch (Exception e) {
-      LOG.error("Exception occurred while generating preview for local file", e);
-      throw new ServiceFormattedException(e.getMessage(), e);
+      LOG.error(e.getMessage(), e);
+      throw new ServiceFormattedException(e);
     }
   }
 
@@ -158,7 +180,7 @@ public class UploadService extends BaseService {
   @POST
   @Consumes(MediaType.APPLICATION_JSON)
   @Produces(MediaType.APPLICATION_JSON)
-  public Response createTable(TableInput tableInput) {
+  public Job createTable(TableInput tableInput) {
     try {
       tableInput.validate();
       List<ColumnDescriptionImpl> header = tableInput.getHeader();
@@ -173,17 +195,15 @@ public class UploadService extends BaseService {
 
       LOG.info("tableCreationQuery : {}", tableCreationQuery);
 
-      Job actualTableJob = createJob(tableCreationQuery, databaseName);
-      String actualTableJobId = actualTableJob.getId();
-
-      JSONObject jobObject = new JSONObject();
-      jobObject.put("jobId", actualTableJobId);
-
-      LOG.info("table creation jobId {}", actualTableJobId);
-      return Response.ok(jobObject).status(201).build();
+      Job job = createJob(tableCreationQuery, databaseName);
+      LOG.info("job created for table creation {}", job);
+      return job;
+    } catch (WebApplicationException e) {
+      LOG.error(getErrorMessage(e), e);
+      throw e;
     } catch (Throwable e) {
-      LOG.error("Exception occurred while creating table with input : " + tableInput, e);
-      throw new ServiceFormattedException(e.getMessage(), e);
+      LOG.error(e.getMessage(), e);
+      throw new ServiceFormattedException(e);
     }
   }
 
@@ -191,40 +211,41 @@ public class UploadService extends BaseService {
   @POST
   @Consumes(MediaType.APPLICATION_JSON)
   @Produces(MediaType.APPLICATION_JSON)
-  public Response uploadFileFromHdfs(UploadFromHdfsInput input ) {
-    this.validateForUploadFile(input);
-
-    if (ParseOptions.InputFileType.CSV.toString().equals(input.getInputFileType()) && Boolean.FALSE.equals(input.getIsFirstRowHeader())) {
-      // upload using the LOAD query
-      LoadQueryInput loadQueryInput = new LoadQueryInput(input.getHdfsPath(), input.getDatabaseName(), input.getTableName());
-      String loadQuery = new QueryGenerator().generateLoadQuery(loadQueryInput);
-
+  public Response uploadFileFromHdfs(UploadFromHdfsInput input) {
+    if (ParseOptions.InputFileType.CSV.toString().equals(input.getInputFileType()) && input.getIsFirstRowHeader().equals(Boolean.FALSE)) {
       try {
-        Job job = createJob(loadQuery,  input.getDatabaseName());
+        // upload using the LOAD query
+        LoadQueryInput loadQueryInput = new LoadQueryInput(input.getHdfsPath(), input.getDatabaseName(), input.getTableName());
+        String loadQuery = new QueryGenerator().generateLoadQuery(loadQueryInput);
+        Job job = createJob(loadQuery, input.getDatabaseName());
 
         JSONObject jo = new JSONObject();
         jo.put("jobId", job.getId());
-
         return Response.ok(jo).build();
+      } catch (WebApplicationException e) {
+        LOG.error(getErrorMessage(e), e);
+        throw e;
       } catch (Throwable e) {
-        LOG.error("Exception occurred while creating job for Load From HDFS query : " + loadQuery, e);
-        throw new ServiceFormattedException(e.getMessage(), e);
+        LOG.error(e.getMessage(), e);
+        throw new ServiceFormattedException(e);
       }
-
     } else {
       // create stream and upload
       InputStream hdfsStream = null;
       try {
         hdfsStream = getHDFSFileStream(input.getHdfsPath());
-        String path = uploadFileFromStream(hdfsStream, input.getIsFirstRowHeader(),input.getInputFileType(),input.getTableName(), input.getDatabaseName());
+        String path = uploadFileFromStream(hdfsStream, input.getIsFirstRowHeader(), input.getInputFileType(), input.getTableName(), input.getDatabaseName());
 
         JSONObject jo = new JSONObject();
         jo.put("uploadedPath", path);
 
         return Response.ok(jo).build();
+      } catch (WebApplicationException e) {
+        LOG.error(getErrorMessage(e), e);
+        throw e;
       } catch (Exception e) {
-        LOG.error("Exception occurred while uploading the file from HDFS with path : " + input.getHdfsPath(), e);
-        throw new ServiceFormattedException(e.getMessage(), e);
+        LOG.error(e.getMessage(), e);
+        throw new ServiceFormattedException(e);
       } finally {
         if (null != hdfsStream)
           try {
@@ -249,14 +270,17 @@ public class UploadService extends BaseService {
     @FormDataParam("databaseName") String databaseName
   ) {
     try {
-
-      String path = uploadFileFromStream(uploadedInputStream,isFirstRowHeader,inputFileType,tableName,databaseName);
+      String path = uploadFileFromStream(uploadedInputStream, isFirstRowHeader, inputFileType, tableName, databaseName);
 
       JSONObject jo = new JSONObject();
       jo.put("uploadedPath", path);
       return Response.ok(jo).build();
+    } catch (WebApplicationException e) {
+      LOG.error(getErrorMessage(e), e);
+      throw e;
     } catch (Exception e) {
-      throw new ServiceFormattedException(e.getMessage(), e);
+      LOG.error(e.getMessage(), e);
+      throw new ServiceFormattedException(e);
     }
   }
 
@@ -264,19 +288,20 @@ public class UploadService extends BaseService {
   @POST
   @Consumes(MediaType.APPLICATION_JSON)
   @Produces(MediaType.APPLICATION_JSON)
-  public Response insertFromTempTable(InsertFromQueryInput input) {
+  public Job insertFromTempTable(InsertFromQueryInput input) {
     try {
       String insertQuery = generateInsertFromQuery(input);
       LOG.info("insertQuery : {}", insertQuery);
 
       Job job = createJob(insertQuery, "default");
-
-      JSONObject jo = new JSONObject();
-      jo.put("jobId", job.getId());
-
-      return Response.ok(jo).build();
+      LOG.info("Job created for insert from temp table : {}", job);
+      return job;
+    } catch (WebApplicationException e) {
+      LOG.error(getErrorMessage(e), e);
+      throw e;
     } catch (Throwable e) {
-      throw new ServiceFormattedException(e.getMessage(), e);
+      LOG.error(e.getMessage(), e);
+      throw new ServiceFormattedException(e);
     }
   }
 
@@ -284,19 +309,20 @@ public class UploadService extends BaseService {
   @POST
   @Consumes(MediaType.APPLICATION_JSON)
   @Produces(MediaType.APPLICATION_JSON)
-  public Response deleteTable(DeleteQueryInput input) {
+  public Job deleteTable(DeleteQueryInput input) {
     try {
       String deleteQuery = generateDeleteQuery(input);
       LOG.info("deleteQuery : {}", deleteQuery);
 
       Job job = createJob(deleteQuery, "default");
-
-      JSONObject jo = new JSONObject();
-      jo.put("jobId", job.getId());
-
-      return Response.ok(jo).build();
+      LOG.info("Job created for delete temp table : {} ", job);
+      return job;
+    } catch (WebApplicationException e) {
+      LOG.error(getErrorMessage(e), e);
+      throw e;
     } catch (Throwable e) {
-      throw new ServiceFormattedException(e.getMessage(), e);
+      LOG.error(e.getMessage(), e);
+      throw new ServiceFormattedException(e);
     }
   }
 
@@ -319,8 +345,12 @@ public class UploadService extends BaseService {
       uploadFile(fullPath, new ReaderInputStream(reader));
 
       return fullPath;
+    } catch (WebApplicationException e) {
+      LOG.error(getErrorMessage(e), e);
+      throw e;
     } catch (Exception e) {
-      throw new ServiceFormattedException(e.getMessage(), e);
+      LOG.error(e.getMessage(), e);
+      throw new ServiceFormattedException(e);
     }
   }
 
@@ -371,9 +401,9 @@ public class UploadService extends BaseService {
 
   private String getHiveMetaStoreLocation() {
     String dir = context.getProperties().get(HIVE_METASTORE_LOCATION_KEY_VIEW_PROPERTY);
-    if(dir != null && !dir.trim().isEmpty()){
+    if (dir != null && !dir.trim().isEmpty()) {
       return dir;
-    }else{
+    } else {
       LOG.debug("Neither found associated cluster nor found the view property {}. Returning default location : {}", HIVE_METASTORE_LOCATION_KEY_VIEW_PROPERTY, HIVE_DEFAULT_METASTORE_LOCATION);
       return HIVE_DEFAULT_METASTORE_LOCATION;
     }
@@ -390,6 +420,12 @@ public class UploadService extends BaseService {
     out.close();
   }
 
+  private static String getErrorMessage(WebApplicationException e) {
+    if (null != e.getResponse() && null != e.getResponse().getEntity())
+      return e.getResponse().getEntity().toString();
+    else return e.getMessage();
+  }
+
   private PreviewData generatePreview(Boolean isFirstRowHeader, String inputFileType, InputStream uploadedInputStream) throws Exception {
     ParseOptions parseOptions = new ParseOptions();
     parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, inputFileType);
@@ -452,10 +488,10 @@ public class UploadService extends BaseService {
     return getBasename(fileName);
   }
 
-  private String getBasename(String fileName){
+  private String getBasename(String fileName) {
     int index = fileName.indexOf(".");
-    if(index != -1){
-      return fileName.substring(0,index);
+    if (index != -1) {
+      return fileName.substring(0, index);
     }
 
     return fileName;

+ 2 - 3
contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/DataParser.java

@@ -23,7 +23,6 @@ import org.apache.ambari.view.hive2.resources.uploads.parsers.csv.CSVParser;
 import org.apache.ambari.view.hive2.resources.uploads.parsers.json.JSONParser;
 import org.apache.ambari.view.hive2.resources.uploads.parsers.xml.XMLParser;
 
-import java.io.IOException;
 import java.io.Reader;
 import java.util.Iterator;
 
@@ -35,7 +34,7 @@ public class DataParser implements IParser {
 
   private IParser parser;
 
-  public DataParser(Reader reader, ParseOptions parseOptions) throws IOException {
+  public DataParser(Reader reader, ParseOptions parseOptions) throws Exception {
     if (parseOptions.getOption(ParseOptions.OPTIONS_FILE_TYPE).equals(ParseOptions.InputFileType.CSV.toString())) {
       parser = new CSVParser(reader, parseOptions);
     } else if (parseOptions.getOption(ParseOptions.OPTIONS_FILE_TYPE).equals(ParseOptions.InputFileType.JSON.toString())) {
@@ -61,7 +60,7 @@ public class DataParser implements IParser {
   }
 
   @Override
-  public void close() throws IOException {
+  public void close() throws Exception {
     parser.close();
   }
 

+ 1 - 5
contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/IParser.java

@@ -20,14 +20,13 @@ package org.apache.ambari.view.hive2.resources.uploads.parsers;
 
 import org.apache.ambari.view.hive2.client.Row;
 
-import java.io.IOException;
 import java.io.Reader;
 
 /**
  * Interface defining methods for Parsers that can used for generating preview
  * and uploading table into hive.
  */
-public interface IParser extends Iterable<Row> {
+public interface IParser extends Iterable<Row>, AutoCloseable{
 
   /**
    * @return returns the Reader that can be read to get the table data as CSV Text Data that can be uploaded directly
@@ -38,7 +37,4 @@ public interface IParser extends Iterable<Row> {
   PreviewData parsePreview();
 
   Row extractHeader();
-
-  void close() throws IOException;
-
 }

+ 66 - 10
contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/ParseUtils.java

@@ -18,15 +18,24 @@
 
 package org.apache.ambari.view.hive2.resources.uploads.parsers;
 
-import org.apache.ambari.view.hive2.client.ColumnDescription;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.text.SimpleDateFormat;
 import java.util.Date;
+import java.util.List;
+
+import static org.apache.ambari.view.hive2.client.ColumnDescription.DataTypes;
 
 public class ParseUtils {
 
+  protected final static Logger LOG =
+    LoggerFactory.getLogger(ParseUtils.class);
+
   final public static String[] DATE_FORMATS = {"mm/dd/yyyy", "dd/mm/yyyy", "mm-dd-yyyy" /*add more formatss*/};
 
+  final public static DataTypes [] dataTypeList = {DataTypes.BOOLEAN,DataTypes.INT,DataTypes.BIGINT,DataTypes.DOUBLE,DataTypes.CHAR,DataTypes.DATE,DataTypes.STRING};
+
   public static boolean isInteger(Object object) {
     if (object == null)
       return false;
@@ -56,6 +65,12 @@ public class ParseUtils {
       return false;
   }
 
+  public static boolean isString(Object object) {
+    if (object == null)
+      return false;
+    else return true; // any non null can always be interpreted as a string
+  }
+
   public static boolean isLong(Object object) {
     if (object == null)
       return false;
@@ -119,15 +134,56 @@ public class ParseUtils {
     return false;
   }
 
-  public static ColumnDescription.DataTypes detectHiveDataType(Object object) {
+  public static DataTypes detectHiveDataType(Object object) {
     // detect Integer
-    if (isInteger(object)) return ColumnDescription.DataTypes.INT;
-    if (isLong(object)) return ColumnDescription.DataTypes.BIGINT;
-    if (isBoolean(object)) return ColumnDescription.DataTypes.BOOLEAN;
-    if (isDouble(object)) return ColumnDescription.DataTypes.DOUBLE;
-    if (isDate(object)) return ColumnDescription.DataTypes.DATE;
-    if (isChar(object)) return ColumnDescription.DataTypes.CHAR;
-
-    return ColumnDescription.DataTypes.STRING;
+    if (isBoolean(object)) return DataTypes.BOOLEAN;
+    if (isInteger(object)) return DataTypes.INT;
+    if (isLong(object)) return DataTypes.BIGINT;
+    if (isDouble(object)) return DataTypes.DOUBLE;
+    if (isChar(object)) return DataTypes.CHAR;
+    if (isDate(object)) return DataTypes.DATE;
+
+    return DataTypes.STRING;
+  }
+
+  public static boolean checkDatatype( Object object, DataTypes datatype){
+    switch(datatype){
+
+      case BOOLEAN :
+        return isBoolean(object);
+      case INT :
+        return isInteger(object);
+      case BIGINT :
+        return isLong(object);
+      case DOUBLE:
+        return isDouble(object);
+      case CHAR:
+        return isChar(object);
+      case DATE:
+        return isDate(object);
+      case STRING:
+        return isString(object);
+
+      default:
+        LOG.error("this datatype detection is not supported : {}", datatype);
+        return false;
+    }
+  }
+
+  public static DataTypes detectHiveColumnDataType(List<Object> colValues) {
+    boolean found = true;
+    for(DataTypes datatype : dataTypeList){
+      found = true;
+      for(Object object : colValues){
+        if(!checkDatatype(object,datatype)){
+          found = false;
+          break;
+        }
+      }
+
+      if(found) return datatype;
+    }
+
+    return DataTypes.STRING; //default
   }
 }

+ 46 - 32
contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/Parser.java

@@ -22,6 +22,8 @@ import org.apache.ambari.view.hive2.client.ColumnDescription;
 import org.apache.ambari.view.hive2.client.Row;
 import org.apache.ambari.view.hive2.resources.uploads.ColumnDescriptionImpl;
 import org.apache.ambari.view.hive2.resources.uploads.TableDataReader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.Reader;
 import java.util.ArrayList;
@@ -35,6 +37,9 @@ import java.util.NoSuchElementException;
  */
 public abstract class Parser implements IParser {
 
+  protected final static Logger LOG =
+    LoggerFactory.getLogger(Parser.class);
+
   protected Reader reader; // same as CSV reader in this case
   protected ParseOptions parseOptions;
   private int numberOfPreviewRows = 10;
@@ -45,22 +50,23 @@ public abstract class Parser implements IParser {
   }
 
   /**
-   * returns which datatype was detected for the maximum number of times in the given column
+   * returns which datatype is valid for all the values
+   */
+
+  /**
    *
-   * @param typeCounts
-   * @param colNum
-   * @return
+   * @param rows : non empty list of rows
+   * @param colNum : to detect datatype for this column number.
+   * @return data type for that column
    */
-  private int getLikelyDataType(int[][] typeCounts, int colNum) {
-    int[] colArray = typeCounts[colNum];
-    int maxIndex = 0;
-    int i = 1;
-    for (; i < colArray.length; i++) {
-      if (colArray[i] > colArray[maxIndex])
-        maxIndex = i;
+  private ColumnDescription.DataTypes getLikelyDataType(List<Row> rows, int colNum) {
+    // order of detection BOOLEAN,INT,BIGINT,DOUBLE,DATE,CHAR,STRING
+    List<Object> colValues = new ArrayList<>(rows.size());
+    for( Row row : rows ){
+      colValues.add(row.getRow()[colNum]);
     }
 
-    return maxIndex;
+    return ParseUtils.detectHiveColumnDataType(colValues);
   }
 
   @Override
@@ -70,12 +76,15 @@ public abstract class Parser implements IParser {
 
   @Override
   public PreviewData parsePreview() {
-    List<Row> previewRows;
+    LOG.info("generating preview for : {}", this.parseOptions );
+
+    ArrayList<Row> previewRows;
     List<ColumnDescription> header;
 
     try {
       numberOfPreviewRows = (Integer) parseOptions.getOption(ParseOptions.OPTIONS_NUMBER_OF_PREVIEW_ROWS);
     } catch (Exception e) {
+      LOG.debug("Illegal number of preview columns supplied {}",parseOptions.getOption(ParseOptions.OPTIONS_NUMBER_OF_PREVIEW_ROWS) );
     }
 
     int numberOfRows = numberOfPreviewRows;
@@ -83,42 +92,35 @@ public abstract class Parser implements IParser {
 
     Row headerRow = null;
     Integer numOfCols = null;
-    int[][] typeCounts = null;
 
-    if (parseOptions.getOption(ParseOptions.OPTIONS_HEADER) != null && parseOptions.getOption(ParseOptions.OPTIONS_HEADER).equals(ParseOptions.HEADER.FIRST_RECORD.toString())) {
-      if (!this.iterator().hasNext()) {
-        throw new NoSuchElementException("Cannot parse Header");
-      }
+    if (parseOptions.getOption(ParseOptions.OPTIONS_HEADER) != null &&
+      ( parseOptions.getOption(ParseOptions.OPTIONS_HEADER).equals(ParseOptions.HEADER.FIRST_RECORD.toString()) ||
+        parseOptions.getOption(ParseOptions.OPTIONS_HEADER).equals(ParseOptions.HEADER.EMBEDDED.toString())
+      )) {
       headerRow = extractHeader();
       numOfCols = headerRow.getRow().length;
-      typeCounts = new int[numOfCols][ColumnDescription.DataTypes.values().length];
-      previewRows.add(headerRow);
     }
 
-    // find data types.
-
     Row r;
     if (iterator().hasNext()) {
       r = iterator().next();
       if( null == numOfCols ) {
         numOfCols = r.getRow().length;
-        typeCounts = new int[numOfCols][ColumnDescription.DataTypes.values().length];
       }
     } else {
-        throw new NoSuchElementException("No rows in the file.");
+      LOG.error("No rows found in the file. returning error.");
+      throw new NoSuchElementException("No rows in the file.");
     }
 
     while (true) {
       // create Header definition from row
       Object[] values = r.getRow();
-
       Object[] newValues= new Object[numOfCols]; // adds null if less columns detected and removes extra columns if any
 
       for (int colNum = 0; colNum < numOfCols; colNum++) {
         if(colNum < values.length) {
           // detect type
           ColumnDescription.DataTypes type = ParseUtils.detectHiveDataType(values[colNum]);
-          typeCounts[colNum][type.ordinal()]++;
           newValues[colNum] = values[colNum];
         }else{
           newValues[colNum] = null;
@@ -134,14 +136,25 @@ public abstract class Parser implements IParser {
       r = iterator().next();
     }
 
-    if (previewRows.size() <= 0)
+    if (previewRows.size() <= 0) {
+      LOG.error("No rows found in the file. returning error.");
       throw new NoSuchElementException("Does not contain any rows.");
+    }
+
+    // find data types.
+    header = generateHeader(headerRow,previewRows,numOfCols);
+
+    return new PreviewData(header,previewRows);
+  }
+
+  private List<ColumnDescription> generateHeader(Row headerRow,List<Row> previewRows, int numOfCols) {
+    List<ColumnDescription> header = new ArrayList<>();
 
-    header = new ArrayList<>(numOfCols);
     for (int colNum = 0; colNum < numOfCols; colNum++) {
-      int dataTypeId = getLikelyDataType(typeCounts, colNum);
-      ColumnDescription.DataTypes type = ColumnDescription.DataTypes.values()[dataTypeId];
-      String colName = "Column" + colNum;
+      ColumnDescription.DataTypes type = getLikelyDataType(previewRows,colNum);
+      LOG.info("datatype detected for column {} : {}", colNum, type);
+
+      String colName = "Column" + (colNum + 1);
       if (null != headerRow)
         colName = (String) headerRow.getRow()[colNum];
 
@@ -149,6 +162,7 @@ public abstract class Parser implements IParser {
       header.add(cd);
     }
 
-    return new PreviewData(header,previewRows);
+    LOG.debug("return headers : {} ", header);
+    return header;
   }
 }

+ 4 - 3
contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/csv/CSVParser.java

@@ -22,8 +22,9 @@ import org.apache.ambari.view.hive2.resources.uploads.parsers.ParseOptions;
 import org.apache.ambari.view.hive2.resources.uploads.parsers.Parser;
 import org.apache.commons.csv.CSVFormat;
 
-import java.io.*;
-import java.util.*;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.Iterator;
 
 /**
  * Parses the given Reader which contains CSV stream and extracts headers and rows, and detect datatypes of columns
@@ -45,7 +46,7 @@ public class CSVParser extends Parser {
   }
 
   @Override
-  public void close() throws IOException {
+  public void close() throws Exception {
     this.parser.close();
   }
 

+ 1 - 1
contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/json/JSONParser.java

@@ -74,7 +74,7 @@ public class JSONParser extends Parser {
   }
 
   @Override
-  public void close() throws IOException {
+  public void close() throws Exception {
     this.jsonReader.close();
   }
 

+ 1 - 1
contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/uploads/parsers/xml/XMLParser.java

@@ -85,7 +85,7 @@ public class XMLParser extends Parser {
   }
 
   @Override
-  public void close() throws IOException {
+  public void close() throws Exception {
     try {
       this.xmlReader.close();
     } catch (XMLStreamException e) {

+ 0 - 1
contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/file-upload.js

@@ -25,7 +25,6 @@ export default EmberUploader.Uploader.extend({
   // Override
   _ajax: function(settings) {
     settings = Ember.merge(settings, this.getProperties('headers'));
-    console.log("_ajax : settings: " + JSON.stringify(settings));
     return this._super(settings);
   }
 });

+ 1 - 5
contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/upload-table.js

@@ -30,9 +30,7 @@ export default application.extend({
   uploadFiles: function (path, files, extras) {
     var uploadUrl = this.buildUploadURL(path);
 
-    console.log("uplaoder : uploadURL : ", uploadUrl);
-    console.log("uploader : extras : ", extras);
-    console.log("uploader : files : ", files);
+    console.log("uplaoder : uploadURL : ", uploadUrl, " extras : ", extras , "files : ", files);
 
     var hdrs = Ember.$.extend(true, {},this.get('headers'));
     delete hdrs['Content-Type'];
@@ -72,10 +70,8 @@ export default application.extend({
                      headers: self.get('headers'),
                      dataType : 'json'
                  }).done(function(data) {
-                     console.log( "inside done : data : ", data );
                      resolve(data);
                  }).fail(function(error) {
-                     console.log( "inside fail error :  ", error );
                      reject(error);
                  });
               });

+ 10 - 1
contrib/views/hive-next/src/main/resources/ui/hive-web/app/components/file-upload.js

@@ -19,7 +19,16 @@
 import EmberUploader from 'ember-uploader';
 
 export default EmberUploader.FileField.extend({
+  onChangeUploadFiles : function(){
+    if(!this.get("uploadFiles")){
+      // files were cleared by the controller so clear here as well.
+      this.set("files");
+      this.set("value");
+    }
+  }.observes("uploadFiles"),
   filesDidChange: function(files) {
-    this.sendAction('filesUploaded',files); // sends this action to controller.
+    if( files ) {
+      this.sendAction('filesUploaded', files); // sends this action to controller.
+    }
   }
 });

+ 60 - 0
contrib/views/hive-next/src/main/resources/ui/hive-web/app/components/validated-text-field.js

@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+import Ember from 'ember';
+
+/** Example :
+ * {{#validated-text-field
+ * inputValue=bindedTextValue invalidClass='form-control red-border' validClass='form-control' regex="^[a-z]+$"
+ * allowEmpty=false tooltip="Enter valid word" errorMessage="Please enter valid word" placeholder="Enter Word"}}
+ * {{/validated-text-field}}
+ */
+export default Ember.Component.extend({
+  allowEmpty: true,
+  valid: true,
+  setValid: function () {
+    this.set("valid", true);
+    this.set("inputClass", this.get("validClass"));
+    this.set("message", this.get("tooltip"));
+  },
+  setInvalid: function () {
+    this.set("valid", false);
+    this.set("inputClass", this.get("invalidClass"));
+    this.set("message", this.get("errorMessage"));
+  },
+  onChangeInputValue: function () {
+    var regStr = this.get("regex");
+    var regExp = new RegExp(regStr, "g");
+    if (this.get("inputValue")) {
+      var arr = this.get("inputValue").match(regExp);
+      if (arr != null && arr.length == 1) {
+        this.setValid();
+      }
+      else {
+        this.setInvalid();
+      }
+    } else {
+      if (this.get("allowEmpty")) {
+        this.setValid();
+      } else {
+        this.setInvalid();
+      }
+    }
+  }.observes("inputValue").on('init')
+});

+ 9 - 1
contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/index.js

@@ -141,8 +141,16 @@ export default Ember.Controller.extend({
         defer = Ember.RSVP.defer(),
         originalModel = this.get('model');
 
+    var title = "";
+    if(shouldGetVisualExplain){
+      title += "Visual Explain "
+    }else if(shouldExplain){
+      title += "Explain "
+    }
+
+    title += originalModel.get('title');
     job = this.store.createRecord(constants.namingConventions.job, {
-      title: originalModel.get('title'),
+      title: title,
       sessionTag: originalModel.get('sessionTag'),
       dataBase: this.get('selectedDatabase.name'),
       referrer: referrer

+ 138 - 115
contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/upload-table.js

@@ -22,16 +22,20 @@ import constants from 'hive/utils/constants';
 
 
 export default Ember.Controller.extend({
-  isLocalUpload : Ember.computed.equal("uploadSource","local"),
-  uploadSource : "local",
-  hdfsPath : "",
+  COLUMN_NAME_REGEX: "^[a-zA-Z]{1}[a-zA-Z0-9_]*$",
+  TABLE_NAME_REGEX: "^[a-zA-Z]{1}[a-zA-Z0-9_]*$",
+  isLocalUpload: Ember.computed.equal("uploadSource", "local"),
+  uploadSource: "local",
+  COLUMN_NAME_PREFIX : "column",
+  hdfsPath: "",
   jobService: Ember.inject.service(constants.namingConventions.job),
   notifyService: Ember.inject.service(constants.namingConventions.notify),
-  needs: ['databases'],
+  databaseService : Ember.inject.service(constants.namingConventions.database),
+  databases : Ember.computed.alias("databaseService.databases"),
   showErrors: false,
   uploader: Uploader.create(),
   baseUrl: "/resources/upload",
-  isFirstRowHeader: true, // is first row  header
+  isFirstRowHeader: false, // is first row  header
   header: null,  // header received from server
   files: null, // files that need to be uploaded only file[0] is relevant
   firstRow: [], // the actual first row of the table.
@@ -41,9 +45,26 @@ export default Ember.Controller.extend({
   filePath: null,
   tableName: null,
   uploadProgressInfos : [],
+  DEFAULT_DB_NAME : 'default',
+  showPreview : false,
   onChangeUploadSource : function(){
     this.clearFields();
   }.observes("uploadSource"),
+  setDefaultDB : function(){
+    var self = this;
+    var defaultDatabase = this.get('databases').find(
+      function(item,index){
+        if(item.id == self.DEFAULT_DB_NAME )
+          return true;
+      }
+    );
+
+    console.log("setting the initial database to : " + defaultDatabase);
+    self.set("selectedDatabase",defaultDatabase);
+  },
+  init: function() {
+    this.setDefaultDB();
+  },
   uploadProgressInfo : Ember.computed("uploadProgressInfos.[]",function(){
     var info = "";
     for( var i = 0 ; i < this.get('uploadProgressInfos').length ; i++)
@@ -64,8 +85,9 @@ export default Ember.Controller.extend({
     "RCFILE"      ,
     "ORC"         ,
     "PARQUET"     ,
-    "AVRO"        ,
-    "INPUTFORMAT"
+    "AVRO"
+    //,
+    //"INPUTFORMAT"  -- not supported as of now.
   ],
   selectedFileType: "ORC",
   dataTypes: [
@@ -84,21 +106,21 @@ export default Ember.Controller.extend({
     "VARCHAR", // -- (Note: Available in Hive 0.12.0 and later)
     "CHAR" // -- (Note: Available in Hive 0.13.0 and later)
   ],
+  _setHeaderElements : function(header,valueArray){
+    header.forEach(function (item, index) {
+      Ember.set(item, 'name',  valueArray.objectAt(index));
+    }, this);
+  },
   isFirstRowHeaderDidChange: function () {
-    console.log("inside onFirstRowHeader : isFirstRowHeader : " + this.get('isFirstRowHeader'));
     if (this.get('isFirstRowHeader') != null && typeof this.get('isFirstRowHeader') !== 'undefined') {
       if (this.get('isFirstRowHeader') == false) {
         if (this.get('rows')) {
           this.get('rows').unshiftObject({row: this.get('firstRow')});
+          this._setHeaderElements(this.get('header'),this.get('defaultColumnNames'));
         }
       } else if( this.get('header') ) { // headers are available
         // take first row of
-        this.get('header').forEach(function (item, index) {
-          console.log("item : ", item);
-          console.log("this.get('firstRow').objectAt(index)  : ", this.get('firstRow').objectAt(index));
-          Ember.set(item, 'name', this.get('firstRow')[index]);
-        }, this);
-
+        this._setHeaderElements(this.get('header'),this.get('firstRow'));
         this.get('rows').removeAt(0);
       }
 
@@ -108,16 +130,13 @@ export default Ember.Controller.extend({
 
   popUploadProgressInfos : function(){
     var msg = this.get('uploadProgressInfos').popObject();
-   // console.log("popedup message : " + msg);
   },
 
   pushUploadProgressInfos : function(info){
     this.get('uploadProgressInfos').pushObject(info);
-   // console.log("pushed message : " + info);
   },
 
   clearUploadProgressModal : function(){
-  //  console.log("inside clearUploadProgressModal this.get('uploadProgressInfos') : " + this.get('uploadProgressInfos'));
     var len = this.get('uploadProgressInfos').length;
     for( var i = 0 ; i < len ; i++){
       this.popUploadProgressInfos();
@@ -125,7 +144,6 @@ export default Ember.Controller.extend({
   },
 
   hideUploadModal : function(){
-    console.log("hiding the modal ....");
     this.clearUploadProgressModal();
     Ember.$("#uploadProgressModal").modal("hide");
   },
@@ -135,11 +153,12 @@ export default Ember.Controller.extend({
   },
 
   clearFields: function () {
+    this.set("showPreview",false);
     this.set("hdfsPath");
     this.set("header");
     this.set("rows");
     this.set("error");
-    this.set('isFirstRowHeader',true);
+    this.set('isFirstRowHeader',false);
     this.set('files');
     this.set("firstRow");
     this.set("selectedDatabase",null);
@@ -147,17 +166,15 @@ export default Ember.Controller.extend({
     this.set("filePath");
     this.set('tableName');
     this.clearUploadProgressModal();
+    this.setDefaultDB();
     this.printValues();
   },
 
   printValues: function () {
-    console.log("printing all values : ");
-    console.log("header : ", this.get('header'));
-    console.log("rows : ", this.get('rows'));
-    console.log("error : ", this.get('error'));
-    console.log("isFirstRowHeader : ", this.get('isFirstRowHeader'));
-    console.log("files : ", this.get('files'));
-    console.log("firstRow : ", this.get('firstRow'));
+    console.log("header : ", this.get('header'),
+      ". rows : ",this.get('rows'),". error : ", this.get('error'),
+      " isFirstRowHeader : ", this.get('isFirstRowHeader'),
+      "firstRow : ", this.get('firstRow'));
   },
 
   generateTempTableName : function(){
@@ -173,21 +190,20 @@ export default Ember.Controller.extend({
   waitForJobStatus: function (jobId, resolve, reject) {
     console.log("finding status of job: ", jobId);
     var self = this;
-    var fetchJobPromise = this.get('jobService').fetchJobStatus(jobId);
+    var fetchJobPromise = this.get('jobService').fetchJob(jobId);
       fetchJobPromise.then(function (data) {
         console.log("waitForJobStatus : data : ", data);
-        var status = data.jobStatus;
-        if (status == "SUCCEEDED") {
+        var job = data.job;
+        var status = job.status.toUpperCase();
+        if (status == constants.statuses.succeeded ) {
           console.log("resolving waitForJobStatus with : " , status);
-          resolve(status);
-        } else if (status == "CANCELED" || status == "CLOSED" || status == "ERROR") {
+          resolve(job);
+        } else if (status == constants.statuses.canceled || status == constants.statuses.closed || status == constants.statuses.error) {
           console.log("rejecting waitForJobStatus with : " + status);
-          reject(new Error(status));
+          reject(new Error(job.statusMessage));
         } else {
-          console.log("retrying waitForJobStatus : ");
-          Ember.run.later(self, function() {
-            this.waitForJobStatus(jobId, resolve, reject);
-          }, 1000);
+          console.log("retrying waitForJobStatus : ", jobId);
+          self.waitForJobStatus(jobId, resolve, reject);
         }
       }, function (error) {
         console.log("rejecting waitForJobStatus with : " + error);
@@ -226,14 +242,18 @@ export default Ember.Controller.extend({
   waitForGeneratingPreview: function () {
     console.log("waitForGeneratingPreview");
     this.showUploadModal();
-    this.pushUploadProgressInfos("<li> Generating Preview .... </li>")
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.generatingPreview'))
   },
 
   previewTable: function (data) {
     console.log('inside previewTable');
+    var self = this;
+    var defaultColumnNames = data.header.map(function(item,index){
+      return self.COLUMN_NAME_PREFIX + index;
+    });
+    this.set("defaultColumnNames",defaultColumnNames);
     this.set("header", data.header);
     this.set("firstRow", data.rows[0].row);
-    console.log("firstRow : ", this.get('firstRow'));
     this.set('isFirstRowHeader', data.isFirstRowHeader);
     this.set('tableName',data.tableName);
     if(data.isFirstRowHeader == true){
@@ -244,23 +264,25 @@ export default Ember.Controller.extend({
 
   onGeneratePreviewSuccess: function (data) {
     console.log("onGeneratePreviewSuccess");
+    this.set("showPreview",true);
     this.hideUploadModal();
     this.previewTable(data);
   },
 
   onGeneratePreviewFailure: function (error) {
     console.log("onGeneratePreviewFailure");
+    this.set("showPreview",false);
     this.hideUploadModal();
     this.setError(error);
   },
 
-  createTable: function () {
-    console.log("table headers : ", this.get('header'));
+  createActualTable : function(){
+    console.log("createActualTable");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToCreateActualTable'));
     var headers = this.get('header');
-
     var selectedDatabase = this.get('selectedDatabase');
-    if (null == selectedDatabase || typeof selectedDatabase === 'undefined') {
-      throw new Error(Ember.I18n.t('hive.errors.emptyDatabase'));
+    if (!selectedDatabase) {
+      throw new Error(this.translate('hive.errors.emptyDatabase', {database : this.translate("hive.words.database")}));
     }
 
     this.set('databaseName', this.get('selectedDatabase').get('name'));
@@ -269,17 +291,8 @@ export default Ember.Controller.extend({
     var isFirstRowHeader = this.get('isFirstRowHeader');
     var filetype = this.get("selectedFileType");
 
-    if (null == databaseName || typeof databaseName === 'undefined' || databaseName == '') {
-      throw new Error(Ember.I18n.t('hive.errors.emptyDatabase'));
-    }
-    if (null == tableName || typeof tableName === 'undefined' || tableName == '') {
-      throw new Error(Ember.I18n.t('hive.errors.emptyTableName'));
-    }
-    if (null == isFirstRowHeader || typeof isFirstRowHeader === 'undefined') {
-      throw new Error(Ember.I18n.t('hive.errors.emptyIsFirstRow'));
-    }
-
-    this.validateColumns();
+    this.validateInput(headers,tableName,databaseName,isFirstRowHeader);
+    this.showUploadModal();
 
     return this.get('uploader').createTable({
       "isFirstRowHeader": isFirstRowHeader,
@@ -290,16 +303,10 @@ export default Ember.Controller.extend({
     });
   },
 
-  createActualTable : function(){
-    console.log("createActualTable");
-    this.pushUploadProgressInfos("<li> Starting to create Actual table.... </li>");
-    return this.createTable();
-  },
-
   waitForCreateActualTable: function (jobId) {
     console.log("waitForCreateActualTable");
     this.popUploadProgressInfos();
-    this.pushUploadProgressInfos("<li> Waiting for creation of Actual table.... </li>");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToCreateActualTable'));
     var self = this;
     var p = new Ember.RSVP.Promise(function (resolve, reject) {
       self.waitForJobStatus(jobId, resolve, reject);
@@ -311,19 +318,19 @@ export default Ember.Controller.extend({
   onCreateActualTableSuccess : function(){
     console.log("onCreateTableSuccess");
     this.popUploadProgressInfos();
-    this.pushUploadProgressInfos("<li> Successfully created Actual table. </li>");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyCreatedActualTable'));
   },
 
   onCreateActualTableFailure : function(error){
     console.log("onCreateActualTableFailure");
     this.popUploadProgressInfos();
-    this.pushUploadProgressInfos("<li> Failed to create Actual table. </li>");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToCreateActualTable'));
     this.setError(error);
   },
 
   createTempTable : function(){
     console.log("createTempTable");
-    this.pushUploadProgressInfos("<li> Starting to create Temporary table.... </li>");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToCreateTemporaryTable'));
     var tempTableName = this.generateTempTableName();
     this.set('tempTableName',tempTableName);
     return this.get('uploader').createTable({
@@ -338,7 +345,7 @@ export default Ember.Controller.extend({
   waitForCreateTempTable: function (jobId) {
     console.log("waitForCreateTempTable");
     this.popUploadProgressInfos();
-    this.pushUploadProgressInfos("<li> Waiting for creation of Temporary table.... </li>");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToCreateTemporaryTable'));
     var self = this;
     var p = new Ember.RSVP.Promise(function (resolve, reject) {
       self.waitForJobStatus(jobId, resolve, reject);
@@ -350,11 +357,11 @@ export default Ember.Controller.extend({
   onCreateTempTableSuccess : function(){
     console.log("onCreateTempTableSuccess");
     this.popUploadProgressInfos();
-    this.pushUploadProgressInfos("<li> Successfully created Temporary table. </li>");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyCreatedTemporaryTable'));
   },
 
   deleteTable : function(databaseName, tableName){
-    console.log("deleting table " + databaseName + "." + tableName);
+    console.log("deleting table ", databaseName , "." , tableName);
 
     return this.get('uploader').deleteTable({
       "database":  databaseName,
@@ -365,7 +372,7 @@ export default Ember.Controller.extend({
   deleteTableOnError : function(databaseName,tableName, tableLabel){
       //delete table and wait for delete job
     var self = this;
-    this.pushUploadProgressInfos("<li> Deleting " + tableLabel + " table...  </li>");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.deletingTable',{table:tableLabel}));
 
     return this.deleteTable(databaseName,tableName).then(function(data){
       return new Ember.RSVP.Promise(function(resolve,reject){
@@ -373,26 +380,31 @@ export default Ember.Controller.extend({
       });
     }).then(function(){
       self.popUploadProgressInfos();
-      self.pushUploadProgressInfos("<li> Successfully deleted " + tableLabel + " table. </li>");
+      self.pushUploadProgressInfos(this.formatMessage('hive.messages.succesfullyDeletedTable',{table:tableLabel}));
       return Ember.RSVP.Promise.resolve();
     },function(err){
       self.popUploadProgressInfos();
-      self.pushUploadProgressInfos("<li> Failed to delete " + tableLabel + " table. </li>");
+      self.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToDeleteTable',{table:tableLabel}));
       self.setError(err);
       return Ember.RSVP.Promise.reject();
     });
   },
 
   rollBackActualTableCreation : function(){
-    return this.deleteTableOnError(this.get("databaseName"),this.get("tableName"),"Actual");
+    return this.deleteTableOnError(this.get("databaseName"),this.get("tableName"),this.translate('hive.words.actual'));
   },
 
-
+  translate : function(str,vars){
+    return Ember.I18n.t(str,vars);
+  },
+  formatMessage : function(messageId, vars){
+    return "<li>" + this.translate(messageId,vars) + "</li>";
+  },
   onCreateTempTableFailure : function(error){
     console.log("onCreateTempTableFailure");
     this.setError(error);
     this.popUploadProgressInfos();
-    this.pushUploadProgressInfos("<li> Failed to create temporary table. </li>");
+    this.pushUploadProgressInfos();
     return this.rollBackActualTableCreation().then(function(data){
       return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
     },function(err){
@@ -402,7 +414,7 @@ export default Ember.Controller.extend({
 
   uploadFile : function(){
     console.log("uploadFile");
-    this.pushUploadProgressInfos("<li> Starting to upload the file .... </li>");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToUploadFile'));
     if( this.get("isLocalUpload")){
       return this.uploadTable();
     }else{
@@ -413,7 +425,7 @@ export default Ember.Controller.extend({
   waitForUploadingFile: function (data) {
     console.log("waitForUploadingFile");
     this.popUploadProgressInfos();
-    this.pushUploadProgressInfos("<li> Waiting for uploading file .... </li>");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToUploadFile'));
     if( data.jobId ){
       var self = this;
           var p = new Ember.RSVP.Promise(function (resolve, reject) {
@@ -428,12 +440,12 @@ export default Ember.Controller.extend({
   onUploadingFileSuccess: function () {
     console.log("onUploadingFileSuccess");
     this.popUploadProgressInfos();
-    this.pushUploadProgressInfos("<li> Successfully uploaded file. </li>");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyUploadedFile') );
   },
 
   rollBackTempTableCreation : function(){
     var self = this;
-    return this.deleteTableOnError(this.get("databaseName"),this.get("tempTableName"),"Temporary").then(function(data){
+    return this.deleteTableOnError(this.get("databaseName"),this.get("tempTableName"),this.translate('hive.words.temporary')).then(function(data){
       return self.rollBackActualTableCreation();
     },function(err){
       return self.rollBackActualTableCreation();
@@ -444,7 +456,7 @@ export default Ember.Controller.extend({
     console.log("onUploadingFileFailure");
     this.setError(error);
     this.popUploadProgressInfos();
-    this.pushUploadProgressInfos("<li> Failed to upload file. </li>");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToUploadFile'));
     return this.rollBackTempTableCreation().then(function(data){
       return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
     },function(err){
@@ -458,7 +470,7 @@ export default Ember.Controller.extend({
 
   insertIntoTable : function(){
     console.log("insertIntoTable");
-    this.pushUploadProgressInfos("<li> Starting to Insert rows from temporary table to actual table .... </li>");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToInsertRows'));
 
     return this.get('uploader').insertIntoTable({
       "fromDatabase":  this.get("databaseName"),
@@ -471,7 +483,7 @@ export default Ember.Controller.extend({
   waitForInsertIntoTable: function (jobId) {
     console.log("waitForInsertIntoTable");
     this.popUploadProgressInfos();
-    this.pushUploadProgressInfos("<li> Waiting for Insertion of rows from temporary table to actual table .... </li>");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToInsertRows'));
     var self = this;
     var p = new Ember.RSVP.Promise(function (resolve, reject) {
       self.waitForJobStatus(jobId, resolve, reject);
@@ -483,14 +495,14 @@ export default Ember.Controller.extend({
   onInsertIntoTableSuccess : function(){
     console.log("onInsertIntoTableSuccess");
     this.popUploadProgressInfos();
-    this.pushUploadProgressInfos("<li> Successfully inserted rows from temporary table to actual table. </li>");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyInsertedRows'));
   },
 
   onInsertIntoTableFailure : function(error){
     console.log("onInsertIntoTableFailure");
     this.setError(error);
     this.popUploadProgressInfos();
-    this.pushUploadProgressInfos("<li> Failed to insert rows from temporary table to actual table. </li>");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToInsertRows'));
     return this.rollBackUploadFile().then(function(data){
       return Ember.RSVP.Promise.reject(error); // always reject for the flow to stop
     },function(err){
@@ -500,7 +512,7 @@ export default Ember.Controller.extend({
 
   deleteTempTable : function(){
     console.log("deleteTempTable");
-    this.pushUploadProgressInfos("<li> Starting to delete temporary table .... </li>");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToDeleteTemporaryTable'));
 
     return this.deleteTable(
       this.get("databaseName"),
@@ -511,7 +523,7 @@ export default Ember.Controller.extend({
   waitForDeleteTempTable: function (jobId) {
     console.log("waitForDeleteTempTable");
     this.popUploadProgressInfos();
-    this.pushUploadProgressInfos("<li> Waiting for deletion of temporary table .... </li>");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.waitingToDeleteTemporaryTable'));
     var self = this;
     var p = new Ember.RSVP.Promise(function (resolve, reject) {
       self.waitForJobStatus(jobId, resolve, reject);
@@ -523,43 +535,40 @@ export default Ember.Controller.extend({
   onDeleteTempTableSuccess : function(){
     console.log("onDeleteTempTableSuccess");
     this.popUploadProgressInfos();
-    this.pushUploadProgressInfos("<li>Successfully inserted row. </li>");
+    this.pushUploadProgressInfos(this.formatMessage('hive.messages.successfullyDeletedTemporaryTable'));
     this.onUploadSuccessfull();
   },
 
   onDeleteTempTableFailure : function(error){
     console.log("onDeleteTempTableFailure");
     this.setError(error);
-    this.setError("You will have to manually delete the table " + this.get("databaseName") + "." + this.get("tempTableName"));
+    this.setError(this.formatMessage('hive.messages.manuallyDeleteTable',{databaseName:this.get('databaseName'), tableName: this.get("tempTableName")}));
   },
 
   createTableAndUploadFile : function(){
     var self = this;
     self.setError();
-    self.showUploadModal();
     self.createActualTable()
-      .then(function(data){
-        console.log("1. received data : ", data);
-        return self.waitForCreateActualTable(data.jobId);
+      .then(function(job){
+        console.log("1. received job : ", job);
+        return self.waitForCreateActualTable(job.id);
       },function(error){
-        self.onCreateActualTableFailure(error);
         console.log("Error occurred: ", error);
+        self.onCreateActualTableFailure(error);
         throw error;
       })
       .then(function(data){
-        console.log("2. received data : ", data);
         self.onCreateActualTableSuccess(data);
         return self.createTempTable(data);
       },function(error){
         if(!self.get('error')){
           console.log("Error occurred: ", error);
-          self.onCreateActualTableFailure(new Error("Server job for creation of actual table failed."));
+          self.onCreateActualTableFailure(error);
         }
         throw error;
       })
-      .then(function(data){
-        console.log("3. received data : ", data);
-        return self.waitForCreateTempTable(data.jobId);
+      .then(function(job){
+        return self.waitForCreateTempTable(job.id);
       },function(error){
         if(!self.get('error')){
           console.log("Error occurred: ", error);
@@ -568,17 +577,15 @@ export default Ember.Controller.extend({
         throw error;
       })
       .then(function(data){
-        console.log("4. received data : ", data);
         self.onCreateTempTableSuccess(data);
         return self.uploadFile(data);
       },function(error){
         if(!self.get('error')){
           console.log("Error occurred: ", error);
-          return self.onCreateTempTableFailure(new Error("Server job for creation of temporary table failed."));
+          return self.onCreateTempTableFailure(error);
         }
         throw error;
       }).then(function(data){
-        console.log("4.5 received data : ", data);
         return self.waitForUploadingFile(data);
       },function(error){
         if(!self.get('error')){
@@ -588,19 +595,17 @@ export default Ember.Controller.extend({
         throw error;
       })
       .then(function(data){
-        console.log("5. received data : ", data);
         self.onUploadingFileSuccess(data);
         return self.insertIntoTable(data);
       },function(error){
         if(!self.get('error')){
           console.log("Error occurred: ", error);
-          return self.onUploadingFileFailure(new Error("Server job for upload of file failed."));
+          return self.onUploadingFileFailure(error);
         }
         throw error;
       })
-      .then(function(data){
-        console.log("6. received data : ", data);
-        return self.waitForInsertIntoTable(data.jobId);
+      .then(function(job){
+        return self.waitForInsertIntoTable(job.id);
       },function(error){
         if(!self.get('error')){
           console.log("Error occurred: ", error);
@@ -609,19 +614,17 @@ export default Ember.Controller.extend({
         throw error;
       })
       .then(function(data){
-        console.log("7. received data : ", data);
         self.onInsertIntoTableSuccess(data);
         return self.deleteTempTable(data);
       },function(error){
         if(!self.get('error')){
           console.log("Error occurred: ", error);
-          return self.onInsertIntoTableFailure(new Error("Server job for insert from temporary to actual table failed."));
+          return self.onInsertIntoTableFailure(error);
         }
         throw error;
       })
-      .then(function(data){
-        console.log("8. received data : ", data);
-        return self.waitForDeleteTempTable(data.jobId);
+      .then(function(job){
+        return self.waitForDeleteTempTable(job.id);
       },function(error){
         if(!self.get('error')){
           console.log("Error occurred: ", error);
@@ -630,12 +633,11 @@ export default Ember.Controller.extend({
         throw error;
       })
       .then(function(data){
-        console.log("9. received data : ", data);
         self.onDeleteTempTableSuccess(data);
       },function(error){
         if(!self.get('error')){
           console.log("Error occurred: ", error);
-          self.onDeleteTempTableFailure(new Error("Server job for deleting temporary table failed."));
+          self.onDeleteTempTableFailure(error);
         }
         throw error;
       }).catch(function(error){
@@ -646,9 +648,30 @@ export default Ember.Controller.extend({
       });
   },
 
-  validateColumns: function () {
+  validateInput: function (headers,tableName,databaseName,isFirstRowHeader) {
     // throw exception if invalid.
+    if(!headers || headers.length == 0) throw new Error(this.translate('hive.errors.emptyHeaders'));
+
+    var regex = new RegExp(this.get("COLUMN_NAME_REGEX"),"g");
+
+    headers.forEach(function(column,index){
+      if( !column  ) throw new Error(this.translate('hive.errors.emptyColumnName'));
+      var matchArr = column.name.match(regex);
+      if(matchArr == null || matchArr.length != 1 ) throw new Error(this.translate('hive.errors.illegalColumnName',{ columnName : column.name, index : (index + 1)}));
+    },this);
+
+    if(!tableName) throw new Error(this.translate('hive.errors.emptyTableName', {tableNameField : this.translate('hive.ui.tableName')}));
+    var tableRegex = new RegExp(this.get("TABLE_NAME_REGEX"),"g");
+    var mArr = tableName.match(tableRegex);
+    if(mArr == null || mArr.length != 1 ) throw new Error(this.translate('hive.errors.illegalTableName', {tableNameField:this.translate('hive.ui.tableName'),tableName:tableName}) );
+
+    if(!databaseName) throw new Error(this.translate('hive.errors.emptyDatabase', {database:this.translate('hive.words.database')}));
+
+    if (null == isFirstRowHeader || typeof isFirstRowHeader === 'undefined') { //this can be true or false. so explicitly checking for null/ undefined.
+      throw new Error(this.translate('hive.errors.emptyIsFirstRow', {isFirstRowHeaderField:this.translate('hive.ui.isFirstRowHeader')}));
+    }
   },
+
   setError: function (error) {
     if(error){
       console.log("upload table error : ", error);
@@ -666,7 +689,7 @@ export default Ember.Controller.extend({
   uploadTableFromHdfs : function(){
     console.log("uploadTableFromHdfs called.");
     if(!(this.get("inputFileTypeCSV") == true && this.get("isFirstRowHeader") == false) ){
-      this.pushUploadProgressInfos("<li>Uploading file .... </li>");
+      this.pushUploadProgressInfos(this.formatMessage('uploadingFromHdfs'));
     }
     return  this.get('uploader').uploadFromHDFS({
         "isFirstRowHeader": this.get("isFirstRowHeader"),
@@ -688,7 +711,8 @@ export default Ember.Controller.extend({
 
   onUploadSuccessfull: function (data) {
     console.log("onUploadSuccessfull : ", data);
-    this.get('notifyService').success("Uploaded Successfully", "Table " + this.get('tableName') + " created in database " + this.get("databaseName"));
+    this.get('notifyService').success(this.translate('hive.messages.successfullyUploadedTableHeader'),
+      this.translate('hive.messages.successfullyUploadedTableMessage' ,{tableName:this.get('tableName') ,databaseName:this.get("databaseName")}));
     this.clearFields();
   },
 
@@ -712,7 +736,6 @@ export default Ember.Controller.extend({
     },
     filesUploaded: function (files) {
       console.log("upload-table.js : uploaded new files : ", files);
-
       this.clearFields();
 
       this.set('files', files);

+ 59 - 3
contrib/views/hive-next/src/main/resources/ui/hive-web/app/initializers/i18n.js

@@ -244,9 +244,65 @@ TRANSLATIONS = {
   hive: {
     errors: {
       'no.query': "No query to process.",
-      'emptyDatabase' : "Please select Database.",
-      'emptyTableName' : "Please enter tableName.",
-      'emptyIsFirstRow' : "Please select is First Row Header?"
+      'emptyDatabase' : "Please select {{ database }}.",
+      'emptyTableName' : "Please enter {{ tableNameField }}.",
+      'illegalTableName':"Illegal {{ tableNameField }} : '{{ tableName }}'",
+      'emptyIsFirstRow' : "{{isFirstRowHeaderField}} cannot be null.",
+      'emptyHeaders':"Headers (containing column names) cannot be null.",
+      'emptyColumnName':"Column name cannot be null.",
+      'illegalColumnName':"Illegal column name : '{{columnName}}' in column number {{index}}",
+    },
+    messages : {
+      'generatingPreview':"Generating Preview.",
+      'startingToCreateActualTable' : "Starting to create Actual table",
+      'waitingToCreateActualTable' : "Waiting for creation of Actual table",
+      'successfullyCreatedActualTable' : "Successfully created Actual table.",
+      'failedToCreateActualTable' : "Failed to create Actual table.",
+      'startingToCreateTemporaryTable' : "Starting to create Temporary table.",
+      'waitingToCreateTemporaryTable' : "Waiting for creation of Temporary table.",
+      'successfullyCreatedTemporaryTable' : "Successfully created Temporary table.",
+      'failedToCreateTemporaryTable' : " Failed to create temporary table.",
+      'deletingTable' :  "Deleting {{table}} table.",
+      'succesfullyDeletedTable' :  "Successfully deleted {{ table}} table.",
+      'failedToDeleteTable' :  "Failed to delete {{table}} table.",
+      'startingToUploadFile' :  "Starting to upload the file.",
+      'waitingToUploadFile' :  "Waiting for uploading file.",
+      'successfullyUploadedFile' :  "Successfully uploaded file.",
+      'failedToUploadFile' :  "Failed to upload file.",
+      'startingToInsertRows' :  "Starting to insert rows from temporary table to actual table.",
+      'waitingToInsertRows' :  "Waiting for insertion of rows from temporary table to actual table.",
+      'successfullyInsertedRows' :  "Successfully inserted rows from temporary table to actual table.",
+      'failedToInsertRows' :  "Failed to insert rows from temporary table to actual table.",
+      'startingToDeleteTemporaryTable' :  "Starting to delete temporary table.",
+      'waitingToDeleteTemporaryTable' :  "Waiting for deletion of temporary table.",
+      'successfullyDeletedTemporaryTable' :  "Successfully deleted temporary table",
+      'manuallyDeleteTable' :  "You will have to manually delete the table {{databaseName}}.{{tableName}}",
+      'uploadingFromHdfs' :  "Uploading file from HDFS ",
+      'successfullyUploadedTableMessage' : "Table {{tableName}} created in database {{databaseName}}",
+      'successfullyUploadedTableHeader' : "Uploaded Successfully"
+    },
+    words :{
+      temporary : "Temporary",
+      actual : "Actual",
+      database : "Database",
+    },
+    ui : {
+      'uploadProgress' : "Upload Progress",
+      'uploadFromLocal':"Upload from Local",
+      'uploadFromHdfs':"Upload from HDFS",
+      'selectFileType':"Select File Type",
+      'fileType':"File type",
+      'selectFromLocal':"Select from local",
+      'hdfsPath':"HDFS Path",
+      'selectDatabase':"Select a Database",
+      'tableName':"Table name",
+      'tableNameErrorMessage':"Only alphanumeric and underscore characters are allowed in table name.",
+      'tableNameTooltip':"Enter valid (alphanumeric + underscore) table name.",
+      'storedAs':"Stored as",
+      'isFirstRowHeader':"Is first row header ?",
+      'columnNameTooltip':"Enter valid (alphanumeric + underscore) column name.",
+      'columnNameErrorMessage':"Only alphanumeric and underscore characters are allowed in column names.",
+
     }
   },
 

+ 1 - 8
contrib/views/hive-next/src/main/resources/ui/hive-web/app/routes/history.js

@@ -34,13 +34,6 @@ export default Ember.Route.extend({
     if (!model) {
       return;
     }
-
-    var filteredModel = model.filter(function (job) {
-       //filter out jobs with referrer type of sample, explain and visual explain
-       return (!job.get('referrer') || job.get('referrer') === constants.jobReferrer.job) &&
-              !!job.get('id');
-    });
-
-    controller.set('history', filteredModel);
+    controller.set('history', model);
   }
 });

+ 4 - 4
contrib/views/hive-next/src/main/resources/ui/hive-web/app/services/job.js

@@ -31,7 +31,7 @@ export default Ember.Service.extend({
        url: url,
        type: 'DELETE',
        headers: {
-        'X-Requested-By': 'ambari',
+        'X-Requested-By': 'ambari'
        },
        success: function () {
          job.reload();
@@ -39,11 +39,11 @@ export default Ember.Service.extend({
     });
   },
 
-  fetchJobStatus: function (jobId) {
-    console.log("finding status of job : ", jobId);
+  fetchJob : function (jobId){
+    console.log("fetching job : ", jobId);
     var self = this;
     var url = this.container.lookup('adapter:application').buildURL();
-    url +=  "/jobs/" + jobId + "/status";
+    url +=  "/jobs/" + jobId ;
 
     return Ember.$.ajax({
       url: url,

+ 4 - 0
contrib/views/hive-next/src/main/resources/ui/hive-web/app/styles/app.scss

@@ -678,3 +678,7 @@ td.data-upload-form-field {
 table.no-border, table.no-border tr, table.no-border tr td {
   border: none;
 }
+
+.red-border {
+  border-color :red;
+}

+ 23 - 0
contrib/views/hive-next/src/main/resources/ui/hive-web/app/templates/components/validated-text-field.hbs

@@ -0,0 +1,23 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{!
+* see example in validated-text-field.js component file
+}}
+
+{{input class=inputClass value=inputValue title=message placeholder=placeholder}}

+ 34 - 22
contrib/views/hive-next/src/main/resources/ui/hive-web/app/templates/upload-table.hbs

@@ -30,7 +30,7 @@
     <!-- Modal content-->
     <div class="modal-content">
       <div class="modal-header">
-        <h4 class="modal-title">Upload Progress</h4>
+        <h4 class="modal-title">{{t "hive.ui.uploadProgress"}}</h4>
       </div>
       <div class="modal-body">
         <p>
@@ -51,60 +51,67 @@
   <div>
     <table class="table data-upload-form pull-left">
       <tr>
-        <td class="data-upload-form-label"><label>Upload from Local</label></td>
+        <td class="data-upload-form-label"><label>{{t "hive.ui.uploadFromLocal"}}</label></td>
         <td  class="data-upload-form-field"> {{radio-button value='local' checked=uploadSource}}</td>
 
-        <td class="data-upload-form-label"><label>Upload from HDFS</label></td>
+        <td class="data-upload-form-label"><label>{{t "hive.ui.uploadFromHdfs"}}</label></td>
         <td  class="data-upload-form-field">{{radio-button value='hdfs' checked=uploadSource}}</td>
       </tr>
       <tr>
-        <td class="data-upload-form-label"><label>File type</label></td>
+        <td class="data-upload-form-label"><label>{{t "hive.ui.fileType"}}</label></td>
         <td class="data-upload-form-field">
           {{typeahead-widget
           content=inputFileTypes
           optionValuePath="id"
           optionLabelPath="name"
           selection=inputFileType
-          placeholder="Select File Type"}}
+          placeholder=(t "hive.ui.uploadFromHdfs")
+          }}
         </td>
 
 
         {{#if isLocalUpload }}
-          <td class="data-upload-form-label"><label>Select from local</label></td>
-          <td class="data-upload-form-field">{{file-upload  filesUploaded="filesUploaded"}}</td>
+          <td class="data-upload-form-label"><label>{{t "hive.ui.selectFromLocal"}}</label></td>
+          <td class="data-upload-form-field">{{file-upload  filesUploaded="filesUploaded"  uploadFiles=files}}</td>
         {{else}}
-          <td class="data-upload-form-label"><label>HDFS Path</label></td>
+          <td class="data-upload-form-label"><label>{{t "hive.ui.hdfsPath"}}</label></td>
           <td class="data-upload-form-field" id="hdfs-param">{{input type="text" class="form-control" placeholder="Enter full HDFS path" value=hdfsPath }}
             <button style="margin-left: 5px; padding-top: 6px;padding-bottom: 6px; padding-right: 10px; padding-left: 10px;" type="button" {{action "previewFromHdfs"}}
             {{bind-attr class=":btn :btn-sm :btn-default"}}>{{t "buttons.showPreview"}}</button></td>
         {{/if}}
       </tr>
-      {{#if rows}}
+      {{#if showPreview}}
         <tr>
-          <td class="data-upload-form-label"><label>Database</label></td>
+          <td class="data-upload-form-label"><label>{{t "hive.words.database"}}</label></td>
           <td class="data-upload-form-field">
             {{typeahead-widget
-            content=controllers.databases.databases
+            content=databases
             optionValuePath="id"
             optionLabelPath="name"
             selection=selectedDatabase
-            placeholder="Select a Database"
+            placeholder=(t "hive.ui.selectDatabase")
             }}
           </td>
 
-          <td class="data-upload-form-label"><label>Table name</label></td>
+          <td class="data-upload-form-label"><label>{{t "hive.ui.tableName"}}</label></td>
           <td
-            class="data-upload-form-field">{{input type="text" class="form-control" placeholder="Table Name" value=tableName }}</td>
+            class="data-upload-form-field">
+            {{#validated-text-field inputValue=tableName allowEmpty=false
+            tooltip=(t "hive.ui.tableNameTooltip")
+            invalidClass='form-control red-border' validClass='form-control' regex=TABLE_NAME_REGEX
+            errorMessage=(t "hive.ui.tableNameErrorMessage") }}
+            {{/validated-text-field}}
+          </td>
         </tr>
         <tr>
-          <td class="data-upload-form-label"><label>Stored as</label></td>
+          <td class="data-upload-form-label"><label>{{t "hive.ui.storedAs"}}</label></td>
           <td class="data-upload-form-field">
             {{typeahead-widget
             content=fileTypes
             selection=selectedFileType}}
           </td>
           {{#if inputFileTypeCSV }}
-            <td class="data-upload-form-label"><label>Is first row header ?</label></td>
+            <td class="data-upload-form-label"><label>{{t "hive.ui.isFirstRowHeader"}}</label></td>
             <td class="data-upload-form-field">
               {{input id="isFirstRowHeader" type="checkbox" checked=isFirstRowHeader }}
             </td>
@@ -114,27 +121,32 @@
       {{/if}}
     </table>
 
+    {{#if showPreview}}
     <table class="pull-right">
       <tr>
         <td>
-          {{#if rows}}
             <button type="button" {{action "uploadTable"}}
               {{bind-attr class=":btn :btn-sm :btn-default"}}>{{t "buttons.uploadTable"}}</button>
-          {{/if}}
         </td>
       </tr>
     </table>
-
+    {{/if}}
   </div>
 
   <div>
-    {{#if rows}}
+    {{#if showPreview}}
       <div id="upload-table">
         <table class="table table-expandable no-border">
           <thead>
           <tr>
             {{#each column in header}}
-              <th>{{input placeholder="column-name" type="text" class="form-control" value=column.name}}</th>
+              <th>
+                {{#validated-text-field inputValue=column.name allowEmpty=false
+                tooltip=(t "hive.ui.columnNameTooltip")
+                invalidClass='form-control red-border' validClass='form-control' regex=COLUMN_NAME_REGEX
+                errorMessage=(t "hive.ui.columnNameErrorMessage")}}
+                {{/validated-text-field}}
+              </th>
             {{/each}}
           </tr>
           <tr id="upload-controls">
@@ -165,4 +177,4 @@
       </div>
     {{/if}}
   </div>
-</div>
+</div>

+ 109 - 0
contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/resources/upload/CSVParserTest.java

@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.upload;
+
+import org.apache.ambari.view.hive2.client.Row;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.csv.CSVParser;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Iterator;
+
+public class CSVParserTest {
+
+  /**
+   * no exception in creating csvParser with emtpy stream
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyStream() throws Exception {
+    String csv = "";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, null);
+      ) {
+      Assert.assertEquals("There should not be any rows.",false, jp.iterator().hasNext());
+    }
+  }
+
+  /**
+   * in case of csv an empty line is still considered as row
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyRow() throws Exception {
+    String csv = "       ";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, null);
+      ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator should be Empty", true, iterator.hasNext());
+      Assert.assertArrayEquals("Row should not be empty",new Object[]{"       "},iterator.next().getRow());
+    }
+  }
+
+  @Test
+  public void testParse1Row() throws Exception {
+    String csv = "value1,c,10,10.1";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, null);
+      ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", true, iterator.hasNext());
+      Row row = iterator.next();
+      Row expected = new Row(new Object[]{"value1", "c", "10", "10.1"});
+      Assert.assertEquals("Row not equal!", expected, row);
+
+      Assert.assertEquals("Should report no more rows!", false, iterator.hasNext());
+    }
+  }
+
+  @Test
+  public void testParseMultipleRow() throws Exception {
+
+    String csv = "value1,c,10,10.1\n" +
+            "value2,c2,102,true";
+
+    try(
+      StringReader sr = new StringReader(csv);
+      CSVParser jp = new CSVParser(sr, null);
+    ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", new Row(new Object[]{"value1", "c", "10", "10.1"}), iterator.next());
+
+      Assert.assertEquals("Failed to detect 2nd row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 2nd row!", new Row(new Object[]{"value2", "c2", "102", Boolean.TRUE.toString()}), iterator.next());
+
+      Assert.assertEquals("Failed to detect end of rows!", false, iterator.hasNext());
+      Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext());
+    }
+  }
+}

+ 326 - 0
contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/resources/upload/DataParserCSVTest.java

@@ -0,0 +1,326 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.upload;
+
+import org.apache.ambari.view.hive2.client.ColumnDescription;
+import org.apache.ambari.view.hive2.client.ColumnDescriptionShort;
+import org.apache.ambari.view.hive2.client.Row;
+import org.apache.ambari.view.hive2.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.DataParser;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.PreviewData;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+
+public class DataParserCSVTest {
+  @Test
+  public void testParsePreviewCSV() throws Exception {
+    String str = "1,a\n" +
+            "2,b\n" +
+            "3,c\n";
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+
+    try (
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions);
+    ){
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(2, pd.getPreviewRows().size()); // now it will not return the first row which is header
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.INT.toString(), 0),
+              new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1)};
+
+      Object cols2[] = new Object[2];
+      cols2[0] = "2";
+      cols2[1] = "b";
+      Row row2 = new Row(cols2);
+
+      Object cols3[] = new Object[2];
+      cols3[0] = "3";
+      cols3[1] = "c";
+      Row row3 = new Row(cols3);
+
+      Row[] rows = { row2, row3};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    }
+  }
+
+  /**
+   * even if in one of the preview rows, datatype is not correct, then it should be assigned that datatype.
+   * but if first row is header then first row should not be acconted for detecting datatype
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewDataTypeDetectionCSV() throws Exception {
+    String str = "1,a,10,k\n" +
+      "2,b,6,8\n" +
+      "2.2,b,7,9\n" +
+      "2,b,abc,1\n" +
+      "2,b,9,3\n" +
+      "2,b,8,5\n" +
+      "2,b,7,3\n" +
+      "2,b,6,3\n" +
+      "3,c,c,3\n";
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    try(StringReader sr = new StringReader(str);
+      DataParser dp= new DataParser(sr, parseOptions)) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(4, pd.getHeader().size());
+      ColumnDescription[] cd = {
+        // as row 3 contains 2.2
+        new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.DOUBLE.toString(), 0),
+        // as all are chars
+        new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1),
+        // as row 4 contains abc
+        new ColumnDescriptionImpl("10", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
+        // although row 1 contains k but it is in header and not counted in detecting datatype
+        new ColumnDescriptionImpl("k", ColumnDescriptionShort.DataTypes.INT.toString(), 3)};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+    }
+  }
+
+  /**
+   * even if in one of the preview rows, datatype is not correct, then it should be assigned that datatype.
+   * but if first row is header then first row should not be acconted for detecting datatype
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewDataTypeDetection2CSV() throws Exception {
+    String str = "1,a,10,k\n" +
+      "2,b,6,p\n" +
+      "2.2,b,7,9\n" +
+      "2,b,2.2,1\n" +
+      "2,b,9,3\n" +
+      "2,b,8,5\n" +
+      "2,b,7,3\n" +
+      "2,b,6,3\n" +
+      "3,c,c,3\n";
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+
+    try(StringReader sr = new StringReader(str);
+        DataParser dp = new DataParser(sr, parseOptions)) {
+
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(4, pd.getHeader().size());
+      ColumnDescription[] cd = {
+        // as row 3 contains 2.2
+        new ColumnDescriptionImpl("1", ColumnDescriptionShort.DataTypes.DOUBLE.toString(), 0),
+        // as all are chars
+        new ColumnDescriptionImpl("a", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1),
+        // some are int, char and some double .. nothing other than 'string' satisfies all the rows
+        new ColumnDescriptionImpl("10", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
+        // although row 1 contains k but it is in header and not counted in detecting datatype
+        // but row 2 also has a char p which will be acconted for datatype detection
+        new ColumnDescriptionImpl("k", ColumnDescriptionShort.DataTypes.CHAR.toString(), 3)};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+    }
+  }
+
+  /**
+   * One row csv will give default column names and 1st row in preview if HEADER.PROVIDED_BY_USER is selected
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreview1RowCSV() throws Exception {
+    String str = "1,a\n" ;
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.PROVIDED_BY_USER.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions)
+    ) {
+
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(1, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("Column1", ColumnDescriptionShort.DataTypes.INT.toString(), 0),
+        new ColumnDescriptionImpl("Column2", ColumnDescriptionShort.DataTypes.CHAR.toString(), 1)};
+
+      Object cols1[] = new Object[2];
+      cols1[0] = "1";
+      cols1[1] = "a";
+      Row row1 = new Row(cols1);
+
+      Row[] rows = {row1};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    }
+  }
+
+  /**
+   * One row csv will throw exception in preview if HEADER.FIRST_RECORD is selected.
+   * @throws IOException
+   */
+  @Test(expected = java.util.NoSuchElementException.class)
+  public void testParsePreview1RowCSVFirstRowHeader() throws Exception {
+    String str = "col1,col2\n" ;
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions)
+    ) {
+
+
+      PreviewData pd = dp.parsePreview();
+    }
+  }
+
+  /**
+   * more number of columns in a row => igore the extra columns. Number of columns is decided by the first row.
+   * If other row contains more columns then those columns will be ignored
+   * Here first row has 2 columns and second row has 3 columns so the value 'x' is ignored
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVMoreColumns() throws Exception {
+    String str = "1,a\n" +
+            "2,b,x\n" +  // contains 3 cols, more number of columns
+            "3,c\n";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+
+    try(
+        StringReader sr = new StringReader(str);
+        DataParser dp = new DataParser(sr, parseOptions)
+    ) {
+
+      PreviewData pd = dp.parsePreview();
+      Row row = new Row(new Object[]{"2","b"});
+
+      Assert.assertArrayEquals("Additional columns not properly handled.", row.getRow(),pd.getPreviewRows().get(0).getRow());
+    }
+  }
+
+  /**
+   * less number of columns => treat missing values as null. Number of columns is decided by the first row of the table
+   * if other rows has less number of columns then it treats other columns as null
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVLessColumns() throws Exception {
+    String str = "1,a\n" +
+            "2\n" +  // contains 1 col, less number of columns
+            "3,c\n";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp =  new DataParser(sr, parseOptions)
+      ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertEquals("Missing value not detected as null.",pd.getPreviewRows().get(1).getRow()[1],null);
+    }
+  }
+
+  /**
+   * empty values are treated as empty string
+   * @throws IOException
+   */
+  @Test
+  public void testEmptyColumn() throws Exception {
+    String str = "1,a,x\n" +
+            "2,,y\n" +  // contains 1 col, less number of columns
+            "3,c,z\n";
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions)
+    ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(0).getRow()[1],"");
+    }
+  }
+
+  /**
+   * empty values are treated as empty string
+   * @throws IOException
+   */
+  @Test
+  public void testLastEmptyColumn() throws Exception {
+    String str = "1,a,x\n" +
+            "2,,\n" +  // contains 1 col, less number of columns
+            "3,c,z\n";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.CSV.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions)
+    ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(0).getRow()[1],"");
+      Assert.assertEquals("Empty column not detected properly.",pd.getPreviewRows().get(0).getRow()[2],"");
+    }
+  }
+}

+ 263 - 0
contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/resources/upload/DataParserJSONTest.java

@@ -0,0 +1,263 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.upload;
+
+import org.apache.ambari.view.hive2.client.ColumnDescription;
+import org.apache.ambari.view.hive2.client.ColumnDescriptionShort;
+import org.apache.ambari.view.hive2.client.Row;
+import org.apache.ambari.view.hive2.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.DataParser;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.PreviewData;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+
+public class DataParserJSONTest {
+
+  @Test
+  public void testParsePreviewJSON() throws Exception {
+    String str = "[ {\"col1\" : \"a\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4.4\" },"
+            + "{\"col1\": \"b\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"5.4\" },"
+            + "{\"col1\": \"c\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6.4\" },"
+            + "{\"col1\": \"d\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7.4\" },"
+            + "{\"col1\": \"e\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"8.4\" },"
+            + "{\"col1\": \"f\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"9.4\" },"
+            + "{\"col1\": \"g\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"10.4\" },"
+            + "{\"col1\": \"h\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"11.4\" },"
+            + "{\"col1\": \"i\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4\" },"
+            + "{\"col1\": \"j\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"5\" },"
+            + "{\"col1\": \"k\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6\" },"
+            + "{\"col1\": \"l\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7\" },"
+            + "{\"col1\": \"m\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"24.4\" },"
+            + "{\"col1\": \"n\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"14.4\" },"
+            + "{\"col1\": \"o\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"34.4\" },"
+            + "{\"col1\": \"p\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"44.4\" },"
+            + "{\"col1\": \"q\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"54.4\" },"
+            + "{\"col1\": \"r\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"64.4\" }"
+            + "]";
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_NUMBER_OF_PREVIEW_ROWS, 7);
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp =  new DataParser(sr, parseOptions)
+    ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(7, pd.getPreviewRows().size()); // header row + preview rows
+      Assert.assertEquals(14, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0),
+              new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1),
+              new ColumnDescriptionImpl("col3", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
+              new ColumnDescriptionImpl("col4", ColumnDescriptionShort.DataTypes.STRING.toString(), 3),
+              new ColumnDescriptionImpl("col5", ColumnDescriptionShort.DataTypes.STRING.toString(), 4),
+              new ColumnDescriptionImpl("col6", ColumnDescriptionShort.DataTypes.STRING.toString(), 5),
+              new ColumnDescriptionImpl("col7", ColumnDescriptionShort.DataTypes.STRING.toString(), 6),
+              new ColumnDescriptionImpl("col8", ColumnDescriptionShort.DataTypes.STRING.toString(), 7),
+              new ColumnDescriptionImpl("col9", ColumnDescriptionShort.DataTypes.STRING.toString(), 8),
+              new ColumnDescriptionImpl("col10", ColumnDescriptionShort.DataTypes.STRING.toString(), 9),
+              new ColumnDescriptionImpl("col11", ColumnDescriptionShort.DataTypes.STRING.toString(), 10),
+              new ColumnDescriptionImpl("col12", ColumnDescriptionShort.DataTypes.STRING.toString(), 11),
+              new ColumnDescriptionImpl("col13", ColumnDescriptionShort.DataTypes.STRING.toString(), 12),
+              new ColumnDescriptionImpl("col14", ColumnDescriptionShort.DataTypes.DOUBLE.toString(), 13)};
+
+      Row row2 = new Row(new Object[]{"a", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "4.4"});
+      Row row3 = new Row(new Object[]{"b", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "5.4"});
+      Row row4 = new Row(new Object[]{"c", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "6.4"});
+      Row row5 = new Row(new Object[]{"d", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "7.4"});
+      Row row6 = new Row(new Object[]{"e", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "8.4"});
+      Row row7 = new Row(new Object[]{"f", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "9.4"});
+      Row row8 = new Row(new Object[]{"g", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "10.4"});
+
+      Row[] rows = { row2, row3, row4, row5, row6, row7, row8};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    }
+  }
+
+  /**
+   * additional columns in rows of JSON are ignored.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVMoreColumns() throws Exception {
+    String str = "[ {\"col1\" : \"a\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4.4\" },"
+            + "{\"col1\": \"b\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" , \"col14\" : \"43.4\" ,\"col15\" : \"asafsfa\" },"
+            + "{\"col1\": \"c\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6.4\" },"
+            + "{\"col1\": \"d\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7.4\" }"
+            + "]";
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp =  new DataParser(sr, parseOptions)
+    ) {
+
+      PreviewData pd = dp.parsePreview();
+
+      Row row2 = new Row(new Object[]{"b", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "43.4"});
+      Assert.assertArrayEquals("More number of columns do not give correct result.", row2.getRow(), pd.getPreviewRows().get(1).getRow());
+    }
+  }
+
+  /**
+   * less columns in json makes them null.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVLessColumns() throws Exception {
+    String str = "[ " +
+            "{\"col1\" : \"a\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4.4\" },"
+            + "{\"col1\": \"b\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\"  },"
+            + "{\"col1\": \"c\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6.4\" },"
+            + "{\"col1\": \"d\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7.4\" }"
+            + "]";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp =  new DataParser(sr, parseOptions)
+    ) {
+      PreviewData pd = dp.parsePreview();
+
+      Assert.assertNull(pd.getPreviewRows().get(1).getRow()[13]);
+    }
+  }
+
+  /**
+   * illegal json format gives error
+   *
+   * @throws IOException
+   */
+  @Test(expected = IllegalArgumentException.class)
+  public void testWrongJsonFormat() throws Exception {
+    String str = "[ " +
+            "{\"col1\" : \"a\", \n\"col2\": \"abcd\" },"
+            + "{\"col1\": \"b\", \n\"col2\": \"abcd\" },"
+            + "{\"col1\": \"c\", \n\"col2\": \"abcd\"  },"
+            + "{\"col1\": \"d\",, \n\"col2\": \"abcd\"  }"       // extra comma in this line
+            + "]";
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions);
+    ) {
+      PreviewData pd = dp.parsePreview();
+    }
+  }
+
+  /**
+   * One row JSON will give embedde column names and 1st row in preview if HEADER.EMBEDDED is selected
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreview1RowJSON() throws Exception {
+    String str = "[ "
+      + "{\"col1\": \"d\", \n\"col2\": \"abcd\"  }"       // extra comma in this line
+      + "]";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions);
+    ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(1, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0),
+        new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1)};
+
+      Object cols1[] = new Object[2];
+      cols1[0] = "d";
+      cols1[1] = "abcd";
+      Row row1 = new Row(cols1);
+
+      Row[] rows = {row1};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    }
+  }
+
+  /**
+   * One row JSON will give default column names and 1st row in preview if HEADER.PROVIDED_BY_USER is selected
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreview1RowJSONHeaderProvided() throws Exception {
+    String str = "[ "
+      + "{\"col1\": \"d\", \n\"col2\": \"abcd\"  }"       // extra comma in this line
+      + "]";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.PROVIDED_BY_USER.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions);
+    ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(1, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("Column1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0),
+        new ColumnDescriptionImpl("Column2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1)};
+
+      Object cols1[] = new Object[2];
+      cols1[0] = "d";
+      cols1[1] = "abcd";
+      Row row1 = new Row(cols1);
+
+      Row[] rows = {row1};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    }
+  }
+}

+ 295 - 0
contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/resources/upload/DataParserXMLTest.java

@@ -0,0 +1,295 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.upload;
+
+import org.apache.ambari.view.hive2.client.ColumnDescription;
+import org.apache.ambari.view.hive2.client.ColumnDescriptionShort;
+import org.apache.ambari.view.hive2.client.Row;
+import org.apache.ambari.view.hive2.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.DataParser;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.ParseOptions;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.PreviewData;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+
+public class DataParserXMLTest {
+
+  @Test
+  public void testParsePreviewXML() throws Exception {
+    String str = "<table>" +
+            "<row>" +
+            "<col name=\"col1\">row1-col1-Value</col>" +
+            "<col name=\"col2\">row1-col2-Value</col>" +
+            "<col name=\"col3\">row1-col3-Value</col>" +
+            "<col name=\"col4\">10</col>" +
+            "<col name=\"col5\">11</col>" +
+            "</row>" +
+            "<row>" +
+            "<col name=\"col1\">row2-col1-Value</col>" +
+            "<col name=\"col2\">row2-col2-Value</col>" +
+            "<col name=\"col3\">row2-col3-Value</col>" +
+            "<col name=\"col4\">20</col>" +
+            "<col name=\"col5\">21</col>" +
+            "</row>" +
+            "</table>";
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions);
+      ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(2, pd.getPreviewRows().size()); // header row + preview rows
+      Assert.assertEquals(5, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0),
+              new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1),
+              new ColumnDescriptionImpl("col3", ColumnDescriptionShort.DataTypes.STRING.toString(), 2),
+              new ColumnDescriptionImpl("col4", ColumnDescriptionShort.DataTypes.INT.toString(), 3),
+              new ColumnDescriptionImpl("col5", ColumnDescriptionShort.DataTypes.INT.toString(), 4)
+      };
+
+      Row row2 = new Row(new Object[]{"row1-col1-Value", "row1-col2-Value", "row1-col3-Value", "10", "11"});
+      Row row3 = new Row(new Object[]{"row2-col1-Value", "row2-col2-Value", "row2-col3-Value", "20", "21"});
+
+      Row[] rows = {row2, row3};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    }
+  }
+
+
+  /**
+   * additional columns in rows of XML are ignored.
+   * number of columns are decided by the first row of the table and here second row contains more columns so those are ignored.
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVMoreColumns() throws Exception {
+    String str ="<table>" +
+            "<row>" +
+            "<col name=\"col1\">row1-col1-Value</col>" +
+            "<col name=\"col2\">row1-col2-Value</col>" +
+            "<col name=\"col3\">row1-col3-Value</col>" +
+            "<col name=\"col4\">10</col>" +
+            "<col name=\"col5\">11</col>" +
+            "</row>" +
+            "<row>" +
+            "<col name=\"col1\">row2-col1-Value</col>" +
+            "<col name=\"col2\">row2-col2-Value</col>" +
+            "<col name=\"col3\">row2-col3-Value</col>" +
+            "<col name=\"col99\">row2-col99-Value</col>" +  // extra colummn
+            "<col name=\"col100\">row2-col100-Value</col>" +  // extra column
+            "<col name=\"col4\">20</col>" +
+            "<col name=\"col5\">21</col>" +
+            "</row>" +
+            "</table>";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+
+
+    try(    StringReader sr = new StringReader(str);
+            DataParser dp = new DataParser(sr, parseOptions);
+    ) {
+
+
+      PreviewData pd = dp.parsePreview();
+
+      Row row2 = new Row(new Object[]{"row2-col1-Value","row2-col2-Value","row2-col3-Value","20","21"});
+      Assert.assertArrayEquals("More number of columns do not give correct result.", row2.getRow(), pd.getPreviewRows().get(1).getRow());
+    }
+  }
+
+  /**
+   * less columns in xml makes them null.
+   * number of columns are decided by the first row of the table and here second row does not contain col99 and col100
+   * columns so those are set to null.
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreviewCSVLessColumns() throws Exception {
+    String str = "<table>" +
+            "<row>" +
+            "<col name=\"col1\">row1-col1-Value</col>" +
+            "<col name=\"col2\">row1-col2-Value</col>" +
+            "<col name=\"col3\">row1-col3-Value</col>" +
+            "<col name=\"col99\">row2-col99-Value</col>" +  // extra colummn
+            "<col name=\"col100\">row2-col100-Value</col>" +  // extra column
+            "<col name=\"col4\">10</col>" +
+            "<col name=\"col5\">11</col>" +
+            "</row>" +
+            "<row>" +
+            "<col name=\"col1\">row2-col1-Value</col>" +
+            "<col name=\"col2\">row2-col2-Value</col>" +
+            "<col name=\"col3\">row2-col3-Value</col>" +
+            "<col name=\"col4\">20</col>" +
+            "<col name=\"col5\">21</col>" +
+            "</row>" +
+            "</table>";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions);
+      ) {
+      PreviewData pd = dp.parsePreview();
+
+      Row row2 = new Row(new Object[]{"row2-col1-Value","row2-col2-Value","row2-col3-Value",null,null,"20","21"});
+      Assert.assertArrayEquals("Less number of columns do not give correct result.", row2.getRow(), pd.getPreviewRows().get(1).getRow());
+    }
+  }
+
+  /**
+   * illegal xml format gives error. adding illegal tag gives error
+   *
+   * @throws IOException
+   */
+  @Test(expected = IllegalArgumentException.class)
+  public void testWrongXMLFormat() throws Exception {
+    String str = "<table>" +
+            "<row>" +
+            "<ccc></ccc>" +   // illegal tag.
+            "<col name=\"col1\">row1-col1-Value</col>" +
+            "<col name=\"col2\">row1-col2-Value</col>" +
+            "<col name=\"col3\">row1-col3-Value</col>" +
+            "<col name=\"col99\">row2-col99-Value</col>" +  // extra colummn
+            "<col name=\"col100\">row2-col100-Value</col>" +  // extra column
+            "<col name=\"col4\">10</col>" +
+            "<col name=\"col5\">11</col>" +
+            "</row>" +
+            "<row>" +
+            "<col name=\"col1\">row2-col1-Value</col>" +
+            "<col name=\"col2\">row2-col2-Value</col>" +
+            "<col name=\"col3\">row2-col3-Value</col>" +
+            "<col name=\"col4\">20</col>" +
+            "<col name=\"col5\">21</col>" +
+            "</row>" +
+            "</table>";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString());
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser  dp = new DataParser(sr, parseOptions);
+      ) {
+      PreviewData pd = dp.parsePreview();
+    }
+  }
+
+  /**
+   * One row XML will give embedde column names and 1st row in preview if HEADER.EMBEDDED is selected
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreview1RowXML() throws Exception {
+    String str = "<table>" +
+                      "<row>" +
+                      "<col name=\"col1\">row1-col1-Value</col>" +
+                      "<col name=\"col2\">11</col>" +
+                      "</row>" +
+                 "</table>";
+
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions);
+      ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(1, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0),
+        new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.INT.toString(), 1)};
+
+      Object cols1[] = new Object[2];
+      cols1[0] = "row1-col1-Value";
+      cols1[1] = "11";
+      Row row1 = new Row(cols1);
+
+      Row[] rows = {row1};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    }
+  }
+
+  /**
+   * One row XML will give default column names and 1st row in preview if HEADER.PROVIDED_BY_USER is selected
+   * @throws IOException
+   */
+  @Test
+  public void testParsePreview1RowXMLHeaderProvided() throws Exception {
+    String str = "<table>" +
+                    "<row>" +
+                    "<col name=\"col1\">row1-col1-Value</col>" +
+                    "<col name=\"col2\">11</col>" +
+                    "</row>" +
+                 "</table>";
+
+    ParseOptions parseOptions = new ParseOptions();
+    parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString());
+    parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.PROVIDED_BY_USER.toString());
+
+    try(
+      StringReader sr = new StringReader(str);
+      DataParser dp = new DataParser(sr, parseOptions)
+      ) {
+
+      PreviewData pd = dp.parsePreview();
+      Assert.assertNotNull(pd.getPreviewRows());
+      Assert.assertNotNull(pd.getHeader());
+      Assert.assertEquals(1, pd.getPreviewRows().size());
+      Assert.assertEquals(2, pd.getHeader().size());
+      ColumnDescription[] cd = {new ColumnDescriptionImpl("Column1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0),
+        new ColumnDescriptionImpl("Column2", ColumnDescriptionShort.DataTypes.INT.toString(), 1)};
+
+      Object cols1[] = new Object[2];
+      cols1[0] = "row1-col1-Value";
+      cols1[1] = "11";
+      Row row1 = new Row(cols1);
+
+      Row[] rows = {row1};
+
+      Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray());
+      Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray());
+    }
+  }
+}

+ 146 - 0
contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/resources/upload/JsonParserTest.java

@@ -0,0 +1,146 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.upload;
+
+import com.google.gson.JsonArray;
+import com.google.gson.JsonObject;
+import org.apache.ambari.view.hive2.client.Row;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.json.JSONParser;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Iterator;
+
+public class JsonParserTest {
+
+  @Test(expected = IOException.class)
+  public void testEmptyStream() throws Exception {
+    String json = "";
+
+    try(
+      StringReader sr = new StringReader(json);
+      JSONParser jp =  new JSONParser(sr, null);
+    ) {
+      // PARSING WILL THROW ERROR
+    }
+  }
+
+  @Test
+  public void testEmptyRow() throws Exception {
+    JsonObject jo = new JsonObject();
+    JsonArray ja = new JsonArray();
+    ja.add(jo);
+    String json = ja.toString();
+
+    try(
+      StringReader sr = new StringReader(json);
+      JSONParser jp = new JSONParser(sr, null)
+      ) {
+
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator should not be Empty", true, iterator.hasNext());
+      Assert.assertArrayEquals("Row should be empty",new Object[]{},iterator.next().getRow());
+    }
+  }
+
+
+  @Test
+  public void testEmptyTable() throws Exception {
+    JsonArray ja = new JsonArray();
+    String json = ja.toString();
+
+    try(
+      StringReader sr = new StringReader(json);
+      JSONParser jp =  new JSONParser(sr, null);
+    ) {
+      Iterator<Row> iterator = jp.iterator();
+      Assert.assertEquals("Iterator Empty!", false, iterator.hasNext());
+    }
+  }
+
+  @Test
+  public void testParse1Row() throws Exception {
+    JsonObject jo = new JsonObject();
+    jo.addProperty("key1","value1");
+    jo.addProperty("key2",'c');
+    jo.addProperty("key3",10);
+    jo.addProperty("key4",10.1);
+
+    JsonArray ja = new JsonArray();
+    ja.add(jo);
+    String json = ja.toString();
+
+    try(StringReader sr = new StringReader(json);
+
+        JSONParser jp  = new JSONParser(sr, null)
+    ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", true, iterator.hasNext());
+      Row row = iterator.next();
+      Row expected = new Row(new Object[]{"value1", "c", "10", "10.1"});
+      Assert.assertEquals("Row not equal!", expected, row);
+
+      Assert.assertEquals("Should report no more rows!", false, iterator.hasNext());
+    }
+  }
+
+  @Test
+  public void testParseMultipleRow() throws Exception {
+    JsonObject jo1 = new JsonObject();
+    jo1.addProperty("key1","value1");
+    jo1.addProperty("key2","c");
+    jo1.addProperty("key3","10");
+    jo1.addProperty("key4","10.1");
+
+    JsonObject jo2 = new JsonObject();
+    jo2.addProperty("key1","value2");
+    jo2.addProperty("key2","c2");
+    jo2.addProperty("key3","102");
+    jo2.addProperty("key4",true);
+
+
+    JsonArray ja = new JsonArray();
+    ja.add(jo1);
+    ja.add(jo2);
+
+    String json = ja.toString();
+
+
+
+    try(
+      StringReader sr = new StringReader(json);
+      JSONParser jp = new JSONParser(sr, null)
+    ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", new Row(new Object[]{"value1", "c", "10", "10.1"}), iterator.next());
+
+      Assert.assertEquals("Failed to detect 2nd row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 2nd row!", new Row(new Object[]{"value2", "c2", "102", Boolean.TRUE.toString()}), iterator.next());
+
+      Assert.assertEquals("Failed to detect end of rows!", false, iterator.hasNext());
+      Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext());
+    }
+  }
+}

+ 84 - 0
contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/resources/upload/QueryGeneratorTest.java

@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.upload;
+
+import org.apache.ambari.view.hive2.client.ColumnDescription;
+import org.apache.ambari.view.hive2.resources.uploads.ColumnDescriptionImpl;
+import org.apache.ambari.view.hive2.resources.uploads.HiveFileType;
+import org.apache.ambari.view.hive2.resources.uploads.query.DeleteQueryInput;
+import org.apache.ambari.view.hive2.resources.uploads.query.InsertFromQueryInput;
+import org.apache.ambari.view.hive2.resources.uploads.query.QueryGenerator;
+import org.apache.ambari.view.hive2.resources.uploads.query.TableInfo;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class QueryGeneratorTest {
+  @Test
+  public void testCreateTextFile() {
+
+    List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
+    cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
+    cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
+    cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
+    cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
+    cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
+
+    TableInfo ti = new TableInfo("databaseName", "tableName", cdl, HiveFileType.TEXTFILE);
+
+    QueryGenerator qg = new QueryGenerator();
+    Assert.assertEquals("Create query for text file not correct ","create table tableName (col1 CHAR(10), col2 STRING, col3 DECIMAL(10,5), col4 VARCHAR(40), col5 INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE;",qg.generateCreateQuery(ti));
+  }
+
+  @Test
+  public void testCreateORC() {
+
+    List<ColumnDescriptionImpl> cdl = new ArrayList<>(4);
+    cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10));
+    cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1));
+    cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5));
+    cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40));
+    cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4));
+
+    TableInfo ti = new TableInfo("databaseName", "tableName", cdl, HiveFileType.ORC);
+
+    QueryGenerator qg = new QueryGenerator();
+    Assert.assertEquals("Create query for text file not correct ","create table tableName (col1 CHAR(10), col2 STRING, col3 DECIMAL(10,5), col4 VARCHAR(40), col5 INT) STORED AS ORC;",qg.generateCreateQuery(ti));
+  }
+
+  @Test
+  public void testInsertFromQuery() {
+
+    InsertFromQueryInput ifqi = new InsertFromQueryInput("fromDB","fromTable","toDB","toTable");
+
+    QueryGenerator qg = new QueryGenerator();
+    Assert.assertEquals("insert from one table to another not correct ","insert into table toDB.toTable select * from fromDB.fromTable",qg.generateInsertFromQuery(ifqi));
+  }
+
+  @Test
+  public void testDropTableQuery() {
+
+    DeleteQueryInput deleteQueryInput = new DeleteQueryInput("dbName","tableName");
+
+    QueryGenerator qg = new QueryGenerator();
+    Assert.assertEquals("drop table query not correct ","drop table dbName.tableName",qg.generateDropTableQuery(deleteQueryInput ));
+  }
+}

+ 127 - 0
contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/resources/upload/TableDataReaderTest.java

@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.upload;
+
+import org.apache.ambari.view.hive2.client.Row;
+import org.apache.ambari.view.hive2.resources.uploads.TableDataReader;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+
+public class TableDataReaderTest {
+
+  private class RowIter implements Iterator<Row> {
+    int numberOfRows;
+    int numberOfCols;
+    int index = 0 ;
+    ArrayList<Row> rows = new ArrayList<Row>();
+    public RowIter(int numberOfRows, int numberOfCols){
+      this.numberOfRows = numberOfRows;
+      this.numberOfCols = numberOfCols;
+      int x = 0 ;
+      for(int i = 0; i < this.numberOfRows; i++ ){
+        Object [] objArray = new Object[10];
+        for(int j = 0; j < this.numberOfCols; j++ ){
+          objArray[j] = x++ + "" ;
+        }
+        Row row = new Row(objArray);
+        rows.add(row);
+      }
+    }
+    @Override
+    public boolean hasNext() {
+      return index < numberOfRows;
+    }
+
+    @Override
+    public Row next() {
+      return rows.get(index++);
+    }
+
+    @Override
+    public void remove() {
+      throw new RuntimeException("Operation not supported.");
+    }
+
+    @Override
+    public String toString() {
+      return "RowIter{" +
+              "index=" + index +
+              ", rows=" + rows +
+              '}';
+    }
+  }
+
+  @Test
+  public void testCSVReader() throws IOException {
+    RowIter rowIter = new RowIter(10,10);
+
+    TableDataReader tableDataReader = new TableDataReader(rowIter);
+
+    char[] first10 = "0,1,2,3,4,".toCharArray();
+    char [] buf = new char[10];
+    tableDataReader.read(buf,0,10);
+
+//    System.out.println("first10 : " + Arrays.toString(first10));
+//    System.out.println("buf : " + Arrays.toString(buf));
+    Assert.assertArrayEquals(first10,buf);
+
+
+    char[] next11 = "5,6,7,8,9\n1".toCharArray();
+    char [] buf1 = new char[11];
+    tableDataReader.read(buf1,0,11);
+
+//    System.out.println("next11 : " + Arrays.toString(next11));
+//    System.out.println("buf1 : " + Arrays.toString(buf1));
+    Assert.assertArrayEquals(next11,buf1);
+
+    // read it fully
+    while( tableDataReader.read(buf,0,10) != -1 );
+
+    char [] last10 = "97,98,99\n,".toCharArray(); // last comma is the left over of previous read.
+//    System.out.println("last10 : " + Arrays.toString(last10));
+//    System.out.println("buf : " + Arrays.toString(buf));
+
+    Assert.assertArrayEquals(last10,buf);
+  }
+
+  @Test
+  public void testEmptyCSVReader() throws IOException {
+    RowIter rowIter = new RowIter(0,0);
+
+    TableDataReader tableDataReader = new TableDataReader(rowIter);
+
+    char[] first10 = new char [10];
+    char [] buf = new char[10];
+    for( int i = 0 ; i < 10 ; i++ ){
+      first10[i] = '\0';
+      buf[i] = '\0';
+    }
+
+    tableDataReader.read(buf,0,10);
+
+//    System.out.println("first10 : " + Arrays.toString(first10));
+//    System.out.println("buf : " + Arrays.toString(buf));
+    Assert.assertArrayEquals(first10,buf);
+  }
+}

+ 135 - 0
contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/resources/upload/XMLParserTest.java

@@ -0,0 +1,135 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2.resources.upload;
+
+import org.apache.ambari.view.hive2.client.Row;
+import org.apache.ambari.view.hive2.resources.uploads.parsers.xml.XMLParser;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Iterator;
+
+public class XMLParserTest {
+
+  @Test(expected = IOException.class)
+  public void testEmptyStream() throws Exception {
+    String xml = "";
+
+    try(
+      StringReader sr = new StringReader(xml);
+      XMLParser jp = new XMLParser(sr, null);
+      ) {
+        // creation of XMLParser will throw exception.
+    }
+  }
+
+  @Test
+  public void testEmptyRow() throws Exception {
+    String xml = "<table><row></row></table>";
+    try(
+      StringReader sr = new StringReader(xml);
+      XMLParser jp = new XMLParser(sr, null);
+      ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator should not be Empty", true, iterator.hasNext());
+      Assert.assertArrayEquals("Row should be empty",new Object[]{},iterator.next().getRow());
+    }
+  }
+
+
+  @Test
+  public void testEmptyTable() throws Exception {
+    String xml = "<table></table>";
+
+    try(
+      StringReader sr = new StringReader(xml);
+      XMLParser jp = new XMLParser(sr, null);
+      ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", false, iterator.hasNext());
+    }
+  }
+
+  @Test
+  public void testParse1Row() throws Exception {
+
+    String xml =
+    "<table>"
+    + "<row>"
+    + "<col name=\"key1\">value1</col>"
+    + "<col name=\"key2\">c</col>"
+    + "<col name=\"key3\">10</col>"
+    + "<col name=\"key4\">10.1</col>"
+    + "</row>"
+    + "</table>"  ;
+
+    try(
+      StringReader sr = new StringReader(xml);
+      XMLParser jp = new XMLParser(sr, null)
+    ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Iterator Empty!", true, iterator.hasNext());
+      Row row = iterator.next();
+      Row expected = new Row(new Object[]{"value1", "c", "10", "10.1"});
+      Assert.assertEquals("Row not equal!", expected, row);
+
+      Assert.assertEquals("Should report no more rows!", false, iterator.hasNext());
+    }
+  }
+
+  @Test
+  public void testParseMultipleRow() throws Exception {
+    String xml =
+    "<table>"
+    + "<row>"
+    + "<col name=\"key1\">value1</col>"
+    + "<col name=\"key2\">c</col>"
+    + "<col name=\"key3\">10</col>"
+    + "<col name=\"key4\">10.1</col>"
+    + "</row>"
+    + "<row>"
+    + "<col name=\"key1\">value2</col>"
+    + "<col name=\"key2\">c2</col>"
+    + "<col name=\"key3\">102</col>"
+    + "<col name=\"key4\">true</col>"
+    + "</row>"
+    + "</table>"  ;
+
+    try(
+      StringReader sr = new StringReader(xml);
+      XMLParser jp = new XMLParser(sr, null)
+    ) {
+      Iterator<Row> iterator = jp.iterator();
+
+      Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 1st row!", new Row(new Object[]{"value1", "c", "10", "10.1"}), iterator.next());
+
+      Assert.assertEquals("Failed to detect 2nd row!", true, iterator.hasNext());
+      Assert.assertEquals("Failed to match 2nd row!", new Row(new Object[]{"value2", "c2", "102", Boolean.TRUE.toString()}), iterator.next());
+
+      Assert.assertEquals("Failed to detect end of rows!", false, iterator.hasNext());
+      Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext());
+    }
+  }
+}