Browse Source

AMBARI-18246. Clean up Log Feeder (Miklos Gergely via oleewere)

Miklos Gergely 9 năm trước cách đây
mục cha
commit
1b07a6dde2
81 tập tin đã thay đổi với 3034 bổ sung3651 xóa
  1. 225 369
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
  2. 27 44
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java
  3. 6 1
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederConstants.java
  4. 19 36
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java
  5. 32 62
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java
  6. 4 4
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java
  7. 15 29
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
  8. 319 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/AbstractInputFile.java
  9. 142 171
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/Input.java
  10. 37 466
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java
  11. 119 191
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputManager.java
  12. 11 6
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputMarker.java
  13. 27 397
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java
  14. 24 16
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
  15. 5 18
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/GZIPReader.java
  16. 3 5
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/LogsearchReaderFactory.java
  17. 0 194
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java
  18. 83 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FilterLogData.java
  19. 59 77
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogConfigFetcher.java
  20. 189 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogConfigHandler.java
  21. 3 3
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederFilter.java
  22. 5 5
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederFilterWrapper.java
  23. 0 59
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogfeederScheduler.java
  24. 0 62
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/ApplyLogFilter.java
  25. 0 49
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/DefaultDataFilter.java
  26. 0 53
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/FilterLogData.java
  27. 5 9
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/Mapper.java
  28. 13 19
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java
  29. 8 12
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldName.java
  30. 13 18
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldValue.java
  31. 5 5
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/LogFeederAMSClient.java
  32. 23 8
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricData.java
  33. 64 64
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricsManager.java
  34. 8 5
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java
  35. 3 5
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputData.java
  36. 3 4
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputDevNull.java
  37. 18 24
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputFile.java
  38. 28 42
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputHDFSFile.java
  39. 30 28
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java
  40. 96 109
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManager.java
  41. 18 23
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java
  42. 32 30
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
  43. 3 3
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3LogPathResolver.java
  44. 2 3
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3OutputConfiguration.java
  45. 38 26
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3Uploader.java
  46. 12 11
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpooler.java
  47. 1 1
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerContext.java
  48. 1 1
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerException.java
  49. 1 1
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverCondition.java
  50. 1 1
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverHandler.java
  51. 5 47
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AWSUtil.java
  52. 61 42
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AliasUtil.java
  53. 5 10
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/CompressionUtil.java
  54. 36 3
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/DateUtil.java
  55. 57 9
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/FileUtil.java
  56. 142 369
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java
  57. 20 38
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogfeederHDFSUtil.java
  58. 14 18
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/PlaceholderUtil.java
  59. 25 56
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/S3Util.java
  60. 0 116
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/AppTest.java
  61. 26 29
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java
  62. 20 21
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java
  63. 17 17
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java
  64. 12 12
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java
  65. 241 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputManagerTest.java
  66. 117 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/logconfig/LogConfigHandlerTest.java
  67. 8 9
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
  68. 1 1
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldNameTest.java
  69. 1 1
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldValueTest.java
  70. 128 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/metrics/MetrcisManagerTest.java
  71. 2 3
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputKafkaTest.java
  72. 256 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java
  73. 7 10
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputS3FileTest.java
  74. 2 3
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputSolrTest.java
  75. 1 1
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3LogPathResolverTest.java
  76. 17 25
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3UploaderTest.java
  77. 1 1
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerTest.java
  78. 0 29
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/AWSUtilTest.java
  79. 10 10
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/PlaceholderUtilTest.java
  80. 2 2
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/S3UtilTest.java
  81. 20 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/logfeeder.properties

+ 225 - 369
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java

@@ -20,14 +20,10 @@
 package org.apache.ambari.logfeeder;
 
 import java.io.BufferedInputStream;
-import java.io.BufferedReader;
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStreamReader;
 import java.lang.reflect.Type;
 import java.util.ArrayList;
-import java.util.Collection;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.Date;
@@ -39,18 +35,21 @@ import java.util.Set;
 
 import org.apache.ambari.logfeeder.filter.Filter;
 import org.apache.ambari.logfeeder.input.Input;
-import org.apache.ambari.logfeeder.input.InputMgr;
+import org.apache.ambari.logfeeder.input.InputManager;
 import org.apache.ambari.logfeeder.input.InputSimulate;
-import org.apache.ambari.logfeeder.logconfig.LogfeederScheduler;
-import org.apache.ambari.logfeeder.metrics.MetricCount;
-import org.apache.ambari.logfeeder.metrics.MetricsMgr;
+import org.apache.ambari.logfeeder.logconfig.LogConfigHandler;
+import org.apache.ambari.logfeeder.metrics.MetricData;
+import org.apache.ambari.logfeeder.metrics.MetricsManager;
 import org.apache.ambari.logfeeder.output.Output;
-import org.apache.ambari.logfeeder.output.OutputMgr;
+import org.apache.ambari.logfeeder.output.OutputManager;
 import org.apache.ambari.logfeeder.util.AliasUtil;
 import org.apache.ambari.logfeeder.util.FileUtil;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.ambari.logfeeder.util.AliasUtil.ALIAS_PARAM;
-import org.apache.ambari.logfeeder.util.AliasUtil.ALIAS_TYPE;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.ambari.logfeeder.util.AliasUtil.AliasType;
 import org.apache.hadoop.util.ShutdownHookManager;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
@@ -58,171 +57,142 @@ import org.apache.log4j.Logger;
 import com.google.gson.reflect.TypeToken;
 
 public class LogFeeder {
-  private static final Logger logger = Logger.getLogger(LogFeeder.class);
+  private static final Logger LOG = Logger.getLogger(LogFeeder.class);
 
   private static final int LOGFEEDER_SHUTDOWN_HOOK_PRIORITY = 30;
+  private static final int CHECKPOINT_CLEAN_INTERVAL_MS = 24 * 60 * 60 * 60 * 1000; // 24 hours
 
-  private Collection<Output> outputList = new ArrayList<Output>();
+  private OutputManager outputManager = new OutputManager();
+  private InputManager inputManager = new InputManager();
+  private MetricsManager metricsManager = new MetricsManager();
 
-  private OutputMgr outMgr = new OutputMgr();
-  private InputMgr inputMgr = new InputMgr();
-  private MetricsMgr metricsMgr = new MetricsMgr();
+  public static Map<String, Object> globalConfigs = new HashMap<>();
 
-  public static Map<String, Object> globalMap = null;
-  private String[] inputParams;
-
-  private List<Map<String, Object>> globalConfigList = new ArrayList<Map<String, Object>>();
-  private List<Map<String, Object>> inputConfigList = new ArrayList<Map<String, Object>>();
-  private List<Map<String, Object>> filterConfigList = new ArrayList<Map<String, Object>>();
-  private List<Map<String, Object>> outputConfigList = new ArrayList<Map<String, Object>>();
+  private List<Map<String, Object>> inputConfigList = new ArrayList<>();
+  private List<Map<String, Object>> filterConfigList = new ArrayList<>();
+  private List<Map<String, Object>> outputConfigList = new ArrayList<>();
   
-  private int checkPointCleanIntervalMS = 24 * 60 * 60 * 60 * 1000; // 24 hours
   private long lastCheckPointCleanedMS = 0;
-  
-  private static boolean isLogfeederCompleted = false;
-  
+  private boolean isLogfeederCompleted = false;
   private Thread statLoggerThread = null;
 
-  private LogFeeder(String[] args) {
-    inputParams = args;
+  private LogFeeder() {}
+
+  public void run() {
+    try {
+      init();
+      monitor();
+      waitOnAllDaemonThreads();
+    } catch (Throwable t) {
+      LOG.fatal("Caught exception in main.", t);
+      System.exit(1);
+    }
   }
 
   private void init() throws Throwable {
+    Date startTime = new Date();
 
-    LogFeederUtil.loadProperties("logfeeder.properties", inputParams);
-
-    String configFiles = LogFeederUtil.getStringProperty("logfeeder.config.files");
-    logger.info("logfeeder.config.files=" + configFiles);
+    loadConfigFiles();
+    addSimulatedInputs();
+    mergeAllConfigs();
     
-    String[] configFileList = null;
-    if (configFiles != null) {
-      configFileList = configFiles.split(",");
-    }
-    //list of config those are there in cmd line config dir , end with .json
-    String[] cmdLineConfigs = getConfigFromCmdLine();
-    //merge both config
-    String mergedConfigList[] = LogFeederUtil.mergeArray(configFileList,
-        cmdLineConfigs);
-    //mergedConfigList is null then set default conifg 
-    if (mergedConfigList == null || mergedConfigList.length == 0) {
-      mergedConfigList = LogFeederUtil.getStringProperty("config.file",
-          "config.json").split(",");
-    }
-    for (String configFileName : mergedConfigList) {
-      logger.info("Going to load config file:" + configFileName);
-      //escape space from config file path
-      configFileName= configFileName.replace("\\ ", "%20");
+    LogConfigHandler.handleConfig();
+    
+    outputManager.init();
+    inputManager.init();
+    metricsManager.init();
+    
+    LOG.debug("==============");
+    
+    Date endTime = new Date();
+    LOG.info("Took " + (endTime.getTime() - startTime.getTime()) + " ms to initialize");
+  }
+
+  private void loadConfigFiles() throws Exception {
+    List<String> configFiles = getConfigFiles();
+    for (String configFileName : configFiles) {
+      LOG.info("Going to load config file:" + configFileName);
+      configFileName = configFileName.replace("\\ ", "%20");
       File configFile = new File(configFileName);
       if (configFile.exists() && configFile.isFile()) {
-        logger.info("Config file exists in path."
-          + configFile.getAbsolutePath());
+        LOG.info("Config file exists in path." + configFile.getAbsolutePath());
         loadConfigsUsingFile(configFile);
       } else {
-        // Let's try to load it from class loader
-        logger.info("Trying to load config file from classloader: "
-          + configFileName);
+        LOG.info("Trying to load config file from classloader: " + configFileName);
         loadConfigsUsingClassLoader(configFileName);
-        logger.info("Loaded config file from classloader: "
-          + configFileName);
+        LOG.info("Loaded config file from classloader: " + configFileName);
       }
     }
-    
-    addSimulatedInputs();
-    
-    mergeAllConfigs();
-    
-    LogfeederScheduler.INSTANCE.start();
-    
-    outMgr.setOutputList(outputList);
-    for (Output output : outputList) {
-      output.init();
-    }
-    inputMgr.init();
-    metricsMgr.init();
-    logger.debug("==============");
   }
 
-  private void loadConfigsUsingClassLoader(String configFileName) throws Exception {
-    BufferedInputStream fileInputStream = null;
-    BufferedReader br = null;
-    try {
-      fileInputStream = (BufferedInputStream) this
-        .getClass().getClassLoader()
-        .getResourceAsStream(configFileName);
-      if (fileInputStream != null) {
-        br = new BufferedReader(new InputStreamReader(
-          fileInputStream));
-        String configData = readFile(br);
-        loadConfigs(configData);
-      } else {
-        throw new Exception("Can't find configFile=" + configFileName);
-      }
-    } finally {
-      if (br != null) {
-        try {
-          br.close();
-        } catch (IOException e) {
-        }
-      }
+  private List<String> getConfigFiles() {
+    List<String> configFiles = new ArrayList<>();
+    
+    String logfeederConfigFilesProperty = LogFeederUtil.getStringProperty("logfeeder.config.files");
+    LOG.info("logfeeder.config.files=" + logfeederConfigFilesProperty);
+    if (logfeederConfigFilesProperty != null) {
+      configFiles.addAll(Arrays.asList(logfeederConfigFilesProperty.split(",")));
+    }
 
-      if (fileInputStream != null) {
-        try {
-          fileInputStream.close();
-        } catch (IOException e) {
-        }
+    String inputConfigDir = LogFeederUtil.getStringProperty("input_config_dir");
+    if (StringUtils.isNotEmpty(inputConfigDir)) {
+      File configDirFile = new File(inputConfigDir);
+      List<File> inputConfigFiles = FileUtil.getAllFileFromDir(configDirFile, "json", false);
+      for (File inputConfigFile : inputConfigFiles) {
+        configFiles.add(inputConfigFile.getAbsolutePath());
       }
     }
+    
+    if (CollectionUtils.isEmpty(configFiles)) {
+      String configFileProperty = LogFeederUtil.getStringProperty("config.file", "config.json");
+      configFiles.addAll(Arrays.asList(configFileProperty.split(",")));
+    }
+    
+    return configFiles;
   }
 
-  /**
-   * This method loads the configurations from the given file.
-   */
   private void loadConfigsUsingFile(File configFile) throws Exception {
-    FileInputStream fileInputStream = null;
     try {
-      fileInputStream = new FileInputStream(configFile);
-      BufferedReader br = new BufferedReader(new InputStreamReader(
-        fileInputStream));
-      String configData = readFile(br);
+      String configData = FileUtils.readFileToString(configFile);
       loadConfigs(configData);
     } catch (Exception t) {
-      logger.error("Error opening config file. configFilePath="
-        + configFile.getAbsolutePath());
+      LOG.error("Error opening config file. configFilePath=" + configFile.getAbsolutePath());
       throw t;
-    } finally {
-      if (fileInputStream != null) {
-        try {
-          fileInputStream.close();
-        } catch (Throwable t) {
-          // ignore
-        }
-      }
+    }
+  }
+
+  private void loadConfigsUsingClassLoader(String configFileName) throws Exception {
+    try (BufferedInputStream fis = (BufferedInputStream) this.getClass().getClassLoader().getResourceAsStream(configFileName)) {
+      String configData = IOUtils.toString(fis);
+      loadConfigs(configData);
     }
   }
 
   @SuppressWarnings("unchecked")
   private void loadConfigs(String configData) throws Exception {
-    Type type = new TypeToken<Map<String, Object>>() {
-    }.getType();
-    Map<String, Object> configMap = LogFeederUtil.getGson().fromJson(
-      configData, type);
+    Type type = new TypeToken<Map<String, Object>>() {}.getType();
+    Map<String, Object> configMap = LogFeederUtil.getGson().fromJson(configData, type);
 
     // Get the globals
     for (String key : configMap.keySet()) {
-      if (key.equalsIgnoreCase("global")) {
-        globalConfigList.add((Map<String, Object>) configMap.get(key));
-      } else if (key.equalsIgnoreCase("input")) {
-        List<Map<String, Object>> mapList = (List<Map<String, Object>>) configMap
-          .get(key);
-        inputConfigList.addAll(mapList);
-      } else if (key.equalsIgnoreCase("filter")) {
-        List<Map<String, Object>> mapList = (List<Map<String, Object>>) configMap
-          .get(key);
-        filterConfigList.addAll(mapList);
-      } else if (key.equalsIgnoreCase("output")) {
-        List<Map<String, Object>> mapList = (List<Map<String, Object>>) configMap
-          .get(key);
-        outputConfigList.addAll(mapList);
+      switch (key) {
+        case "global" :
+          globalConfigs.putAll((Map<String, Object>) configMap.get(key));
+          break;
+        case "input" :
+          List<Map<String, Object>> inputConfig = (List<Map<String, Object>>) configMap.get(key);
+          inputConfigList.addAll(inputConfig);
+          break;
+        case "filter" :
+          List<Map<String, Object>> filterConfig = (List<Map<String, Object>>) configMap.get(key);
+          filterConfigList.addAll(filterConfig);
+          break;
+        case "output" :
+          List<Map<String, Object>> outputConfig = (List<Map<String, Object>>) configMap.get(key);
+          outputConfigList.addAll(outputConfig);
+          break;
+        default :
+          LOG.warn("Unknown config key: " + key);
       }
     }
   }
@@ -244,231 +214,175 @@ public class LogFeeder {
   }
 
   private void mergeAllConfigs() {
-    globalMap = mergeConfigs(globalConfigList);
+    loadOutputs();
+    loadInputs();
+    loadFilters();
+    
+    assignOutputsToInputs();
+  }
 
-    sortBlocks(filterConfigList);
-    // First loop for output
+  private void loadOutputs() {
     for (Map<String, Object> map : outputConfigList) {
       if (map == null) {
         continue;
       }
-      mergeBlocks(globalMap, map);
+      mergeBlocks(globalConfigs, map);
 
       String value = (String) map.get("destination");
-      Output output;
-      if (value == null || value.isEmpty()) {
-        logger.error("Output block doesn't have destination element");
-        continue;
-      }
-      String classFullName = AliasUtil.getInstance().readAlias(value, ALIAS_TYPE.OUTPUT, ALIAS_PARAM.KLASS);
-      if (classFullName == null || classFullName.isEmpty()) {
-        logger.error("Destination block doesn't have output element");
+      if (StringUtils.isEmpty(value)) {
+        LOG.error("Output block doesn't have destination element");
         continue;
       }
-      output = (Output) LogFeederUtil.getClassInstance(classFullName, ALIAS_TYPE.OUTPUT);
-
+      Output output = (Output) AliasUtil.getClassInstance(value, AliasType.OUTPUT);
       if (output == null) {
-        logger.error("Destination Object is null");
+        LOG.error("Output object could not be found");
         continue;
       }
-
       output.setDestination(value);
       output.loadConfig(map);
 
-      // We will only check for is_enabled out here. Down below we will
-      // check whether this output is enabled for the input
-      boolean isEnabled = output.getBooleanValue("is_enabled", true);
-      if (isEnabled) {
-        outputList.add(output);
+      // We will only check for is_enabled out here. Down below we will check whether this output is enabled for the input
+      if (output.getBooleanValue("is_enabled", true)) {
         output.logConfgs(Level.INFO);
+        outputManager.add(output);
       } else {
-        logger.info("Output is disabled. So ignoring it. "
-          + output.getShortDescription());
+        LOG.info("Output is disabled. So ignoring it. " + output.getShortDescription());
       }
     }
+  }
 
-    // Second loop for input
+  private void loadInputs() {
     for (Map<String, Object> map : inputConfigList) {
       if (map == null) {
         continue;
       }
-      mergeBlocks(globalMap, map);
+      mergeBlocks(globalConfigs, map);
 
       String value = (String) map.get("source");
-      Input input;
-      if (value == null || value.isEmpty()) {
-        logger.error("Input block doesn't have source element");
-        continue;
-      }
-      String classFullName = AliasUtil.getInstance().readAlias(value, ALIAS_TYPE.INPUT, ALIAS_PARAM.KLASS);
-      if (classFullName == null || classFullName.isEmpty()) {
-        logger.error("Source block doesn't have source element");
+      if (StringUtils.isEmpty(value)) {
+        LOG.error("Input block doesn't have source element");
         continue;
       }
-      input = (Input) LogFeederUtil.getClassInstance(classFullName, ALIAS_TYPE.INPUT);
-
+      Input input = (Input) AliasUtil.getClassInstance(value, AliasType.INPUT);
       if (input == null) {
-        logger.error("Source Object is null");
+        LOG.error("Input object could not be found");
         continue;
       }
-
       input.setType(value);
       input.loadConfig(map);
 
       if (input.isEnabled()) {
-        input.setOutputMgr(outMgr);
-        input.setInputMgr(inputMgr);
-        inputMgr.add(input);
+        input.setOutputManager(outputManager);
+        input.setInputManager(inputManager);
+        inputManager.add(input);
         input.logConfgs(Level.INFO);
       } else {
-        logger.info("Input is disabled. So ignoring it. "
-          + input.getShortDescription());
+        LOG.info("Input is disabled. So ignoring it. " + input.getShortDescription());
       }
     }
+  }
+
+  private void loadFilters() {
+    sortFilters();
 
-    // Third loop is for filter, but we will have to create a filter
-    // instance for each input, so it can maintain the state per input
     List<Input> toRemoveInputList = new ArrayList<Input>();
-    for (Input input : inputMgr.getInputList()) {
-      Filter prevFilter = null;
+    for (Input input : inputManager.getInputList()) {
       for (Map<String, Object> map : filterConfigList) {
         if (map == null) {
           continue;
         }
-        mergeBlocks(globalMap, map);
+        mergeBlocks(globalConfigs, map);
 
         String value = (String) map.get("filter");
-        Filter filter;
-        if (value == null || value.isEmpty()) {
-          logger.error("Filter block doesn't have filter element");
-          continue;
-        }
-
-        String classFullName = AliasUtil.getInstance().readAlias(value, ALIAS_TYPE.FILTER, ALIAS_PARAM.KLASS);
-        if (classFullName == null || classFullName.isEmpty()) {
-          logger.error("Filter block doesn't have filter element");
+        if (StringUtils.isEmpty(value)) {
+          LOG.error("Filter block doesn't have filter element");
           continue;
         }
-        filter = (Filter) LogFeederUtil.getClassInstance(classFullName, ALIAS_TYPE.FILTER);
-
+        Filter filter = (Filter) AliasUtil.getClassInstance(value, AliasType.FILTER);
         if (filter == null) {
-          logger.error("Filter Object is null");
+          LOG.error("Filter object could not be found");
           continue;
         }
         filter.loadConfig(map);
         filter.setInput(input);
 
         if (filter.isEnabled()) {
-          filter.setOutputMgr(outMgr);
-          if (prevFilter == null) {
-            input.setFirstFilter(filter);
-          } else {
-            prevFilter.setNextFilter(filter);
-          }
-          prevFilter = filter;
+          filter.setOutputManager(outputManager);
+          input.addFilter(filter);
           filter.logConfgs(Level.INFO);
         } else {
-          logger.debug("Ignoring filter "
-            + filter.getShortDescription() + " for input "
-            + input.getShortDescription());
+          LOG.debug("Ignoring filter " + filter.getShortDescription() + " for input " + input.getShortDescription());
         }
       }
+      
       if (input.getFirstFilter() == null) {
         toRemoveInputList.add(input);
       }
     }
 
-    // Fourth loop is for associating valid outputs to input
-    Set<Output> usedOutputSet = new HashSet<Output>();
-    for (Input input : inputMgr.getInputList()) {
-      for (Output output : outputList) {
-        boolean ret = LogFeederUtil.isEnabled(output.getConfigs(),
-          input.getConfigs());
-        if (ret) {
-          usedOutputSet.add(output);
-          input.addOutput(output);
-        }
-      }
-    }
-    outputList = usedOutputSet;
-
     for (Input toRemoveInput : toRemoveInputList) {
-      logger.warn("There are no filters, we will ignore this input. "
-        + toRemoveInput.getShortDescription());
-      inputMgr.removeInput(toRemoveInput);
+      LOG.warn("There are no filters, we will ignore this input. " + toRemoveInput.getShortDescription());
+      inputManager.removeInput(toRemoveInput);
     }
   }
 
-  private void sortBlocks(List<Map<String, Object>> blockList) {
-
-    Collections.sort(blockList, new Comparator<Map<String, Object>>() {
+  private void sortFilters() {
+    Collections.sort(filterConfigList, new Comparator<Map<String, Object>>() {
 
       @Override
       public int compare(Map<String, Object> o1, Map<String, Object> o2) {
         Object o1Sort = o1.get("sort_order");
         Object o2Sort = o2.get("sort_order");
-        if (o1Sort == null) {
-          return 0;
-        }
-        if (o2Sort == null) {
+        if (o1Sort == null || o2Sort == null) {
           return 0;
         }
-        int o1Value = 0;
-        if (!(o1Sort instanceof Number)) {
-          try {
-            o1Value = (new Double(Double.parseDouble(o1Sort
-              .toString()))).intValue();
-          } catch (Throwable t) {
-            logger.error("Value is not of type Number. class="
-              + o1Sort.getClass().getName() + ", value="
-              + o1Sort.toString() + ", map=" + o1.toString());
-          }
-        } else {
-          o1Value = ((Number) o1Sort).intValue();
-        }
-        int o2Value = 0;
-        if (!(o2Sort instanceof Integer)) {
+        
+        int o1Value = parseSort(o1, o1Sort);
+        int o2Value = parseSort(o2, o2Sort);
+        
+        return o1Value - o2Value;
+      }
+
+      private int parseSort(Map<String, Object> map, Object o) {
+        if (!(o instanceof Number)) {
           try {
-            o2Value = (new Double(Double.parseDouble(o2Sort
-              .toString()))).intValue();
+            return (new Double(Double.parseDouble(o.toString()))).intValue();
           } catch (Throwable t) {
-            logger.error("Value is not of type Number. class="
-              + o2Sort.getClass().getName() + ", value="
-              + o2Sort.toString() + ", map=" + o2.toString());
+            LOG.error("Value is not of type Number. class=" + o.getClass().getName() + ", value=" + o.toString()
+              + ", map=" + map.toString());
+            return 0;
           }
         } else {
-
+          return ((Number) o).intValue();
         }
-        return o1Value - o2Value;
       }
     });
   }
 
-  private Map<String, Object> mergeConfigs(
-    List<Map<String, Object>> configList) {
-    Map<String, Object> mergedConfig = new HashMap<String, Object>();
-    for (Map<String, Object> config : configList) {
-      mergeBlocks(config, mergedConfig);
+  private void assignOutputsToInputs() {
+    Set<Output> usedOutputSet = new HashSet<Output>();
+    for (Input input : inputManager.getInputList()) {
+      for (Output output : outputManager.getOutputs()) {
+        if (LogFeederUtil.isEnabled(output.getConfigs(), input.getConfigs())) {
+          usedOutputSet.add(output);
+          input.addOutput(output);
+        }
+      }
     }
-    return mergedConfig;
+    outputManager.retainUsedOutputs(usedOutputSet);
   }
 
-  private void mergeBlocks(Map<String, Object> fromMap,
-                           Map<String, Object> toMap) {
-    // Merge the non-string
+  @SuppressWarnings("unchecked")
+  private void mergeBlocks(Map<String, Object> fromMap, Map<String, Object> toMap) {
     for (String key : fromMap.keySet()) {
       Object objValue = fromMap.get(key);
       if (objValue == null) {
         continue;
       }
       if (objValue instanceof Map) {
-        @SuppressWarnings("unchecked")
-        Map<String, Object> globalFields = LogFeederUtil
-          .cloneObject((Map<String, Object>) fromMap.get(key));
+        Map<String, Object> globalFields = LogFeederUtil.cloneObject((Map<String, Object>) objValue);
 
-        @SuppressWarnings("unchecked")
-        Map<String, Object> localFields = (Map<String, Object>) toMap
-          .get(key);
+        Map<String, Object> localFields = (Map<String, Object>) toMap.get(key);
         if (localFields == null) {
           localFields = new HashMap<String, Object>();
           toMap.put(key, localFields);
@@ -477,8 +391,7 @@ public class LogFeeder {
         if (globalFields != null) {
           for (String fieldKey : globalFields.keySet()) {
             if (!localFields.containsKey(fieldKey)) {
-              localFields.put(fieldKey,
-                globalFields.get(fieldKey));
+              localFields.put(fieldKey, globalFields.get(fieldKey));
             }
           }
         }
@@ -493,11 +406,29 @@ public class LogFeeder {
     }
   }
 
+  private class JVMShutdownHook extends Thread {
+
+    public void run() {
+      try {
+        LOG.info("Processing is shutting down.");
+
+        inputManager.close();
+        outputManager.close();
+        inputManager.checkInAll();
+
+        logStats();
+
+        LOG.info("LogSearch is exiting.");
+      } catch (Throwable t) {
+        // Ignore
+      }
+    }
+  }
+
   private void monitor() throws Exception {
-    inputMgr.monitor();
+    inputManager.monitor();
     JVMShutdownHook logfeederJVMHook = new JVMShutdownHook();
-    ShutdownHookManager.get().addShutdownHook(logfeederJVMHook,
-        LOGFEEDER_SHUTDOWN_HOOK_PRIORITY);
+    ShutdownHookManager.get().addShutdownHook(logfeederJVMHook, LOGFEEDER_SHUTDOWN_HOOK_PRIORITY);
     
     statLoggerThread = new Thread("statLogger") {
 
@@ -512,17 +443,14 @@ public class LogFeeder {
           try {
             logStats();
           } catch (Throwable t) {
-            logger.error(
-              "LogStats: Caught exception while logging stats.",
-              t);
+            LOG.error("LogStats: Caught exception while logging stats.", t);
           }
 
-          if (System.currentTimeMillis() > (lastCheckPointCleanedMS + checkPointCleanIntervalMS)) {
+          if (System.currentTimeMillis() > (lastCheckPointCleanedMS + CHECKPOINT_CLEAN_INTERVAL_MS)) {
             lastCheckPointCleanedMS = System.currentTimeMillis();
-            inputMgr.cleanCheckPointFiles();
+            inputManager.cleanCheckPointFiles();
           }
 
-          // logfeeder is stopped then break the loop
           if (isLogfeederCompleted) {
             break;
           }
@@ -536,84 +464,20 @@ public class LogFeeder {
   }
 
   private void logStats() {
-    inputMgr.logStats();
-    outMgr.logStats();
-
-    if (metricsMgr.isMetricsEnabled()) {
-      List<MetricCount> metricsList = new ArrayList<MetricCount>();
-      inputMgr.addMetricsContainers(metricsList);
-      outMgr.addMetricsContainers(metricsList);
-      metricsMgr.useMetrics(metricsList);
-    }
-  }
-
-  private String readFile(BufferedReader br) throws Exception {
-    try {
-      StringBuilder sb = new StringBuilder();
-      String line = br.readLine();
-      while (line != null) {
-        sb.append(line);
-        line = br.readLine();
-      }
-      return sb.toString();
-    } catch (Exception t) {
-      logger.error("Error loading properties file.", t);
-      throw t;
-    }
-  }
-
-  public Collection<Output> getOutputList() {
-    return outputList;
-  }
-
-  public OutputMgr getOutMgr() {
-    return outMgr;
-  }
-
-  public static void main(String[] args) {
-    LogFeeder logFeeder = new LogFeeder(args);
-    logFeeder.run();
-  }
-
-  public void run() {
-    try {
-      Date startTime = new Date();
-      this.init();
-      Date endTime = new Date();
-      logger.info("Took " + (endTime.getTime() - startTime.getTime())
-        + " ms to initialize");
-      this.monitor();
-      //wait for all background thread before stop main thread
-      this.waitOnAllDaemonThreads();
-    } catch (Throwable t) {
-      logger.fatal("Caught exception in main.", t);
-      System.exit(1);
+    inputManager.logStats();
+    outputManager.logStats();
+
+    if (metricsManager.isMetricsEnabled()) {
+      List<MetricData> metricsList = new ArrayList<MetricData>();
+      inputManager.addMetricsContainers(metricsList);
+      outputManager.addMetricsContainers(metricsList);
+      metricsManager.useMetrics(metricsList);
     }
   }
 
-  private class JVMShutdownHook extends Thread {
-
-    public void run() {
-      try {
-        logger.info("Processing is shutting down.");
-
-        inputMgr.close();
-        outMgr.close();
-        inputMgr.checkInAll();
-
-        logStats();
-
-        logger.info("LogSearch is exiting.");
-      } catch (Throwable t) {
-        // Ignore
-      }
-    }
-  }
-  
   private void waitOnAllDaemonThreads() {
-    String foreground = LogFeederUtil.getStringProperty("foreground");
-    if (foreground != null && foreground.equalsIgnoreCase("true")) {
-      inputMgr.waitOnAllInputs();
+    if ("true".equals(LogFeederUtil.getStringProperty("foreground"))) {
+      inputManager.waitOnAllInputs();
       isLogfeederCompleted = true;
       if (statLoggerThread != null) {
         try {
@@ -624,24 +488,16 @@ public class LogFeeder {
       }
     }
   }
-  
-  private String[] getConfigFromCmdLine() {
-    String inputConfigDir = LogFeederUtil.getStringProperty("input_config_dir");
-    if (inputConfigDir != null && !inputConfigDir.isEmpty()) {
-      String[] searchFileWithExtensions = new String[] { "json" };
-      File configDirFile = new File(inputConfigDir);
-      List<File> configFiles = FileUtil.getAllFileFromDir(configDirFile,
-          searchFileWithExtensions, false);
-      if (configFiles != null && configFiles.size() > 0) {
-        String configPaths[] = new String[configFiles.size()];
-        for (int index = 0; index < configFiles.size(); index++) {
-          File configFile = configFiles.get(index);
-          String configFilePath = configFile.getAbsolutePath();
-          configPaths[index] = configFilePath;
-        }
-        return configPaths;
-      }
+
+  public static void main(String[] args) {
+    try {
+      LogFeederUtil.loadProperties("logfeeder.properties", args);
+    } catch (Throwable t) {
+      LOG.warn("Could not load logfeeder properites");
+      System.exit(1);
     }
-    return new String[0];
+
+    LogFeeder logFeeder = new LogFeeder();
+    logFeeder.run();
   }
 }

+ 27 - 44
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java

@@ -23,27 +23,27 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.metrics.MetricData;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.collections.MapUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Logger;
 import org.apache.log4j.Priority;
 
 
 public abstract class ConfigBlock {
-  static private Logger logger = Logger.getLogger(ConfigBlock.class);
+  private static final Logger LOG = Logger.getLogger(ConfigBlock.class);
 
   private boolean drain = false;
 
   protected Map<String, Object> configs;
   protected Map<String, String> contextFields = new HashMap<String, String>();
-  public MetricCount statMetric = new MetricCount();
-
-  /**
-   *
-   */
+  public MetricData statMetric = new MetricData(getStatMetricName(), false);
+  protected String getStatMetricName() {
+    return null;
+  }
+  
   public ConfigBlock() {
-    super();
   }
 
   /**
@@ -58,10 +58,7 @@ public abstract class ConfigBlock {
     return this.getClass().getSimpleName();
   }
 
-  /**
-   * @param metricsList
-   */
-  public void addMetricsContainers(List<MetricCount> metricsList) {
+  public void addMetricsContainers(List<MetricData> metricsList) {
     metricsList.add(statMetric);
   }
 
@@ -89,25 +86,21 @@ public abstract class ConfigBlock {
     boolean isEnabled = getBooleanValue("is_enabled", true);
     if (isEnabled) {
       // Let's check for static conditions
-      Map<String, Object> conditions = (Map<String, Object>) configs
-        .get("conditions");
+      Map<String, Object> conditions = (Map<String, Object>) configs.get("conditions");
       boolean allow = true;
-      if (conditions != null && conditions.size() > 0) {
+      if (MapUtils.isNotEmpty(conditions)) {
         allow = false;
         for (String conditionType : conditions.keySet()) {
           if (conditionType.equalsIgnoreCase("fields")) {
-            Map<String, Object> fields = (Map<String, Object>) conditions
-              .get("fields");
+            Map<String, Object> fields = (Map<String, Object>) conditions.get("fields");
             for (String fieldName : fields.keySet()) {
               Object values = fields.get(fieldName);
               if (values instanceof String) {
-                allow = isFieldConditionMatch(fieldName,
-                  (String) values);
+                allow = isFieldConditionMatch(fieldName, (String) values);
               } else {
                 List<String> listValues = (List<String>) values;
                 for (String stringValue : listValues) {
-                  allow = isFieldConditionMatch(fieldName,
-                    stringValue);
+                  allow = isFieldConditionMatch(fieldName, stringValue);
                   if (allow) {
                     break;
                   }
@@ -135,8 +128,7 @@ public abstract class ConfigBlock {
       allow = true;
     } else {
       @SuppressWarnings("unchecked")
-      Map<String, Object> addFields = (Map<String, Object>) configs
-        .get("add_fields");
+      Map<String, Object> addFields = (Map<String, Object>) configs.get("add_fields");
       if (addFields != null && addFields.get(fieldName) != null) {
         String addFieldValue = (String) addFields.get(fieldName);
         if (stringValue.equalsIgnoreCase(addFieldValue)) {
@@ -184,12 +176,7 @@ public abstract class ConfigBlock {
     String strValue = getStringValue(key);
     boolean retValue = defaultValue;
     if (!StringUtils.isEmpty(strValue)) {
-      if (strValue.equalsIgnoreCase("true")
-        || strValue.equalsIgnoreCase("yes")) {
-        retValue = true;
-      } else {
-        retValue = false;
-      }
+      retValue = (strValue.equalsIgnoreCase("true") || strValue.equalsIgnoreCase("yes"));
     }
     return retValue;
   }
@@ -201,8 +188,7 @@ public abstract class ConfigBlock {
       try {
         retValue = Integer.parseInt(strValue);
       } catch (Throwable t) {
-        logger.error("Error parsing integer value. key=" + key
-          + ", value=" + strValue);
+        LOG.error("Error parsing integer value. key=" + key + ", value=" + strValue);
       }
     }
     return retValue;
@@ -215,8 +201,7 @@ public abstract class ConfigBlock {
       try {
         retValue = Long.parseLong(strValue);
       } catch (Throwable t) {
-        logger.error("Error parsing long value. key=" + key + ", value="
-            + strValue);
+        LOG.error("Error parsing long value. key=" + key + ", value=" + strValue);
       }
     }
     return retValue;
@@ -227,29 +212,27 @@ public abstract class ConfigBlock {
   }
 
   public void incrementStat(int count) {
-    statMetric.count += count;
+    statMetric.value += count;
   }
 
-  public void logStatForMetric(MetricCount metric, String prefixStr) {
-    LogFeederUtil.logStatForMetric(metric, prefixStr, ", key="
-      + getShortDescription());
+  public void logStatForMetric(MetricData metric, String prefixStr) {
+    LogFeederUtil.logStatForMetric(metric, prefixStr, ", key=" + getShortDescription());
   }
 
-  synchronized public void logStat() {
+  public synchronized void logStat() {
     logStatForMetric(statMetric, "Stat");
   }
 
   public boolean logConfgs(Priority level) {
-    if (level.toInt() == Priority.INFO_INT && !logger.isInfoEnabled()) {
+    if (level.toInt() == Priority.INFO_INT && !LOG.isInfoEnabled()) {
       return false;
     }
-    if (level.toInt() == Priority.DEBUG_INT && !logger.isDebugEnabled()) {
+    if (level.toInt() == Priority.DEBUG_INT && !LOG.isDebugEnabled()) {
       return false;
     }
-    logger.log(level, "Printing configuration Block="
-      + getShortDescription());
-    logger.log(level, "configs=" + configs);
-    logger.log(level, "contextFields=" + contextFields);
+    LOG.log(level, "Printing configuration Block=" + getShortDescription());
+    LOG.log(level, "configs=" + configs);
+    LOG.log(level, "contextFields=" + contextFields);
     return true;
   }
 

+ 6 - 1
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederConstants.java → ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederConstants.java

@@ -16,13 +16,14 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.ambari.logfeeder.logconfig;
+package org.apache.ambari.logfeeder.common;
 
 public class LogFeederConstants {
 
   public static final String ALL = "all";
   public static final String LOGFEEDER_FILTER_NAME = "log_feeder_config";
   public static final String LOG_LEVEL_UNKNOWN = "UNKNOWN";
+  
   // solr fields
   public static final String SOLR_LEVEL = "level";
   public static final String SOLR_COMPONENT = "type";
@@ -31,4 +32,8 @@ public class LogFeederConstants {
   // UserConfig Constants History
   public static final String VALUES = "jsons";
   public static final String ROW_TYPE = "rowtype";
+  
+  // S3 Constants
+  public static final String S3_PATH_START_WITH = "s3://";
+  public static final String S3_PATH_SEPARATOR = "/";
 }

+ 19 - 36
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java

@@ -29,21 +29,19 @@ import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.input.Input;
 import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.ambari.logfeeder.mapper.Mapper;
-import org.apache.ambari.logfeeder.metrics.MetricCount;
-import org.apache.ambari.logfeeder.output.OutputMgr;
+import org.apache.ambari.logfeeder.metrics.MetricData;
+import org.apache.ambari.logfeeder.output.OutputManager;
 import org.apache.ambari.logfeeder.util.AliasUtil;
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.ambari.logfeeder.util.AliasUtil.ALIAS_PARAM;
-import org.apache.ambari.logfeeder.util.AliasUtil.ALIAS_TYPE;
+import org.apache.ambari.logfeeder.util.AliasUtil.AliasType;
 import org.apache.log4j.Logger;
 import org.apache.log4j.Priority;
 
 public abstract class Filter extends ConfigBlock {
-  private static final Logger logger = Logger.getLogger(Filter.class);
+  private static final Logger LOG = Logger.getLogger(Filter.class);
 
   protected Input input;
   private Filter nextFilter = null;
-  private OutputMgr outputMgr;
+  private OutputManager outputManager;
 
   private Map<String, List<Mapper>> postFieldValueMappers = new HashMap<String, List<Mapper>>();
 
@@ -74,15 +72,12 @@ public abstract class Filter extends ConfigBlock {
       }
       for (Map<String, Object> mapObject : mapList) {
         for (String mapClassCode : mapObject.keySet()) {
-          Mapper mapper = getMapper(mapClassCode);
+          Mapper mapper = (Mapper) AliasUtil.getClassInstance(mapClassCode, AliasType.MAPPER);
           if (mapper == null) {
             break;
           }
-          if (mapper.init(getInput().getShortDescription(),
-            fieldName, mapClassCode,
-            mapObject.get(mapClassCode))) {
-            List<Mapper> fieldMapList = postFieldValueMappers
-              .get(fieldName);
+          if (mapper.init(getInput().getShortDescription(), fieldName, mapClassCode, mapObject.get(mapClassCode))) {
+            List<Mapper> fieldMapList = postFieldValueMappers.get(fieldName);
             if (fieldMapList == null) {
               fieldMapList = new ArrayList<Mapper>();
               postFieldValueMappers.put(fieldName, fieldMapList);
@@ -94,17 +89,8 @@ public abstract class Filter extends ConfigBlock {
     }
   }
 
-  private Mapper getMapper(String mapClassCode) {
-    String classFullName = AliasUtil.getInstance().readAlias(mapClassCode, ALIAS_TYPE.MAPPER, ALIAS_PARAM.KLASS);
-    if (classFullName != null && !classFullName.isEmpty()) {
-      Mapper mapper = (Mapper) LogFeederUtil.getClassInstance(classFullName, ALIAS_TYPE.MAPPER);
-      return mapper;
-    }
-    return null;
-  }
-
-  public void setOutputMgr(OutputMgr outputMgr) {
-    this.outputMgr = outputMgr;
+  public void setOutputManager(OutputManager outputManager) {
+    this.outputManager = outputManager;
   }
 
   public Filter getNextFilter() {
@@ -131,25 +117,23 @@ public abstract class Filter extends ConfigBlock {
     if (nextFilter != null) {
       nextFilter.apply(inputStr, inputMarker);
     } else {
-      outputMgr.write(inputStr, inputMarker);
+      outputManager.write(inputStr, inputMarker);
     }
   }
 
   public void apply(Map<String, Object> jsonObj, InputMarker inputMarker) throws LogfeederException {
-    if (postFieldValueMappers.size() > 0) {
-      for (String fieldName : postFieldValueMappers.keySet()) {
-        Object value = jsonObj.get(fieldName);
-        if (value != null) {
-          for (Mapper mapper : postFieldValueMappers.get(fieldName)) {
-            value = mapper.apply(jsonObj, value);
-          }
+    for (String fieldName : postFieldValueMappers.keySet()) {
+      Object value = jsonObj.get(fieldName);
+      if (value != null) {
+        for (Mapper mapper : postFieldValueMappers.get(fieldName)) {
+          value = mapper.apply(jsonObj, value);
         }
       }
     }
     if (nextFilter != null) {
       nextFilter.apply(jsonObj, inputMarker);
     } else {
-      outputMgr.write(jsonObj, inputMarker);
+      outputManager.write(jsonObj, inputMarker);
     }
   }
 
@@ -193,16 +177,15 @@ public abstract class Filter extends ConfigBlock {
     if (!super.logConfgs(level)) {
       return false;
     }
-    logger.log(level, "input=" + input.getShortDescription());
+    LOG.log(level, "input=" + input.getShortDescription());
     return true;
   }
 
   @Override
-  public void addMetricsContainers(List<MetricCount> metricsList) {
+  public void addMetricsContainers(List<MetricData> metricsList) {
     super.addMetricsContainers(metricsList);
     if (nextFilter != null) {
       nextFilter.addMetricsContainers(metricsList);
     }
   }
-
 }

+ 32 - 62
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java

@@ -36,7 +36,7 @@ import oi.thekraken.grok.api.exception.GrokException;
 
 import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.metrics.MetricData;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
@@ -45,7 +45,7 @@ import org.apache.log4j.Logger;
 import com.google.gson.reflect.TypeToken;
 
 public class FilterGrok extends Filter {
-  static private Logger logger = Logger.getLogger(FilterGrok.class);
+  private static final Logger LOG = Logger.getLogger(FilterGrok.class);
 
   private static final String GROK_PATTERN_FILE = "grok-patterns";
 
@@ -68,25 +68,23 @@ public class FilterGrok extends Filter {
 
   private Type jsonType = new TypeToken<Map<String, String>>() {}.getType();
 
-  private MetricCount grokErrorMetric = new MetricCount();
+  private MetricData grokErrorMetric = new MetricData("filter.error.grok", false);
 
   @Override
   public void init() throws Exception {
     super.init();
 
     try {
-      grokErrorMetric.metricsName = "filter.error.grok";
       messagePattern = escapePattern(getStringValue("message_pattern"));
       multilinePattern = escapePattern(getStringValue("multiline_pattern"));
       sourceField = getStringValue("source_field");
       removeSourceField = getBooleanValue("remove_source_field",
         removeSourceField);
 
-      logger.info("init() done. grokPattern=" + messagePattern
-        + ", multilinePattern=" + multilinePattern + ", "
-        + getShortDescription());
+      LOG.info("init() done. grokPattern=" + messagePattern + ", multilinePattern=" + multilinePattern + ", " +
+      getShortDescription());
       if (StringUtils.isEmpty(messagePattern)) {
-        logger.error("message_pattern is not set for filter.");
+        LOG.error("message_pattern is not set for filter.");
         return;
       }
       extractNamedParams(messagePattern, namedParamList);
@@ -102,9 +100,7 @@ public class FilterGrok extends Filter {
         grokMultiline.compile(multilinePattern);
       }
     } catch (Throwable t) {
-      logger.fatal(
-        "Caught exception while initializing Grok. multilinePattern="
-          + multilinePattern + ", messagePattern="
+      LOG.fatal("Caught exception while initializing Grok. multilinePattern=" + multilinePattern + ", messagePattern="
           + messagePattern, t);
       grokMessage = null;
       grokMultiline = null;
@@ -123,9 +119,10 @@ public class FilterGrok extends Filter {
   }
 
   private void extractNamedParams(String patternStr, Set<String> paramList) {
-    String grokRegEx = "%\\{" + "(?<name>" + "(?<pattern>[A-z0-9]+)"
-      + "(?::(?<subname>[A-z0-9_:]+))?" + ")" + "(?:=(?<definition>"
-      + "(?:" + "(?:[^{}]+|\\.+)+" + ")+" + ")" + ")?" + "\\}";
+    String grokRegEx = "%\\{" +
+        "(?<name>" + "(?<pattern>[A-z0-9]+)" + "(?::(?<subname>[A-z0-9_:]+))?" + ")" +
+        "(?:=(?<definition>" + "(?:" + "(?:[^{}]+|\\.+)+" + ")+" + ")" + ")?" +
+        "\\}";
 
     Pattern pattern = Pattern.compile(grokRegEx);
     java.util.regex.Matcher matcher = pattern.matcher(patternStr);
@@ -139,28 +136,23 @@ public class FilterGrok extends Filter {
 
   private boolean loadPatterns(Grok grok) {
     InputStreamReader grokPatternsReader = null;
-    logger.info("Loading pattern file " + GROK_PATTERN_FILE);
+    LOG.info("Loading pattern file " + GROK_PATTERN_FILE);
     try {
-      BufferedInputStream fileInputStream = (BufferedInputStream) this
-        .getClass().getClassLoader()
-        .getResourceAsStream(GROK_PATTERN_FILE);
+      BufferedInputStream fileInputStream =
+          (BufferedInputStream) this.getClass().getClassLoader().getResourceAsStream(GROK_PATTERN_FILE);
       if (fileInputStream == null) {
-        logger.fatal("Couldn't load grok-patterns file "
-          + GROK_PATTERN_FILE + ". Things will not work");
+        LOG.fatal("Couldn't load grok-patterns file " + GROK_PATTERN_FILE + ". Things will not work");
         return false;
       }
       grokPatternsReader = new InputStreamReader(fileInputStream);
     } catch (Throwable t) {
-      logger.fatal("Error reading grok-patterns file " + GROK_PATTERN_FILE
-        + " from classpath. Grok filtering will not work.", t);
+      LOG.fatal("Error reading grok-patterns file " + GROK_PATTERN_FILE + " from classpath. Grok filtering will not work.", t);
       return false;
     }
     try {
       grok.addPatternFromReader(grokPatternsReader);
     } catch (GrokException e) {
-      logger.fatal(
-        "Error loading patterns from grok-patterns reader for file "
-          + GROK_PATTERN_FILE, e);
+      LOG.fatal("Error loading patterns from grok-patterns reader for file " + GROK_PATTERN_FILE, e);
       return false;
     }
 
@@ -177,8 +169,7 @@ public class FilterGrok extends Filter {
       String jsonStr = grokMultiline.capture(inputStr);
       if (!"{}".equals(jsonStr)) {
         if (strBuff != null) {
-          Map<String, Object> jsonObj = Collections
-            .synchronizedMap(new HashMap<String, Object>());
+          Map<String, Object> jsonObj = Collections.synchronizedMap(new HashMap<String, Object>());
           try {
             applyMessage(strBuff.toString(), jsonObj, currMultilineJsonStr);
           } finally {
@@ -192,15 +183,13 @@ public class FilterGrok extends Filter {
       if (strBuff == null) {
         strBuff = new StringBuilder();
       } else {
-        strBuff.append('\r');
-        strBuff.append('\n');
+        strBuff.append("\r\n");
       }
       strBuff.append(inputStr);
       savedInputMarker = inputMarker;
     } else {
       savedInputMarker = inputMarker;
-      Map<String, Object> jsonObj = Collections
-        .synchronizedMap(new HashMap<String, Object>());
+      Map<String, Object> jsonObj = Collections.synchronizedMap(new HashMap<String, Object>());
       applyMessage(inputStr, jsonObj, null);
     }
   }
@@ -216,14 +205,8 @@ public class FilterGrok extends Filter {
     }
   }
 
-  /**
-   * @param inputStr
-   * @param jsonObj
-   * @throws LogfeederException 
-   */
-  private void applyMessage(String inputStr, Map<String, Object> jsonObj,
-                            String multilineJsonStr) throws LogfeederException {
-    String jsonStr = grokParse(inputStr);
+  private void applyMessage(String inputStr, Map<String, Object> jsonObj, String multilineJsonStr) throws LogfeederException {
+    String jsonStr = grokMessage.capture(inputStr);
 
     boolean parseError = false;
     if ("{}".equals(jsonStr)) {
@@ -239,8 +222,7 @@ public class FilterGrok extends Filter {
     if (parseError) {
       jsonStr = multilineJsonStr;
     }
-    Map<String, String> jsonSrc = LogFeederUtil.getGson().fromJson(jsonStr,
-      jsonType);
+    Map<String, String> jsonSrc = LogFeederUtil.getGson().fromJson(jsonStr, jsonType);
     for (String namedParam : namedParamList) {
       if (jsonSrc.get(namedParam) != null) {
         jsonObj.put(namedParam, jsonSrc.get(namedParam));
@@ -260,37 +242,26 @@ public class FilterGrok extends Filter {
       }
     }
     super.apply(jsonObj, savedInputMarker);
-    statMetric.count++;
-  }
-
-  public String grokParse(String inputStr) {
-    String jsonStr = grokMessage.capture(inputStr);
-    return jsonStr;
+    statMetric.value++;
   }
 
   private void logParseError(String inputStr) {
-    grokErrorMetric.count++;
-    final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
-      + "_PARSEERROR";
+    grokErrorMetric.value++;
+    String logMessageKey = this.getClass().getSimpleName() + "_PARSEERROR";
     int inputStrLength = inputStr != null ? inputStr.length() : 0;
-    LogFeederUtil.logErrorMessageByInterval(
-      LOG_MESSAGE_KEY,
-      "Error parsing string. length=" + inputStrLength
-        + ", input=" + input.getShortDescription()
-        + ". First upto 100 characters="
-        + LogFeederUtil.subString(inputStr, 100), null, logger,
-      Level.WARN);
+    LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Error parsing string. length=" + inputStrLength + ", input=" +
+        input.getShortDescription() + ". First upto 100 characters=" + StringUtils.abbreviate(inputStr, 100), null, LOG,
+        Level.WARN);
   }
 
   @Override
   public void flush() {
     if (strBuff != null) {
-      Map<String, Object> jsonObj = Collections
-        .synchronizedMap(new HashMap<String, Object>());
+      Map<String, Object> jsonObj = Collections.synchronizedMap(new HashMap<String, Object>());
       try {
         applyMessage(strBuff.toString(), jsonObj, currMultilineJsonStr);
       } catch (LogfeederException e) {
-        logger.error(e.getLocalizedMessage(), e.getCause());
+        LOG.error(e.getLocalizedMessage(), e.getCause());
       }
       strBuff = null;
       savedInputMarker = null;
@@ -304,7 +275,7 @@ public class FilterGrok extends Filter {
   }
 
   @Override
-  public void addMetricsContainers(List<MetricCount> metricsList) {
+  public void addMetricsContainers(List<MetricData> metricsList) {
     super.addMetricsContainers(metricsList);
     metricsList.add(grokErrorMetric);
   }
@@ -314,5 +285,4 @@ public class FilterGrok extends Filter {
     super.logStat();
     logStatForMetric(grokErrorMetric, "Stat: Grok Errors");
   }
-
 }

+ 4 - 4
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java

@@ -22,12 +22,13 @@ import java.util.Map;
 
 import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logfeeder.util.DateUtil;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.log4j.Logger;
 
 public class FilterJSON extends Filter {
   
-  private static final Logger logger  = Logger.getLogger(FilterJSON.class);
+  private static final Logger LOG  = Logger.getLogger(FilterJSON.class);
 
   @Override
   public void apply(String inputStr, InputMarker inputMarker) throws LogfeederException {
@@ -35,7 +36,7 @@ public class FilterJSON extends Filter {
     try {
       jsonMap = LogFeederUtil.toJSONObject(inputStr);
     } catch (Exception e) {
-      logger.error(e.getLocalizedMessage());
+      LOG.error(e.getLocalizedMessage());
       throw new LogfeederException("Json parsing failed for inputstr = " + inputStr ,e.getCause());
     }
     Double lineNumberD = (Double) jsonMap.get("line_number");
@@ -45,10 +46,9 @@ public class FilterJSON extends Filter {
     }
     String timeStampStr = (String) jsonMap.get("logtime");
     if (timeStampStr != null && !timeStampStr.isEmpty()) {
-      String logtime = LogFeederUtil.getDate(timeStampStr);
+      String logtime = DateUtil.getDate(timeStampStr);
       jsonMap.put("logtime", logtime);
     }
     super.apply(jsonMap, inputMarker);
   }
-
 }

+ 15 - 29
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java

@@ -25,38 +25,35 @@ import java.util.StringTokenizer;
 
 import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.metrics.MetricData;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 
 public class FilterKeyValue extends Filter {
-  private static final Logger logger = Logger.getLogger(FilterKeyValue.class);
+  private static final Logger LOG = Logger.getLogger(FilterKeyValue.class);
 
   private String sourceField = null;
   private String valueSplit = "=";
   private String fieldSplit = "\t";
 
-  private MetricCount errorMetric = new MetricCount();
+  private MetricData errorMetric = new MetricData("filter.error.keyvalue", false);
 
   @Override
   public void init() throws Exception {
     super.init();
-    errorMetric.metricsName = "filter.error.keyvalue";
 
     sourceField = getStringValue("source_field");
     valueSplit = getStringValue("value_split", valueSplit);
     fieldSplit = getStringValue("field_split", fieldSplit);
 
-    logger.info("init() done. source_field=" + sourceField
-      + ", value_split=" + valueSplit + ", " + ", field_split="
-      + fieldSplit + ", " + getShortDescription());
+    LOG.info("init() done. source_field=" + sourceField + ", value_split=" + valueSplit + ", " + ", field_split=" +
+        fieldSplit + ", " + getShortDescription());
     if (StringUtils.isEmpty(sourceField)) {
-      logger.fatal("source_field is not set for filter. This filter will not be applied");
+      LOG.fatal("source_field is not set for filter. This filter will not be applied");
       return;
     }
-
   }
 
   @Override
@@ -71,40 +68,30 @@ public class FilterKeyValue extends Filter {
     }
     Object valueObj = jsonObj.get(sourceField);
     if (valueObj != null) {
-      StringTokenizer fieldTokenizer = new StringTokenizer(
-        valueObj.toString(), fieldSplit);
+      StringTokenizer fieldTokenizer = new StringTokenizer(valueObj.toString(), fieldSplit);
       while (fieldTokenizer.hasMoreTokens()) {
         String nv = fieldTokenizer.nextToken();
-        StringTokenizer nvTokenizer = new StringTokenizer(nv,
-          valueSplit);
+        StringTokenizer nvTokenizer = new StringTokenizer(nv, valueSplit);
         while (nvTokenizer.hasMoreTokens()) {
           String name = nvTokenizer.nextToken();
           if (nvTokenizer.hasMoreTokens()) {
             String value = nvTokenizer.nextToken();
             jsonObj.put(name, value);
           } else {
-            logParseError("name=" + name + ", pair=" + nv
-              + ", field=" + sourceField + ", field_value="
-              + valueObj);
+            logParseError("name=" + name + ", pair=" + nv + ", field=" + sourceField + ", field_value=" + valueObj);
           }
         }
       }
     }
     super.apply(jsonObj, inputMarker);
-    statMetric.count++;
+    statMetric.value++;
   }
 
   private void logParseError(String inputStr) {
-    errorMetric.count++;
-    final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
-      + "_PARSEERROR";
-    LogFeederUtil
-      .logErrorMessageByInterval(
-        LOG_MESSAGE_KEY,
-        "Error parsing string. length=" + inputStr.length()
-          + ", input=" + input.getShortDescription()
-          + ". First upto 100 characters="
-          + LogFeederUtil.subString(inputStr, 100), null, logger,
+    errorMetric.value++;
+    String logMessageKey = this.getClass().getSimpleName() + "_PARSEERROR";
+    LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Error parsing string. length=" + inputStr.length() + ", input=" +
+        input.getShortDescription() + ". First upto 100 characters=" + StringUtils.abbreviate(inputStr, 100), null, LOG,
         Level.ERROR);
   }
 
@@ -114,9 +101,8 @@ public class FilterKeyValue extends Filter {
   }
 
   @Override
-  public void addMetricsContainers(List<MetricCount> metricsList) {
+  public void addMetricsContainers(List<MetricData> metricsList) {
     super.addMetricsContainers(metricsList);
     metricsList.add(errorMetric);
   }
-
 }

+ 319 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/AbstractInputFile.java

@@ -0,0 +1,319 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.input;
+
+import java.io.BufferedReader;
+import java.io.EOFException;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.lang3.ArrayUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+
+public abstract class AbstractInputFile extends Input {
+  protected static final Logger LOG = Logger.getLogger(AbstractInputFile.class);
+
+  private static final int DEFAULT_CHECKPOINT_INTERVAL_MS = 5 * 1000;
+
+  protected File[] logFiles;
+  protected String logPath;
+  protected Object fileKey;
+  protected String base64FileKey;
+
+  protected boolean isReady;
+  private boolean isStartFromBegining = true;
+
+  private String checkPointExtension;
+  private File checkPointFile;
+  private RandomAccessFile checkPointWriter;
+  private long lastCheckPointTimeMS;
+  private int checkPointIntervalMS;
+  private Map<String, Object> jsonCheckPoint;
+  private InputMarker lastCheckPointInputMarker;
+
+  @Override
+  protected String getStatMetricName() {
+    return "input.files.read_lines";
+  }
+  
+  @Override
+  protected String getReadBytesMetricName() {
+    return "input.files.read_bytes";
+  }
+  
+  @Override
+  public void init() throws Exception {
+    LOG.info("init() called");
+    
+    checkPointExtension = LogFeederUtil.getStringProperty("logfeeder.checkpoint.extension", InputManager.DEFAULT_CHECKPOINT_EXTENSION);
+
+    // Let's close the file and set it to true after we start monitoring it
+    setClosed(true);
+    logPath = getStringValue("path");
+    tail = getBooleanValue("tail", tail);
+    checkPointIntervalMS = getIntValue("checkpoint.interval.ms", DEFAULT_CHECKPOINT_INTERVAL_MS);
+
+    if (StringUtils.isEmpty(logPath)) {
+      LOG.error("path is empty for file input. " + getShortDescription());
+      return;
+    }
+
+    String startPosition = getStringValue("start_position");
+    if (StringUtils.isEmpty(startPosition) || startPosition.equalsIgnoreCase("beginning") ||
+        startPosition.equalsIgnoreCase("begining") || !tail) {
+      isStartFromBegining = true;
+    }
+
+    setFilePath(logPath);
+    boolean isFileReady = isReady();
+
+    LOG.info("File to monitor " + logPath + ", tail=" + tail + ", isReady=" + isFileReady);
+
+    super.init();
+  }
+
+  protected void processFile(File logPathFile) throws FileNotFoundException, IOException {
+    LOG.info("Monitoring logPath=" + logPath + ", logPathFile=" + logPathFile);
+    BufferedReader br = null;
+    checkPointFile = null;
+    checkPointWriter = null;
+    jsonCheckPoint = null;
+
+    int lineCount = 0;
+    try {
+      setFilePath(logPathFile.getAbsolutePath());
+      
+      br = openLogFile(logPathFile);
+
+      boolean resume = isStartFromBegining;
+      int resumeFromLineNumber = getResumeFromLineNumber();
+      if (resumeFromLineNumber > 0) {
+        resume = false;
+      }
+      
+      setClosed(false);
+      int sleepStep = 2;
+      int sleepIteration = 0;
+      while (true) {
+        try {
+          if (isDrain()) {
+            break;
+          }
+
+          String line = br.readLine();
+          if (line == null) {
+            if (!resume) {
+              resume = true;
+            }
+            sleepIteration++;
+            if (sleepIteration == 2) {
+              flush();
+              if (!tail) {
+                LOG.info("End of file. Done with filePath=" + logPathFile.getAbsolutePath() + ", lineCount=" + lineCount);
+                break;
+              }
+            } else if (sleepIteration > 4) {
+              Object newFileKey = getFileKey(logPathFile);
+              if (newFileKey != null && (fileKey == null || !newFileKey.equals(fileKey))) {
+                LOG.info("File key is different. Marking this input file for rollover. oldKey=" + fileKey + ", newKey=" +
+                    newFileKey + ". " + getShortDescription());
+                
+                try {
+                  LOG.info("File is rolled over. Closing current open file." + getShortDescription() + ", lineCount=" +
+                      lineCount);
+                  br.close();
+                } catch (Exception ex) {
+                  LOG.error("Error closing file" + getShortDescription(), ex);
+                  break;
+                }
+                
+                try {
+                  LOG.info("Opening new rolled over file." + getShortDescription());
+                  br = openLogFile(logPathFile);
+                  lineCount = 0;
+                } catch (Exception ex) {
+                  LOG.error("Error opening rolled over file. " + getShortDescription(), ex);
+                  LOG.info("Added input to not ready list." + getShortDescription());
+                  isReady = false;
+                  inputManager.addToNotReady(this);
+                  break;
+                }
+                LOG.info("File is successfully rolled over. " + getShortDescription());
+                continue;
+              }
+            }
+            try {
+              Thread.sleep(sleepStep * 1000);
+              sleepStep = Math.min(sleepStep * 2, 10);
+            } catch (InterruptedException e) {
+              LOG.info("Thread interrupted." + getShortDescription());
+            }
+          } else {
+            lineCount++;
+            sleepStep = 1;
+            sleepIteration = 0;
+
+            if (!resume && lineCount > resumeFromLineNumber) {
+              LOG.info("Resuming to read from last line. lineCount=" + lineCount + ", input=" + getShortDescription());
+              resume = true;
+            }
+            if (resume) {
+              InputMarker marker = new InputMarker(this, base64FileKey, lineCount);
+              outputLine(line, marker);
+            }
+          }
+        } catch (Throwable t) {
+          String logMessageKey = this.getClass().getSimpleName() + "_READ_LOOP_EXCEPTION";
+          LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Caught exception in read loop. lineNumber=" + lineCount +
+              ", input=" + getShortDescription(), t, LOG, Level.ERROR);
+        }
+      }
+    } finally {
+      if (br != null) {
+        LOG.info("Closing reader." + getShortDescription() + ", lineCount=" + lineCount);
+        try {
+          br.close();
+        } catch (Throwable t) {
+          // ignore
+        }
+      }
+    }
+  }
+
+  protected abstract BufferedReader openLogFile(File logFile) throws IOException;
+
+  protected abstract Object getFileKey(File logFile);
+  
+  private int getResumeFromLineNumber() {
+    int resumeFromLineNumber = 0;
+    
+    if (tail) {
+      try {
+        LOG.info("Checking existing checkpoint file. " + getShortDescription());
+
+        String checkPointFileName = base64FileKey + checkPointExtension;
+        File checkPointFolder = inputManager.getCheckPointFolderFile();
+        checkPointFile = new File(checkPointFolder, checkPointFileName);
+        checkPointWriter = new RandomAccessFile(checkPointFile, "rw");
+
+        try {
+          int contentSize = checkPointWriter.readInt();
+          byte b[] = new byte[contentSize];
+          int readSize = checkPointWriter.read(b, 0, contentSize);
+          if (readSize != contentSize) {
+            LOG.error("Couldn't read expected number of bytes from checkpoint file. expected=" + contentSize + ", read=" +
+                readSize + ", checkPointFile=" + checkPointFile + ", input=" + getShortDescription());
+          } else {
+            String jsonCheckPointStr = new String(b, 0, readSize);
+            jsonCheckPoint = LogFeederUtil.toJSONObject(jsonCheckPointStr);
+
+            resumeFromLineNumber = LogFeederUtil.objectToInt(jsonCheckPoint.get("line_number"), 0, "line_number");
+
+            LOG.info("CheckPoint. checkPointFile=" + checkPointFile + ", json=" + jsonCheckPointStr +
+                ", resumeFromLineNumber=" + resumeFromLineNumber);
+          }
+        } catch (EOFException eofEx) {
+          LOG.info("EOFException. Will reset checkpoint file " + checkPointFile.getAbsolutePath() + " for " +
+              getShortDescription());
+        }
+        if (jsonCheckPoint == null) {
+          // This seems to be first time, so creating the initial checkPoint object
+          jsonCheckPoint = new HashMap<String, Object>();
+          jsonCheckPoint.put("file_path", filePath);
+          jsonCheckPoint.put("file_key", base64FileKey);
+        }
+
+      } catch (Throwable t) {
+        LOG.error("Error while configuring checkpoint file. Will reset file. checkPointFile=" + checkPointFile, t);
+      }
+    }
+    
+    return resumeFromLineNumber;
+  }
+
+  @Override
+  public synchronized void checkIn(InputMarker inputMarker) {
+    if (checkPointWriter != null) {
+      try {
+        int lineNumber = LogFeederUtil.objectToInt(jsonCheckPoint.get("line_number"), 0, "line_number");
+        if (lineNumber > inputMarker.lineNumber) {
+          // Already wrote higher line number for this input
+          return;
+        }
+        // If interval is greater than last checkPoint time, then write
+        long currMS = System.currentTimeMillis();
+        if (!isClosed() && (currMS - lastCheckPointTimeMS) < checkPointIntervalMS) {
+          // Let's save this one so we can update the check point file on flush
+          lastCheckPointInputMarker = inputMarker;
+          return;
+        }
+        lastCheckPointTimeMS = currMS;
+
+        jsonCheckPoint.put("line_number", "" + new Integer(inputMarker.lineNumber));
+        jsonCheckPoint.put("last_write_time_ms", "" + new Long(currMS));
+        jsonCheckPoint.put("last_write_time_date", new Date());
+
+        String jsonStr = LogFeederUtil.getGson().toJson(jsonCheckPoint);
+
+        // Let's rewind
+        checkPointWriter.seek(0);
+        checkPointWriter.writeInt(jsonStr.length());
+        checkPointWriter.write(jsonStr.getBytes());
+
+        if (isClosed()) {
+          String logMessageKey = this.getClass().getSimpleName() + "_FINAL_CHECKIN";
+          LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Wrote final checkPoint, input=" + getShortDescription() +
+              ", checkPointFile=" + checkPointFile.getAbsolutePath() + ", checkPoint=" + jsonStr, null, LOG, Level.INFO);
+        }
+      } catch (Throwable t) {
+        String logMessageKey = this.getClass().getSimpleName() + "_CHECKIN_EXCEPTION";
+        LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Caught exception checkIn. , input=" + getShortDescription(), t,
+            LOG, Level.ERROR);
+      }
+    }
+  }
+
+  @Override
+  public void lastCheckIn() {
+    if (lastCheckPointInputMarker != null) {
+      checkIn(lastCheckPointInputMarker);
+    }
+  }
+
+  @Override
+  public void close() {
+    super.close();
+    LOG.info("close() calling checkPoint checkIn(). " + getShortDescription());
+    lastCheckIn();
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "input:source=" + getStringValue("source") + ", path=" +
+        (!ArrayUtils.isEmpty(logFiles) ? logFiles[0].getAbsolutePath() : logPath);
+  }
+}

+ 142 - 171
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/Input.java

@@ -21,7 +21,6 @@ package org.apache.ambari.logfeeder.input;
 
 import java.io.File;
 import java.util.ArrayList;
-import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -29,88 +28,138 @@ import java.util.Map;
 import org.apache.ambari.logfeeder.common.ConfigBlock;
 import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.filter.Filter;
-import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.metrics.MetricData;
 import org.apache.ambari.logfeeder.output.Output;
-import org.apache.ambari.logfeeder.output.OutputMgr;
+import org.apache.ambari.logfeeder.output.OutputManager;
 import org.apache.log4j.Logger;
 
 public abstract class Input extends ConfigBlock implements Runnable {
-  static private Logger logger = Logger.getLogger(Input.class);
-
-  protected OutputMgr outputMgr;
-  protected InputMgr inputMgr;
+  private static final Logger LOG = Logger.getLogger(Input.class);
 
+  private static final boolean DEFAULT_TAIL = true;
+  private static final boolean DEFAULT_USE_EVENT_MD5 = false;
+  private static final boolean DEFAULT_GEN_EVENT_MD5 = true;
+  
+  protected InputManager inputManager;
+  protected OutputManager outputManager;
   private List<Output> outputList = new ArrayList<Output>();
 
-  private Filter firstFilter = null;
   private Thread thread;
-  private boolean isClosed = false;
-  protected String filePath = null;
-  private String type = null;
+  private String type;
+  protected String filePath;
+  private Filter firstFilter;
+  private boolean isClosed;
 
-  protected boolean tail = true;
-  private boolean useEventMD5 = false;
-  private boolean genEventMD5 = true;
+  protected boolean tail;
+  private boolean useEventMD5;
+  private boolean genEventMD5;
 
-  protected MetricCount readBytesMetric = new MetricCount();
+  protected MetricData readBytesMetric = new MetricData(getReadBytesMetricName(), false);
+  protected String getReadBytesMetricName() {
+    return null;
+  }
+  
+  @Override
+  public void loadConfig(Map<String, Object> map) {
+    super.loadConfig(map);
+    String typeValue = getStringValue("type");
+    if (typeValue != null) {
+      // Explicitly add type and value to field list
+      contextFields.put("type", typeValue);
+      @SuppressWarnings("unchecked")
+      Map<String, Object> addFields = (Map<String, Object>) map.get("add_fields");
+      if (addFields == null) {
+        addFields = new HashMap<String, Object>();
+        map.put("add_fields", addFields);
+      }
+      addFields.put("type", typeValue);
+    }
+  }
 
-  /**
-   * This method will be called from the thread spawned for the output. This
-   * method should only exit after all data are read from the source or the
-   * process is exiting
-   */
-  abstract void start() throws Exception;
+  public void setType(String type) {
+    this.type = type;
+  }
+
+  public void setInputManager(InputManager inputManager) {
+    this.inputManager = inputManager;
+  }
+
+  public void setOutputManager(OutputManager outputManager) {
+    this.outputManager = outputManager;
+  }
+
+  public void addFilter(Filter filter) {
+    if (firstFilter == null) {
+      firstFilter = filter;
+    } else {
+      Filter f = firstFilter;
+      while (f.getNextFilter() != null) {
+        f = f.getNextFilter();
+      }
+      f.setNextFilter(filter);
+    }
+  }
+
+  public void addOutput(Output output) {
+    outputList.add(output);
+  }
 
   @Override
   public void init() throws Exception {
     super.init();
-    tail = getBooleanValue("tail", tail);
-    useEventMD5 = getBooleanValue("use_event_md5_as_id", useEventMD5);
-    genEventMD5 = getBooleanValue("gen_event_md5", genEventMD5);
+    tail = getBooleanValue("tail", DEFAULT_TAIL);
+    useEventMD5 = getBooleanValue("use_event_md5_as_id", DEFAULT_USE_EVENT_MD5);
+    genEventMD5 = getBooleanValue("gen_event_md5", DEFAULT_GEN_EVENT_MD5);
 
     if (firstFilter != null) {
       firstFilter.init();
     }
   }
 
-  @Override
-  public String getNameForThread() {
-    if (filePath != null) {
-      try {
-        return (type + "=" + (new File(filePath)).getName());
-      } catch (Throwable ex) {
-        logger.warn("Couldn't get basename for filePath=" + filePath,
-          ex);
-      }
+  boolean monitor() {
+    if (isReady()) {
+      LOG.info("Starting thread. " + getShortDescription());
+      thread = new Thread(this, getNameForThread());
+      thread.start();
+      return true;
+    } else {
+      return false;
     }
-    return super.getNameForThread() + ":" + type;
   }
 
+  public abstract boolean isReady();
+
   @Override
   public void run() {
     try {
-      logger.info("Started to monitor. " + getShortDescription());
+      LOG.info("Started to monitor. " + getShortDescription());
       start();
     } catch (Exception e) {
-      logger.error("Error writing to output.", e);
+      LOG.error("Error writing to output.", e);
     }
-    logger.info("Exiting thread. " + getShortDescription());
+    LOG.info("Exiting thread. " + getShortDescription());
   }
 
+  /**
+   * This method will be called from the thread spawned for the output. This
+   * method should only exit after all data are read from the source or the
+   * process is exiting
+   */
+  abstract void start() throws Exception;
+
   protected void outputLine(String line, InputMarker marker) {
-    statMetric.count++;
-    readBytesMetric.count += (line.length());
+    statMetric.value++;
+    readBytesMetric.value += (line.length());
 
     if (firstFilter != null) {
       try {
         firstFilter.apply(line, marker);
       } catch (LogfeederException e) {
-        logger.error(e.getLocalizedMessage(),e);
+        LOG.error(e.getLocalizedMessage(), e);
       }
     } else {
-      // TODO: For now, let's make filter mandatory, so that no one
-      // accidently forgets to write filter
-      // outputMgr.write(line, this);
+      // TODO: For now, let's make filter mandatory, so that no one accidently forgets to write filter
+      // outputManager.write(line, this);
     }
   }
 
@@ -120,60 +169,10 @@ public abstract class Input extends ConfigBlock implements Runnable {
     }
   }
 
-  public boolean monitor() {
-    if (isReady()) {
-      logger.info("Starting thread. " + getShortDescription());
-      thread = new Thread(this, getNameForThread());
-      thread.start();
-      return true;
-    } else {
-      return false;
-    }
-  }
-
-  public void checkIn(InputMarker inputMarker) {
-    // Default implementation is to ignore.
-  }
-
-  /**
-   * This is generally used by final checkin
-   */
-  public void checkIn() {
-  }
-
-  public boolean isReady() {
-    return true;
-  }
-
-  public boolean isTail() {
-    return tail;
-  }
-
-  public void setTail(boolean tail) {
-    this.tail = tail;
-  }
-
-  public boolean isUseEventMD5() {
-    return useEventMD5;
-  }
-
-  public void setUseEventMD5(boolean useEventMD5) {
-    this.useEventMD5 = useEventMD5;
-  }
-
-  public boolean isGenEventMD5() {
-    return genEventMD5;
-  }
-
-  public void setGenEventMD5(boolean genEventMD5) {
-    this.genEventMD5 = genEventMD5;
-  }
-
   @Override
   public void setDrain(boolean drain) {
-    logger.info("Request to drain. " + getShortDescription());
+    LOG.info("Request to drain. " + getShortDescription());
     super.setDrain(drain);
-    ;
     try {
       thread.interrupt();
     } catch (Throwable t) {
@@ -181,38 +180,36 @@ public abstract class Input extends ConfigBlock implements Runnable {
     }
   }
 
-  public Filter getFirstFilter() {
-    return firstFilter;
-  }
-
-  public void setFirstFilter(Filter filter) {
-    firstFilter = filter;
+  public void addMetricsContainers(List<MetricData> metricsList) {
+    super.addMetricsContainers(metricsList);
+    if (firstFilter != null) {
+      firstFilter.addMetricsContainers(metricsList);
+    }
+    metricsList.add(readBytesMetric);
   }
 
-  public void setInputMgr(InputMgr inputMgr) {
-    this.inputMgr = inputMgr;
-  }
+  @Override
+  public void logStat() {
+    super.logStat();
+    logStatForMetric(readBytesMetric, "Stat: Bytes Read");
 
-  public void setOutputMgr(OutputMgr outputMgr) {
-    this.outputMgr = outputMgr;
+    if (firstFilter != null) {
+      firstFilter.logStat();
+    }
   }
 
-  public String getFilePath() {
-    return filePath;
-  }
+  public abstract void checkIn(InputMarker inputMarker);
 
-  public void setFilePath(String filePath) {
-    this.filePath = filePath;
-  }
+  public abstract void lastCheckIn();
 
   public void close() {
-    logger.info("Close called. " + getShortDescription());
+    LOG.info("Close called. " + getShortDescription());
 
     try {
       if (firstFilter != null) {
         firstFilter.close();
       } else {
-        outputMgr.close();
+        outputManager.close();
       }
     } catch (Throwable t) {
       // Ignore
@@ -220,86 +217,60 @@ public abstract class Input extends ConfigBlock implements Runnable {
     isClosed = true;
   }
 
-  public void setClosed(boolean isClosed) {
-    this.isClosed = isClosed;
-  }
-
-  public boolean isClosed() {
-    return isClosed;
-  }
-
-  @Override
-  public void loadConfig(Map<String, Object> map) {
-    super.loadConfig(map);
-    String typeValue = getStringValue("type");
-    if (typeValue != null) {
-      // Explicitly add type and value to field list
-      contextFields.put("type", typeValue);
-      @SuppressWarnings("unchecked")
-      Map<String, Object> addFields = (Map<String, Object>) map
-        .get("add_fields");
-      if (addFields == null) {
-        addFields = new HashMap<String, Object>();
-        map.put("add_fields", addFields);
-      }
-      addFields.put("type", typeValue);
-    }
+  public boolean isTail() {
+    return tail;
   }
 
-  @Override
-  public String getShortDescription() {
-    return null;
+  public boolean isUseEventMD5() {
+    return useEventMD5;
   }
 
-  @Override
-  public void logStat() {
-    super.logStat();
-    logStatForMetric(readBytesMetric, "Stat: Bytes Read");
-
-    if (firstFilter != null) {
-      firstFilter.logStat();
-    }
+  public boolean isGenEventMD5() {
+    return genEventMD5;
   }
 
-  @Override
-  public String toString() {
-    return getShortDescription();
+  public Filter getFirstFilter() {
+    return firstFilter;
   }
 
-  public void rollOver() {
-    // Only some inputs support it. E.g. InputFile
+  public String getFilePath() {
+    return filePath;
   }
 
-  public String getType() {
-    return type;
+  public void setFilePath(String filePath) {
+    this.filePath = filePath;
   }
 
-  public void setType(String type) {
-    this.type = type;
+  public void setClosed(boolean isClosed) {
+    this.isClosed = isClosed;
   }
 
-  public Date getEventTime() {
-    return null;
+  public boolean isClosed() {
+    return isClosed;
   }
 
   public List<Output> getOutputList() {
     return outputList;
   }
-
-  public void addOutput(Output output) {
-    outputList.add(output);
-  }
-
-  public void addMetricsContainers(List<MetricCount> metricsList) {
-    super.addMetricsContainers(metricsList);
-    if (firstFilter != null) {
-      firstFilter.addMetricsContainers(metricsList);
-    }
-    metricsList.add(readBytesMetric);
-  }
   
   public Thread getThread(){
     return thread;
   }
 
+  @Override
+  public String getNameForThread() {
+    if (filePath != null) {
+      try {
+        return (type + "=" + (new File(filePath)).getName());
+      } catch (Throwable ex) {
+        LOG.warn("Couldn't get basename for filePath=" + filePath, ex);
+      }
+    }
+    return super.getNameForThread() + ":" + type;
+  }
+
+  @Override
+  public String toString() {
+    return getShortDescription();
+  }
 }

+ 37 - 466
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java

@@ -19,528 +19,99 @@
 package org.apache.ambari.logfeeder.input;
 
 import java.io.BufferedReader;
-import java.io.EOFException;
 import java.io.File;
 import java.io.FileFilter;
 import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.RandomAccessFile;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.nio.file.attribute.BasicFileAttributes;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
 
 import org.apache.ambari.logfeeder.input.reader.LogsearchReaderFactory;
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.ambari.logfeeder.util.FileUtil;
 import org.apache.commons.io.filefilter.WildcardFileFilter;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
+import org.apache.commons.lang3.ArrayUtils;
 import org.apache.solr.common.util.Base64;
 
-public class InputFile extends Input {
-  private static final Logger logger = Logger.getLogger(InputFile.class);
-
-  private String logPath = null;
-  private boolean isStartFromBegining = true;
-
-  private boolean isReady = false;
-  private File[] logPathFiles = null;
-  private Object fileKey = null;
-  private String base64FileKey = null;
-
-  private boolean isRolledOver = false;
-  private boolean addWildCard = false;
-
-  private long lastCheckPointTimeMS = 0;
-  private int checkPointIntervalMS = 5 * 1000; // 5 seconds
-  private RandomAccessFile checkPointWriter = null;
-  private Map<String, Object> jsonCheckPoint = null;
-
-  private File checkPointFile = null;
-
-  private InputMarker lastCheckPointInputMarker = null;
-
-  private String checkPointExtension = ".cp";
-
-  @Override
-  public void init() throws Exception {
-    logger.info("init() called");
-    statMetric.metricsName = "input.files.read_lines";
-    readBytesMetric.metricsName = "input.files.read_bytes";
-    checkPointExtension = LogFeederUtil.getStringProperty(
-      "logfeeder.checkpoint.extension", checkPointExtension);
-
-    // Let's close the file and set it to true after we start monitoring it
-    setClosed(true);
-    logPath = getStringValue("path");
-    tail = getBooleanValue("tail", tail);
-    addWildCard = getBooleanValue("add_wild_card", addWildCard);
-    checkPointIntervalMS = getIntValue("checkpoint.interval.ms",
-      checkPointIntervalMS);
-
-    if (logPath == null || logPath.isEmpty()) {
-      logger.error("path is empty for file input. "
-        + getShortDescription());
-      return;
-    }
-
-    String startPosition = getStringValue("start_position");
-    if (StringUtils.isEmpty(startPosition)
-      || startPosition.equalsIgnoreCase("beginning")
-      || startPosition.equalsIgnoreCase("begining")) {
-      isStartFromBegining = true;
-    }
-
-    if (!tail) {
-      // start position end doesn't apply if we are not tailing
-      isStartFromBegining = true;
-    }
-
-    setFilePath(logPath);
-    boolean isFileReady = isReady();
-
-    logger.info("File to monitor " + logPath + ", tail=" + tail
-      + ", addWildCard=" + addWildCard + ", isReady=" + isFileReady);
-
-    super.init();
-  }
+public class InputFile extends AbstractInputFile {
 
   @Override
   public boolean isReady() {
     if (!isReady) {
       // Let's try to check whether the file is available
-      logPathFiles = getActualFiles(logPath);
-      if (logPathFiles != null && logPathFiles.length > 0
-        && logPathFiles[0].isFile()) {
-
-        if (isTail() && logPathFiles.length > 1) {
-          logger.warn("Found multiple files (" + logPathFiles.length
-            + ") for the file filter " + filePath
-            + ". Will use only the first one. Using "
-            + logPathFiles[0].getAbsolutePath());
+      logFiles = getActualFiles(logPath);
+      if (!ArrayUtils.isEmpty(logFiles) && logFiles[0].isFile()) {
+        if (tail && logFiles.length > 1) {
+          LOG.warn("Found multiple files (" + logFiles.length + ") for the file filter " + filePath +
+              ". Will use only the first one. Using " + logFiles[0].getAbsolutePath());
         }
-        logger.info("File filter " + filePath + " expanded to "
-          + logPathFiles[0].getAbsolutePath());
+        LOG.info("File filter " + filePath + " expanded to " + logFiles[0].getAbsolutePath());
         isReady = true;
       } else {
-        logger.debug(logPath + " file doesn't exist. Ignoring for now");
+        LOG.debug(logPath + " file doesn't exist. Ignoring for now");
       }
     }
     return isReady;
   }
 
   private File[] getActualFiles(String searchPath) {
-    if (addWildCard) {
-      if (!searchPath.endsWith("*")) {
-        searchPath = searchPath + "*";
-      }
-    }
-    File checkFile = new File(searchPath);
-    if (checkFile.isFile()) {
-      return new File[]{checkFile};
+    File searchFile = new File(searchPath);
+    if (searchFile.isFile()) {
+      return new File[]{searchFile};
+    } else {
+      FileFilter fileFilter = new WildcardFileFilter(searchFile.getName());
+      return searchFile.getParentFile().listFiles(fileFilter);
     }
-    // Let's do wild card search
-    // First check current folder
-    File checkFiles[] = findFileForWildCard(searchPath, new File("."));
-    if (checkFiles == null || checkFiles.length == 0) {
-      // Let's check from the parent folder
-      File parentDir = (new File(searchPath)).getParentFile();
-      if (parentDir != null) {
-        String wildCard = (new File(searchPath)).getName();
-        checkFiles = findFileForWildCard(wildCard, parentDir);
-      }
-    }
-    return checkFiles;
-  }
-
-  private File[] findFileForWildCard(String searchPath, File dir) {
-    logger.debug("findFileForWildCard(). filePath=" + searchPath + ", dir="
-      + dir + ", dir.fullpath=" + dir.getAbsolutePath());
-    FileFilter fileFilter = new WildcardFileFilter(searchPath);
-    return dir.listFiles(fileFilter);
-  }
-
-  @Override
-  synchronized public void checkIn(InputMarker inputMarker) {
-    super.checkIn(inputMarker);
-    if (checkPointWriter != null) {
-      try {
-        int lineNumber = LogFeederUtil.objectToInt(
-          jsonCheckPoint.get("line_number"), 0, "line_number");
-        if (lineNumber > inputMarker.lineNumber) {
-          // Already wrote higher line number for this input
-          return;
-        }
-        // If interval is greater than last checkPoint time, then write
-        long currMS = System.currentTimeMillis();
-        if (!isClosed()
-          && (currMS - lastCheckPointTimeMS) < checkPointIntervalMS) {
-          // Let's save this one so we can update the check point file
-          // on flush
-          lastCheckPointInputMarker = inputMarker;
-          return;
-        }
-        lastCheckPointTimeMS = currMS;
-
-        jsonCheckPoint.put("line_number", ""
-          + new Integer(inputMarker.lineNumber));
-        jsonCheckPoint.put("last_write_time_ms", "" + new Long(currMS));
-        jsonCheckPoint.put("last_write_time_date", new Date());
-
-        String jsonStr = LogFeederUtil.getGson().toJson(jsonCheckPoint);
-
-        // Let's rewind
-        checkPointWriter.seek(0);
-        checkPointWriter.writeInt(jsonStr.length());
-        checkPointWriter.write(jsonStr.getBytes());
-
-        if (isClosed()) {
-          final String LOG_MESSAGE_KEY = this.getClass()
-            .getSimpleName() + "_FINAL_CHECKIN";
-          LogFeederUtil.logErrorMessageByInterval(
-            LOG_MESSAGE_KEY,
-            "Wrote final checkPoint, input="
-              + getShortDescription()
-              + ", checkPointFile="
-              + checkPointFile.getAbsolutePath()
-              + ", checkPoint=" + jsonStr, null, logger,
-            Level.INFO);
-        }
-      } catch (Throwable t) {
-        final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
-          + "_CHECKIN_EXCEPTION";
-        LogFeederUtil
-          .logErrorMessageByInterval(LOG_MESSAGE_KEY,
-            "Caught exception checkIn. , input="
-              + getShortDescription(), t, logger,
-            Level.ERROR);
-      }
-    }
-
-  }
-
-  @Override
-  public void checkIn() {
-    super.checkIn();
-    if (lastCheckPointInputMarker != null) {
-      checkIn(lastCheckPointInputMarker);
-    }
-  }
-
-  @Override
-  public void rollOver() {
-    logger.info("Marking this input file for rollover. "
-      + getShortDescription());
-    isRolledOver = true;
   }
 
   @Override
   void start() throws Exception {
-
-    if (logPathFiles == null || logPathFiles.length == 0) {
-      return;
-    }
     boolean isProcessFile = getBooleanValue("process_file", true);
     if (isProcessFile) {
-      if (isTail()) {
-        processFile(logPathFiles[0]);
+      if (tail) {
+        processFile(logFiles[0]);
       } else {
-        for (File file : logPathFiles) {
+        for (File file : logFiles) {
           try {
             processFile(file);
             if (isClosed() || isDrain()) {
-              logger.info("isClosed or isDrain. Now breaking loop.");
+              LOG.info("isClosed or isDrain. Now breaking loop.");
               break;
             }
           } catch (Throwable t) {
-            logger.error("Error processing file=" + file.getAbsolutePath(), t);
+            LOG.error("Error processing file=" + file.getAbsolutePath(), t);
           }
         }
       }
       close();
-    }else{
-      copyFiles(logPathFiles);
+    } else {
+      copyFiles(logFiles);
     }
-    
   }
 
   @Override
-  public void close() {
-    super.close();
-    logger.info("close() calling checkPoint checkIn(). "
-      + getShortDescription());
-    checkIn();
-  }
-
-  private void processFile(File logPathFile) throws FileNotFoundException,
-    IOException {
-    logger.info("Monitoring logPath=" + logPath + ", logPathFile="
-      + logPathFile);
-    BufferedReader br = null;
-    checkPointFile = null;
-    checkPointWriter = null;
-    jsonCheckPoint = null;
-    int resumeFromLineNumber = 0;
-
-    int lineCount = 0;
-    try {
-      setFilePath(logPathFile.getAbsolutePath());
-      br = new BufferedReader(LogsearchReaderFactory.INSTANCE.getReader(logPathFile));
-
-      // Whether to send to output from the beginning.
-      boolean resume = isStartFromBegining;
-
-      // Seems FileWatch is not reliable, so let's only use file key comparison
-      fileKey = getFileKey(logPathFile);
-      base64FileKey = Base64.byteArrayToBase64(fileKey.toString()
-        .getBytes());
-      logger.info("fileKey=" + fileKey + ", base64=" + base64FileKey
-        + ". " + getShortDescription());
-
-      if (isTail()) {
-        try {
-          logger.info("Checking existing checkpoint file. "
-            + getShortDescription());
-
-          String fileBase64 = Base64.byteArrayToBase64(fileKey
-            .toString().getBytes());
-          String checkPointFileName = fileBase64
-            + checkPointExtension;
-          File checkPointFolder = inputMgr.getCheckPointFolderFile();
-          checkPointFile = new File(checkPointFolder,
-            checkPointFileName);
-          checkPointWriter = new RandomAccessFile(checkPointFile,
-            "rw");
-
-          try {
-            int contentSize = checkPointWriter.readInt();
-            byte b[] = new byte[contentSize];
-            int readSize = checkPointWriter.read(b, 0, contentSize);
-            if (readSize != contentSize) {
-              logger.error("Couldn't read expected number of bytes from checkpoint file. expected="
-                + contentSize
-                + ", read="
-                + readSize
-                + ", checkPointFile="
-                + checkPointFile
-                + ", input=" + getShortDescription());
-            } else {
-              String jsonCheckPointStr = new String(b, 0, readSize);
-              jsonCheckPoint = LogFeederUtil
-                .toJSONObject(jsonCheckPointStr);
-
-              resumeFromLineNumber = LogFeederUtil.objectToInt(
-                jsonCheckPoint.get("line_number"), 0,
-                "line_number");
-
-              if (resumeFromLineNumber > 0) {
-                // Let's read from last line read
-                resume = false;
-              }
-              logger.info("CheckPoint. checkPointFile="
-                + checkPointFile + ", json="
-                + jsonCheckPointStr
-                + ", resumeFromLineNumber="
-                + resumeFromLineNumber + ", resume="
-                + resume);
-            }
-          } catch (EOFException eofEx) {
-            logger.info("EOFException. Will reset checkpoint file "
-              + checkPointFile.getAbsolutePath() + " for "
-              + getShortDescription());
-          }
-          if (jsonCheckPoint == null) {
-            // This seems to be first time, so creating the initial
-            // checkPoint object
-            jsonCheckPoint = new HashMap<String, Object>();
-            jsonCheckPoint.put("file_path", filePath);
-            jsonCheckPoint.put("file_key", fileBase64);
-          }
-
-        } catch (Throwable t) {
-          logger.error(
-            "Error while configuring checkpoint file. Will reset file. checkPointFile="
-              + checkPointFile, t);
-        }
-      }
-
-      setClosed(false);
-      int sleepStep = 2;
-      int sleepIteration = 0;
-      while (true) {
-        try {
-          if (isDrain()) {
-            break;
-          }
-
-          String line = br.readLine();
-          if (line == null) {
-            if (!resume) {
-              resume = true;
-            }
-            sleepIteration++;
-            try {
-              // Since FileWatch service is not reliable, we will check
-              // file inode every n seconds after no write
-              if (sleepIteration > 4) {
-                Object newFileKey = getFileKey(logPathFile);
-                if (newFileKey != null) {
-                  if (fileKey == null
-                    || !newFileKey.equals(fileKey)) {
-                    logger.info("File key is different. Calling rollover. oldKey="
-                      + fileKey
-                      + ", newKey="
-                      + newFileKey
-                      + ". "
-                      + getShortDescription());
-                    // File has rotated.
-                    rollOver();
-                  }
-                }
-              }
-              // Flush on the second iteration
-              if (!tail && sleepIteration >= 2) {
-                logger.info("End of file. Done with filePath="
-                  + logPathFile.getAbsolutePath()
-                  + ", lineCount=" + lineCount);
-                flush();
-                break;
-              } else if (sleepIteration == 2) {
-                flush();
-              } else if (sleepIteration >= 2) {
-                if (isRolledOver) {
-                  isRolledOver = false;
-                  // Close existing file
-                  try {
-                    logger.info("File is rolled over. Closing current open file."
-                      + getShortDescription()
-                      + ", lineCount=" + lineCount);
-                    br.close();
-                  } catch (Exception ex) {
-                    logger.error("Error closing file"
-                      + getShortDescription());
-                    break;
-                  }
-                  try {
-                    logger.info("Opening new rolled over file."
-                      + getShortDescription());
-                    br = new BufferedReader(LogsearchReaderFactory.
-                      INSTANCE.getReader(logPathFile));
-                    lineCount = 0;
-                    fileKey = getFileKey(logPathFile);
-                    base64FileKey = Base64
-                      .byteArrayToBase64(fileKey
-                        .toString().getBytes());
-                    logger.info("fileKey=" + fileKey
-                      + ", base64=" + base64FileKey
-                      + ", " + getShortDescription());
-                  } catch (Exception ex) {
-                    logger.error("Error opening rolled over file. "
-                      + getShortDescription());
-                    // Let's add this to monitoring and exit this thread
-                    logger.info("Added input to not ready list."
-                      + getShortDescription());
-                    isReady = false;
-                    inputMgr.addToNotReady(this);
-                    break;
-                  }
-                  logger.info("File is successfully rolled over. "
-                    + getShortDescription());
-                  continue;
-                }
-              }
-              Thread.sleep(sleepStep * 1000);
-              sleepStep = (sleepStep * 2);
-              sleepStep = sleepStep > 10 ? 10 : sleepStep;
-            } catch (InterruptedException e) {
-              logger.info("Thread interrupted."
-                + getShortDescription());
-            }
-          } else {
-            lineCount++;
-            sleepStep = 1;
-            sleepIteration = 0;
-
-            if (!resume && lineCount > resumeFromLineNumber) {
-              logger.info("Resuming to read from last line. lineCount="
-                + lineCount
-                + ", input="
-                + getShortDescription());
-              resume = true;
-            }
-            if (resume) {
-              InputMarker marker = new InputMarker();
-              marker.base64FileKey = base64FileKey;
-              marker.input = this;
-              marker.lineNumber = lineCount;
-              outputLine(line, marker);
-            }
-          }
-        } catch (Throwable t) {
-          final String LOG_MESSAGE_KEY = this.getClass()
-            .getSimpleName() + "_READ_LOOP_EXCEPTION";
-          LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
-            "Caught exception in read loop. lineNumber="
-              + lineCount + ", input="
-              + getShortDescription(), t, logger,
-            Level.ERROR);
-
-        }
-      }
-    } finally {
-      if (br != null) {
-        logger.info("Closing reader." + getShortDescription()
-          + ", lineCount=" + lineCount);
-        try {
-          br.close();
-        } catch (Throwable t) {
-          // ignore
-        }
-      }
-    }
-  }
-
-  static public Object getFileKey(File file) {
-    try {
-      Path fileFullPath = Paths.get(file.getAbsolutePath());
-      if (fileFullPath != null) {
-        BasicFileAttributes basicAttr = Files.readAttributes(
-          fileFullPath, BasicFileAttributes.class);
-        return basicAttr.fileKey();
-      }
-    } catch (Throwable ex) {
-      logger.error("Error getting file attributes for file=" + file, ex);
-    }
-    return file.toString();
+  protected BufferedReader openLogFile(File logFile) throws FileNotFoundException {
+    BufferedReader br = new BufferedReader(LogsearchReaderFactory.INSTANCE.getReader(logFile));
+    fileKey = getFileKey(logFile);
+    base64FileKey = Base64.byteArrayToBase64(fileKey.toString().getBytes());
+    LOG.info("fileKey=" + fileKey + ", base64=" + base64FileKey + ". " + getShortDescription());
+    return br;
   }
 
   @Override
-  public String getShortDescription() {
-    return "input:source="
-      + getStringValue("source")
-      + ", path="
-      + (logPathFiles != null && logPathFiles.length > 0 ? logPathFiles[0]
-      .getAbsolutePath() : getStringValue("path"));
+  protected Object getFileKey(File logFile) {
+    return FileUtil.getFileKey(logFile);
   }
-  
-  public void copyFiles(File[] files) {
+
+  private void copyFiles(File[] files) {
     boolean isCopyFile = getBooleanValue("copy_file", false);
     if (isCopyFile && files != null) {
       for (File file : files) {
         try {
-          InputMarker marker = new InputMarker();
-          marker.input = this;
-          outputMgr.copyFile(file, marker);
+          InputMarker marker = new InputMarker(this, null, 0);
+          outputManager.copyFile(file, marker);
           if (isClosed() || isDrain()) {
-            logger.info("isClosed or isDrain. Now breaking loop.");
+            LOG.info("isClosed or isDrain. Now breaking loop.");
             break;
           }
         } catch (Throwable t) {
-          logger.error("Error processing file=" + file.getAbsolutePath(), t);
+          LOG.error("Error processing file=" + file.getAbsolutePath(), t);
         }
       }
     }

+ 119 - 191
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputMgr.java → ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputManager.java

@@ -32,27 +32,31 @@ import java.util.Map;
 import java.util.Set;
 import java.util.UUID;
 
-import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.metrics.MetricData;
+import org.apache.ambari.logfeeder.util.FileUtil;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.io.filefilter.WildcardFileFilter;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Logger;
 import org.apache.solr.common.util.Base64;
 
-public class InputMgr {
-  private static final Logger logger = Logger.getLogger(InputMgr.class);
+public class InputManager {
+  private static final Logger LOG = Logger.getLogger(InputManager.class);
 
+  private static final String CHECKPOINT_SUBFOLDER_NAME = "logfeeder_checkpoints";
+  public static final String DEFAULT_CHECKPOINT_EXTENSION = ".cp";
+  
   private List<Input> inputList = new ArrayList<Input>();
   private Set<Input> notReadyList = new HashSet<Input>();
 
   private boolean isDrain = false;
   private boolean isAnyInputTail = false;
 
-  private String checkPointSubFolderName = "logfeeder_checkpoints";
   private File checkPointFolderFile = null;
 
-  private MetricCount filesCountMetric = new MetricCount();
+  private MetricData filesCountMetric = new MetricData("input.files.count", true);
 
-  private String checkPointExtension = ".cp";
+  private String checkPointExtension;
   
   private Thread inputIsReadyMonitor = null;
 
@@ -65,20 +69,18 @@ public class InputMgr {
   }
 
   public void removeInput(Input input) {
-    logger.info("Trying to remove from inputList. "
-      + input.getShortDescription());
+    LOG.info("Trying to remove from inputList. " + input.getShortDescription());
     Iterator<Input> iter = inputList.iterator();
     while (iter.hasNext()) {
       Input iterInput = iter.next();
       if (iterInput.equals(input)) {
-        logger.info("Removing Input from inputList. "
-          + input.getShortDescription());
+        LOG.info("Removing Input from inputList. " + input.getShortDescription());
         iter.remove();
       }
     }
   }
 
-  public int getActiveFilesCount() {
+  private int getActiveFilesCount() {
     int count = 0;
     for (Input input : inputList) {
       if (input.isReady()) {
@@ -89,11 +91,7 @@ public class InputMgr {
   }
 
   public void init() {
-    filesCountMetric.metricsName = "input.files.count";
-    filesCountMetric.isPointInTime = true;
-
-    checkPointExtension = LogFeederUtil.getStringProperty(
-      "logfeeder.checkpoint.extension", checkPointExtension);
+    checkPointExtension = LogFeederUtil.getStringProperty("logfeeder.checkpoint.extension", DEFAULT_CHECKPOINT_EXTENSION);
     for (Input input : inputList) {
       try {
         input.init();
@@ -101,19 +99,16 @@ public class InputMgr {
           isAnyInputTail = true;
         }
       } catch (Exception e) {
-        logger.error(
-          "Error initializing input. "
-            + input.getShortDescription(), e);
+        LOG.error("Error initializing input. " + input.getShortDescription(), e);
       }
     }
 
     if (isAnyInputTail) {
-      logger.info("Determining valid checkpoint folder");
+      LOG.info("Determining valid checkpoint folder");
       boolean isCheckPointFolderValid = false;
       // We need to keep track of the files we are reading.
-      String checkPointFolder = LogFeederUtil
-        .getStringProperty("logfeeder.checkpoint.folder");
-      if (checkPointFolder != null && !checkPointFolder.isEmpty()) {
+      String checkPointFolder = LogFeederUtil.getStringProperty("logfeeder.checkpoint.folder");
+      if (!StringUtils.isEmpty(checkPointFolder)) {
         checkPointFolderFile = new File(checkPointFolder);
         isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
       }
@@ -121,88 +116,72 @@ public class InputMgr {
         // Let's try home folder
         String userHome = LogFeederUtil.getStringProperty("user.home");
         if (userHome != null) {
-          checkPointFolderFile = new File(userHome,
-            checkPointSubFolderName);
-          logger.info("Checking if home folder can be used for checkpoints. Folder="
-            + checkPointFolderFile);
+          checkPointFolderFile = new File(userHome, CHECKPOINT_SUBFOLDER_NAME);
+          LOG.info("Checking if home folder can be used for checkpoints. Folder=" + checkPointFolderFile);
           isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
         }
       }
       if (!isCheckPointFolderValid) {
         // Let's use tmp folder
-        String tmpFolder = LogFeederUtil
-          .getStringProperty("java.io.tmpdir");
+        String tmpFolder = LogFeederUtil.getStringProperty("java.io.tmpdir");
         if (tmpFolder == null) {
           tmpFolder = "/tmp";
         }
-        checkPointFolderFile = new File(tmpFolder,
-          checkPointSubFolderName);
-        logger.info("Checking if tmps folder can be used for checkpoints. Folder="
-          + checkPointFolderFile);
+        checkPointFolderFile = new File(tmpFolder, CHECKPOINT_SUBFOLDER_NAME);
+        LOG.info("Checking if tmps folder can be used for checkpoints. Folder=" + checkPointFolderFile);
         isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
         if (isCheckPointFolderValid) {
-          logger.warn("Using tmp folder "
-            + checkPointFolderFile
-            + " to store check points. This is not recommended."
-            + "Please set logfeeder.checkpoint.folder property");
+          LOG.warn("Using tmp folder " + checkPointFolderFile + " to store check points. This is not recommended." +
+              "Please set logfeeder.checkpoint.folder property");
         }
       }
 
       if (isCheckPointFolderValid) {
-        logger.info("Using folder " + checkPointFolderFile
-          + " for storing checkpoints");
+        LOG.info("Using folder " + checkPointFolderFile + " for storing checkpoints");
       }
     }
 
   }
 
-  public File getCheckPointFolderFile() {
-    return checkPointFolderFile;
-  }
-
   private boolean verifyCheckPointFolder(File folderPathFile) {
     if (!folderPathFile.exists()) {
-      // Create the folder
       try {
         if (!folderPathFile.mkdir()) {
-          logger.warn("Error creating folder for check point. folder="
-            + folderPathFile);
+          LOG.warn("Error creating folder for check point. folder=" + folderPathFile);
         }
       } catch (Throwable t) {
-        logger.warn("Error creating folder for check point. folder="
-          + folderPathFile, t);
+        LOG.warn("Error creating folder for check point. folder=" + folderPathFile, t);
       }
     }
 
     if (folderPathFile.exists() && folderPathFile.isDirectory()) {
       // Let's check whether we can create a file
-      File testFile = new File(folderPathFile, UUID.randomUUID()
-        .toString());
+      File testFile = new File(folderPathFile, UUID.randomUUID().toString());
       try {
         testFile.createNewFile();
         return testFile.delete();
       } catch (IOException e) {
-        logger.warn(
-          "Couldn't create test file in "
-            + folderPathFile.getAbsolutePath()
-            + " for checkPoint", e);
+        LOG.warn("Couldn't create test file in " + folderPathFile.getAbsolutePath() + " for checkPoint", e);
       }
     }
     return false;
   }
 
+  public File getCheckPointFolderFile() {
+    return checkPointFolderFile;
+  }
+
   public void monitor() {
     for (Input input : inputList) {
       if (input.isReady()) {
         input.monitor();
       } else {
         if (input.isTail()) {
-          logger.info("Adding input to not ready list. Note, it is possible this component is not run on this host. So it might not be an issue. "
-            + input.getShortDescription());
+          LOG.info("Adding input to not ready list. Note, it is possible this component is not run on this host. " +
+              "So it might not be an issue. " + input.getShortDescription());
           notReadyList.add(input);
         } else {
-          logger.info("Input is not ready, so going to ignore it "
-            + input.getShortDescription());
+          LOG.info("Input is not ready, so going to ignore it " + input.getShortDescription());
         }
       }
     }
@@ -211,11 +190,10 @@ public class InputMgr {
        inputIsReadyMonitor = new Thread("InputIsReadyMonitor") {
         @Override
         public void run() {
-          logger.info("Going to monitor for these missing files: "
-            + notReadyList.toString());
+          LOG.info("Going to monitor for these missing files: " + notReadyList.toString());
           while (true) {
             if (isDrain) {
-              logger.info("Exiting missing file monitor.");
+              LOG.info("Exiting missing file monitor.");
               break;
             }
             try {
@@ -228,8 +206,7 @@ public class InputMgr {
                     iter.remove();
                   }
                 } catch (Throwable t) {
-                  logger.error("Error while enabling monitoring for input. "
-                    + input.getShortDescription());
+                  LOG.error("Error while enabling monitoring for input. " + input.getShortDescription());
                 }
               }
               Thread.sleep(30 * 1000);
@@ -243,15 +220,15 @@ public class InputMgr {
     }
   }
 
-  public void addToNotReady(Input notReadyInput) {
+  void addToNotReady(Input notReadyInput) {
     notReadyList.add(notReadyInput);
   }
 
-  public void addMetricsContainers(List<MetricCount> metricsList) {
+  public void addMetricsContainers(List<MetricData> metricsList) {
     for (Input input : inputList) {
       input.addMetricsContainers(metricsList);
     }
-    filesCountMetric.count = getActiveFilesCount();
+    filesCountMetric.value = getActiveFilesCount();
     metricsList.add(filesCountMetric);
   }
 
@@ -260,76 +237,18 @@ public class InputMgr {
       input.logStat();
     }
 
-    filesCountMetric.count = getActiveFilesCount();
-    LogFeederUtil.logStatForMetric(filesCountMetric,
-      "Stat: Files Monitored Count", null);
-  }
-
-  public void close() {
-    for (Input input : inputList) {
-      try {
-        input.setDrain(true);
-      } catch (Throwable t) {
-        logger.error(
-          "Error while draining. input="
-            + input.getShortDescription(), t);
-      }
-    }
-    isDrain = true;
-
-    // Need to get this value from property
-    int iterations = 30;
-    int waitTimeMS = 1000;
-    int i = 0;
-    boolean allClosed = true;
-    for (i = 0; i < iterations; i++) {
-      allClosed = true;
-      for (Input input : inputList) {
-        if (!input.isClosed()) {
-          try {
-            allClosed = false;
-            logger.warn("Waiting for input to close. "
-              + input.getShortDescription() + ", "
-              + (iterations - i) + " more seconds");
-            Thread.sleep(waitTimeMS);
-          } catch (Throwable t) {
-            // Ignore
-          }
-        }
-      }
-      if (allClosed) {
-        break;
-      }
-    }
-    if (!allClosed) {
-      logger.warn("Some inputs were not closed. Iterations=" + i);
-      for (Input input : inputList) {
-        if (!input.isClosed()) {
-          logger.warn("Input not closed. Will ignore it."
-            + input.getShortDescription());
-        }
-      }
-    } else {
-      logger.info("All inputs are closed. Iterations=" + i);
-    }
-
+    filesCountMetric.value = getActiveFilesCount();
+    LogFeederUtil.logStatForMetric(filesCountMetric, "Stat: Files Monitored Count", "");
   }
 
-  public void checkInAll() {
-    for (Input input : inputList) {
-      input.checkIn();
-    }
-  }
 
   public void cleanCheckPointFiles() {
 
     if (checkPointFolderFile == null) {
-      logger.info("Will not clean checkPoint files. checkPointFolderFile="
-        + checkPointFolderFile);
+      LOG.info("Will not clean checkPoint files. checkPointFolderFile=" + checkPointFolderFile);
       return;
     }
-    logger.info("Cleaning checkPoint files. checkPointFolderFile="
-      + checkPointFolderFile.getAbsolutePath());
+    LOG.info("Cleaning checkPoint files. checkPointFolderFile=" + checkPointFolderFile.getAbsolutePath());
     try {
       // Loop over the check point files and if filePath is not present, then move to closed
       String searchPath = "*" + checkPointExtension;
@@ -337,103 +256,66 @@ public class InputMgr {
       File[] checkPointFiles = checkPointFolderFile.listFiles(fileFilter);
       int totalCheckFilesDeleted = 0;
       for (File checkPointFile : checkPointFiles) {
-        RandomAccessFile checkPointReader = null;
-        try {
-          checkPointReader = new RandomAccessFile(checkPointFile, "r");
-
+        try (RandomAccessFile checkPointReader = new RandomAccessFile(checkPointFile, "r")) {
           int contentSize = checkPointReader.readInt();
           byte b[] = new byte[contentSize];
           int readSize = checkPointReader.read(b, 0, contentSize);
           if (readSize != contentSize) {
-            logger.error("Couldn't read expected number of bytes from checkpoint file. expected="
-              + contentSize
-              + ", read="
-              + readSize
-              + ", checkPointFile=" + checkPointFile);
+            LOG.error("Couldn't read expected number of bytes from checkpoint file. expected=" + contentSize + ", read="
+              + readSize + ", checkPointFile=" + checkPointFile);
           } else {
-            // Create JSON string
             String jsonCheckPointStr = new String(b, 0, readSize);
-            Map<String, Object> jsonCheckPoint = LogFeederUtil
-              .toJSONObject(jsonCheckPointStr);
+            Map<String, Object> jsonCheckPoint = LogFeederUtil.toJSONObject(jsonCheckPointStr);
 
-            String logFilePath = (String) jsonCheckPoint
-              .get("file_path");
-            String logFileKey = (String) jsonCheckPoint
-              .get("file_key");
+            String logFilePath = (String) jsonCheckPoint.get("file_path");
+            String logFileKey = (String) jsonCheckPoint.get("file_key");
             if (logFilePath != null && logFileKey != null) {
               boolean deleteCheckPointFile = false;
               File logFile = new File(logFilePath);
               if (logFile.exists()) {
-                Object fileKeyObj = InputFile
-                  .getFileKey(logFile);
-                String fileBase64 = Base64
-                  .byteArrayToBase64(fileKeyObj
-                    .toString().getBytes());
+                Object fileKeyObj = FileUtil.getFileKey(logFile);
+                String fileBase64 = Base64.byteArrayToBase64(fileKeyObj.toString().getBytes());
                 if (!logFileKey.equals(fileBase64)) {
                   deleteCheckPointFile = true;
-                  logger.info("CheckPoint clean: File key has changed. old="
-                    + logFileKey
-                    + ", new="
-                    + fileBase64
-                    + ", filePath="
-                    + logFilePath
-                    + ", checkPointFile="
-                    + checkPointFile.getAbsolutePath());
+                  LOG.info("CheckPoint clean: File key has changed. old=" + logFileKey + ", new=" + fileBase64 + ", filePath=" +
+                      logFilePath + ", checkPointFile=" + checkPointFile.getAbsolutePath());
                 }
               } else {
-                logger.info("CheckPoint clean: Log file doesn't exist. filePath="
-                  + logFilePath
-                  + ", checkPointFile="
-                  + checkPointFile.getAbsolutePath());
+                LOG.info("CheckPoint clean: Log file doesn't exist. filePath=" + logFilePath + ", checkPointFile=" +
+                    checkPointFile.getAbsolutePath());
                 deleteCheckPointFile = true;
               }
               if (deleteCheckPointFile) {
-                logger.info("Deleting CheckPoint file="
-                  + checkPointFile.getAbsolutePath()
-                  + ", logFile=" + logFilePath);
+                LOG.info("Deleting CheckPoint file=" + checkPointFile.getAbsolutePath() + ", logFile=" + logFilePath);
                 checkPointFile.delete();
                 totalCheckFilesDeleted++;
               }
             }
           }
         } catch (EOFException eof) {
-          logger.warn("Caught EOFException. Ignoring reading existing checkPoint file. "
-            + checkPointFile);
+          LOG.warn("Caught EOFException. Ignoring reading existing checkPoint file. " + checkPointFile);
         } catch (Throwable t) {
-          logger.error("Error while checking checkPoint file. "
-            + checkPointFile, t);
-        } finally {
-          if (checkPointReader != null) {
-            try {
-              checkPointReader.close();
-            } catch (Throwable t) {
-              logger.error("Error closing checkPoint file. "
-                + checkPointFile, t);
-            }
-          }
+          LOG.error("Error while checking checkPoint file. " + checkPointFile, t);
         }
       }
-      logger.info("Deleted " + totalCheckFilesDeleted
-        + " checkPoint file(s). checkPointFolderFile="
-        + checkPointFolderFile.getAbsolutePath());
+      LOG.info("Deleted " + totalCheckFilesDeleted + " checkPoint file(s). checkPointFolderFile=" +
+          checkPointFolderFile.getAbsolutePath());
 
     } catch (Throwable t) {
-      logger.error("Error while cleaning checkPointFiles", t);
+      LOG.error("Error while cleaning checkPointFiles", t);
     }
   }
 
   public void waitOnAllInputs() {
     //wait on inputs
-    if (inputList != null) {
-      for (Input input : inputList) {
-        if (input != null) {
-          Thread inputThread = input.getThread();
-          if (inputThread != null) {
-            try {
-              inputThread.join();
-            } catch (InterruptedException e) {
-              // ignore
-            }
+    for (Input input : inputList) {
+      if (input != null) {
+        Thread inputThread = input.getThread();
+        if (inputThread != null) {
+          try {
+            inputThread.join();
+          } catch (InterruptedException e) {
+            // ignore
           }
         }
       }
@@ -448,4 +330,50 @@ public class InputMgr {
       }
     }
   }
+
+  public void checkInAll() {
+    for (Input input : inputList) {
+      input.lastCheckIn();
+    }
+  }
+
+  public void close() {
+    for (Input input : inputList) {
+      try {
+        input.setDrain(true);
+      } catch (Throwable t) {
+        LOG.error("Error while draining. input=" + input.getShortDescription(), t);
+      }
+    }
+    isDrain = true;
+
+    // Need to get this value from property
+    int iterations = 30;
+    int waitTimeMS = 1000;
+    for (int i = 0; i < iterations; i++) {
+      boolean allClosed = true;
+      for (Input input : inputList) {
+        if (!input.isClosed()) {
+          try {
+            allClosed = false;
+            LOG.warn("Waiting for input to close. " + input.getShortDescription() + ", " + (iterations - i) + " more seconds");
+            Thread.sleep(waitTimeMS);
+          } catch (Throwable t) {
+            // Ignore
+          }
+        }
+      }
+      if (allClosed) {
+        LOG.info("All inputs are closed. Iterations=" + i);
+        return;
+      }
+    }
+    
+    LOG.warn("Some inputs were not closed after " + iterations + " iterations");
+    for (Input input : inputList) {
+      if (!input.isClosed()) {
+        LOG.warn("Input not closed. Will ignore it." + input.getShortDescription());
+      }
+    }
+  }
 }

+ 11 - 6
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputMarker.java

@@ -23,13 +23,18 @@ package org.apache.ambari.logfeeder.input;
  * This file contains the file inode, line number of the log currently been read
  */
 public class InputMarker {
-  public int lineNumber = 0;
-  public Input input;
-  public String base64FileKey = null;
-
+  public final Input input;
+  public final String base64FileKey;
+  public final int lineNumber;
+  
+  public InputMarker(Input input, String base64FileKey, int lineNumber) {
+    this.input = input;
+    this.base64FileKey = base64FileKey;
+    this.lineNumber = lineNumber;
+  }
+  
   @Override
   public String toString() {
-    return "InputMarker [lineNumber=" + lineNumber + ", input="
-      + input.getShortDescription() + "]";
+    return "InputMarker [lineNumber=" + lineNumber + ", input=" + input.getShortDescription() + "]";
   }
 }

+ 27 - 397
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java

@@ -19,201 +19,57 @@
 package org.apache.ambari.logfeeder.input;
 
 import java.io.BufferedReader;
-import java.io.EOFException;
 import java.io.File;
-import java.io.FileNotFoundException;
 import java.io.IOException;
-import java.io.RandomAccessFile;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
 
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.ambari.logfeeder.util.S3Util;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
+import org.apache.commons.lang.ArrayUtils;
 import org.apache.solr.common.util.Base64;
 
-public class InputS3File extends Input {
-  private static final Logger logger = Logger.getLogger(InputS3File.class);
-
-  private String logPath = null;
-  private boolean isStartFromBegining = true;
-
-  private boolean isReady = false;
-  private String[] s3LogPathFiles = null;
-  private Object fileKey = null;
-  private String base64FileKey = null;
-
-  private boolean isRolledOver = false;
-  private boolean addWildCard = false;
-
-  private long lastCheckPointTimeMS = 0;
-  private int checkPointIntervalMS = 5 * 1000; // 5 seconds
-  private RandomAccessFile checkPointWriter = null;
-  private Map<String, Object> jsonCheckPoint = null;
-
-  private File checkPointFile = null;
-
-  private InputMarker lastCheckPointInputMarker = null;
-
-  private String checkPointExtension = ".cp";
-
-
-  @Override
-  public void init() throws Exception {
-    logger.info("init() called");
-    statMetric.metricsName = "input.files.read_lines";
-    readBytesMetric.metricsName = "input.files.read_bytes";
-    checkPointExtension = LogFeederUtil.getStringProperty(
-        "logfeeder.checkpoint.extension", checkPointExtension);
-
-    // Let's close the file and set it to true after we start monitoring it
-    setClosed(true);
-    logPath = getStringValue("path");
-    tail = getBooleanValue("tail", tail);
-    addWildCard = getBooleanValue("add_wild_card", addWildCard);
-    checkPointIntervalMS = getIntValue("checkpoint.interval.ms",
-        checkPointIntervalMS);
-    if (logPath == null || logPath.isEmpty()) {
-      logger.error("path is empty for file input. " + getShortDescription());
-      return;
-    }
-
-    String startPosition = getStringValue("start_position");
-    if (StringUtils.isEmpty(startPosition)
-        || startPosition.equalsIgnoreCase("beginning")
-        || startPosition.equalsIgnoreCase("begining")) {
-      isStartFromBegining = true;
-    }
-
-    if (!tail) {
-      // start position end doesn't apply if we are not tailing
-      isStartFromBegining = true;
-    }
-
-    setFilePath(logPath);
-    boolean isFileReady = isReady();
-
-    logger.info("File to monitor " + logPath + ", tail=" + tail
-        + ", addWildCard=" + addWildCard + ", isReady=" + isFileReady);
-
-    super.init();
-  }
+public class InputS3File extends AbstractInputFile {
 
   @Override
   public boolean isReady() {
     if (!isReady) {
       // Let's try to check whether the file is available
-      s3LogPathFiles = getActualFiles(logPath);
-      if (s3LogPathFiles != null && s3LogPathFiles.length > 0) {
-        if (isTail() && s3LogPathFiles.length > 1) {
-          logger.warn("Found multiple files (" + s3LogPathFiles.length
-              + ") for the file filter " + filePath
-              + ". Will use only the first one. Using " + s3LogPathFiles[0]);
+      logFiles = getActualFiles(logPath);
+      if (!ArrayUtils.isEmpty(logFiles)) {
+        if (tail && logFiles.length > 1) {
+          LOG.warn("Found multiple files (" + logFiles.length + ") for the file filter " + filePath +
+              ". Will use only the first one. Using " + logFiles[0].getAbsolutePath());
         }
-        logger.info("File filter " + filePath + " expanded to "
-            + s3LogPathFiles[0]);
+        LOG.info("File filter " + filePath + " expanded to " + logFiles[0].getAbsolutePath());
         isReady = true;
       } else {
-        logger.debug(logPath + " file doesn't exist. Ignoring for now");
+        LOG.debug(logPath + " file doesn't exist. Ignoring for now");
       }
     }
     return isReady;
   }
 
-  private String[] getActualFiles(String searchPath) {
+  private File[] getActualFiles(String searchPath) {
     // TODO search file on s3
-    return new String[] { searchPath };
-  }
-
-  @Override
-  synchronized public void checkIn(InputMarker inputMarker) {
-    super.checkIn(inputMarker);
-    if (checkPointWriter != null) {
-      try {
-        int lineNumber = LogFeederUtil.objectToInt(
-            jsonCheckPoint.get("line_number"), 0, "line_number");
-        if (lineNumber > inputMarker.lineNumber) {
-          // Already wrote higher line number for this input
-          return;
-        }
-        // If interval is greater than last checkPoint time, then write
-        long currMS = System.currentTimeMillis();
-        if (!isClosed()
-            && (currMS - lastCheckPointTimeMS) < checkPointIntervalMS) {
-          // Let's save this one so we can update the check point file
-          // on flush
-          lastCheckPointInputMarker = inputMarker;
-          return;
-        }
-        lastCheckPointTimeMS = currMS;
-
-        jsonCheckPoint.put("line_number", ""
-            + new Integer(inputMarker.lineNumber));
-        jsonCheckPoint.put("last_write_time_ms", "" + new Long(currMS));
-        jsonCheckPoint.put("last_write_time_date", new Date());
-
-        String jsonStr = LogFeederUtil.getGson().toJson(jsonCheckPoint);
-
-        // Let's rewind
-        checkPointWriter.seek(0);
-        checkPointWriter.writeInt(jsonStr.length());
-        checkPointWriter.write(jsonStr.getBytes());
-
-        if (isClosed()) {
-          final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
-              + "_FINAL_CHECKIN";
-          LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
-              "Wrote final checkPoint, input=" + getShortDescription()
-                  + ", checkPointFile=" + checkPointFile.getAbsolutePath()
-                  + ", checkPoint=" + jsonStr, null, logger, Level.INFO);
-        }
-      } catch (Throwable t) {
-        final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
-            + "_CHECKIN_EXCEPTION";
-        LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
-            "Caught exception checkIn. , input=" + getShortDescription(), t,
-            logger, Level.ERROR);
-      }
-    }
-
-  }
-
-  @Override
-  public void checkIn() {
-    super.checkIn();
-    if (lastCheckPointInputMarker != null) {
-      checkIn(lastCheckPointInputMarker);
-    }
-  }
-
-  @Override
-  public void rollOver() {
-    logger.info("Marking this input file for rollover. "
-        + getShortDescription());
-    isRolledOver = true;
+    return new File[] { new File(searchPath) };
   }
 
   @Override
   void start() throws Exception {
-    if (s3LogPathFiles == null || s3LogPathFiles.length == 0) {
+    if (ArrayUtils.isEmpty(logFiles)) {
       return;
     }
 
-    if (isTail()) {
-      processFile(s3LogPathFiles[0]);
+    if (tail) {
+      processFile(logFiles[0]);
     } else {
-      for (String s3FilePath : s3LogPathFiles) {
+      for (File s3FilePath : logFiles) {
         try {
           processFile(s3FilePath);
           if (isClosed() || isDrain()) {
-            logger.info("isClosed or isDrain. Now breaking loop.");
+            LOG.info("isClosed or isDrain. Now breaking loop.");
             break;
           }
         } catch (Throwable t) {
-          logger.error("Error processing file=" + s3FilePath, t);
+          LOG.error("Error processing file=" + s3FilePath, t);
         }
       }
     }
@@ -221,244 +77,18 @@ public class InputS3File extends Input {
   }
 
   @Override
-  public void close() {
-    super.close();
-    logger.info("close() calling checkPoint checkIn(). "
-        + getShortDescription());
-    checkIn();
-  }
-
-  private void processFile(String logPathFile) throws FileNotFoundException,
-      IOException {
-    logger.info("Monitoring logPath=" + logPath + ", logPathFile="
-        + logPathFile);
-    BufferedReader br = null;
-    checkPointFile = null;
-    checkPointWriter = null;
-    jsonCheckPoint = null;
-    int resumeFromLineNumber = 0;
-
-    int lineCount = 0;
-    try {
-      setFilePath(logPathFile);
-      String s3AccessKey = getStringValue("s3_access_key");
-      String s3SecretKey = getStringValue("s3_secret_key");
-      br = S3Util.INSTANCE.getReader(logPathFile,s3AccessKey,s3SecretKey);
-      if(br==null){
-        //log err
-        return;
-      }
-      
-      // Whether to send to output from the beginning.
-      boolean resume = isStartFromBegining;
-
-      // Seems FileWatch is not reliable, so let's only use file key comparison
-      fileKey = getFileKey(logPathFile);
-      base64FileKey = Base64.byteArrayToBase64(fileKey.toString().getBytes());
-      logger.info("fileKey=" + fileKey + ", base64=" + base64FileKey + ". "
-          + getShortDescription());
-
-      if (isTail()) {
-        try {
-          // Let's see if there is a checkpoint for this file
-          logger.info("Checking existing checkpoint file. "
-              + getShortDescription());
-
-          String fileBase64 = Base64.byteArrayToBase64(fileKey.toString()
-              .getBytes());
-          String checkPointFileName = fileBase64 + checkPointExtension;
-          File checkPointFolder = inputMgr.getCheckPointFolderFile();
-          checkPointFile = new File(checkPointFolder, checkPointFileName);
-          checkPointWriter = new RandomAccessFile(checkPointFile, "rw");
-
-          try {
-            int contentSize = checkPointWriter.readInt();
-            byte b[] = new byte[contentSize];
-            int readSize = checkPointWriter.read(b, 0, contentSize);
-            if (readSize != contentSize) {
-              logger
-                  .error("Couldn't read expected number of bytes from checkpoint file. expected="
-                      + contentSize
-                      + ", read="
-                      + readSize
-                      + ", checkPointFile="
-                      + checkPointFile
-                      + ", input="
-                      + getShortDescription());
-            } else {
-              String jsonCheckPointStr = new String(b, 0, readSize);
-              jsonCheckPoint = LogFeederUtil.toJSONObject(jsonCheckPointStr);
-
-              resumeFromLineNumber = LogFeederUtil.objectToInt(
-                  jsonCheckPoint.get("line_number"), 0, "line_number");
-
-              if (resumeFromLineNumber > 0) {
-                // Let's read from last line read
-                resume = false;
-              }
-              logger.info("CheckPoint. checkPointFile=" + checkPointFile
-                  + ", json=" + jsonCheckPointStr + ", resumeFromLineNumber="
-                  + resumeFromLineNumber + ", resume=" + resume);
-            }
-          } catch (EOFException eofEx) {
-            logger.info("EOFException. Will reset checkpoint file "
-                + checkPointFile.getAbsolutePath() + " for "
-                + getShortDescription());
-          }
-          if (jsonCheckPoint == null) {
-            // This seems to be first time, so creating the initial
-            // checkPoint object
-            jsonCheckPoint = new HashMap<String, Object>();
-            jsonCheckPoint.put("file_path", filePath);
-            jsonCheckPoint.put("file_key", fileBase64);
-          }
-
-        } catch (Throwable t) {
-          logger.error(
-              "Error while configuring checkpoint file. Will reset file. checkPointFile="
-                  + checkPointFile, t);
-        }
-      }
-
-      setClosed(false);
-      int sleepStep = 2;
-      int sleepIteration = 0;
-      while (true) {
-        try {
-          if (isDrain()) {
-            break;
-          }
-
-          String line = br.readLine();
-          if (line == null) {
-            if (!resume) {
-              resume = true;
-            }
-            sleepIteration++;
-            try {
-              // Since FileWatch service is not reliable, we will check
-              // file inode every n seconds after no write
-              if (sleepIteration > 4) {
-                Object newFileKey = getFileKey(logPathFile);
-                if (newFileKey != null) {
-                  if (fileKey == null || !newFileKey.equals(fileKey)) {
-                    logger
-                        .info("File key is different. Calling rollover. oldKey="
-                            + fileKey
-                            + ", newKey="
-                            + newFileKey
-                            + ". "
-                            + getShortDescription());
-                    // File has rotated.
-                    rollOver();
-                  }
-                }
-              }
-              // Flush on the second iteration
-              if (!tail && sleepIteration >= 2) {
-                logger.info("End of file. Done with filePath=" + logPathFile
-                    + ", lineCount=" + lineCount);
-                flush();
-                break;
-              } else if (sleepIteration == 2) {
-                flush();
-              } else if (sleepIteration >= 2) {
-                if (isRolledOver) {
-                  isRolledOver = false;
-                  // Close existing file
-                  try {
-                    logger
-                        .info("File is rolled over. Closing current open file."
-                            + getShortDescription() + ", lineCount="
-                            + lineCount);
-                    br.close();
-                  } catch (Exception ex) {
-                    logger.error("Error closing file" + getShortDescription());
-                    break;
-                  }
-                  try {
-                    // Open new file
-                    logger.info("Opening new rolled over file."
-                        + getShortDescription());
-                    br = S3Util.INSTANCE.getReader(logPathFile,s3AccessKey,s3SecretKey);
-                    lineCount = 0;
-                    fileKey = getFileKey(logPathFile);
-                    base64FileKey = Base64.byteArrayToBase64(fileKey.toString()
-                        .getBytes());
-                    logger.info("fileKey=" + fileKey + ", base64="
-                        + base64FileKey + ", " + getShortDescription());
-                  } catch (Exception ex) {
-                    logger.error("Error opening rolled over file. "
-                        + getShortDescription());
-                    // Let's add this to monitoring and exit this thread
-                    logger.info("Added input to not ready list."
-                        + getShortDescription());
-                    isReady = false;
-                    inputMgr.addToNotReady(this);
-                    break;
-                  }
-                  logger.info("File is successfully rolled over. "
-                      + getShortDescription());
-                  continue;
-                }
-              }
-              Thread.sleep(sleepStep * 1000);
-              sleepStep = (sleepStep * 2);
-              sleepStep = sleepStep > 10 ? 10 : sleepStep;
-            } catch (InterruptedException e) {
-              logger.info("Thread interrupted." + getShortDescription());
-            }
-          } else {
-            lineCount++;
-            sleepStep = 1;
-            sleepIteration = 0;
-
-            if (!resume && lineCount > resumeFromLineNumber) {
-              logger.info("Resuming to read from last line. lineCount="
-                  + lineCount + ", input=" + getShortDescription());
-              resume = true;
-            }
-            if (resume) {
-              InputMarker marker = new InputMarker();
-              marker.base64FileKey = base64FileKey;
-              marker.input = this;
-              marker.lineNumber = lineCount;
-              outputLine(line, marker);
-            }
-          }
-        } catch (Throwable t) {
-          final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
-              + "_READ_LOOP_EXCEPTION";
-          LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
-              "Caught exception in read loop. lineNumber=" + lineCount
-                  + ", input=" + getShortDescription(), t, logger, Level.ERROR);
-
-        }
-      }
-    } finally {
-      if (br != null) {
-        logger.info("Closing reader." + getShortDescription() + ", lineCount="
-            + lineCount);
-        try {
-          br.close();
-        } catch (Throwable t) {
-          // ignore
-        }
-      }
-    }
-  }
-
-  static public Object getFileKey(String s3FilePath) {
-    return s3FilePath.toString();
+  protected BufferedReader openLogFile(File logPathFile) throws IOException {
+    String s3AccessKey = getStringValue("s3_access_key");
+    String s3SecretKey = getStringValue("s3_secret_key");
+    BufferedReader br = S3Util.getReader(logPathFile.getPath(), s3AccessKey, s3SecretKey);
+    fileKey = getFileKey(logPathFile);
+    base64FileKey = Base64.byteArrayToBase64(fileKey.toString().getBytes());
+    LOG.info("fileKey=" + fileKey + ", base64=" + base64FileKey + ". " + getShortDescription());
+    return br;
   }
 
   @Override
-  public String getShortDescription() {
-    return "input:source="
-        + getStringValue("source")
-        + ", path="
-        + (s3LogPathFiles != null && s3LogPathFiles.length > 0 ? s3LogPathFiles[0]
-            : getStringValue("path"));
+  protected Object getFileKey(File logFile) {
+    return logFile.getPath();
   }
-
 }

+ 24 - 16
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java

@@ -18,7 +18,7 @@
  */
 package org.apache.ambari.logfeeder.input;
 
-import java.net.Inet4Address;
+import java.net.InetAddress;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Date;
@@ -66,7 +66,7 @@ public class InputSimulate extends Input {
     
     Filter filter = new FilterJSON();
     filter.setInput(this);
-    setFirstFilter(filter);
+    addFilter(filter);
   }
   
   private List<String> getSimulatedLogTypes() {
@@ -88,23 +88,18 @@ public class InputSimulate extends Input {
     
     return LOG_TEXT_PATTERN.replaceAll("<LOG_MESSAGE_PATTERN>", logMessagePattern);
   }
-  
-  @Override
-  public String getNameForThread() {
-    return "Simulated input";
-  }
 
   @Override
-  public String getShortDescription() {
-    return "Simulated input";
+  public boolean isReady() {
+    return true;
   }
-  
+
   @Override
   void start() throws Exception {
     if (types.isEmpty())
       return;
     
-    getFirstFilter().setOutputMgr(outputMgr);
+    getFirstFilter().setOutputManager(outputManager);
     while (true) {
       String type = imitateRandomLogFile();
       
@@ -129,10 +124,7 @@ public class InputSimulate extends Input {
   }
 
   private InputMarker getInputMarker(String type) throws Exception {
-    InputMarker marker = new InputMarker();
-    marker.input = this;
-    marker.lineNumber = getLineNumber(type);
-    marker.base64FileKey = getBase64FileKey();
+    InputMarker marker = new InputMarker(this, getBase64FileKey(), getLineNumber(type));
     return marker;
   }
 
@@ -147,7 +139,7 @@ public class InputSimulate extends Input {
   }
 
   private String getBase64FileKey() throws Exception {
-    String fileKey = Inet4Address.getLocalHost().getHostAddress() + "|" + filePath;
+    String fileKey = InetAddress.getLocalHost().getHostAddress() + "|" + filePath;
     return Base64.byteArrayToBase64(fileKey.getBytes());
   }
 
@@ -155,4 +147,20 @@ public class InputSimulate extends Input {
     Date d = new Date();
     return String.format(logText, d.getTime(), level, marker.lineNumber);
   }
+
+  @Override
+  public void checkIn(InputMarker inputMarker) {}
+
+  @Override
+  public void lastCheckIn() {}
+  
+  @Override
+  public String getNameForThread() {
+    return "Simulated input";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Simulated input";
+  }
 }

+ 5 - 18
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/GZIPReader.java

@@ -18,7 +18,6 @@
  */
 package org.apache.ambari.logfeeder.input.reader;
 
-import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
@@ -30,15 +29,11 @@ import org.apache.log4j.Logger;
 
 class GZIPReader extends InputStreamReader {
 
-  private static Logger logger = Logger.getLogger(GZIPReader.class);
+  private static final Logger LOG = Logger.getLogger(GZIPReader.class);
 
   GZIPReader(String fileName) throws FileNotFoundException {
     super(getStream(fileName));
-    logger.info("Created GZIPReader for file : " + fileName);
-  }
-
-  GZIPReader(File file) throws FileNotFoundException {
-    super(getStream(file.getName()));
+    LOG.info("Created GZIPReader for file : " + fileName);
   }
 
   private static InputStream getStream(String fileName) {
@@ -48,7 +43,7 @@ class GZIPReader extends InputStreamReader {
       fileStream = new FileInputStream(fileName);
       gzipStream = new GZIPInputStream(fileStream);
     } catch (Exception e) {
-      logger.error(e, e.getCause());
+      LOG.error(e, e.getCause());
     }
     return gzipStream;
   }
@@ -58,21 +53,13 @@ class GZIPReader extends InputStreamReader {
    */
   static boolean isValidFile(String fileName) {
     // TODO make it generic and put in factory itself
-    InputStream is = null;
-    try {
-      is = new FileInputStream(fileName);
+    
+    try (InputStream is = new FileInputStream(fileName)) {
       byte[] signature = new byte[2];
       int nread = is.read(signature); // read the gzip signature
       return nread == 2 && signature[0] == (byte) 0x1f && signature[1] == (byte) 0x8b;
     } catch (IOException e) {
       return false;
-    } finally {
-      if (is != null) {
-        try {
-          is.close();
-        } catch (IOException e) {
-        }
-      }
     }
   }
 }

+ 3 - 5
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/LogsearchReaderFactory.java

@@ -27,17 +27,15 @@ import org.apache.log4j.Logger;
 
 public enum LogsearchReaderFactory {
   INSTANCE;
-  private static Logger logger = Logger
-    .getLogger(LogsearchReaderFactory.class);
+  private static final Logger LOG = Logger.getLogger(LogsearchReaderFactory.class);
 
   public Reader getReader(File file) throws FileNotFoundException {
-    logger.debug("Inside reader factory for file:" + file);
+    LOG.debug("Inside reader factory for file:" + file);
     if (GZIPReader.isValidFile(file.getAbsolutePath())) {
-      logger.info("Reading file " + file + " as gzip file");
+      LOG.info("Reading file " + file + " as gzip file");
       return new GZIPReader(file.getAbsolutePath());
     } else {
       return new FileReader(file);
     }
   }
-
 }

+ 0 - 194
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java

@@ -1,194 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder.logconfig;
-
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.TimeZone;
-
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.ambari.logfeeder.util.SolrUtil;
-import org.apache.ambari.logfeeder.view.VLogfeederFilter;
-import org.apache.ambari.logfeeder.view.VLogfeederFilterWrapper;
-import org.apache.log4j.Logger;
-
-public class FetchConfigFromSolr extends Thread {
-  private static Logger logger = Logger.getLogger(FetchConfigFromSolr.class);
-  private static VLogfeederFilterWrapper logfeederFilterWrapper = null;
-  private static int solrConfigInterval = 5;// 5 sec;
-  private static long delay;
-  private static String endTimeDateFormat = "yyyy-MM-dd'T'HH:mm:ss.SSS";//2016-04-05T04:30:00.000Z
-  private static String sysTimeZone = "GMT";
-
-  FetchConfigFromSolr(boolean isDaemon) {
-    this.setName(this.getClass().getSimpleName());
-    this.setDaemon(isDaemon);
-  }
-
-  @Override
-  public void run() {
-    String zkConnectString = LogFeederUtil.getStringProperty("logfeeder.solr.zk_connect_string");
-    String solrUrl = LogFeederUtil.getStringProperty("logfeeder.solr.url");
-    if ((zkConnectString == null || zkConnectString.trim().length() == 0 )
-        && (solrUrl == null || solrUrl.trim().length() == 0)) {
-      logger.warn("Neither Solr ZK Connect String nor solr Uril for UserConfig/History is set." +
-          "Won't look for level configuration from Solr.");
-      return;
-    }
-    solrConfigInterval = LogFeederUtil.getIntProperty("logfeeder.solr.config.interval", solrConfigInterval);
-    delay = 1000 * solrConfigInterval;
-    do {
-      logger.debug("Updating config from solr after every " + solrConfigInterval + " sec.");
-      pullConfigFromSolr();
-      try {
-        Thread.sleep(delay);
-      } catch (InterruptedException e) {
-        logger.error(e.getLocalizedMessage(), e.getCause());
-      }
-    } while (true);
-  }
-
-  private synchronized void pullConfigFromSolr() {
-    SolrUtil solrUtil = SolrUtil.getInstance();
-    if(solrUtil!=null){
-      HashMap<String, Object> configDocMap = solrUtil.getConfigDoc();
-      if (configDocMap != null) {
-        String configJson = (String) configDocMap.get(LogFeederConstants.VALUES);
-        if (configJson != null) {
-          logfeederFilterWrapper = LogFeederUtil.getGson().fromJson(configJson, VLogfeederFilterWrapper.class);
-        }
-      }
-    }
-  }
-
-  private static boolean isFilterExpired(VLogfeederFilter logfeederFilter) {
-    boolean isFilterExpired = false;// default is false
-    if (logfeederFilter != null) {
-      Date filterEndDate = parseFilterExpireDate(logfeederFilter);
-      if (filterEndDate != null) {
-        Date currentDate = getCurrentDate();
-        if (currentDate.compareTo(filterEndDate) >= 0) {
-          logger.debug("Filter for  Component :" + logfeederFilter.getLabel() + " and Hosts :"
-            + listToStr(logfeederFilter.getHosts()) + "Filter is expired because of filter endTime : "
-            + dateToStr(filterEndDate) + " is older than currentTime :" + dateToStr(currentDate));
-          isFilterExpired = true;
-        }
-      }
-    }
-    return isFilterExpired;
-  }
-
-  private static String dateToStr(Date date) {
-    if (date == null) {
-      return "";
-    }
-    SimpleDateFormat formatter = new SimpleDateFormat(endTimeDateFormat);
-    TimeZone timeZone = TimeZone.getTimeZone(sysTimeZone);
-    formatter.setTimeZone(timeZone);
-    return formatter.format(date);
-  }
-
-  private static Date parseFilterExpireDate(VLogfeederFilter vLogfeederFilter) {
-    String expiryTime = vLogfeederFilter.getExpiryTime();
-    if (expiryTime != null && !expiryTime.isEmpty()) {
-      SimpleDateFormat formatter = new SimpleDateFormat(endTimeDateFormat);
-      TimeZone timeZone = TimeZone.getTimeZone(sysTimeZone);
-      formatter.setTimeZone(timeZone);
-      try {
-        return formatter.parse(expiryTime);
-      } catch (ParseException e) {
-        logger.error("Filter have invalid ExpiryTime : " + expiryTime + " for component :" + vLogfeederFilter.getLabel()
-          + " and hosts :" + listToStr(vLogfeederFilter.getHosts()));
-      }
-    }
-    return null;
-  }
-
-  public static List<String> getAllowedLevels(String hostName, VLogfeederFilter componentFilter) {
-    String componentName = componentFilter.getLabel();
-    List<String> hosts = componentFilter.getHosts();
-    List<String> defaultLevels = componentFilter.getDefaultLevels();
-    List<String> overrideLevels = componentFilter.getOverrideLevels();
-    String expiryTime=componentFilter.getExpiryTime();
-    //check is user override or not
-    if ((expiryTime != null && !expiryTime.isEmpty())
-        || (overrideLevels != null && !overrideLevels.isEmpty())
-        || (hosts != null && !hosts.isEmpty())) {
-      if (hosts == null || hosts.isEmpty()) {
-        // hosts list is empty or null consider it apply on all hosts
-        hosts.add(LogFeederConstants.ALL);
-      }
-      if (LogFeederUtil.isListContains(hosts, hostName, false)) {
-        if (isFilterExpired(componentFilter)) {
-          logger.debug("Filter for component " + componentName + " and host :"
-              + hostName + " is expired at " + componentFilter.getExpiryTime());
-          return defaultLevels;
-        } else {
-          return overrideLevels;
-        }
-      }
-    }
-    return defaultLevels;
-  }
-
-  public static boolean isFilterAvailable() {
-    return logfeederFilterWrapper != null;
-  }
-  
-  public static VLogfeederFilter findComponentFilter(String componentName) {
-    if (logfeederFilterWrapper != null) {
-      HashMap<String, VLogfeederFilter> filter = logfeederFilterWrapper.getFilter();
-      if (filter != null) {
-        VLogfeederFilter componentFilter = filter.get(componentName);
-        if (componentFilter != null) {
-          return componentFilter;
-        }
-      }
-    }
-    logger.trace("Filter is not there for component :" + componentName);
-    return null;
-  }
-
-
-  public static Date getCurrentDate() {
-    TimeZone.setDefault(TimeZone.getTimeZone(sysTimeZone));
-    Date date = new Date();
-    return date;
-  }
-
-  public static String listToStr(List<String> strList) {
-    StringBuilder out = new StringBuilder("[");
-    if (strList != null) {
-      int counter = 0;
-      for (Object o : strList) {
-        if (counter > 0) {
-          out.append(",");
-        }
-        out.append(o.toString());
-        counter++;
-      }
-    }
-    out.append("]");
-    return out.toString();
-  }
-}

+ 83 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FilterLogData.java

@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.logconfig;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.common.LogFeederConstants;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections.MapUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Logger;
+
+/**
+ * Read configuration from solr and filter the log
+ */
+public enum FilterLogData {
+  INSTANCE;
+  
+  private static final Logger LOG = Logger.getLogger(FilterLogData.class);
+  
+  private static final boolean DEFAULT_VALUE = true;
+
+  public boolean isAllowed(String jsonBlock) {
+    if (StringUtils.isEmpty(jsonBlock)) {
+      return DEFAULT_VALUE;
+    }
+    Map<String, Object> jsonObj = LogFeederUtil.toJSONObject(jsonBlock);
+    return isAllowed(jsonObj);
+  }
+
+  public boolean isAllowed(Map<String, Object> jsonObj) {
+    boolean isAllowed = applyFilter(jsonObj);
+    if (!isAllowed) {
+      LOG.trace("Filter block the content :" + LogFeederUtil.getGson().toJson(jsonObj));
+    }
+    return isAllowed;
+  }
+  
+
+  private boolean applyFilter(Map<String, Object> jsonObj) {
+    if (MapUtils.isEmpty(jsonObj)) {
+      LOG.warn("Output jsonobj is empty");
+      return DEFAULT_VALUE;
+    }
+    
+    String hostName = (String) jsonObj.get(LogFeederConstants.SOLR_HOST);
+    String componentName = (String) jsonObj.get(LogFeederConstants.SOLR_COMPONENT);
+    String level = (String) jsonObj.get(LogFeederConstants.SOLR_LEVEL);
+    if (StringUtils.isNotBlank(hostName) && StringUtils.isNotBlank(componentName) && StringUtils.isNotBlank(level)) {
+      LogFeederFilter componentFilter = LogConfigHandler.findComponentFilter(componentName);
+      if (componentFilter == null) {
+        return DEFAULT_VALUE;
+      }
+      List<String> allowedLevels = LogConfigHandler.getAllowedLevels(hostName, componentFilter);
+      if (CollectionUtils.isEmpty(allowedLevels)) {
+        allowedLevels.add(LogFeederConstants.ALL);
+      }
+      return LogFeederUtil.isListContains(allowedLevels, level, false);
+    }
+    else {
+      return DEFAULT_VALUE;
+    }
+  }
+}

+ 59 - 77
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java → ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogConfigFetcher.java

@@ -16,12 +16,16 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.ambari.logfeeder.util;
+package org.apache.ambari.logfeeder.logconfig;
 
 import java.io.IOException;
 import java.util.HashMap;
+import java.util.Map;
 
-import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
+import org.apache.ambari.logfeeder.common.LogFeederConstants;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 import org.apache.solr.client.solrj.SolrClient;
@@ -37,73 +41,57 @@ import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.SolrException;
 
-public class SolrUtil {
-
-  private static final Logger logger = Logger.getLogger(SolrUtil.class);
-
-  private static SolrUtil instance = null;
+public class LogConfigFetcher {
+  private static final Logger LOG = Logger.getLogger(LogConfigFetcher.class);
   
-  private SolrClient solrClient = null;
-  private CloudSolrClient solrClouldClient = null;
+  private static LogConfigFetcher instance;
+  public synchronized static LogConfigFetcher getInstance() {
+    if (instance == null) {
+      try {
+        instance = new LogConfigFetcher();
+      } catch (Exception e) {
+        String logMessageKey = LogConfigFetcher.class.getSimpleName() + "_SOLR_UTIL";
+              LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Error constructing solrUtil", e, LOG, Level.WARN);
+      }
+    }
+    return instance;
+  }
+
+  private SolrClient solrClient;
 
   private String solrDetail = "";
 
-  private SolrUtil() throws Exception {
+  public LogConfigFetcher() throws Exception {
     String url = LogFeederUtil.getStringProperty("logfeeder.solr.url");
     String zkConnectString = LogFeederUtil.getStringProperty("logfeeder.solr.zk_connect_string");
     String collection = LogFeederUtil.getStringProperty("logfeeder.solr.core.config.name", "history");
     connectToSolr(url, zkConnectString, collection);
   }
 
-  public static SolrUtil getInstance() {
-    if (instance == null) {
-      synchronized (SolrUtil.class) {
-        if (instance == null) {
-          try {
-            instance = new SolrUtil();
-          } catch (Exception e) {
-            final String LOG_MESSAGE_KEY = SolrUtil.class
-                .getSimpleName() + "_SOLR_UTIL";
-              LogFeederUtil.logErrorMessageByInterval(
-                LOG_MESSAGE_KEY,
-                "Error constructing solrUtil", e, logger,
-                Level.WARN);
-          }
-        }
-      }
-    }
-    return instance;
-  }
-
-  private SolrClient connectToSolr(String url, String zkConnectString,
-                                  String collection) throws Exception {
-    solrDetail = "zkConnectString=" + zkConnectString + ", collection=" + collection
-      + ", url=" + url;
+  private SolrClient connectToSolr(String url, String zkConnectString, String collection) throws Exception {
+    solrDetail = "zkConnectString=" + zkConnectString + ", collection=" + collection + ", url=" + url;
 
-    logger.info("connectToSolr() " + solrDetail);
-    if (collection == null || collection.isEmpty()) {
-      throw new Exception("For solr, collection name is mandatory. "
-        + solrDetail);
+    LOG.info("connectToSolr() " + solrDetail);
+    if (StringUtils.isEmpty(collection)) {
+      throw new Exception("For solr, collection name is mandatory. " + solrDetail);
     }
-    if (zkConnectString != null && !zkConnectString.isEmpty()) {
+    
+    if (StringUtils.isEmpty(zkConnectString) && StringUtils.isBlank(url))
+      throw new Exception("Both zkConnectString and URL are empty. zkConnectString=" + zkConnectString + ", collection=" +
+          collection + ", url=" + url);
+    
+    if (StringUtils.isNotEmpty(zkConnectString)) {
       solrDetail = "zkConnectString=" + zkConnectString + ", collection=" + collection;
-      logger.info("Using zookeepr. " + solrDetail);
-      solrClouldClient = new CloudSolrClient(zkConnectString);
+      LOG.info("Using zookeepr. " + solrDetail);
+      CloudSolrClient solrClouldClient = new CloudSolrClient(zkConnectString);
       solrClouldClient.setDefaultCollection(collection);
       solrClient = solrClouldClient;
-      int waitDurationMS = 3 * 60 * 1000;
-      checkSolrStatus(waitDurationMS);
+      checkSolrStatus(3 * 60 * 1000);
     } else {
-      if (url == null || url.trim().isEmpty()) {
-        throw new Exception("Both zkConnectString and URL are empty. zkConnectString="
-          + zkConnectString + ", collection=" + collection + ", url="
-          + url);
-      }
       solrDetail = "collection=" + collection + ", url=" + url;
       String collectionURL = url + "/" + collection;
-      logger.info("Connecting to  solr : " + collectionURL);
+      LOG.info("Connecting to  solr : " + collectionURL);
       solrClient = new HttpSolrClient(collectionURL);
-
     }
     return solrClient;
   }
@@ -121,44 +109,31 @@ public class SolrUtil {
           CollectionAdminRequest.List colListReq = new CollectionAdminRequest.List();
           response = colListReq.process(solrClient);
         } catch (Exception ex) {
-          logger.error("Con't connect to Solr. solrDetail=" + solrDetail, ex);
+          LOG.error("Con't connect to Solr. solrDetail=" + solrDetail, ex);
         }
         if (response != null && response.getStatus() == 0) {
-          logger.info("Solr getCollections() is success. solr=" + solrDetail);
+          LOG.info("Solr getCollections() is success. solr=" + solrDetail);
           status = true;
           break;
         }
         if (System.currentTimeMillis() - beginTimeMS > waitDurationMS) {
-          logger.error("Solr is not reachable even after "
-            + (System.currentTimeMillis() - beginTimeMS)
+          LOG.error("Solr is not reachable even after " + (System.currentTimeMillis() - beginTimeMS)
             + " ms. If you are using alias, then you might have to restart LogSearch after Solr is up and running. solr="
             + solrDetail + ", response=" + response);
           break;
         } else {
-          logger.warn("Solr is not reachable yet. getCollections() attempt count=" + pingCount
-            + ". Will sleep for " + waitIntervalMS + " ms and try again." + " solr=" + solrDetail
-            + ", response=" + response);
-
+          LOG.warn("Solr is not reachable yet. getCollections() attempt count=" + pingCount + ". Will sleep for " +
+              waitIntervalMS + " ms and try again." + " solr=" + solrDetail + ", response=" + response);
         }
         Thread.sleep(waitIntervalMS);
       }
     } catch (Throwable t) {
-      logger.error("Seems Solr is not up. solrDetail=" + solrDetail);
+      LOG.error("Seems Solr is not up. solrDetail=" + solrDetail, t);
     }
     return status;
   }
 
-  private QueryResponse process(SolrQuery solrQuery) throws SolrServerException, IOException, SolrException {
-    if (solrClient != null) {
-      QueryResponse queryResponse = solrClient.query(solrQuery, METHOD.POST);
-      return queryResponse;
-    } else {
-      logger.error("solrClient can't be null");
-      return null;
-    }
-  }
-
-  public HashMap<String, Object> getConfigDoc() {
+  public Map<String, Object> getConfigDoc() {
     HashMap<String, Object> configMap = new HashMap<String, Object>();
     SolrQuery solrQuery = new SolrQuery();
     solrQuery.setQuery("*:*");
@@ -168,19 +143,26 @@ public class SolrUtil {
       QueryResponse response = process(solrQuery);
       if (response != null) {
         SolrDocumentList documentList = response.getResults();
-        if (documentList != null && documentList.size() > 0) {
+        if (CollectionUtils.isNotEmpty(documentList)) {
           SolrDocument configDoc = documentList.get(0);
           String configJson = LogFeederUtil.getGson().toJson(configDoc);
-          configMap = (HashMap<String, Object>) LogFeederUtil
-              .toJSONObject(configJson);
+          configMap = (HashMap<String, Object>) LogFeederUtil.toJSONObject(configJson);
         }
       }
     } catch (Exception e) {
-      final String logMessageKey = this.getClass().getSimpleName()
-          + "_FETCH_FILTER_CONFIG_ERROR";
-      LogFeederUtil.logErrorMessageByInterval(logMessageKey,
-          "Error getting filter config from solr", e, logger, Level.ERROR);
+      String logMessageKey = this.getClass().getSimpleName() + "_FETCH_FILTER_CONFIG_ERROR";
+      LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Error getting filter config from solr", e, LOG, Level.ERROR);
     }
     return configMap;
   }
+
+  private QueryResponse process(SolrQuery solrQuery) throws SolrServerException, IOException, SolrException {
+    if (solrClient != null) {
+      QueryResponse queryResponse = solrClient.query(solrQuery, METHOD.POST);
+      return queryResponse;
+    } else {
+      LOG.error("solrClient can't be null");
+      return null;
+    }
+  }
 }

+ 189 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogConfigHandler.java

@@ -0,0 +1,189 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.logconfig;
+
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.TimeZone;
+
+import org.apache.ambari.logfeeder.common.LogFeederConstants;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+
+public class LogConfigHandler extends Thread {
+  private static final Logger LOG = Logger.getLogger(LogConfigHandler.class);
+  
+  private static final int DEFAULT_SOLR_CONFIG_INTERVAL = 5;
+  private static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS";
+  private static final String TIMEZONE = "GMT";
+  
+  static {
+    TimeZone.setDefault(TimeZone.getTimeZone(TIMEZONE));
+  }
+  
+  private static ThreadLocal<DateFormat> formatter = new ThreadLocal<DateFormat>() {
+    protected DateFormat initialValue() {
+      SimpleDateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT);
+      dateFormat.setTimeZone(TimeZone.getTimeZone(TIMEZONE));
+      return dateFormat;
+    }
+  };
+  
+  private static LogFeederFilterWrapper logFeederFilterWrapper;
+
+  private static boolean running = false;
+
+  public static void handleConfig() {
+    boolean filterEnable = LogFeederUtil.getBooleanProperty("logfeeder.log.filter.enable", false);
+    if (!filterEnable) {
+      LOG.info("Logfeeder filter Scheduler is disabled.");
+      return;
+    }
+    if (!running) {
+      new LogConfigHandler().start();
+      running = true;
+      LOG.info("Logfeeder Filter Thread started!");
+    } else {
+      LOG.warn("Logfeeder Filter Thread is already running.");
+    }
+  }
+  
+  private LogConfigHandler() {
+    setName(getClass().getSimpleName());
+    setDaemon(true);
+  }
+
+  @Override
+  public void run() {
+    String zkConnectString = LogFeederUtil.getStringProperty("logfeeder.solr.zk_connect_string");
+    String solrUrl = LogFeederUtil.getStringProperty("logfeeder.solr.url");
+    if (StringUtils.isBlank(zkConnectString) && StringUtils.isBlank(solrUrl)) {
+      LOG.warn("Neither Solr ZK Connect String nor solr Url for UserConfig/History is set." +
+          "Won't look for level configuration from Solr.");
+      return;
+    }
+    
+    int solrConfigInterval = LogFeederUtil.getIntProperty("logfeeder.solr.config.interval", DEFAULT_SOLR_CONFIG_INTERVAL);
+    do {
+      LOG.debug("Updating config from solr after every " + solrConfigInterval + " sec.");
+      fetchConfig();
+      try {
+        Thread.sleep(1000 * solrConfigInterval);
+      } catch (InterruptedException e) {
+        LOG.error(e.getLocalizedMessage(), e.getCause());
+      }
+    } while (true);
+  }
+
+  private synchronized void fetchConfig() {
+    LogConfigFetcher fetcher = LogConfigFetcher.getInstance();
+    if (fetcher != null) {
+      Map<String, Object> configDocMap = fetcher.getConfigDoc();
+      String configJson = (String) configDocMap.get(LogFeederConstants.VALUES);
+      if (configJson != null) {
+        logFeederFilterWrapper = LogFeederUtil.getGson().fromJson(configJson, LogFeederFilterWrapper.class);
+      }
+    }
+  }
+
+  public static boolean isFilterAvailable() {
+    return logFeederFilterWrapper != null;
+  }
+
+  public static List<String> getAllowedLevels(String hostName, LogFeederFilter componentFilter) {
+    String componentName = componentFilter.getLabel();
+    List<String> hosts = componentFilter.getHosts();
+    List<String> defaultLevels = componentFilter.getDefaultLevels();
+    List<String> overrideLevels = componentFilter.getOverrideLevels();
+    String expiryTime = componentFilter.getExpiryTime();
+    
+    // check is user override or not
+    if (StringUtils.isNotEmpty(expiryTime) || CollectionUtils.isNotEmpty(overrideLevels) || CollectionUtils.isNotEmpty(hosts)) {
+      if (CollectionUtils.isEmpty(hosts)) { // hosts list is empty or null consider it apply on all hosts
+        hosts.add(LogFeederConstants.ALL);
+      }
+      
+      if (LogFeederUtil.isListContains(hosts, hostName, false)) {
+        if (isFilterExpired(componentFilter)) {
+          LOG.debug("Filter for component " + componentName + " and host :" + hostName + " is expired at " +
+              componentFilter.getExpiryTime());
+          return defaultLevels;
+        } else {
+          return overrideLevels;
+        }
+      }
+    }
+    return defaultLevels;
+  }
+
+  private static boolean isFilterExpired(LogFeederFilter logfeederFilter) {
+    if (logfeederFilter == null)
+      return false;
+    
+    Date filterEndDate = parseFilterExpireDate(logfeederFilter);
+    if (filterEndDate == null) {
+      return false;
+    }
+    
+    Date currentDate = new Date();
+    if (!currentDate.before(filterEndDate)) {
+      LOG.debug("Filter for  Component :" + logfeederFilter.getLabel() + " and Hosts : [" +
+          StringUtils.join(logfeederFilter.getHosts(), ',') + "] is expired because of filter endTime : " +
+          formatter.get().format(filterEndDate) + " is older than currentTime :" + formatter.get().format(currentDate));
+      return true;
+    } else {
+      return false;
+    }
+  }
+
+  private static Date parseFilterExpireDate(LogFeederFilter vLogfeederFilter) {
+    String expiryTime = vLogfeederFilter.getExpiryTime();
+    if (StringUtils.isNotEmpty(expiryTime)) {
+      try {
+        return formatter.get().parse(expiryTime);
+      } catch (ParseException e) {
+        LOG.error("Filter have invalid ExpiryTime : " + expiryTime + " for component :" + vLogfeederFilter.getLabel()
+          + " and hosts : [" + StringUtils.join(vLogfeederFilter.getHosts(), ',') + "]");
+      }
+    }
+    return null;
+  }
+  
+  public static LogFeederFilter findComponentFilter(String componentName) {
+    if (logFeederFilterWrapper != null) {
+      HashMap<String, LogFeederFilter> filter = logFeederFilterWrapper.getFilter();
+      if (filter != null) {
+        LogFeederFilter componentFilter = filter.get(componentName);
+        if (componentFilter != null) {
+          return componentFilter;
+        }
+      }
+    }
+    LOG.trace("Filter is not there for component :" + componentName);
+    return null;
+  }
+}

+ 3 - 3
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/view/VLogfeederFilter.java → ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederFilter.java

@@ -16,7 +16,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.ambari.logfeeder.view;
+package org.apache.ambari.logfeeder.logconfig;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -33,7 +33,7 @@ import org.codehaus.jackson.map.annotate.JsonSerialize;
 @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
 @XmlRootElement
 @XmlAccessorType(XmlAccessType.FIELD)
-public class VLogfeederFilter {
+public class LogFeederFilter {
 
   private String label;
   private List<String> hosts;
@@ -41,7 +41,7 @@ public class VLogfeederFilter {
   private List<String> overrideLevels;
   private String expiryTime;
 
-  public VLogfeederFilter() {
+  public LogFeederFilter() {
     hosts = new ArrayList<String>();
     defaultLevels = new ArrayList<String>();
     overrideLevels = new ArrayList<String>();

+ 5 - 5
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/view/VLogfeederFilterWrapper.java → ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederFilterWrapper.java

@@ -16,7 +16,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.ambari.logfeeder.view;
+package org.apache.ambari.logfeeder.logconfig;
 
 import java.util.HashMap;
 
@@ -32,16 +32,16 @@ import org.codehaus.jackson.map.annotate.JsonSerialize;
 @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
 @XmlRootElement
 @XmlAccessorType(XmlAccessType.FIELD)
-public class VLogfeederFilterWrapper {
+public class LogFeederFilterWrapper {
 
-  private HashMap<String, VLogfeederFilter> filter;
+  private HashMap<String, LogFeederFilter> filter;
   private String id;
 
-  public HashMap<String, VLogfeederFilter> getFilter() {
+  public HashMap<String, LogFeederFilter> getFilter() {
     return filter;
   }
 
-  public void setFilter(HashMap<String, VLogfeederFilter> filter) {
+  public void setFilter(HashMap<String, LogFeederFilter> filter) {
     this.filter = filter;
   }
 

+ 0 - 59
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogfeederScheduler.java

@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder.logconfig;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.log4j.Logger;
-
-public enum LogfeederScheduler {
-
-  INSTANCE;
-
-  private Logger logger = Logger.getLogger(LogfeederScheduler.class);
-
-  private static boolean running = false;
-
-  public synchronized void start() {
-    boolean filterEnable = LogFeederUtil.getBooleanProperty("logfeeder.log.filter.enable", false);
-    if (!filterEnable) {
-      logger.info("Logfeeder  filter Scheduler is disabled.");
-      return;
-    }
-    if (!running) {
-      for (Thread thread : getThreadList()) {
-        thread.start();
-      }
-      running = true;
-      logger.info("Logfeeder Scheduler started!");
-    } else {
-      logger.warn("Logfeeder Scheduler is already running.");
-    }
-  }
-
-  private List<Thread> getThreadList() {
-    List<Thread> tasks = new ArrayList<Thread>();
-    Thread configMonitor = new FetchConfigFromSolr(true);
-    tasks.add(configMonitor);
-    return tasks;
-  }
-}

+ 0 - 62
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/ApplyLogFilter.java

@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder.logconfig.filter;
-
-import java.util.List;
-import java.util.Map;
-
-import org.apache.ambari.logfeeder.logconfig.FetchConfigFromSolr;
-import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.ambari.logfeeder.view.VLogfeederFilter;
-import org.apache.log4j.Logger;
-
-class ApplyLogFilter extends DefaultDataFilter {
-
-  private static Logger logger = Logger.getLogger(ApplyLogFilter.class);
-
-  @Override
-  public boolean applyFilter(Map<String, Object> jsonObj, boolean defaultValue) {
-    if (isEmpty(jsonObj)) {
-      logger.warn("Output jsonobj is empty");
-      return defaultValue;
-    }
-    String hostName = (String) jsonObj.get(LogFeederConstants.SOLR_HOST);
-    if (isNotEmpty(hostName)) {
-      String componentName = (String) jsonObj.get(LogFeederConstants.SOLR_COMPONENT);
-      if (isNotEmpty(componentName)) {
-        String level = (String) jsonObj.get(LogFeederConstants.SOLR_LEVEL);
-        if (isNotEmpty(level)) {
-          VLogfeederFilter componentFilter = FetchConfigFromSolr.findComponentFilter(componentName);
-          if (componentFilter == null) {
-            return defaultValue;
-          }
-          List<String> allowedLevels = FetchConfigFromSolr.getAllowedLevels(
-              hostName, componentFilter);
-          if (allowedLevels == null || allowedLevels.isEmpty()) {
-            allowedLevels.add(LogFeederConstants.ALL);
-          }
-          return LogFeederUtil.isListContains(allowedLevels, level, false);
-        }
-      }
-    }
-    return defaultValue;
-  }
-}

+ 0 - 49
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/DefaultDataFilter.java

@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logfeeder.logconfig.filter;
-
-import java.util.Map;
-
-/**
- * Default filter to allow everything
- */
-class DefaultDataFilter {
-  public boolean applyFilter(Map<String, Object> outputJsonObj, boolean defaultValue) {
-    return defaultValue;
-  }
-
-  protected boolean isEmpty(Map<String, Object> map) {
-    if (map == null || map.isEmpty()) {
-      return true;
-    }
-    return false;
-  }
-
-  protected boolean isEmpty(String str) {
-    if (str == null || str.trim().isEmpty()) {
-      return true;
-    }
-    return false;
-  }
-
-  protected boolean isNotEmpty(String str) {
-    return !isEmpty(str);
-  }
-
-}

+ 0 - 53
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/FilterLogData.java

@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder.logconfig.filter;
-
-import java.util.Map;
-
-import org.apache.ambari.logfeeder.logconfig.filter.ApplyLogFilter;
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.log4j.Logger;
-
-/**
- * Read configuration from solr and filter the log
- */
-public enum FilterLogData {
-  INSTANCE;
-  private ApplyLogFilter applyLogFilter = new ApplyLogFilter();
-  private static Logger logger = Logger.getLogger(FilterLogData.class);
-  // by default allow every log
-  boolean defaultValue = true;
-
-  public boolean isAllowed(String jsonBlock) {
-    if (jsonBlock == null || jsonBlock.isEmpty()) {
-      return defaultValue;
-    }
-    Map<String, Object> jsonObj = LogFeederUtil.toJSONObject(jsonBlock);
-    return isAllowed(jsonObj);
-  }
-
-  public boolean isAllowed(Map<String, Object> jsonObj) {
-    boolean isAllowed = applyLogFilter.applyFilter(jsonObj, defaultValue);
-    if (!isAllowed) {
-      logger.trace("Filter block the content :" + LogFeederUtil.getGson().toJson(jsonObj));
-    }
-    return isAllowed;
-  }
-}

+ 5 - 9
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/Mapper.java

@@ -26,22 +26,18 @@ public abstract class Mapper {
   protected String fieldName;
   private String mapClassCode;
 
-  public boolean init(String inputDesc, String fieldName,
-                      String mapClassCode, Object mapConfigs) {
+  public abstract boolean init(String inputDesc, String fieldName, String mapClassCode, Object mapConfigs);
+
+  protected void init(String inputDesc, String fieldName, String mapClassCode) {
     this.inputDesc = inputDesc;
     this.fieldName = fieldName;
     this.mapClassCode = mapClassCode;
-    return true;
   }
 
-  public Object apply(Map<String, Object> jsonObj, Object value) {
-    return value;
-  }
+  public abstract Object apply(Map<String, Object> jsonObj, Object value);
 
   @Override
   public String toString() {
-    return "mapClass=" + mapClassCode + ", input=" + inputDesc
-      + ", fieldName=" + fieldName;
+    return "mapClass=" + mapClassCode + ", input=" + inputDesc + ", fieldName=" + fieldName;
   }
-
 }

+ 13 - 19
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java

@@ -31,31 +31,29 @@ import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 
 public class MapperDate extends Mapper {
-  private static final Logger logger = Logger.getLogger(MapperDate.class);
+  private static final Logger LOG = Logger.getLogger(MapperDate.class);
 
   private SimpleDateFormat targetDateFormatter = null;
   private boolean isEpoch = false;
   private SimpleDateFormat srcDateFormatter=null;
 
   @Override
-  public boolean init(String inputDesc, String fieldName,
-                      String mapClassCode, Object mapConfigs) {
-    super.init(inputDesc, fieldName, mapClassCode, mapConfigs);
+  public boolean init(String inputDesc, String fieldName, String mapClassCode, Object mapConfigs) {
+    init(inputDesc, fieldName, mapClassCode);
     if (!(mapConfigs instanceof Map)) {
-      logger.fatal("Can't initialize object. mapConfigs class is not of type Map. "
-        + mapConfigs.getClass().getName()
-        + ", map="
-        + this.toString());
+      LOG.fatal("Can't initialize object. mapConfigs class is not of type Map. " + mapConfigs.getClass().getName() +
+        ", map=" + this);
       return false;
     }
+    
     @SuppressWarnings("unchecked")
     Map<String, Object> mapObjects = (Map<String, Object>) mapConfigs;
     String targetDateFormat = (String) mapObjects.get("target_date_pattern");
     String srcDateFormat = (String) mapObjects.get("src_date_pattern");
     if (StringUtils.isEmpty(targetDateFormat)) {
-      logger.fatal("Date format for map is empty. " + this.toString());
+      LOG.fatal("Date format for map is empty. " + this);
     } else {
-      logger.info("Date mapper format is " + targetDateFormat);
+      LOG.info("Date mapper format is " + targetDateFormat);
 
       if (targetDateFormat.equalsIgnoreCase("epoch")) {
         isEpoch = true;
@@ -68,8 +66,7 @@ public class MapperDate extends Mapper {
           }
           return true;
         } catch (Throwable ex) {
-          logger.fatal("Error creating date format. format="
-            + targetDateFormat + ". " + this.toString());
+          LOG.fatal("Error creating date format. format=" + targetDateFormat + ". " + this.toString());
         }
       } 
     }
@@ -84,7 +81,7 @@ public class MapperDate extends Mapper {
           long ms = Long.parseLong(value.toString()) * 1000;
           value = new Date(ms);
         } else if (targetDateFormatter != null) {
-          if(srcDateFormatter!=null){
+          if (srcDateFormatter != null) {
             Date srcDate = srcDateFormatter.parse(value.toString());
             //set year in src_date when src_date does not have year component
             if (!srcDateFormatter.toPattern().contains("yy")) {
@@ -108,12 +105,9 @@ public class MapperDate extends Mapper {
         }
         jsonObj.put(fieldName, value);
       } catch (Throwable t) {
-        LogFeederUtil.logErrorMessageByInterval(this.getClass()
-            .getSimpleName() + ":apply",
-          "Error applying date transformation. isEpoch="
-            + isEpoch + ", targetateFormat=" + (targetDateFormatter!=null ?targetDateFormatter.toPattern():"")
-            + ", value=" + value + ". " + this.toString(),
-          t, logger, Level.ERROR);
+        LogFeederUtil.logErrorMessageByInterval(this.getClass().getSimpleName() + ":apply", "Error applying date transformation." +
+            " isEpoch=" + isEpoch + ", targetateFormat=" + (targetDateFormatter!=null ?targetDateFormatter.toPattern():"")
+            + ", value=" + value + ". " + this.toString(), t, LOG, Level.ERROR);
       }
     }
     return value;

+ 8 - 12
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldName.java

@@ -30,24 +30,23 @@ import org.apache.log4j.Logger;
  * Overrides the value for the field
  */
 public class MapperFieldName extends Mapper {
-  private static final Logger logger = Logger.getLogger(MapperFieldName.class);
+  private static final Logger LOG = Logger.getLogger(MapperFieldName.class);
 
   private String newValue = null;
 
   @Override
-  public boolean init(String inputDesc, String fieldName,
-      String mapClassCode, Object mapConfigs) {
-    super.init(inputDesc, fieldName, mapClassCode, mapConfigs);
+  public boolean init(String inputDesc, String fieldName, String mapClassCode, Object mapConfigs) {
+    init(inputDesc, fieldName, mapClassCode);
     if (!(mapConfigs instanceof Map)) {
-      logger.fatal("Can't initialize object. mapConfigs class is not of type Map. "
-          + mapConfigs.getClass().getName());
+      LOG.fatal("Can't initialize object. mapConfigs class is not of type Map. " + mapConfigs.getClass().getName());
       return false;
     }
+    
     @SuppressWarnings("unchecked")
     Map<String, Object> mapObjects = (Map<String, Object>) mapConfigs;
     newValue = (String) mapObjects.get("new_fieldname");
     if (StringUtils.isEmpty(newValue)) {
-      logger.fatal("Map field value is empty.");
+      LOG.fatal("Map field value is empty.");
       return false;
     }
     return true;
@@ -59,12 +58,9 @@ public class MapperFieldName extends Mapper {
       jsonObj.remove(fieldName);
       jsonObj.put(newValue, value);
     } else {
-      LogFeederUtil.logErrorMessageByInterval(this.getClass()
-          .getSimpleName() + ":apply",
-          "New fieldName is null, so transformation is not applied. "
-              + this.toString(), null, logger, Level.ERROR);
+      LogFeederUtil.logErrorMessageByInterval(this.getClass().getSimpleName() + ":apply",
+          "New fieldName is null, so transformation is not applied. " + this.toString(), null, LOG, Level.ERROR);
     }
     return value;
   }
-
 }

+ 13 - 18
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldValue.java

@@ -30,25 +30,25 @@ import org.apache.log4j.Logger;
  * Overrides the value for the field
  */
 public class MapperFieldValue extends Mapper {
-  private Logger logger = Logger.getLogger(MapperFieldValue.class);
+  private static final Logger LOG = Logger.getLogger(MapperFieldValue.class);
+  
   private String prevValue = null;
   private String newValue = null;
 
   @Override
-  public boolean init(String inputDesc, String fieldName,
-      String mapClassCode, Object mapConfigs) {
-    super.init(inputDesc, fieldName, mapClassCode, mapConfigs);
+  public boolean init(String inputDesc, String fieldName, String mapClassCode, Object mapConfigs) {
+    init(inputDesc, fieldName, mapClassCode);
     if (!(mapConfigs instanceof Map)) {
-      logger.fatal("Can't initialize object. mapConfigs class is not of type Map. "
-          + mapConfigs.getClass().getName());
+      LOG.fatal("Can't initialize object. mapConfigs class is not of type Map. " + mapConfigs.getClass().getName());
       return false;
     }
+    
     @SuppressWarnings("unchecked")
     Map<String, Object> mapObjects = (Map<String, Object>) mapConfigs;
     prevValue = (String) mapObjects.get("pre_value");
     newValue = (String) mapObjects.get("post_value");
     if (StringUtils.isEmpty(newValue)) {
-      logger.fatal("Map field value is empty.");
+      LOG.fatal("Map field value is empty.");
       return false;
     }
     return true;
@@ -56,20 +56,15 @@ public class MapperFieldValue extends Mapper {
 
   @Override
   public Object apply(Map<String, Object> jsonObj, Object value) {
-    if (newValue != null) {
-      if (prevValue != null) {
-        if (prevValue.equalsIgnoreCase(value.toString())) {
-          value = newValue;
-          jsonObj.put(fieldName, value);
-        }
+    if (newValue != null && prevValue != null) {
+      if (prevValue.equalsIgnoreCase(value.toString())) {
+        value = newValue;
+        jsonObj.put(fieldName, value);
       }
     } else {
-      LogFeederUtil.logErrorMessageByInterval(
-          this.getClass().getSimpleName() + ":apply",
-          "New value is null, so transformation is not applied. "
-              + this.toString(), null, logger, Level.ERROR);
+      LogFeederUtil.logErrorMessageByInterval(this.getClass().getSimpleName() + ":apply",
+          "New value is null, so transformation is not applied. " + this.toString(), null, LOG, Level.ERROR);
     }
     return value;
   }
-
 }

+ 5 - 5
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/LogFeederAMSClient.java

@@ -20,25 +20,25 @@
 package org.apache.ambari.logfeeder.metrics;
 
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.log4j.Logger;
 
 public class LogFeederAMSClient extends AbstractTimelineMetricsSink {
-  private static final Logger logger = Logger.getLogger(LogFeederAMSClient.class);
+  private static final Logger LOG = Logger.getLogger(LogFeederAMSClient.class);
 
   private String collectorHosts = null;
 
   public LogFeederAMSClient() {
-    collectorHosts = LogFeederUtil
-      .getStringProperty("logfeeder.metrics.collector.hosts");
-    if (collectorHosts != null && collectorHosts.trim().length() == 0) {
+    collectorHosts = LogFeederUtil.getStringProperty("logfeeder.metrics.collector.hosts");
+    if (StringUtils.isBlank(collectorHosts)) {
       collectorHosts = null;
     }
     if (collectorHosts != null) {
       collectorHosts = collectorHosts.trim();
     }
-    logger.info("AMS collector URL=" + collectorHosts);
+    LOG.info("AMS collector URL=" + collectorHosts);
   }
 
   @Override

+ 23 - 8
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricCount.java → ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricData.java

@@ -19,13 +19,28 @@
 
 package org.apache.ambari.logfeeder.metrics;
 
-public class MetricCount {
-  public String metricsName = null;
-  public boolean isPointInTime = false;
+import org.apache.commons.lang3.builder.ToStringBuilder;
+import org.apache.commons.lang3.builder.ToStringStyle;
 
-  public long count = 0;
-  public long prevLogCount = 0;
-  public long prevLogMS = System.currentTimeMillis();
-  public long prevPublishCount = 0;
-  public int publishCount = 0; // Count of published metrics. Used for first time sending metrics
+public class MetricData {
+  public final String metricsName;
+  public final boolean isPointInTime;
+
+  public MetricData(String metricsName, boolean isPointInTime) {
+    this.metricsName = metricsName;
+    this.isPointInTime = isPointInTime;
+  }
+  
+  public long value = 0;
+  public long prevPublishValue = 0;
+  
+  public long prevLogValue = 0;
+  public long prevLogTime = System.currentTimeMillis();
+  
+  public int publishCount = 0; // Number of times the metric was published so far
+  
+  @Override
+  public String toString() {
+    return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE);
+  }
 }

+ 64 - 64
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricsMgr.java → ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricsManager.java

@@ -30,8 +30,8 @@ import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.log4j.Logger;
 
-public class MetricsMgr {
-  private static final Logger logger = Logger.getLogger(MetricsMgr.class);
+public class MetricsManager {
+  private static final Logger LOG = Logger.getLogger(MetricsManager.class);
 
   private boolean isMetricsEnabled = false;
   private String nodeHostName = null;
@@ -41,35 +41,42 @@ public class MetricsMgr {
   private long lastFailedPublishTimeMS = System.currentTimeMillis(); // Reset the clock
 
   private int publishIntervalMS = 60 * 1000;
-  private int maxMetricsBuffer = 60 * 60 * 1000; // If AMS is down, we should not keep
-  // the metrics in memory forever
+  private int maxMetricsBuffer = 60 * 60 * 1000; // If AMS is down, we should not keep the metrics in memory forever
   private HashMap<String, TimelineMetric> metricsMap = new HashMap<String, TimelineMetric>();
   private LogFeederAMSClient amsClient = null;
 
   public void init() {
-    logger.info("Initializing MetricsMgr()");
+    LOG.info("Initializing MetricsManager()");
     amsClient = new LogFeederAMSClient();
 
-    if (amsClient.getCollectorUri() != null) {
-      nodeHostName = LogFeederUtil.getStringProperty("node.hostname");
+    if (amsClient.getCollectorUri(null) != null) {
+      findNodeHostName();
       if (nodeHostName == null) {
-        try {
-          nodeHostName = InetAddress.getLocalHost().getHostName();
-        } catch (Throwable e) {
-          logger.warn(
-            "Error getting hostname using InetAddress.getLocalHost().getHostName()",
-            e);
-        }
-        if (nodeHostName == null) {
-          try {
-            nodeHostName = InetAddress.getLocalHost()
-              .getCanonicalHostName();
-          } catch (Throwable e) {
-            logger.warn(
-              "Error getting hostname using InetAddress.getLocalHost().getCanonicalHostName()",
-              e);
-          }
-        }
+        isMetricsEnabled = false;
+        LOG.error("Failed getting hostname for node. Disabling publishing LogFeeder metrics");
+      } else {
+        isMetricsEnabled = true;
+        LOG.info("LogFeeder Metrics is enabled. Metrics host=" + amsClient.getCollectorUri(null));
+      }
+    } else {
+      LOG.info("LogFeeder Metrics publish is disabled");
+    }
+  }
+
+  private void findNodeHostName() {
+    nodeHostName = LogFeederUtil.getStringProperty("node.hostname");
+    if (nodeHostName == null) {
+      try {
+        nodeHostName = InetAddress.getLocalHost().getHostName();
+      } catch (Throwable e) {
+        LOG.warn("Error getting hostname using InetAddress.getLocalHost().getHostName()", e);
+      }
+    }
+    if (nodeHostName == null) {
+      try {
+        nodeHostName = InetAddress.getLocalHost().getCanonicalHostName();
+      } catch (Throwable e) {
+        LOG.warn("Error getting hostname using InetAddress.getLocalHost().getCanonicalHostName()", e);
       }
       if (nodeHostName == null) {
         isMetricsEnabled = false;
@@ -77,7 +84,7 @@ public class MetricsMgr {
       } else {
         isMetricsEnabled = true;
         logger.info("LogFeeder Metrics is enabled. Metrics host="
-          + amsClient.getCollectorUri());
+          + amsClient.getCollectorUri(null));
       }
     } else {
       logger.info("LogFeeder Metrics publish is disabled");
@@ -88,35 +95,36 @@ public class MetricsMgr {
     return isMetricsEnabled;
   }
 
-  synchronized public void useMetrics(List<MetricCount> metricsList) {
+  public synchronized void useMetrics(List<MetricData> metricsList) {
     if (!isMetricsEnabled) {
       return;
     }
-    logger.info("useMetrics() metrics.size=" + metricsList.size());
+    LOG.info("useMetrics() metrics.size=" + metricsList.size());
     long currMS = System.currentTimeMillis();
+
+    gatherMetrics(metricsList, currMS);
+    publishMetrics(currMS);
+  }
+
+  private void gatherMetrics(List<MetricData> metricsList, long currMS) {
     Long currMSLong = new Long(currMS);
-    for (MetricCount metric : metricsList) {
+    for (MetricData metric : metricsList) {
       if (metric.metricsName == null) {
-        logger.debug("metric.metricsName is null");
-        // Metrics is not meant to be published
+        LOG.debug("metric.metricsName is null");
         continue;
       }
-      long currCount = metric.count;
-      if (!metric.isPointInTime && metric.publishCount > 0
-        && currCount <= metric.prevPublishCount) {
-        // No new data added, so let's ignore it
-        logger.debug("Nothing changed. " + metric.metricsName
-          + ", currCount=" + currCount + ", prevPublishCount="
-          + metric.prevPublishCount);
+      long currCount = metric.value;
+      if (!metric.isPointInTime && metric.publishCount > 0 && currCount <= metric.prevPublishValue) {
+        LOG.debug("Nothing changed. " + metric.metricsName + ", currCount=" + currCount + ", prevPublishCount=" +
+            metric.prevPublishValue);
         continue;
       }
       metric.publishCount++;
 
+      LOG.debug("Ensuring metrics=" + metric.metricsName);
       TimelineMetric timelineMetric = metricsMap.get(metric.metricsName);
       if (timelineMetric == null) {
-        logger.debug("Creating new metric obbject for "
-          + metric.metricsName);
-        // First time for this metric
+        LOG.debug("Creating new metric obbject for " + metric.metricsName);
         timelineMetric = new TimelineMetric();
         timelineMetric.setMetricName(metric.metricsName);
         timelineMetric.setHostName(nodeHostName);
@@ -127,52 +135,44 @@ public class MetricsMgr {
 
         metricsMap.put(metric.metricsName, timelineMetric);
       }
-      logger.debug("Adding metrics=" + metric.metricsName);
+
+      LOG.debug("Adding metrics=" + metric.metricsName);
       if (metric.isPointInTime) {
-        timelineMetric.getMetricValues().put(currMSLong,
-          new Double(currCount));
+        timelineMetric.getMetricValues().put(currMSLong, new Double(currCount));
       } else {
         Double value = timelineMetric.getMetricValues().get(currMSLong);
         if (value == null) {
           value = new Double(0);
         }
-        value += (currCount - metric.prevPublishCount);
+        value += (currCount - metric.prevPublishValue);
         timelineMetric.getMetricValues().put(currMSLong, value);
-        metric.prevPublishCount = currCount;
+        metric.prevPublishValue = currCount;
       }
     }
+  }
 
-    if (metricsMap.size() > 0
-      && currMS - lastPublishTimeMS > publishIntervalMS) {
+  private void publishMetrics(long currMS) {
+    if (!metricsMap.isEmpty() && currMS - lastPublishTimeMS > publishIntervalMS) {
       try {
-        // Time to publish
         TimelineMetrics timelineMetrics = new TimelineMetrics();
-        List<TimelineMetric> timeLineMetricList = new ArrayList<TimelineMetric>();
-        timeLineMetricList.addAll(metricsMap.values());
-        timelineMetrics.setMetrics(timeLineMetricList);
+        timelineMetrics.setMetrics(new ArrayList<TimelineMetric>(metricsMap.values()));
         amsClient.emitMetrics(timelineMetrics);
-        logger.info("Published " + timeLineMetricList.size()
-          + " metrics to AMS");
+
+        LOG.info("Published " + timelineMetrics.getMetrics().size() + " metrics to AMS");
         metricsMap.clear();
-        timeLineMetricList.clear();
         lastPublishTimeMS = currMS;
       } catch (Throwable t) {
-        logger.warn("Error sending metrics to AMS.", t);
+        LOG.warn("Error sending metrics to AMS.", t);
         if (currMS - lastFailedPublishTimeMS > maxMetricsBuffer) {
-          logger.error("AMS was not sent for last "
-            + maxMetricsBuffer
-            / 1000
-            + " seconds. Purging it and will start rebuilding it again");
+          LOG.error("AMS was not sent for last " + maxMetricsBuffer / 1000 +
+              " seconds. Purging it and will start rebuilding it again");
           metricsMap.clear();
           lastFailedPublishTimeMS = currMS;
         }
       }
     } else {
-      logger.info("Not publishing metrics. metrics.size()="
-        + metricsMap.size() + ", lastPublished="
-        + (currMS - lastPublishTimeMS) / 1000
-        + " seconds ago, intervalConfigured=" + publishIntervalMS
-        / 1000);
+      LOG.info("Not publishing metrics. metrics.size()=" + metricsMap.size() + ", lastPublished=" +
+          (currMS - lastPublishTimeMS) / 1000 + " seconds ago, intervalConfigured=" + publishIntervalMS / 1000);
     }
   }
 }

+ 8 - 5
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java

@@ -26,16 +26,19 @@ import java.util.Map.Entry;
 
 import org.apache.ambari.logfeeder.common.ConfigBlock;
 import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.metrics.MetricData;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.log4j.Logger;
 
 public abstract class Output extends ConfigBlock {
-  private static final Logger logger = Logger.getLogger(Output.class);
+  private static final Logger LOG = Logger.getLogger(Output.class);
 
   private String destination = null;
 
-  protected MetricCount writeBytesMetric = new MetricCount();
+  protected MetricData writeBytesMetric = new MetricData(getWriteBytesMetricName(), false);
+  protected String getWriteBytesMetricName() {
+    return null;
+  }
 
   @Override
   public String getShortDescription() {
@@ -67,7 +70,7 @@ public abstract class Output extends ConfigBlock {
    * Extend this method to clean up
    */
   public void close() {
-    logger.info("Calling base close()." + getShortDescription());
+    LOG.info("Calling base close()." + getShortDescription());
     isClosed = true;
   }
 
@@ -91,7 +94,7 @@ public abstract class Output extends ConfigBlock {
   }
 
   @Override
-  public void addMetricsContainers(List<MetricCount> metricsList) {
+  public void addMetricsContainers(List<MetricData> metricsList) {
     super.addMetricsContainers(metricsList);
     metricsList.add(writeBytesMetric);
   }

+ 3 - 5
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputData.java

@@ -27,18 +27,16 @@ import org.apache.ambari.logfeeder.input.InputMarker;
  * This contains the output json object and InputMarker.
  */
 public class OutputData {
-  Map<String, Object> jsonObj;
-  InputMarker inputMarker;
+  public final Map<String, Object> jsonObj;
+  public final InputMarker inputMarker;
 
   public OutputData(Map<String, Object> jsonObj, InputMarker inputMarker) {
-    super();
     this.jsonObj = jsonObj;
     this.inputMarker = inputMarker;
   }
 
   @Override
   public String toString() {
-    return "OutputData [jsonObj=" + jsonObj + ", inputMarker="
-      + inputMarker + "]";
+    return "OutputData [jsonObj=" + jsonObj + ", inputMarker=" + inputMarker + "]";
   }
 }

+ 3 - 4
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputDevNull.java

@@ -28,16 +28,15 @@ import org.apache.log4j.Logger;
  */
 public class OutputDevNull extends Output {
 
-  private static Logger logger = Logger.getLogger(OutputDevNull.class);
+  private static final Logger LOG = Logger.getLogger(OutputDevNull.class);
 
   @Override
   public void write(String block, InputMarker inputMarker){
-    logger.trace("Ignore log block: " + block);
+    LOG.trace("Ignore log block: " + block);
   }
 
   @Override
   public void copyFile(File inputFile, InputMarker inputMarker) {
-    throw new UnsupportedOperationException(
-        "copyFile method is not yet supported for output=dev_null");
+    throw new UnsupportedOperationException("copyFile method is not yet supported for output=dev_null");
   }
 }

+ 18 - 24
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputFile.java

@@ -30,26 +30,27 @@ import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.csv.CSVFormat;
 import org.apache.commons.csv.CSVPrinter;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Logger;
 
 public class OutputFile extends Output {
-  static Logger logger = Logger.getLogger(OutputFile.class);
+  private static final Logger LOG = Logger.getLogger(OutputFile.class);
 
-  PrintWriter outWriter = null;
-  String filePath = null;
-  String codec;
+  private PrintWriter outWriter;
+  private String filePath = null;
+  private String codec;
 
   @Override
   public void init() throws Exception {
     super.init();
 
     filePath = getStringValue("path");
-    if (filePath == null || filePath.isEmpty()) {
-      logger.error("Filepath config property <path> is not set in config file.");
+    if (StringUtils.isEmpty(filePath)) {
+      LOG.error("Filepath config property <path> is not set in config file.");
       return;
     }
     codec = getStringValue("codec");
-    if (codec == null || codec.trim().isEmpty()) {
+    if (StringUtils.isBlank(codec)) {
       codec = "json";
     } else {
       if (codec.trim().equalsIgnoreCase("csv")) {
@@ -57,12 +58,11 @@ public class OutputFile extends Output {
       } else if (codec.trim().equalsIgnoreCase("json")) {
         codec = "csv";
       } else {
-        logger.error("Unsupported codec type. codec=" + codec
-          + ", will use json");
+        LOG.error("Unsupported codec type. codec=" + codec + ", will use json");
         codec = "json";
       }
     }
-    logger.info("Out filePath=" + filePath + ", codec=" + codec);
+    LOG.info("Out filePath=" + filePath + ", codec=" + codec);
     File outFile = new File(filePath);
     if (outFile.getParentFile() != null) {
       File parentDir = outFile.getParentFile();
@@ -71,16 +71,14 @@ public class OutputFile extends Output {
       }
     }
 
-    outWriter = new PrintWriter(new BufferedWriter(new FileWriter(outFile,
-      true)));
+    outWriter = new PrintWriter(new BufferedWriter(new FileWriter(outFile, true)));
 
-    logger.info("init() is successfull. filePath="
-      + outFile.getAbsolutePath());
+    LOG.info("init() is successfull. filePath=" + outFile.getAbsolutePath());
   }
 
   @Override
   public void close() {
-    logger.info("Closing file." + getShortDescription());
+    LOG.info("Closing file." + getShortDescription());
     if (outWriter != null) {
       try {
         outWriter.close();
@@ -92,8 +90,7 @@ public class OutputFile extends Output {
   }
 
   @Override
-  public void write(Map<String, Object> jsonObj, InputMarker inputMarker)
-    throws Exception {
+  public void write(Map<String, Object> jsonObj, InputMarker inputMarker) throws Exception {
     String outStr = null;
     CSVPrinter csvPrinter = null;
     try {
@@ -104,7 +101,7 @@ public class OutputFile extends Output {
         outStr = LogFeederUtil.getGson().toJson(jsonObj);
       }
       if (outWriter != null && outStr != null) {
-        statMetric.count++;
+        statMetric.value++;
 
         outWriter.println(outStr);
         outWriter.flush();
@@ -122,7 +119,7 @@ public class OutputFile extends Output {
   @Override
   synchronized public void write(String block, InputMarker inputMarker) throws Exception {
     if (outWriter != null && block != null) {
-      statMetric.count++;
+      statMetric.value++;
 
       outWriter.println(block);
       outWriter.flush();
@@ -135,10 +132,7 @@ public class OutputFile extends Output {
   }
 
   @Override
-  public void copyFile(File inputFile, InputMarker inputMarker)
-      throws UnsupportedOperationException {
-    throw new UnsupportedOperationException(
-        "copyFile method is not yet supported for output=file");
+  public void copyFile(File inputFile, InputMarker inputMarker) throws UnsupportedOperationException {
+    throw new UnsupportedOperationException("copyFile method is not yet supported for output=file");
   }
-
 }

+ 28 - 42
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputHDFSFile.java

@@ -43,7 +43,8 @@ import java.util.concurrent.ConcurrentLinkedQueue;
  * The events are spooled on the local file system and uploaded in batches asynchronously.
  */
 public class OutputHDFSFile extends Output implements RolloverHandler, RolloverCondition {
-  private final static Logger logger = Logger.getLogger(OutputHDFSFile.class);
+  private static final Logger LOG = Logger.getLogger(OutputHDFSFile.class);
+  
   private static final long DEFAULT_ROLLOVER_THRESHOLD_TIME_SECONDS = 5 * 60L;// 5 min by default
 
   private ConcurrentLinkedQueue<File> localReadyFiles = new ConcurrentLinkedQueue<File>();
@@ -72,23 +73,20 @@ public class OutputHDFSFile extends Output implements RolloverHandler, RolloverC
     rolloverThresholdTimeMillis = rolloverThresholdTimeSeconds * 1000L;
     filenamePrefix = getStringValue("file_name_prefix", filenamePrefix);
     if (StringUtils.isEmpty(hdfsOutDir)) {
-      logger
-          .error("HDFS config property <hdfs_out_dir> is not set in config file.");
+      LOG.error("HDFS config property <hdfs_out_dir> is not set in config file.");
       return;
     }
     if (StringUtils.isEmpty(hdfsHost)) {
-      logger
-          .error("HDFS config property <hdfs_host> is not set in config file.");
+      LOG.error("HDFS config property <hdfs_host> is not set in config file.");
       return;
     }
     if (StringUtils.isEmpty(hdfsPort)) {
-      logger
-          .error("HDFS config property <hdfs_port> is not set in config file.");
+      LOG.error("HDFS config property <hdfs_port> is not set in config file.");
       return;
     }
     HashMap<String, String> contextParam = buildContextParam();
     hdfsOutDir = PlaceholderUtil.replaceVariables(hdfsOutDir, contextParam);
-    logger.info("hdfs Output dir=" + hdfsOutDir);
+    LOG.info("hdfs Output dir=" + hdfsOutDir);
     String localFileDir = LogFeederUtil.getLogfeederTempDir() + "hdfs/service/";
     logSpooler = new LogSpooler(localFileDir, filenamePrefix, this, this);
     this.startHDFSCopyThread();
@@ -96,18 +94,17 @@ public class OutputHDFSFile extends Output implements RolloverHandler, RolloverC
 
   @Override
   public void close() {
-    logger.info("Closing file." + getShortDescription());
+    LOG.info("Closing file." + getShortDescription());
     logSpooler.rollover();
     this.stopHDFSCopyThread();
     isClosed = true;
   }
 
   @Override
-  synchronized public void write(String block, InputMarker inputMarker)
-      throws Exception {
+  public synchronized void write(String block, InputMarker inputMarker) throws Exception {
     if (block != null) {
       logSpooler.add(block);
-      statMetric.count++;
+      statMetric.value++;
     }
   }
 
@@ -127,24 +124,19 @@ public class OutputHDFSFile extends Output implements RolloverHandler, RolloverC
             Iterator<File> localFileIterator = localReadyFiles.iterator();
             while (localFileIterator.hasNext()) {
               File localFile = localFileIterator.next();
-              fileSystem = LogfeederHDFSUtil.INSTANCE.buildFileSystem(hdfsHost,
-                  hdfsPort);
+              fileSystem = LogfeederHDFSUtil.buildFileSystem(hdfsHost, hdfsPort);
               if (fileSystem != null && localFile.exists()) {
                 String destFilePath = hdfsOutDir + "/" + localFile.getName();
                 String localPath = localFile.getAbsolutePath();
                 boolean overWrite = true;
                 boolean delSrc = true;
-                boolean isCopied = LogfeederHDFSUtil.INSTANCE.copyFromLocal(
-                    localFile.getAbsolutePath(), destFilePath, fileSystem,
+                boolean isCopied = LogfeederHDFSUtil.copyFromLocal(localFile.getAbsolutePath(), destFilePath, fileSystem,
                     overWrite, delSrc);
                 if (isCopied) {
-                  logger.debug("File copy to hdfs hdfspath :" + destFilePath
-                      + " and deleted local file :" + localPath);
+                  LOG.debug("File copy to hdfs hdfspath :" + destFilePath + " and deleted local file :" + localPath);
                 } else {
-                  // TODO Need to write retry logic, in next release we can
-                  // handle it
-                  logger.error("Hdfs file copy  failed for hdfspath :"
-                      + destFilePath + " and localpath :" + localPath);
+                  // TODO Need to write retry logic, in next release we can handle it
+                  LOG.error("Hdfs file copy  failed for hdfspath :" + destFilePath + " and localpath :" + localPath);
                 }
               }
               localFileIterator.remove();
@@ -157,14 +149,11 @@ public class OutputHDFSFile extends Output implements RolloverHandler, RolloverC
                 }
               }
             } catch (InterruptedException e) {
-              logger.error(e.getLocalizedMessage(),e);
+              LOG.error(e.getLocalizedMessage(),e);
             }
           }
         } catch (Exception e) {
-          logger
-              .error(
-                  "Exception in hdfsCopyThread errorMsg:"
-                      + e.getLocalizedMessage(), e);
+          LOG.error("Exception in hdfsCopyThread errorMsg:" + e.getLocalizedMessage(), e);
         }
       }
     };
@@ -174,24 +163,23 @@ public class OutputHDFSFile extends Output implements RolloverHandler, RolloverC
 
   private void stopHDFSCopyThread() {
     if (hdfsCopyThread != null) {
-      logger.info("waiting till copy all local files to hdfs.......");
+      LOG.info("waiting till copy all local files to hdfs.......");
       while (!localReadyFiles.isEmpty()) {
         try {
           Thread.sleep(1000);
         } catch (InterruptedException e) {
-          logger.error(e.getLocalizedMessage(), e);
+          LOG.error(e.getLocalizedMessage(), e);
         }
-        logger.debug("still waiting to copy all local files to hdfs.......");
+        LOG.debug("still waiting to copy all local files to hdfs.......");
       }
-      logger.info("calling interrupt method for hdfsCopyThread to stop it.");
+      LOG.info("calling interrupt method for hdfsCopyThread to stop it.");
       try {
         hdfsCopyThread.interrupt();
       } catch (SecurityException exception) {
-        logger.error(" Current thread : '" + Thread.currentThread().getName()
-            + "' does not have permission to interrupt the Thread: '"
-            + hdfsCopyThread.getName() + "'");
+        LOG.error(" Current thread : '" + Thread.currentThread().getName() +
+            "' does not have permission to interrupt the Thread: '" + hdfsCopyThread.getName() + "'");
       }
-      LogfeederHDFSUtil.INSTANCE.closeFileSystem(fileSystem);
+      LogfeederHDFSUtil.closeFileSystem(fileSystem);
     }
   }
 
@@ -208,15 +196,13 @@ public class OutputHDFSFile extends Output implements RolloverHandler, RolloverC
         readyMonitor.notifyAll();
       }
     } catch (Exception e) {
-      logger.error(e.getLocalizedMessage(),e);
+      LOG.error(e.getLocalizedMessage(),e);
     }
   }
 
   @Override
-  public void copyFile(File inputFile, InputMarker inputMarker)
-      throws UnsupportedOperationException {
-    throw new UnsupportedOperationException(
-        "copyFile method is not yet supported for output=hdfs");     
+  public void copyFile(File inputFile, InputMarker inputMarker) throws UnsupportedOperationException {
+    throw new UnsupportedOperationException("copyFile method is not yet supported for output=hdfs");
   }
 
   /**
@@ -242,8 +228,8 @@ public class OutputHDFSFile extends Output implements RolloverHandler, RolloverC
     long timeSinceCreation = new Date().getTime() - currentSpoolerContext.getActiveLogCreationTime().getTime();
     boolean shouldRollover = timeSinceCreation > rolloverThresholdTimeMillis;
     if (shouldRollover) {
-      logger.info("Detecting that time since file creation time " + currentSpoolerContext.getActiveLogCreationTime() +
-                    " has crossed threshold (msecs) " + rolloverThresholdTimeMillis);
+      LOG.info("Detecting that time since file creation time " + currentSpoolerContext.getActiveLogCreationTime() +
+          " has crossed threshold (msecs) " + rolloverThresholdTimeMillis);
     }
     return shouldRollover;
   }

+ 30 - 28
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java

@@ -55,6 +55,16 @@ public class OutputKafka extends Output {
   // Let's start with the assumption Kafka is down
   private boolean isKafkaBrokerUp = false;
 
+  @Override
+  protected String getStatMetricName() {
+    return "output.kafka.write_logs";
+  }
+
+  @Override
+  protected String getWriteBytesMetricName() {
+    return "output.kafka.write_bytes";
+  }
+  
   @Override
   public void init() throws Exception {
     super.init();
@@ -65,9 +75,6 @@ public class OutputKafka extends Output {
   }
 
   private Properties initProperties() throws Exception {
-    statMetric.metricsName = "output.kafka.write_logs";
-    writeBytesMetric.metricsName = "output.kafka.write_bytes";
-
     String brokerList = getStringValue("broker_list");
     if (StringUtils.isEmpty(brokerList)) {
       throw new Exception("For kafka output, bootstrap broker_list is needed");
@@ -124,17 +131,15 @@ public class OutputKafka extends Output {
             if (publishMessage(kafkaCallBack.message, kafkaCallBack.inputMarker)) {
               kafkaCallBack = null;
             } else {
-              LOG.error("Kafka is down. messageNumber=" + kafkaCallBack.thisMessageNumber + ". Going to sleep for "
-                  + FAILED_RETRY_INTERVAL + " seconds");
+              LOG.error("Kafka is down. messageNumber=" + kafkaCallBack.thisMessageNumber + ". Going to sleep for " +
+                  FAILED_RETRY_INTERVAL + " seconds");
               Thread.sleep(FAILED_RETRY_INTERVAL * 1000);
             }
 
           } catch (Throwable t) {
             String logMessageKey = this.getClass().getSimpleName() + "_KAFKA_RETRY_WRITE_ERROR";
-            LogFeederUtil.logErrorMessageByInterval(logMessageKey,
-                "Error sending message to Kafka during retry. message="
-                    + (kafkaCallBack == null ? null : kafkaCallBack.message),
-                t, LOG, Level.ERROR);
+            LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Error sending message to Kafka during retry. message=" +
+                (kafkaCallBack == null ? null : kafkaCallBack.message), t, LOG, Level.ERROR);
           }
         }
 
@@ -160,8 +165,8 @@ public class OutputKafka extends Output {
           LOG.error("Kafka is down. Going to sleep for " + FAILED_RETRY_INTERVAL + " seconds");
           Thread.sleep(FAILED_RETRY_INTERVAL * 1000);
         } else {
-          LOG.warn("Kafka is still catching up from previous failed messages. outstanding messages="
-              + failedMessages.size() + " Going to sleep for " + CATCHUP_RETRY_INTERVAL + " seconds");
+          LOG.warn("Kafka is still catching up from previous failed messages. outstanding messages=" + failedMessages.size() +
+              " Going to sleep for " + CATCHUP_RETRY_INTERVAL + " seconds");
           Thread.sleep(CATCHUP_RETRY_INTERVAL * 1000);
         }
       } catch (Throwable t) {
@@ -198,16 +203,15 @@ public class OutputKafka extends Output {
 
   private boolean publishMessage(String block, InputMarker inputMarker) {
     if (isAsync && isKafkaBrokerUp) { // Send asynchronously
-      producer.send(new ProducerRecord<String, String>(topic, block),
-          new KafkaCallBack(this, block, inputMarker, ++messageCount));
+      producer.send(new ProducerRecord<String, String>(topic, block), new KafkaCallBack(this, block, inputMarker, ++messageCount));
       return true;
     } else { // Send synchronously
       try {
         // Not using key. Let it round robin
         RecordMetadata metadata = producer.send(new ProducerRecord<String, String>(topic, block)).get();
         if (metadata != null) {
-          statMetric.count++;
-          writeBytesMetric.count += block.length();
+          statMetric.value++;
+          writeBytesMetric.value += block.length();
         }
         if (!isKafkaBrokerUp) {
           LOG.info("Started writing to kafka. " + getShortDescription());
@@ -217,18 +221,18 @@ public class OutputKafka extends Output {
       } catch (InterruptedException e) {
         isKafkaBrokerUp = false;
         String logKeyMessage = this.getClass().getSimpleName() + "_KAFKA_INTERRUPT";
-        LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "InterruptedException-Error sending message to Kafka", e,
-            LOG, Level.ERROR);
+        LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "InterruptedException-Error sending message to Kafka", e, LOG,
+            Level.ERROR);
       } catch (ExecutionException e) {
         isKafkaBrokerUp = false;
         String logKeyMessage = this.getClass().getSimpleName() + "_KAFKA_EXECUTION";
-        LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "ExecutionException-Error sending message to Kafka", e,
-            LOG, Level.ERROR);
+        LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "ExecutionException-Error sending message to Kafka", e, LOG,
+            Level.ERROR);
       } catch (Throwable t) {
         isKafkaBrokerUp = false;
         String logKeyMessage = this.getClass().getSimpleName() + "_KAFKA_WRITE_ERROR";
-        LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "GenericException-Error sending message to Kafka", t,
-            LOG, Level.ERROR);
+        LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "GenericException-Error sending message to Kafka", t, LOG,
+            Level.ERROR);
       }
     }
     return false;
@@ -260,12 +264,12 @@ public class OutputKafka extends Output {
           output.isKafkaBrokerUp = true;
         }
         output.incrementStat(1);
-        output.writeBytesMetric.count += message.length();
+        output.writeBytesMetric.value += message.length();
       } else {
         output.isKafkaBrokerUp = false;
         String logKeyMessage = this.getClass().getSimpleName() + "_KAFKA_ASYNC_ERROR";
-        LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "Error sending message to Kafka. Async Callback",
-            exception, LOG, Level.ERROR);
+        LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "Error sending message to Kafka. Async Callback", exception, LOG,
+            Level.ERROR);
 
         output.failedMessages.add(this);
       }
@@ -273,9 +277,7 @@ public class OutputKafka extends Output {
   }
 
   @Override
-  public void copyFile(File inputFile, InputMarker inputMarker)
-      throws UnsupportedOperationException {
-    throw new UnsupportedOperationException(
-        "copyFile method is not yet supported for output=kafka");
+  public void copyFile(File inputFile, InputMarker inputMarker) throws UnsupportedOperationException {
+    throw new UnsupportedOperationException("copyFile method is not yet supported for output=kafka");
   }
 }

+ 96 - 109
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputMgr.java → ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManager.java

@@ -27,41 +27,53 @@ import java.util.List;
 import java.util.Map;
 import java.util.UUID;
 
+import org.apache.ambari.logfeeder.common.LogFeederConstants;
 import org.apache.ambari.logfeeder.input.Input;
 import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
-import org.apache.ambari.logfeeder.logconfig.filter.FilterLogData;
-import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.logconfig.FilterLogData;
+import org.apache.ambari.logfeeder.metrics.MetricData;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.ambari.logfeeder.util.MurmurHash;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 
-public class OutputMgr {
-  private static final Logger logger = Logger.getLogger(OutputMgr.class);
+public class OutputManager {
+  private static final Logger LOG = Logger.getLogger(OutputManager.class);
 
-  private Collection<Output> outputList = new ArrayList<Output>();
+  private static final int HASH_SEED = 31174077;
+  private static final int MAX_OUTPUT_SIZE = 32765; // 32766-1
+
+  private List<Output> outputs = new ArrayList<Output>();
 
   private boolean addMessageMD5 = true;
 
-  private int MAX_OUTPUT_SIZE = 32765; // 32766-1
-  private static long doc_counter = 0;
-  private MetricCount messageTruncateMetric = new MetricCount();
+  private static long docCounter = 0;
+  private MetricData messageTruncateMetric = new MetricData(null, false);
+
+  public List<Output> getOutputs() {
+    return outputs;
+  }
+
+  public void add(Output output) {
+    this.outputs.add(output);
+  }
 
-  
-  public Collection<Output> getOutputList() {
-    return outputList;
+  public void retainUsedOutputs(Collection<Output> usedOutputs) {
+    outputs.retainAll(usedOutputs);
   }
 
-  public void setOutputList(Collection<Output> outputList) {
-    this.outputList = outputList;
+  public void init() throws Exception {
+    for (Output output : outputs) {
+      output.init();
+    }
   }
 
   public void write(Map<String, Object> jsonObj, InputMarker inputMarker) {
     Input input = inputMarker.input;
 
     // Update the block with the context fields
-    for (Map.Entry<String, String> entry : input.getContextFields()
-      .entrySet()) {
+    for (Map.Entry<String, String> entry : input.getContextFields().entrySet()) {
       if (jsonObj.get(entry.getKey()) == null) {
         jsonObj.put(entry.getKey(), entry.getValue());
       }
@@ -69,7 +81,6 @@ public class OutputMgr {
 
     // TODO: Ideally most of the overrides should be configurable
 
-    // Add the input type
     if (jsonObj.get("type") == null) {
       jsonObj.put("type", input.getStringValue("type"));
     }
@@ -79,20 +90,16 @@ public class OutputMgr {
     if (jsonObj.get("path") == null && input.getStringValue("path") != null) {
       jsonObj.put("path", input.getStringValue("path"));
     }
-
-    // Add host if required
     if (jsonObj.get("host") == null && LogFeederUtil.hostName != null) {
       jsonObj.put("host", LogFeederUtil.hostName);
     }
-    // Add IP if required
     if (jsonObj.get("ip") == null && LogFeederUtil.ipAddress != null) {
       jsonObj.put("ip", LogFeederUtil.ipAddress);
     }
-    
-    //Add level
     if (jsonObj.get("level") == null) {
       jsonObj.put("level", LogFeederConstants.LOG_LEVEL_UNKNOWN);
     }
+    
     if (input.isUseEventMD5() || input.isGenEventMD5()) {
       String prefix = "";
       Object logtimeObj = jsonObj.get("logtime");
@@ -103,8 +110,8 @@ public class OutputMgr {
           prefix = logtimeObj.toString();
         }
       }
-      Long eventMD5 = LogFeederUtil.genHash(LogFeederUtil.getGson()
-        .toJson(jsonObj));
+      
+      Long eventMD5 = MurmurHash.hash64A(LogFeederUtil.getGson().toJson(jsonObj).getBytes(), HASH_SEED);
       if (input.isGenEventMD5()) {
         jsonObj.put("event_md5", prefix + eventMD5.toString());
       }
@@ -113,8 +120,7 @@ public class OutputMgr {
       }
     }
 
-    // jsonObj.put("@timestamp", new Date());
-    jsonObj.put("seq_num", new Long(doc_counter++));
+    jsonObj.put("seq_num", new Long(docCounter++));
     if (jsonObj.get("id") == null) {
       jsonObj.put("id", UUID.randomUUID().toString());
     }
@@ -122,71 +128,88 @@ public class OutputMgr {
       jsonObj.put("event_count", new Integer(1));
     }
     if (inputMarker.lineNumber > 0) {
-      jsonObj.put("logfile_line_number", new Integer(
-        inputMarker.lineNumber));
+      jsonObj.put("logfile_line_number", new Integer(inputMarker.lineNumber));
     }
     if (jsonObj.containsKey("log_message")) {
       // TODO: Let's check size only for log_message for now
       String logMessage = (String) jsonObj.get("log_message");
-      if (logMessage != null
-        && logMessage.getBytes().length > MAX_OUTPUT_SIZE) {
-        messageTruncateMetric.count++;
-        final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
-          + "_MESSAGESIZE";
-        LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
-          "Message is too big. size="
-            + logMessage.getBytes().length + ", input="
-            + input.getShortDescription()
-            + ". Truncating to " + MAX_OUTPUT_SIZE
-            + ", first upto 100 characters="
-            + LogFeederUtil.subString(logMessage, 100),
-          null, logger, Level.WARN);
-        logMessage = new String(logMessage.getBytes(), 0,
-          MAX_OUTPUT_SIZE);
-        jsonObj.put("log_message", logMessage);
-        // Add error tags
-        @SuppressWarnings("unchecked")
-        List<String> tagsList = (List<String>) jsonObj.get("tags");
-        if (tagsList == null) {
-          tagsList = new ArrayList<String>();
-          jsonObj.put("tags", tagsList);
-        }
-        tagsList.add("error_message_truncated");
-
-      }
+      logMessage = truncateLongLogMessage(jsonObj, input, logMessage);
       if (addMessageMD5) {
-        jsonObj.put("message_md5",
-          "" + LogFeederUtil.genHash(logMessage));
+        jsonObj.put("message_md5", "" + MurmurHash.hash64A(logMessage.getBytes(), 31174077));
       }
     }
-    //check log is allowed to send output
+    
     if (FilterLogData.INSTANCE.isAllowed(jsonObj)) {
       for (Output output : input.getOutputList()) {
         try {
           output.write(jsonObj, inputMarker);
         } catch (Exception e) {
-          logger.error("Error writing. to " + output.getShortDescription(), e);
+          LOG.error("Error writing. to " + output.getShortDescription(), e);
         }
       }
     }
   }
 
+  @SuppressWarnings("unchecked")
+  private String truncateLongLogMessage(Map<String, Object> jsonObj, Input input, String logMessage) {
+    if (logMessage != null && logMessage.getBytes().length > MAX_OUTPUT_SIZE) {
+      messageTruncateMetric.value++;
+      String logMessageKey = this.getClass().getSimpleName() + "_MESSAGESIZE";
+      LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Message is too big. size=" + logMessage.getBytes().length +
+          ", input=" + input.getShortDescription() + ". Truncating to " + MAX_OUTPUT_SIZE + ", first upto 100 characters=" +
+          StringUtils.abbreviate(logMessage, 100), null, LOG, Level.WARN);
+      logMessage = new String(logMessage.getBytes(), 0, MAX_OUTPUT_SIZE);
+      jsonObj.put("log_message", logMessage);
+      List<String> tagsList = (List<String>) jsonObj.get("tags");
+      if (tagsList == null) {
+        tagsList = new ArrayList<String>();
+        jsonObj.put("tags", tagsList);
+      }
+      tagsList.add("error_message_truncated");
+    }
+    return logMessage;
+  }
+
   public void write(String jsonBlock, InputMarker inputMarker) {
-    //check log is allowed to send output
     if (FilterLogData.INSTANCE.isAllowed(jsonBlock)) {
       for (Output output : inputMarker.input.getOutputList()) {
         try {
           output.write(jsonBlock, inputMarker);
         } catch (Exception e) {
-          logger.error("Error writing. to " + output.getShortDescription(), e);
+          LOG.error("Error writing. to " + output.getShortDescription(), e);
         }
       }
     }
   }
 
+  public void copyFile(File inputFile, InputMarker inputMarker) {
+    Input input = inputMarker.input;
+    for (Output output : input.getOutputList()) {
+      try {
+        output.copyFile(inputFile, inputMarker);
+      }catch (Exception e) {
+        LOG.error("Error coyping file . to " + output.getShortDescription(), e);
+      }
+    }
+  }
+
+  public void logStats() {
+    for (Output output : outputs) {
+      output.logStat();
+    }
+    LogFeederUtil.logStatForMetric(messageTruncateMetric, "Stat: Messages Truncated", "");
+  }
+
+  public void addMetricsContainers(List<MetricData> metricsList) {
+    metricsList.add(messageTruncateMetric);
+    for (Output output : outputs) {
+      output.addMetricsContainers(metricsList);
+    }
+  }
+
   public void close() {
-    logger.info("Close called for outputs ...");
-    for (Output output : outputList) {
+    LOG.info("Close called for outputs ...");
+    for (Output output : outputs) {
       try {
         output.setDrain(true);
         output.close();
@@ -194,20 +217,17 @@ public class OutputMgr {
         // Ignore
       }
     }
+    
     // Need to get this value from property
     int iterations = 30;
     int waitTimeMS = 1000;
-    int i;
-    boolean allClosed = true;
-    for (i = 0; i < iterations; i++) {
-      allClosed = true;
-      for (Output output : outputList) {
+    for (int i = 0; i < iterations; i++) {
+      boolean allClosed = true;
+      for (Output output : outputs) {
         if (!output.isClosed()) {
           try {
             allClosed = false;
-            logger.warn("Waiting for output to close. "
-              + output.getShortDescription() + ", "
-              + (iterations - i) + " more seconds");
+            LOG.warn("Waiting for output to close. " + output.getShortDescription() + ", " + (iterations - i) + " more seconds");
             Thread.sleep(waitTimeMS);
           } catch (Throwable t) {
             // Ignore
@@ -215,48 +235,15 @@ public class OutputMgr {
         }
       }
       if (allClosed) {
-        break;
+        LOG.info("All outputs are closed. Iterations=" + i);
+        return;
       }
     }
 
-    if (!allClosed) {
-      logger.warn("Some outpus were not closed. Iterations=" + i);
-      for (Output output : outputList) {
-        if (!output.isClosed()) {
-          logger.warn("Output not closed. Will ignore it."
-            + output.getShortDescription() + ", pendingCound="
-            + output.getPendingCount());
-        }
-      }
-    } else {
-      logger.info("All outputs are closed. Iterations=" + i);
-    }
-  }
-
-  public void logStats() {
-    for (Output output : outputList) {
-      output.logStat();
-    }
-    LogFeederUtil.logStatForMetric(messageTruncateMetric,
-      "Stat: Messages Truncated", null);
-  }
-
-  public void addMetricsContainers(List<MetricCount> metricsList) {
-    metricsList.add(messageTruncateMetric);
-    for (Output output : outputList) {
-      output.addMetricsContainers(metricsList);
-    }
-  }
-
-  
-  public void copyFile(File inputFile, InputMarker inputMarker) {
-    Input input = inputMarker.input;
-    for (Output output : input.getOutputList()) {
-      try {
-        output.copyFile(inputFile, inputMarker);
-      }catch (Exception e) {
-        logger.error("Error coyping file . to " + output.getShortDescription(),
-            e);
+    LOG.warn("Some outpus were not closed after " + iterations + "  iterations");
+    for (Output output : outputs) {
+      if (!output.isClosed()) {
+        LOG.warn("Output not closed. Will ignore it." + output.getShortDescription() + ", pendingCound=" + output.getPendingCount());
       }
     }
   }

+ 18 - 23
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java

@@ -22,6 +22,7 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
 import org.apache.ambari.logfeeder.LogFeeder;
+import org.apache.ambari.logfeeder.common.LogFeederConstants;
 import org.apache.ambari.logfeeder.filter.Filter;
 import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.ambari.logfeeder.output.spool.LogSpooler;
@@ -47,10 +48,10 @@ import java.util.Map.Entry;
  * </ul>
  */
 public class OutputS3File extends Output implements RolloverCondition, RolloverHandler {
+  private static final Logger LOG = Logger.getLogger(OutputS3File.class);
 
   public static final String INPUT_ATTRIBUTE_TYPE = "type";
   public static final String GLOBAL_CONFIG_S3_PATH_SUFFIX = "global.config.json";
-  static private Logger logger = Logger.getLogger(OutputS3File.class);
 
   private LogSpooler logSpooler;
   private S3OutputConfiguration s3OutputConfiguration;
@@ -72,23 +73,21 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
   @Override
   public void copyFile(File inputFile, InputMarker inputMarker) {
     String type = inputMarker.input.getStringValue(INPUT_ATTRIBUTE_TYPE);
-    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration,
-        S3Util.INSTANCE, false, type);
-    String resolvedPath = s3Uploader.uploadFile(inputFile,
-        inputMarker.input.getStringValue(INPUT_ATTRIBUTE_TYPE));
+    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, false, type);
+    String resolvedPath = s3Uploader.uploadFile(inputFile, inputMarker.input.getStringValue(INPUT_ATTRIBUTE_TYPE));
 
     uploadConfig(inputMarker, type, s3OutputConfiguration, resolvedPath);
   }
 
-  private void uploadConfig(InputMarker inputMarker, String type,
-                            S3OutputConfiguration s3OutputConfiguration, String resolvedPath) {
+  private void uploadConfig(InputMarker inputMarker, String type, S3OutputConfiguration s3OutputConfiguration,
+      String resolvedPath) {
 
     ArrayList<Map<String, Object>> filters = new ArrayList<>();
     addFilters(filters, inputMarker.input.getFirstFilter());
     Map<String, Object> inputConfig = new HashMap<>();
     inputConfig.putAll(inputMarker.input.getConfigs());
-    String s3CompletePath = S3Util.S3_PATH_START_WITH + s3OutputConfiguration.getS3BucketName()
-        + S3Util.S3_PATH_SEPARATOR + resolvedPath;
+    String s3CompletePath = LogFeederConstants.S3_PATH_START_WITH + s3OutputConfiguration.getS3BucketName() +
+        LogFeederConstants.S3_PATH_SEPARATOR + resolvedPath;
     inputConfig.put("path", s3CompletePath);
 
     ArrayList<Map<String, Object>> inputConfigList = new ArrayList<>();
@@ -117,17 +116,15 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
     }
   }
 
-  private void writeConfigToS3(Map<String, Object> configToWrite, String s3KeySuffix,
-                              S3OutputConfiguration s3OutputConfiguration) {
+  private void writeConfigToS3(Map<String, Object> configToWrite, String s3KeySuffix, S3OutputConfiguration s3OutputConfiguration) {
     Gson gson = new GsonBuilder().setPrettyPrinting().create();
     String configJson = gson.toJson(configToWrite);
 
-    String s3ResolvedKey = new S3LogPathResolver().
-        getResolvedPath(getStringValue("s3_config_dir"), s3KeySuffix, s3OutputConfiguration.getCluster());
+    String s3ResolvedKey = new S3LogPathResolver().getResolvedPath(getStringValue("s3_config_dir"), s3KeySuffix,
+        s3OutputConfiguration.getCluster());
 
-    S3Util.INSTANCE.writeIntoS3File(configJson, s3OutputConfiguration.getS3BucketName(),
-        s3ResolvedKey, s3OutputConfiguration.getS3AccessKey(),
-        s3OutputConfiguration.getS3SecretKey());
+    S3Util.writeIntoS3File(configJson, s3OutputConfiguration.getS3BucketName(), s3ResolvedKey,
+        s3OutputConfiguration.getS3AccessKey(), s3OutputConfiguration.getS3SecretKey());
   }
 
   private String getComponentConfigFileName(String componentName) {
@@ -136,7 +133,7 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
 
 
   private Map<String, Object> getGlobalConfig() {
-    Map<String, Object> globalConfig = LogFeeder.globalMap;
+    Map<String, Object> globalConfig = LogFeeder.globalConfigs;
     if (globalConfig == null) {
       globalConfig = new HashMap<>();
     }
@@ -173,8 +170,7 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
       globalConfig.put("copy_file", false);
       globalConfig.put("process_file", true);
       globalConfig.put("tail", false);
-      Map<String, Object> addFields = (Map<String, Object>) globalConfig
-          .get("add_fields");
+      Map<String, Object> addFields = (Map<String, Object>) globalConfig.get("add_fields");
       if (addFields == null) {
         addFields = new HashMap<>();
       }
@@ -216,7 +212,7 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
 
   @VisibleForTesting
   protected S3Uploader createUploader(String logType) {
-    S3Uploader uploader = new S3Uploader(s3OutputConfiguration, S3Util.INSTANCE, true, logType);
+    S3Uploader uploader = new S3Uploader(s3OutputConfiguration, true, logType);
     uploader.startUploaderThread();
     return uploader;
   }
@@ -224,8 +220,7 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
   @VisibleForTesting
   protected LogSpooler createSpooler(String filePath) {
     String spoolDirectory = LogFeederUtil.getLogfeederTempDir() + "/s3/service";
-    logger.info(String.format("Creating spooler with spoolDirectory=%s, filePath=%s",
-        spoolDirectory, filePath));
+    LOG.info(String.format("Creating spooler with spoolDirectory=%s, filePath=%s", spoolDirectory, filePath));
     return new LogSpooler(spoolDirectory, new File(filePath).getName()+"-", this, this,
         s3OutputConfiguration.getRolloverTimeThresholdSecs());
   }
@@ -244,7 +239,7 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
     long currentSize = spoolFile.length();
     boolean result = (currentSize >= s3OutputConfiguration.getRolloverSizeThresholdBytes());
     if (result) {
-      logger.info(String.format("Rolling over %s, current size %d, threshold size %d", spoolFile, currentSize,
+      LOG.info(String.format("Rolling over %s, current size %d, threshold size %d", spoolFile, currentSize,
           s3OutputConfiguration.getRolloverSizeThresholdBytes()));
     }
     return result;

+ 32 - 30
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java

@@ -34,7 +34,8 @@ import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.logconfig.FetchConfigFromSolr;
+import org.apache.ambari.logfeeder.logconfig.LogConfigHandler;
+import org.apache.ambari.logfeeder.util.DateUtil;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
@@ -76,7 +77,17 @@ public class OutputSolr extends Output {
 
   private BlockingQueue<OutputData> outgoingBuffer = null;
   private List<SolrWorkerThread> workerThreadList = new ArrayList<>();
-
+  
+  @Override
+  protected String getStatMetricName() {
+    return "output.solr.write_logs";
+  }
+  
+  @Override
+  protected String getWriteBytesMetricName() {
+    return "output.solr.write_bytes";
+  }
+  
   @Override
   public void init() throws Exception {
     super.init();
@@ -87,9 +98,6 @@ public class OutputSolr extends Output {
   }
 
   private void initParams() throws Exception {
-    statMetric.metricsName = "output.solr.write_logs";
-    writeBytesMetric.metricsName = "output.solr.write_bytes";
-
     splitMode = getStringValue("splits_interval_mins", "none");
     if (!splitMode.equalsIgnoreCase("none")) {
       splitInterval = getIntValue("split_interval_mins", DEFAULT_SPLIT_INTERVAL);
@@ -204,10 +212,8 @@ public class OutputSolr extends Output {
         LOG.info("Ping to Solr server is successful for worker=" + count);
       } else {
         LOG.warn(
-            String.format(
-                "Ping to Solr server failed. It would check again. worker=%d, "
-                    + "solrUrl=%s, zkConnectString=%s, collection=%s, response=%s",
-                count, solrUrl, zkConnectString, collection, response));
+            String.format("Ping to Solr server failed. It would check again. worker=%d, solrUrl=%s, zkConnectString=%s, " +
+                "collection=%s, response=%s", count, solrUrl, zkConnectString, collection, response));
       }
     } catch (Throwable t) {
       LOG.warn(String.format(
@@ -223,7 +229,7 @@ public class OutputSolr extends Output {
     
     while (true) {
       LOG.info("Checking if config is available");
-      if (FetchConfigFromSolr.isFilterAvailable()) {
+      if (LogConfigHandler.isFilterAvailable()) {
         LOG.info("Config is available");
         return;
       }
@@ -256,7 +262,7 @@ public class OutputSolr extends Output {
 
   private void useActualDateIfNeeded(Map<String, Object> jsonObj) {
     if (skipLogtime) {
-      jsonObj.put("logtime", LogFeederUtil.getActualDateStr());
+      jsonObj.put("logtime", DateUtil.getActualDateStr());
     }
   }
 
@@ -324,7 +330,7 @@ public class OutputSolr extends Output {
 
     private final SolrClient solrClient;
     private final Collection<SolrInputDocument> localBuffer = new ArrayList<>();
-    private final Map<String, InputMarker> latestInputMarkerList = new HashMap<>();
+    private final Map<String, InputMarker> latestInputMarkers = new HashMap<>();
 
     private long localBufferBytesSize = 0;
 
@@ -352,17 +358,16 @@ public class OutputSolr extends Output {
             }
           }
 
-          if (localBuffer.size() > 0 && ((outputData == null && isDrain())
-              || (nextDispatchDuration <= 0 || localBuffer.size() >= maxBufferSize))) {
+          if (localBuffer.size() > 0 && ((outputData == null && isDrain()) ||
+              (nextDispatchDuration <= 0 || localBuffer.size() >= maxBufferSize))) {
             boolean response = sendToSolr(outputData);
             if( isDrain() && !response) {
               //Since sending to Solr response failed and it is in draining mode, let's break;
-              LOG.warn("In drain mode and sending to Solr failed. So exiting. output=" 
-                  + getShortDescription());
+              LOG.warn("In drain mode and sending to Solr failed. So exiting. output=" + getShortDescription());
               break;
             }
           }
-          if( localBuffer.size() == 0 ) {
+          if (localBuffer.size() == 0) {
             //If localBuffer is empty, then reset the timer
             lastDispatchTime = currTimeMS;
           }
@@ -403,8 +408,7 @@ public class OutputSolr extends Output {
         } catch (IOException | SolrException exception) {
           // Transient error, lets block till it is available
           try {
-            LOG.warn("Solr is not reachable. Going to retry after "
-                + RETRY_INTERVAL + " seconds. " + "output="
+            LOG.warn("Solr is not reachable. Going to retry after " + RETRY_INTERVAL + " seconds. " + "output="
                 + getShortDescription(), exception);
             Thread.sleep(RETRY_INTERVAL * 1000);
           } catch (Throwable t) {
@@ -414,8 +418,8 @@ public class OutputSolr extends Output {
           // Something unknown happened. Let's not block because of this error. 
           // Clear the buffer
           String logMessageKey = this.getClass().getSimpleName() + "_SOLR_UPDATE_EXCEPTION";
-          LogFeederUtil.logErrorMessageByInterval(logMessageKey,
-              "Error sending log message to server. Dropping logs", serverException, LOG, Level.ERROR);
+          LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Error sending log message to server. Dropping logs",
+              serverException, LOG, Level.ERROR);
           resetLocalBuffer();
           break;
         }
@@ -447,7 +451,7 @@ public class OutputSolr extends Output {
               Level.ERROR);
         }
       }
-      latestInputMarkerList.put(outputData.inputMarker.base64FileKey, outputData.inputMarker);
+      latestInputMarkers.put(outputData.inputMarker.base64FileKey, outputData.inputMarker);
       localBuffer.add(document);
     }
 
@@ -479,9 +483,9 @@ public class OutputSolr extends Output {
         LogFeederUtil.logErrorMessageByInterval(logMessageKey,
             String.format("Error writing to Solr. response=%s, log=%s", response, outputData), null, LOG, Level.ERROR);
       }
-      statMetric.count += localBuffer.size();
-      writeBytesMetric.count += localBufferBytesSize;
-      for (InputMarker inputMarker : latestInputMarkerList.values()) {
+      statMetric.value += localBuffer.size();
+      writeBytesMetric.value += localBufferBytesSize;
+      for (InputMarker inputMarker : latestInputMarkers.values()) {
         inputMarker.input.checkIn(inputMarker);
       }
     }
@@ -499,7 +503,7 @@ public class OutputSolr extends Output {
     public void resetLocalBuffer() {
       localBuffer.clear();
       localBufferBytesSize = 0;
-      latestInputMarkerList.clear();
+      latestInputMarkers.clear();
     }
 
     public boolean isDone() {
@@ -512,9 +516,7 @@ public class OutputSolr extends Output {
   }
 
   @Override
-  public void copyFile(File inputFile, InputMarker inputMarker)
-      throws UnsupportedOperationException {
-    throw new UnsupportedOperationException(
-        "copyFile method is not yet supported for output=solr");
+  public void copyFile(File inputFile, InputMarker inputMarker) throws UnsupportedOperationException {
+    throw new UnsupportedOperationException("copyFile method is not yet supported for output=solr");
   }
 }

+ 3 - 3
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3LogPathResolver.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,9 +18,9 @@
 
 package org.apache.ambari.logfeeder.output;
 
+import org.apache.ambari.logfeeder.common.LogFeederConstants;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.ambari.logfeeder.util.PlaceholderUtil;
-import org.apache.ambari.logfeeder.util.S3Util;
 
 import java.util.HashMap;
 
@@ -40,7 +40,7 @@ public class S3LogPathResolver {
   public String getResolvedPath(String baseKeyPrefix, String keySuffix, String cluster) {
     HashMap<String, String> contextParam = buildContextParam(cluster);
     String resolvedKeyPrefix = PlaceholderUtil.replaceVariables(baseKeyPrefix, contextParam);
-    return resolvedKeyPrefix + S3Util.S3_PATH_SEPARATOR + keySuffix;
+    return resolvedKeyPrefix + LogFeederConstants.S3_PATH_SEPARATOR + keySuffix;
   }
 
   private HashMap<String, String> buildContextParam(String cluster) {

+ 2 - 3
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3OutputConfiguration.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -103,8 +103,7 @@ public class S3OutputConfiguration {
     };
 
     for (int i = 0; i < longValuedKeysToCopy.length; i++) {
-      configs.put(longValuedKeysToCopy[i],
-          configBlock.getLongValue(longValuedKeysToCopy[i], defaultValuesForLongValuedKeys[i]));
+      configs.put(longValuedKeysToCopy[i], configBlock.getLongValue(longValuedKeysToCopy[i], defaultValuesForLongValuedKeys[i]));
     }
 
     configs.put(ADDITIONAL_FIELDS_KEY, configBlock.getNVList(ADDITIONAL_FIELDS_KEY));

+ 38 - 26
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3Uploader.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,8 +18,12 @@
 
 package org.apache.ambari.logfeeder.output;
 
+import com.amazonaws.AmazonClientException;
+import com.amazonaws.services.s3.transfer.TransferManager;
+import com.amazonaws.services.s3.transfer.Upload;
 import com.google.common.annotations.VisibleForTesting;
 
+import org.apache.ambari.logfeeder.common.LogFeederConstants;
 import org.apache.ambari.logfeeder.util.CompressionUtil;
 import org.apache.ambari.logfeeder.util.S3Util;
 import org.apache.log4j.Logger;
@@ -39,20 +43,18 @@ import java.util.concurrent.atomic.AtomicBoolean;
  * {@link org.apache.ambari.logfeeder.input.InputFile}.
  */
 public class S3Uploader implements Runnable {
+  private static final Logger LOG = Logger.getLogger(S3Uploader.class);
+  
   public static final String POISON_PILL = "POISON-PILL";
-  private static Logger logger = Logger.getLogger(S3Uploader.class);
 
   private final S3OutputConfiguration s3OutputConfiguration;
-  private final S3Util s3UtilInstance;
   private final boolean deleteOnEnd;
-  private String logType;
+  private final String logType;
   private final BlockingQueue<String> fileContextsToUpload;
-  private AtomicBoolean stopRunningThread = new AtomicBoolean(false);
+  private final AtomicBoolean stopRunningThread = new AtomicBoolean(false);
 
-  public S3Uploader(S3OutputConfiguration s3OutputConfiguration, S3Util s3UtilInstance, boolean deleteOnEnd,
-                    String logType) {
+  public S3Uploader(S3OutputConfiguration s3OutputConfiguration, boolean deleteOnEnd, String logType) {
     this.s3OutputConfiguration = s3OutputConfiguration;
-    this.s3UtilInstance = s3UtilInstance;
     this.deleteOnEnd = deleteOnEnd;
     this.logType = logType;
     this.fileContextsToUpload = new LinkedBlockingQueue<>();
@@ -81,7 +83,7 @@ public class S3Uploader implements Runnable {
     stopRunningThread.set(true);
     boolean offerStatus = fileContextsToUpload.offer(POISON_PILL);
     if (!offerStatus) {
-      logger.warn("Could not add poison pill to interrupt uploader thread.");
+      LOG.warn("Could not add poison pill to interrupt uploader thread.");
     }
   }
 
@@ -92,7 +94,7 @@ public class S3Uploader implements Runnable {
   void addFileForUpload(String fileToUpload) {
     boolean offerStatus = fileContextsToUpload.offer(fileToUpload);
     if (!offerStatus) {
-      logger.error("Could not add file " + fileToUpload + " for upload.");
+      LOG.error("Could not add file " + fileToUpload + " for upload.");
     }
   }
 
@@ -102,12 +104,12 @@ public class S3Uploader implements Runnable {
       try {
         String fileNameToUpload = fileContextsToUpload.take();
         if (POISON_PILL.equals(fileNameToUpload)) {
-          logger.warn("Found poison pill while waiting for files to upload, exiting");
+          LOG.warn("Found poison pill while waiting for files to upload, exiting");
           return;
         }
         uploadFile(new File(fileNameToUpload), logType);
       } catch (InterruptedException e) {
-        logger.error("Interrupted while waiting for elements from fileContextsToUpload", e);
+        LOG.error("Interrupted while waiting for elements from fileContextsToUpload", e);
         return;
       }
     }
@@ -130,34 +132,44 @@ public class S3Uploader implements Runnable {
     String compressionAlgo = s3OutputConfiguration.getCompressionAlgo();
 
     String keySuffix = fileToUpload.getName() + "." + compressionAlgo;
-    String s3Path = new S3LogPathResolver().
-        getResolvedPath(s3OutputConfiguration.getS3Path()+S3Util.S3_PATH_SEPARATOR+logType,
-            keySuffix, s3OutputConfiguration.getCluster());
-    logger.info(String.format("keyPrefix=%s, keySuffix=%s, s3Path=%s",
-        s3OutputConfiguration.getS3Path(), keySuffix, s3Path));
+    String s3Path = new S3LogPathResolver().getResolvedPath(
+        s3OutputConfiguration.getS3Path() + LogFeederConstants.S3_PATH_SEPARATOR + logType, keySuffix,
+        s3OutputConfiguration.getCluster());
+    LOG.info(String.format("keyPrefix=%s, keySuffix=%s, s3Path=%s", s3OutputConfiguration.getS3Path(), keySuffix, s3Path));
     File sourceFile = createCompressedFileForUpload(fileToUpload, compressionAlgo);
 
-    logger.info("Starting S3 upload " + sourceFile + " -> " + bucketName + ", " + s3Path);
-    s3UtilInstance.uploadFileTos3(bucketName, s3Path, sourceFile, s3AccessKey,
-        s3SecretKey);
+    LOG.info("Starting S3 upload " + sourceFile + " -> " + bucketName + ", " + s3Path);
+    uploadFileToS3(bucketName, s3Path, sourceFile, s3AccessKey, s3SecretKey);
 
     // delete local compressed file
     sourceFile.delete();
     if (deleteOnEnd) {
-      logger.info("Deleting input file as required");
+      LOG.info("Deleting input file as required");
       if (!fileToUpload.delete()) {
-        logger.error("Could not delete file " + fileToUpload.getAbsolutePath() + " after upload to S3");
+        LOG.error("Could not delete file " + fileToUpload.getAbsolutePath() + " after upload to S3");
       }
     }
     return s3Path;
   }
 
+  @VisibleForTesting
+  protected void uploadFileToS3(String bucketName, String s3Key, File localFile, String accessKey, String secretKey) {
+    TransferManager transferManager = S3Util.getTransferManager(accessKey, secretKey);
+    try {
+      Upload upload = transferManager.upload(bucketName, s3Key, localFile);
+      upload.waitForUploadResult();
+    } catch (AmazonClientException | InterruptedException e) {
+      LOG.error("s3 uploading failed for file :" + localFile.getAbsolutePath(), e);
+    } finally {
+      S3Util.shutdownTransferManager(transferManager);
+    }
+  }
+
   @VisibleForTesting
   protected File createCompressedFileForUpload(File fileToUpload, String compressionAlgo) {
-    File outputFile = new File(fileToUpload.getParent(), fileToUpload.getName() + "_"
-        + new Date().getTime() + "." + compressionAlgo);
-    outputFile = CompressionUtil.compressFile(fileToUpload, outputFile,
-        compressionAlgo);
+    File outputFile = new File(fileToUpload.getParent(), fileToUpload.getName() + "_" + new Date().getTime() +
+        "." + compressionAlgo);
+    outputFile = CompressionUtil.compressFile(fileToUpload, outputFile, compressionAlgo);
     return outputFile;
   }
 }

+ 12 - 11
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpooler.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -39,8 +39,9 @@ import java.util.concurrent.atomic.AtomicBoolean;
  * {@link RolloverHandler} to trigger the handling of the rolled over file.
  */
 public class LogSpooler {
+  
+  private static final Logger LOG = Logger.getLogger(LogSpooler.class);
   public static final long TIME_BASED_ROLLOVER_DISABLED_THRESHOLD = 0;
-  static private Logger logger = Logger.getLogger(LogSpooler.class);
   static final String fileDateFormat = "yyyy-MM-dd-HH-mm-ss";
 
   private String spoolDirectory;
@@ -98,7 +99,7 @@ public class LogSpooler {
   private void initializeSpoolDirectory() {
     File spoolDir = new File(spoolDirectory);
     if (!spoolDir.exists()) {
-      logger.info("Creating spool directory: " + spoolDir);
+      LOG.info("Creating spool directory: " + spoolDir);
       boolean result = spoolDir.mkdirs();
       if (!result) {
         throw new LogSpoolerException("Could not create spool directory: " + spoolDirectory);
@@ -116,7 +117,7 @@ public class LogSpooler {
           + ", error message: " + e.getLocalizedMessage(), e);
     }
     currentSpoolerContext = new LogSpoolerContext(currentSpoolFile);
-    logger.info("Initialized spool file at path: " + currentSpoolFile);
+    LOG.info("Initialized spool file at path: " + currentSpoolFile);
   }
 
   @VisibleForTesting
@@ -141,7 +142,7 @@ public class LogSpooler {
     currentSpoolBufferedWriter.println(logEvent);
     currentSpoolerContext.logEventSpooled();
     if (rolloverCondition.shouldRollover(currentSpoolerContext)) {
-      logger.info("Trying to rollover based on rollover condition");
+      LOG.info("Trying to rollover based on rollover condition");
       tryRollover();
     }
   }
@@ -154,19 +155,19 @@ public class LogSpooler {
    * rolled over file.
    */
   public void rollover() {
-    logger.info("Rollover condition detected, rolling over file: " + currentSpoolFile);
+    LOG.info("Rollover condition detected, rolling over file: " + currentSpoolFile);
     currentSpoolBufferedWriter.flush();
     if (currentSpoolFile.length()==0) {
-      logger.info("No data in file " + currentSpoolFile + ", not doing rollover");
+      LOG.info("No data in file " + currentSpoolFile + ", not doing rollover");
     } else {
       currentSpoolBufferedWriter.close();
       rolloverHandler.handleRollover(currentSpoolFile);
-      logger.info("Invoked rollover handler with file: " + currentSpoolFile);
+      LOG.info("Invoked rollover handler with file: " + currentSpoolFile);
       initializeSpoolState();
     }
     boolean status = rolloverInProgress.compareAndSet(true, false);
     if (!status) {
-      logger.error("Should have reset rollover flag!!");
+      LOG.error("Should have reset rollover flag!!");
     }
   }
 
@@ -174,7 +175,7 @@ public class LogSpooler {
     if (rolloverInProgress.compareAndSet(false, true)) {
       rollover();
     } else {
-      logger.warn("Ignoring rollover call as rollover already in progress for file " +
+      LOG.warn("Ignoring rollover call as rollover already in progress for file " +
           currentSpoolFile);
     }
   }
@@ -197,7 +198,7 @@ public class LogSpooler {
   private class LogSpoolerRolloverTimerTask extends TimerTask {
     @Override
     public void run() {
-      logger.info("Trying rollover based on time");
+      LOG.info("Trying rollover based on time");
       tryRollover();
     }
   }

+ 1 - 1
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerContext.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

+ 1 - 1
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerException.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

+ 1 - 1
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverCondition.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

+ 1 - 1
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverHandler.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

+ 5 - 47
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AWSUtil.java

@@ -20,62 +20,20 @@ package org.apache.ambari.logfeeder.util;
 
 import org.apache.log4j.Logger;
 
-import com.amazonaws.AmazonServiceException;
 import com.amazonaws.auth.AWSCredentials;
 import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.services.identitymanagement.AmazonIdentityManagementClient;
 
-public enum AWSUtil {
-  INSTANCE;
+public class AWSUtil {
   private static final Logger LOG = Logger.getLogger(AWSUtil.class);
 
-  public String getAwsUserName(String accessKey, String secretKey) {
-    String username = null;
-    AWSCredentials awsCredentials = createAWSCredentials(accessKey, secretKey);
-    AmazonIdentityManagementClient amazonIdentityManagementClient;
-    if (awsCredentials != null) {
-      amazonIdentityManagementClient = new AmazonIdentityManagementClient(
-          awsCredentials);
-    } else {
-      // create default client
-      amazonIdentityManagementClient = new AmazonIdentityManagementClient();
-    }
-    try {
-      username = amazonIdentityManagementClient.getUser().getUser()
-          .getUserName();
-    } catch (AmazonServiceException e) {
-      if (e.getErrorCode().compareTo("AccessDenied") == 0) {
-        String arn = null;
-        String msg = e.getMessage();
-        int arnIdx = msg.indexOf("arn:aws");
-        if (arnIdx != -1) {
-          int arnSpace = msg.indexOf(" ", arnIdx);
-          // should be similar to "arn:aws:iam::111111111111:user/username"
-          arn = msg.substring(arnIdx, arnSpace);
-        }
-        if (arn != null) {
-          String[] arnParts = arn.split(":");
-          if (arnParts != null && arnParts.length > 5) {
-            username = arnParts[5];
-            if (username != null) {
-              username = username.replace("user/", "");
-            }
-          }
-        }
-      }
-    } catch (Exception exception) {
-      LOG.error(
-          "Error in getting username :" + exception.getLocalizedMessage(),
-          exception.getCause());
-    }
-    return username;
+  private AWSUtil() {
+    throw new UnsupportedOperationException();
   }
 
-  public AWSCredentials createAWSCredentials(String accessKey, String secretKey) {
+  public static AWSCredentials createAWSCredentials(String accessKey, String secretKey) {
     if (accessKey != null && secretKey != null) {
       LOG.debug("Creating aws client as per new accesskey and secretkey");
-      AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey,
-          secretKey);
+      AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey);
       return awsCredentials;
     } else {
       return null;

+ 61 - 42
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AliasUtil.java

@@ -21,69 +21,90 @@ package org.apache.ambari.logfeeder.util;
 import java.io.File;
 import java.util.HashMap;
 
+import org.apache.ambari.logfeeder.filter.Filter;
+import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.mapper.Mapper;
+import org.apache.ambari.logfeeder.output.Output;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Logger;
 
 public class AliasUtil {
 
-  private static Logger logger = Logger.getLogger(AliasUtil.class);
+  private static final Logger LOG = Logger.getLogger(AliasUtil.class);
 
-  private static AliasUtil instance = null;
+  private static final String ALIAS_CONFIG_JSON = "alias_config.json";
+  private static HashMap<String, Object> aliasMap = null;
 
-  private static String aliasConfigJson = "alias_config.json";
-
-  private HashMap<String, Object> aliasMap = null;
-
-  public static enum ALIAS_TYPE {
-    INPUT, FILTER, MAPPER, OUTPUT
+  static {
+    File jsonFile = FileUtil.getFileFromClasspath(ALIAS_CONFIG_JSON);
+    if (jsonFile != null) {
+      aliasMap = FileUtil.readJsonFromFile(jsonFile);
+    }
   }
 
-  public static enum ALIAS_PARAM {
-    KLASS
+  public static enum AliasType {
+    INPUT, FILTER, MAPPER, OUTPUT
   }
 
   private AliasUtil() {
-    init();
+    throw new UnsupportedOperationException();
   }
 
-  public static AliasUtil getInstance() {
-    if (instance == null) {
-      synchronized (AliasUtil.class) {
-        if (instance == null) {
-          instance = new AliasUtil();
-        }
-      }
+  public static Object getClassInstance(String key, AliasType aliasType) {
+    String classFullName = getClassFullName(key, aliasType);
+    
+    Object instance = null;
+    try {
+      instance = (Object) Class.forName(classFullName).getConstructor().newInstance();
+    } catch (Exception exception) {
+      LOG.error("Unsupported class = " + classFullName, exception.getCause());
     }
-    return instance;
-  }
 
-  /**
-   */
-  private void init() {
-    File jsonFile = LogFeederUtil.getFileFromClasspath(aliasConfigJson);
-    if (jsonFile != null) {
-      this.aliasMap = LogFeederUtil.readJsonFromFile(jsonFile);
+    if (instance != null) {
+      boolean isValid = false;
+      switch (aliasType) {
+        case FILTER:
+          isValid = Filter.class.isAssignableFrom(instance.getClass());
+          break;
+        case INPUT:
+          isValid = Input.class.isAssignableFrom(instance.getClass());
+          break;
+        case OUTPUT:
+          isValid = Output.class.isAssignableFrom(instance.getClass());
+          break;
+        case MAPPER:
+          isValid = Mapper.class.isAssignableFrom(instance.getClass());
+          break;
+        default:
+          LOG.warn("Unhandled aliasType: " + aliasType);
+          isValid = true;
+      }
+      if (!isValid) {
+        LOG.error("Not a valid class :" + classFullName + " AliasType :" + aliasType.name());
+      }
     }
-
+    return instance;
   }
 
-
-  public String readAlias(String key, ALIAS_TYPE aliastype, ALIAS_PARAM aliasParam) {
-    String result = key;// key as a default value;
+  private static String getClassFullName(String key, AliasType aliastype) {
+    String className = null;// key as a default value;
+    
     HashMap<String, String> aliasInfo = getAliasInfo(key, aliastype);
-    String value = aliasInfo.get(aliasParam.name().toLowerCase());
-    if (value != null && !value.isEmpty()) {
-      result = value;
-      logger.debug("Alias found for key :" + key + ",  param :" + aliasParam.name().toLowerCase() + ", value :"
-        + value + " aliastype:" + aliastype.name());
+    String value = aliasInfo.get("klass");
+    if (!StringUtils.isEmpty(value)) {
+      className = value;
+      LOG.debug("Class name found for key :" + key + ", class name :" + className + " aliastype:" + aliastype.name());
     } else {
-      logger.debug("Alias not found for key :" + key + ", param :" + aliasParam.name().toLowerCase());
+      LOG.debug("Class name not found for key :" + key + " aliastype:" + aliastype.name());
     }
-    return result;
+    
+    return className;
   }
 
   @SuppressWarnings("unchecked")
-  private HashMap<String, String> getAliasInfo(String key, ALIAS_TYPE aliastype) {
-    HashMap<String, String> aliasInfo = null;
+  private static HashMap<String, String> getAliasInfo(String key, AliasType aliastype) {
+    HashMap<String, String> aliasInfo = new HashMap<String, String>();
+    
     if (aliasMap != null) {
       String typeKey = aliastype.name().toLowerCase();
       HashMap<String, Object> typeJson = (HashMap<String, Object>) aliasMap.get(typeKey);
@@ -91,9 +112,7 @@ public class AliasUtil {
         aliasInfo = (HashMap<String, String>) typeJson.get(key);
       }
     }
-    if (aliasInfo == null) {
-      aliasInfo = new HashMap<String, String>();
-    }
+    
     return aliasInfo;
   }
 }

+ 5 - 10
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/CompressionUtil.java

@@ -37,25 +37,20 @@ public class CompressionUtil {
     FileInputStream ios = null;
     try {
       if (!inputFile.exists()) {
-        throw new IllegalArgumentException("Input File:"
-            + inputFile.getAbsolutePath() + " is not exist.");
+        throw new IllegalArgumentException("Input File:" + inputFile.getAbsolutePath() + " is not exist.");
       }
       if (inputFile.isDirectory()) {
-        throw new IllegalArgumentException("Input File:"
-            + inputFile.getAbsolutePath() + " is a directory.");
+        throw new IllegalArgumentException("Input File:" + inputFile.getAbsolutePath() + " is a directory.");
       }
       File parent = outputFile.getParentFile();
       if (parent != null && !parent.exists()) {
         boolean isParentCreated = parent.mkdirs();
         if (!isParentCreated) {
-          throw new IllegalAccessException(
-              "User does not have permission to create parent directory :"
-                  + parent.getAbsolutePath());
+          throw new IllegalAccessException( "User does not have permission to create parent directory :" + parent.getAbsolutePath());
         }
       }
-      final OutputStream out = new FileOutputStream(outputFile);
-      cos = new CompressorStreamFactory().createCompressorOutputStream(
-          algoName, out);
+      OutputStream out = new FileOutputStream(outputFile);
+      cos = new CompressorStreamFactory().createCompressorOutputStream(algoName, out);
       ios = new FileInputStream(inputFile);
       IOUtils.copy(ios, cos);
     } catch (Exception e) {

+ 36 - 3
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/DateUtil.java

@@ -20,12 +20,17 @@ package org.apache.ambari.logfeeder.util;
 
 import java.text.SimpleDateFormat;
 import java.util.Date;
+import java.util.TimeZone;
 
 import org.apache.log4j.Logger;
 
 public class DateUtil {
-  private static final Logger logger = Logger.getLogger(DateUtil.class);
-
+  private static final Logger LOG = Logger.getLogger(DateUtil.class);
+  
+  private DateUtil() {
+    throw new UnsupportedOperationException();
+  }
+  
   public static String dateToString(Date date, String dateFormat) {
     if (date == null || dateFormat == null || dateFormat.isEmpty()) {
       return "";
@@ -34,8 +39,36 @@ public class DateUtil {
       SimpleDateFormat formatter = new SimpleDateFormat(dateFormat);
       return formatter.format(date).toString();
     } catch (Exception e) {
-      logger.error("Error in coverting dateToString  format :" + dateFormat, e);
+      LOG.error("Error in coverting dateToString  format :" + dateFormat, e);
     }
     return "";
   }
+
+  private final static String SOLR_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
+  private static ThreadLocal<SimpleDateFormat> dateFormatter = new ThreadLocal<SimpleDateFormat>() {
+    @Override
+    protected SimpleDateFormat initialValue() {
+      SimpleDateFormat sdf = new SimpleDateFormat(SOLR_DATE_FORMAT);
+      sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
+      return sdf;
+    }
+  };
+
+  public static String getDate(String timeStampStr) {
+    try {
+      return dateFormatter.get().format(new Date(Long.parseLong(timeStampStr)));
+    } catch (Exception ex) {
+      LOG.error(ex);
+      return null;
+    }
+  }
+
+  public static String getActualDateStr() {
+    try {
+      return dateFormatter.get().format(new Date());
+    } catch (Exception ex) {
+      LOG.error(ex);
+      return null;
+    }
+  }
 }

+ 57 - 9
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/FileUtil.java

@@ -20,25 +20,73 @@
 package org.apache.ambari.logfeeder.util;
 
 import java.io.File;
+import java.io.IOException;
+import java.net.URL;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.attribute.BasicFileAttributes;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.log4j.Logger;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.type.TypeReference;
 
 public class FileUtil {
-  private static final Logger logger = Logger.getLogger(FileUtil.class);
-
-  public static List<File> getAllFileFromDir(File directory,
-      String[] searchFileWithExtensions, boolean checkInSubDir) {
+  private static final Logger LOG = Logger.getLogger(FileUtil.class);
+  
+  private FileUtil() {
+    throw new UnsupportedOperationException();
+  }
+  
+  public static List<File> getAllFileFromDir(File directory, String extension, boolean checkInSubDir) {
     if (!directory.exists()) {
-      logger.error(directory.getAbsolutePath() + " is not exists ");
-    } else if (directory.isDirectory()) {
-      return (List<File>) FileUtils.listFiles(directory,
-          searchFileWithExtensions, checkInSubDir);
+      LOG.error(directory.getAbsolutePath() + " is not exists ");
+    } else if (!directory.isDirectory()) {
+      LOG.error(directory.getAbsolutePath() + " is not Directory ");
     } else {
-      logger.error(directory.getAbsolutePath() + " is not Directory ");
+      return (List<File>) FileUtils.listFiles(directory, new String[]{extension}, checkInSubDir);
     }
     return new ArrayList<File>();
   }
+
+
+  public static Object getFileKey(File file) {
+    try {
+      Path fileFullPath = Paths.get(file.getAbsolutePath());
+      if (fileFullPath != null) {
+        BasicFileAttributes basicAttr = Files.readAttributes(fileFullPath, BasicFileAttributes.class);
+        return basicAttr.fileKey();
+      }
+    } catch (Throwable ex) {
+      LOG.error("Error getting file attributes for file=" + file, ex);
+    }
+    return file.toString();
+  }
+
+  public static File getFileFromClasspath(String filename) {
+    URL fileCompleteUrl = Thread.currentThread().getContextClassLoader().getResource(filename);
+    LOG.debug("File Complete URI :" + fileCompleteUrl);
+    File file = null;
+    try {
+      file = new File(fileCompleteUrl.toURI());
+    } catch (Exception exception) {
+      LOG.debug(exception.getMessage(), exception.getCause());
+    }
+    return file;
+  }
+
+  public static HashMap<String, Object> readJsonFromFile(File jsonFile) {
+    ObjectMapper mapper = new ObjectMapper();
+    try {
+      HashMap<String, Object> jsonmap = mapper.readValue(jsonFile, new TypeReference<HashMap<String, Object>>() {});
+      return jsonmap;
+    } catch (IOException e) {
+      LOG.error(e, e.getCause());
+    }
+    return new HashMap<String, Object>();
+  }
 }

+ 142 - 369
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java

@@ -22,36 +22,23 @@ package org.apache.ambari.logfeeder.util;
 import java.io.BufferedInputStream;
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.IOException;
 import java.lang.reflect.Type;
 import java.net.InetAddress;
-import java.net.URL;
 import java.net.UnknownHostException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
 import java.util.HashMap;
 import java.util.Hashtable;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
-import java.util.TimeZone;
 
 import org.apache.ambari.logfeeder.LogFeeder;
-import org.apache.ambari.logfeeder.filter.Filter;
-import org.apache.ambari.logfeeder.input.Input;
-import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
-import org.apache.ambari.logfeeder.mapper.Mapper;
-import org.apache.ambari.logfeeder.metrics.MetricCount;
-import org.apache.ambari.logfeeder.output.Output;
+import org.apache.ambari.logfeeder.common.LogFeederConstants;
+import org.apache.ambari.logfeeder.metrics.MetricData;
+import org.apache.commons.collections.MapUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
-import org.codehaus.jackson.JsonParseException;
-import org.codehaus.jackson.map.JsonMappingException;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.type.TypeReference;
 
-import com.google.common.collect.ObjectArrays;
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
 import com.google.gson.reflect.TypeToken;
@@ -60,109 +47,80 @@ import com.google.gson.reflect.TypeToken;
  * This class contains utility methods used by LogFeeder
  */
 public class LogFeederUtil {
-  private static final Logger logger = Logger.getLogger(LogFeederUtil.class);
+  private static final Logger LOG = Logger.getLogger(LogFeederUtil.class);
 
-  private static final int HASH_SEED = 31174077;
-  public final static String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS";
-  public final static String SOLR_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
-  private static Gson gson = new GsonBuilder().setDateFormat(DATE_FORMAT).create();
-
-  private static Properties props;
-
-  private static Map<String, LogHistory> logHistoryList = new Hashtable<String, LogHistory>();
-  private static int logInterval = 30000; // 30 seconds
+  private final static String GSON_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS";
+  private static Gson gson = new GsonBuilder().setDateFormat(GSON_DATE_FORMAT).create();
+  
+  public static Gson getGson() {
+    return gson;
+  }
 
   public static String hostName = null;
   public static String ipAddress = null;
   
-  private static String logfeederTempDir = null;
-  
-  private static final Object _LOCK = new Object();
-  
   static{
-    setHostNameAndIP();
+    try {
+      InetAddress ip = InetAddress.getLocalHost();
+      ipAddress = ip.getHostAddress();
+      String getHostName = ip.getHostName();
+      String getCanonicalHostName = ip.getCanonicalHostName();
+      if (!getCanonicalHostName.equalsIgnoreCase(ipAddress)) {
+        LOG.info("Using getCanonicalHostName()=" + getCanonicalHostName);
+        hostName = getCanonicalHostName;
+      } else {
+        LOG.info("Using getHostName()=" + getHostName);
+        hostName = getHostName;
+      }
+      LOG.info("ipAddress=" + ipAddress + ", getHostName=" + getHostName + ", getCanonicalHostName=" + getCanonicalHostName +
+          ", hostName=" + hostName);
+    } catch (UnknownHostException e) {
+      LOG.error("Error getting hostname.", e);
+    }
   }
   
-  public static Gson getGson() {
-    return gson;
-  }
-
-  private static ThreadLocal<SimpleDateFormat> dateFormatter = new ThreadLocal<SimpleDateFormat>() {
-    @Override
-    protected SimpleDateFormat initialValue() {
-      SimpleDateFormat sdf = new SimpleDateFormat(SOLR_DATE_FORMAT);
-      sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
-      return sdf;
-    }
-  };
+  private static Properties props;
 
   /**
-   * This method will read the properties from System, followed by propFile
-   * and finally from the map
+   * This method will read the properties from System, followed by propFile and finally from the map
    */
-  public static void loadProperties(String propFile, String[] propNVList)
-    throws Exception {
-    logger.info("Loading properties. propFile=" + propFile);
+  public static void loadProperties(String propFile, String[] propNVList) throws Exception {
+    LOG.info("Loading properties. propFile=" + propFile);
     props = new Properties(System.getProperties());
     boolean propLoaded = false;
 
     // First get properties file path from environment value
     String propertiesFilePath = System.getProperty("properties");
-    if (propertiesFilePath != null && !propertiesFilePath.isEmpty()) {
+    if (StringUtils.isNotEmpty(propertiesFilePath)) {
       File propertiesFile = new File(propertiesFilePath);
       if (propertiesFile.exists() && propertiesFile.isFile()) {
-        logger.info("Properties file path set in environment. Loading properties file="
-          + propertiesFilePath);
-        FileInputStream fileInputStream = null;
-        try {
-          fileInputStream = new FileInputStream(propertiesFile);
-          props.load(fileInputStream);
+        LOG.info("Properties file path set in environment. Loading properties file=" + propertiesFilePath);
+        try (FileInputStream fis = new FileInputStream(propertiesFile)) {
+          props.load(fis);
           propLoaded = true;
         } catch (Throwable t) {
-          logger.error("Error loading properties file. properties file="
-            + propertiesFile.getAbsolutePath());
-        } finally {
-          if (fileInputStream != null) {
-            try {
-              fileInputStream.close();
-            } catch (Throwable t) {
-              // Ignore error
-            }
-          }
+          LOG.error("Error loading properties file. properties file=" + propertiesFile.getAbsolutePath());
         }
       } else {
-        logger.error("Properties file path set in environment, but file not found. properties file="
-          + propertiesFilePath);
+        LOG.error("Properties file path set in environment, but file not found. properties file=" + propertiesFilePath);
       }
     }
 
     if (!propLoaded) {
-      BufferedInputStream fileInputStream = null;
-      try {
+      try (BufferedInputStream bis = (BufferedInputStream) LogFeeder.class.getClassLoader().getResourceAsStream(propFile)) {
         // Properties not yet loaded, let's try from class loader
-        fileInputStream = (BufferedInputStream) LogFeeder.class
-          .getClassLoader().getResourceAsStream(propFile);
-        if (fileInputStream != null) {
-          logger.info("Loading properties file " + propFile
-            + " from classpath");
-          props.load(fileInputStream);
+        if (bis != null) {
+          LOG.info("Loading properties file " + propFile + " from classpath");
+          props.load(bis);
           propLoaded = true;
         } else {
-          logger.fatal("Properties file not found in classpath. properties file name= "
-            + propFile);
-        }
-      } finally {
-        if (fileInputStream != null) {
-          try {
-            fileInputStream.close();
-          } catch (IOException e) {
-          }
+          LOG.fatal("Properties file not found in classpath. properties file name= " + propFile);
         }
       }
     }
 
     if (!propLoaded) {
-      logger.fatal("Properties file is not loaded.");
+      LOG.fatal("Properties file is not loaded.");
       throw new Exception("Properties not loaded");
     } else {
       updatePropertiesFromMap(propNVList);
@@ -173,162 +131,124 @@ public class LogFeederUtil {
     if (nvList == null) {
       return;
     }
-    logger.info("Trying to load additional proeprties from argument paramters. nvList.length="
-      + nvList.length);
-    if (nvList != null && nvList.length > 0) {
-      for (String nv : nvList) {
-        logger.info("Passed nv=" + nv);
-        if (nv.startsWith("-") && nv.length() > 1) {
-          nv = nv.substring(1);
-          logger.info("Stripped nv=" + nv);
-          int i = nv.indexOf("=");
-          if (nv.length() > i) {
-            logger.info("Candidate nv=" + nv);
-            String name = nv.substring(0, i);
-            String value = nv.substring(i + 1);
-            logger.info("Adding property from argument to properties. name="
-              + name + ", value=" + value);
-            props.put(name, value);
-          }
+    LOG.info("Trying to load additional proeprties from argument paramters. nvList.length=" + nvList.length);
+    for (String nv : nvList) {
+      LOG.info("Passed nv=" + nv);
+      if (nv.startsWith("-") && nv.length() > 1) {
+        nv = nv.substring(1);
+        LOG.info("Stripped nv=" + nv);
+        int i = nv.indexOf("=");
+        if (nv.length() > i) {
+          LOG.info("Candidate nv=" + nv);
+          String name = nv.substring(0, i);
+          String value = nv.substring(i + 1);
+          LOG.info("Adding property from argument to properties. name=" + name + ", value=" + value);
+          props.put(name, value);
         }
       }
     }
   }
 
-  static public String getStringProperty(String key) {
-    if (props != null) {
-      return props.getProperty(key);
-    }
-    return null;
+  public static String getStringProperty(String key) {
+    return props == null ? null : props.getProperty(key);
   }
 
-  static public String getStringProperty(String key, String defaultValue) {
-    if (props != null) {
-      return props.getProperty(key, defaultValue);
-    }
-    return defaultValue;
+  public static String getStringProperty(String key, String defaultValue) {
+    return props == null ? defaultValue : props.getProperty(key, defaultValue);
   }
 
-  static public boolean getBooleanProperty(String key, boolean defaultValue) {
-    String strValue = getStringProperty(key);
-    return toBoolean(strValue, defaultValue);
+  public static boolean getBooleanProperty(String key, boolean defaultValue) {
+    String value = getStringProperty(key);
+    return toBoolean(value, defaultValue);
   }
 
-  private static boolean toBoolean(String strValue, boolean defaultValue) {
-    boolean retValue = defaultValue;
-    if (!StringUtils.isEmpty(strValue)) {
-      if (strValue.equalsIgnoreCase("true")
-        || strValue.equalsIgnoreCase("yes")) {
-        retValue = true;
-      } else {
-        retValue = false;
-      }
+  private static boolean toBoolean(String value, boolean defaultValue) {
+    if (StringUtils.isEmpty(value)) {
+      return defaultValue;
     }
-    return retValue;
+    
+    return "true".equalsIgnoreCase(value) || "yes".equalsIgnoreCase(value);
   }
 
-  static public int getIntProperty(String key, int defaultValue) {
-    String strValue = getStringProperty(key);
-    int retValue = defaultValue;
-    retValue = objectToInt(strValue, retValue, ", key=" + key);
+  public static int getIntProperty(String key, int defaultValue) {
+    String value = getStringProperty(key);
+    int retValue = objectToInt(value, defaultValue, ", key=" + key);
     return retValue;
   }
 
-  public static int objectToInt(Object objValue, int retValue,
-                                String errMessage) {
+  public static int objectToInt(Object objValue, int retValue, String errMessage) {
     if (objValue == null) {
       return retValue;
     }
     String strValue = objValue.toString();
-    if (!StringUtils.isEmpty(strValue)) {
+    if (StringUtils.isNotEmpty(strValue)) {
       try {
         retValue = Integer.parseInt(strValue);
       } catch (Throwable t) {
-        logger.error("Error parsing integer value. str=" + strValue
-          + ", " + errMessage);
+        LOG.error("Error parsing integer value. str=" + strValue + ", " + errMessage);
       }
     }
     return retValue;
   }
 
-  public static boolean isEnabled(Map<String, Object> conditionConfigs,
-                                  Map<String, Object> valueConfigs) {
-    boolean allow = toBoolean((String) valueConfigs.get("is_enabled"), true);
-    @SuppressWarnings("unchecked")
-    Map<String, Object> conditions = (Map<String, Object>) conditionConfigs
-      .get("conditions");
-    if (conditions != null && conditions.size() > 0) {
-      allow = false;
-      for (String conditionType : conditions.keySet()) {
-        if (conditionType.equalsIgnoreCase("fields")) {
-          @SuppressWarnings("unchecked")
-          Map<String, Object> fields = (Map<String, Object>) conditions
-            .get("fields");
-          for (String fieldName : fields.keySet()) {
-            Object values = fields.get(fieldName);
-            if (values instanceof String) {
-              allow = isFieldConditionMatch(valueConfigs,
-                fieldName, (String) values);
-            } else {
-              @SuppressWarnings("unchecked")
-              List<String> listValues = (List<String>) values;
-              for (String stringValue : listValues) {
-                allow = isFieldConditionMatch(valueConfigs,
-                  fieldName, stringValue);
-                if (allow) {
-                  break;
-                }
-              }
-            }
-            if (allow) {
-              break;
+  @SuppressWarnings("unchecked")
+  public static boolean isEnabled(Map<String, Object> conditionConfigs, Map<String, Object> valueConfigs) {
+    Map<String, Object> conditions = (Map<String, Object>) conditionConfigs.get("conditions");
+    if (MapUtils.isEmpty(conditions)) {
+      return toBoolean((String) valueConfigs.get("is_enabled"), true);
+    }
+    
+    for (String conditionType : conditions.keySet()) {
+      if (!conditionType.equalsIgnoreCase("fields")) {
+        continue;
+      }
+      
+      Map<String, Object> fields = (Map<String, Object>) conditions.get("fields");
+      for (Map.Entry<String, Object> field : fields.entrySet()) {
+        if (field.getValue() instanceof String) {
+          if (isFieldConditionMatch(valueConfigs, field.getKey(), (String) field.getValue())) {
+            return true;
+          }
+        } else {
+          for (String stringValue : (List<String>) field.getValue()) {
+            if (isFieldConditionMatch(valueConfigs, field.getKey(), stringValue)) {
+              return true;
             }
           }
         }
-        if (allow) {
-          break;
-        }
       }
     }
-    return allow;
+    
+    return false;
   }
 
-  public static boolean isFieldConditionMatch(Map<String, Object> configs,
-                                              String fieldName, String stringValue) {
+  private static boolean isFieldConditionMatch(Map<String, Object> configs, String fieldName, String stringValue) {
     boolean allow = false;
     String fieldValue = (String) configs.get(fieldName);
     if (fieldValue != null && fieldValue.equalsIgnoreCase(stringValue)) {
       allow = true;
     } else {
       @SuppressWarnings("unchecked")
-      Map<String, Object> addFields = (Map<String, Object>) configs
-        .get("add_fields");
+      Map<String, Object> addFields = (Map<String, Object>) configs.get("add_fields");
       if (addFields != null && addFields.get(fieldName) != null) {
         String addFieldValue = (String) addFields.get(fieldName);
         if (stringValue.equalsIgnoreCase(addFieldValue)) {
           allow = true;
         }
       }
-
     }
     return allow;
   }
 
-  public static void logStatForMetric(MetricCount metric, String prefixStr,
-                                      String postFix) {
-    long currStat = metric.count;
+  public static void logStatForMetric(MetricData metric, String prefixStr, String postFix) {
+    long currStat = metric.value;
     long currMS = System.currentTimeMillis();
-    if (currStat > metric.prevLogCount) {
-      if (postFix == null) {
-        postFix = "";
-      }
-      logger.info(prefixStr + ": total_count=" + metric.count
-        + ", duration=" + (currMS - metric.prevLogMS) / 1000
-        + " secs, count=" + (currStat - metric.prevLogCount)
-        + postFix);
+    if (currStat > metric.prevLogValue) {
+      LOG.info(prefixStr + ": total_count=" + metric.value + ", duration=" + (currMS - metric.prevLogTime) / 1000 +
+          " secs, count=" + (currStat - metric.prevLogValue) + postFix);
     }
-    metric.prevLogCount = currStat;
-    metric.prevLogMS = currMS;
+    metric.prevLogValue = currStat;
+    metric.prevLogTime = currMS;
   }
 
   public static Map<String, Object> cloneObject(Map<String, Object> map) {
@@ -336,221 +256,74 @@ public class LogFeederUtil {
       return null;
     }
     String jsonStr = gson.toJson(map);
-    Type type = new TypeToken<Map<String, Object>>() {
-    }.getType();
+    Type type = new TypeToken<Map<String, Object>>() {}.getType();
     return gson.fromJson(jsonStr, type);
   }
 
   public static Map<String, Object> toJSONObject(String jsonStr) {
-    if(jsonStr==null || jsonStr.trim().isEmpty()){
+    if (StringUtils.isBlank(jsonStr)) {
       return new HashMap<String, Object>();
     }
-    Type type = new TypeToken<Map<String, Object>>() {
-    }.getType();
+    Type type = new TypeToken<Map<String, Object>>() {}.getType();
     return gson.fromJson(jsonStr, type);
   }
 
-  static public boolean logErrorMessageByInterval(String key, String message,
-                                                  Throwable e, Logger callerLogger, Level level) {
+  private static class LogHistory {
+    private long lastLogTime = 0;
+    private int counter = 0;
+  }
+
+  private static Map<String, LogHistory> logHistoryList = new Hashtable<String, LogHistory>();
 
+  public static boolean logErrorMessageByInterval(String key, String message, Throwable e, Logger callerLogger, Level level) {
     LogHistory log = logHistoryList.get(key);
     if (log == null) {
       log = new LogHistory();
       logHistoryList.put(key, log);
     }
-    if ((System.currentTimeMillis() - log.lastLogTime) > logInterval) {
+    
+    if ((System.currentTimeMillis() - log.lastLogTime) > 30 * 1000) {
       log.lastLogTime = System.currentTimeMillis();
-      int counter = log.counter;
-      log.counter = 0;
-      if (counter > 0) {
-        message += ". Messages suppressed before: " + counter;
-      }
-      if (e == null) {
-        callerLogger.log(level, message);
-      } else {
-        callerLogger.log(level, message, e);
+      if (log.counter > 0) {
+        message += ". Messages suppressed before: " + log.counter;
       }
+      log.counter = 0;
+      callerLogger.log(level, message, e);
 
       return true;
     } else {
       log.counter++;
-    }
-    return false;
-
-  }
-
-  static public String subString(String str, int maxLength) {
-    if (str == null || str.length() == 0) {
-      return "";
-    }
-    maxLength = str.length() < maxLength ? str.length() : maxLength;
-    return str.substring(0, maxLength);
-  }
-
-  public static long genHash(String value) {
-    if (value == null) {
-      value = "null";
-    }
-    return MurmurHash.hash64A(value.getBytes(), HASH_SEED);
-  }
-
-  private static class LogHistory {
-    private long lastLogTime = 0;
-    private int counter = 0;
-  }
-
-  public static String getDate(String timeStampStr) {
-    try {
-      return dateFormatter.get().format(new Date(Long.parseLong(timeStampStr)));
-    } catch (Exception ex) {
-      logger.error(ex);
-      return null;
+      return false;
     }
   }
 
-  public static String getActualDateStr() {
-    try {
-      return dateFormatter.get().format(new Date());
-    } catch (Exception ex) {
-      logger.error(ex);
-      return null;
-    }
-  }
-
-  public static File getFileFromClasspath(String filename) {
-    URL fileCompleteUrl = Thread.currentThread().getContextClassLoader()
-      .getResource(filename);
-    logger.debug("File Complete URI :" + fileCompleteUrl);
-    File file = null;
-    try {
-      file = new File(fileCompleteUrl.toURI());
-    } catch (Exception exception) {
-      logger.debug(exception.getMessage(), exception.getCause());
-    }
-    return file;
-  }
-
-  public static Object getClassInstance(String classFullName, AliasUtil.ALIAS_TYPE aliasType) {
-    Object instance = null;
-    try {
-      instance = (Object) Class.forName(classFullName).getConstructor().newInstance();
-    } catch (Exception exception) {
-      logger.error("Unsupported class =" + classFullName, exception.getCause());
+  public static boolean isListContains(List<String> list, String str, boolean caseSensitive) {
+    if (list == null) {
+      return false;
     }
-    // check instance class as par aliasType
-    if (instance != null) {
-      boolean isValid = false;
-      switch (aliasType) {
-        case FILTER:
-          isValid = Filter.class.isAssignableFrom(instance.getClass());
-          break;
-        case INPUT:
-          isValid = Input.class.isAssignableFrom(instance.getClass());
-          break;
-        case OUTPUT:
-          isValid = Output.class.isAssignableFrom(instance.getClass());
-          break;
-        case MAPPER:
-          isValid = Mapper.class.isAssignableFrom(instance.getClass());
-          break;
-        default:
-          // by default consider all are valid class
-          isValid = true;
+    
+    for (String value : list) {
+      if (value == null) {
+        continue;
       }
-      if (!isValid) {
-        logger.error("Not a valid class :" + classFullName + " AliasType :" + aliasType.name());
-      }
-    }
-    return instance;
-  }
-
-  public static HashMap<String, Object> readJsonFromFile(File jsonFile) {
-    ObjectMapper mapper = new ObjectMapper();
-    try {
-      HashMap<String, Object> jsonmap = mapper.readValue(jsonFile, new TypeReference<HashMap<String, Object>>() {
-      });
-      return jsonmap;
-    } catch (JsonParseException e) {
-      logger.error(e, e.getCause());
-    } catch (JsonMappingException e) {
-      logger.error(e, e.getCause());
-    } catch (IOException e) {
-      logger.error(e, e.getCause());
-    }
-    return new HashMap<String, Object>();
-  }
-
-  public static boolean isListContains(List<String> list, String str, boolean caseSensitive) {
-    if (list != null) {
-      for (String value : list) {
-        if (value != null) {
-          if (caseSensitive) {
-            if (value.equals(str)) {
-              return true;
-            }
-          } else {
-            if (value.equalsIgnoreCase(str)) {
-              return true;
-            }
-          }
-          if (value.equalsIgnoreCase(LogFeederConstants.ALL)) {
-            return true;
-          }
-        }
+      
+      if (caseSensitive ? value.equals(str) : value.equalsIgnoreCase(str) ||
+          value.equalsIgnoreCase(LogFeederConstants.ALL)) {
+        return true;
       }
     }
     return false;
   }
   
+  private static String logfeederTempDir = null;
   
-  private static synchronized String setHostNameAndIP() {
-    if (hostName == null || ipAddress == null) {
-      try {
-        InetAddress ip = InetAddress.getLocalHost();
-        ipAddress = ip.getHostAddress();
-        String getHostName = ip.getHostName();
-        String getCanonicalHostName = ip.getCanonicalHostName();
-        if (!getCanonicalHostName.equalsIgnoreCase(ipAddress)) {
-          logger.info("Using getCanonicalHostName()=" + getCanonicalHostName);
-          hostName = getCanonicalHostName;
-        } else {
-          logger.info("Using getHostName()=" + getHostName);
-          hostName = getHostName;
-        }
-        logger.info("ipAddress=" + ipAddress + ", getHostName=" + getHostName
-            + ", getCanonicalHostName=" + getCanonicalHostName + ", hostName="
-            + hostName);
-      } catch (UnknownHostException e) {
-        logger.error("Error getting hostname.", e);
-      }
-    }
-    return hostName;
-  }
-
-  public static String[] mergeArray(String[] first, String[] second) {
-    if (first == null) {
-      first = new String[0];
-    }
-    if (second == null) {
-      second = new String[0];
-    }
-    String[] mergedArray = ObjectArrays.concat(first, second, String.class);
-    return mergedArray;
-  }
-  
-  public static String getLogfeederTempDir() {
+  public synchronized static String getLogfeederTempDir() {
     if (logfeederTempDir == null) {
-      synchronized (_LOCK) {
-        if (logfeederTempDir == null) {
-          String tempDirValue = getStringProperty("logfeeder.tmp.dir",
-              "/tmp/$username/logfeeder/");
-          HashMap<String, String> contextParam = new HashMap<String, String>();
-          String username = System.getProperty("user.name");
-          contextParam.put("username", username);
-          logfeederTempDir = PlaceholderUtil.replaceVariables(tempDirValue,
-              contextParam);
-        }
-      }
+      String tempDirValue = getStringProperty("logfeeder.tmp.dir", "/tmp/$username/logfeeder/");
+      HashMap<String, String> contextParam = new HashMap<String, String>();
+      String username = System.getProperty("user.name");
+      contextParam.put("username", username);
+      logfeederTempDir = PlaceholderUtil.replaceVariables(tempDirValue, contextParam);
     }
     return logfeederTempDir;
   }

+ 20 - 38
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogfeederHDFSUtil.java

@@ -25,71 +25,53 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.log4j.Logger;
 
-public enum LogfeederHDFSUtil {
-  INSTANCE;
-  private static Logger logger = Logger.getLogger(LogfeederHDFSUtil.class);
+public class LogfeederHDFSUtil {
+  private static final Logger LOG = Logger.getLogger(LogfeederHDFSUtil.class);
 
-  public void createHDFSDir(String dirPath, FileSystem dfs) {
-    Path src = new Path(dirPath);
-    try {
-      if (dfs.isDirectory(src)) {
-        logger.info("hdfs dir dirPath=" + dirPath + "  is already exist.");
-        return;
-      }
-      boolean isDirCreated = dfs.mkdirs(src);
-      if (isDirCreated) {
-        logger.debug("HDFS dirPath=" + dirPath + " created successfully.");
-      } else {
-        logger.warn("HDFS dir creation failed dirPath=" + dirPath);
-      }
-    } catch (IOException e) {
-      logger.error("HDFS dir creation failed dirPath=" + dirPath, e.getCause());
-    }
+  private LogfeederHDFSUtil() {
+    throw new UnsupportedOperationException();
   }
-
-  public boolean copyFromLocal(String sourceFilepath, String destFilePath,
-      FileSystem fileSystem, boolean overwrite, boolean delSrc) {
+  
+  public static boolean copyFromLocal(String sourceFilepath, String destFilePath, FileSystem fileSystem, boolean overwrite,
+      boolean delSrc) {
     Path src = new Path(sourceFilepath);
     Path dst = new Path(destFilePath);
     boolean isCopied = false;
     try {
-      logger.info("copying localfile := " + sourceFilepath + " to hdfsPath := "
-          + destFilePath);
+      LOG.info("copying localfile := " + sourceFilepath + " to hdfsPath := " + destFilePath);
       fileSystem.copyFromLocalFile(delSrc, overwrite, src, dst);
       isCopied = true;
     } catch (Exception e) {
-      logger.error("Error copying local file :" + sourceFilepath
-          + " to hdfs location : " + destFilePath, e);
+      LOG.error("Error copying local file :" + sourceFilepath + " to hdfs location : " + destFilePath, e);
     }
     return isCopied;
   }
 
-  public FileSystem buildFileSystem(String hdfsHost, String hdfsPort) {
+  public static FileSystem buildFileSystem(String hdfsHost, String hdfsPort) {
     try {
       Configuration configuration = buildHdfsConfiguration(hdfsHost, hdfsPort);
       FileSystem fs = FileSystem.get(configuration);
       return fs;
     } catch (Exception e) {
-      logger.error("Exception is buildFileSystem :", e);
+      LOG.error("Exception is buildFileSystem :", e);
     }
     return null;
   }
 
-  public void closeFileSystem(FileSystem fileSystem) {
+  private static Configuration buildHdfsConfiguration(String hdfsHost, String hdfsPort) {
+    String url = "hdfs://" + hdfsHost + ":" + hdfsPort + "/";
+    Configuration configuration = new Configuration();
+    configuration.set("fs.default.name", url);
+    return configuration;
+  }
+
+  public static void closeFileSystem(FileSystem fileSystem) {
     if (fileSystem != null) {
       try {
         fileSystem.close();
       } catch (IOException e) {
-        logger.error(e.getLocalizedMessage(), e.getCause());
+        LOG.error(e.getLocalizedMessage(), e.getCause());
       }
     }
   }
-
-  public Configuration buildHdfsConfiguration(String hdfsHost, String hdfsPort) {
-    String url = "hdfs://" + hdfsHost + ":" + hdfsPort + "/";
-    Configuration configuration = new Configuration();
-    configuration.set("fs.default.name", url);
-    return configuration;
-  }
-
 }

+ 14 - 18
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/PlaceholderUtil.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -21,38 +21,34 @@ import java.util.HashMap;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-public class PlaceholderUtil {
+import org.apache.commons.lang3.StringUtils;
 
-  private static Pattern placeHolderPattern;
-  static {
-    placeHolderPattern = Pattern.compile("\\$\\s*(\\w+)");
+public class PlaceholderUtil {
+  private PlaceholderUtil() {
+    throw new UnsupportedOperationException();
   }
+  
+  private static final Pattern placeHolderPattern = Pattern.compile("\\$\\s*(\\w+)");
 
-  public static String replaceVariables(String inputStr,
-      HashMap<String, String> contextParam) {
+  public static String replaceVariables(String inputStr, HashMap<String, String> contextParam) {
     Matcher m = placeHolderPattern.matcher(inputStr);
-    String placeholder;
-    String replacement;
     String output = new String(inputStr);
     while (m.find()) {
-      placeholder = m.group();
+      String placeholder = m.group();
       if (placeholder != null && !placeholder.isEmpty()) {
-        String key = placeholder.replace("$","").toLowerCase();// remove
-                                                                   // brace
-        replacement = getFromContext(contextParam, placeholder, key);
+        String key = placeholder.replace("$","").toLowerCase();// remove brace
+        String replacement = getFromContext(contextParam, placeholder, key);
         output = output.replace(placeholder, replacement);
       }
     }
     return output;
   }
 
-  private static String getFromContext(HashMap<String, String> contextParam,
-      String defaultValue, String key) {
-    String returnValue = defaultValue;// by default set default value as a
-                                      // return
+  private static String getFromContext(HashMap<String, String> contextParam, String defaultValue, String key) {
+    String returnValue = defaultValue; // by default set default value as a return
     if (contextParam != null) {
       String value = contextParam.get(key);
-      if (value != null && !value.trim().isEmpty()) {
+      if (StringUtils.isNotBlank(value)) {
         returnValue = value;
       }
     }

+ 25 - 56
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/S3Util.java

@@ -19,7 +19,6 @@
 package org.apache.ambari.logfeeder.util;
 
 import java.io.BufferedReader;
-import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
@@ -27,6 +26,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.zip.GZIPInputStream;
 
+import org.apache.ambari.logfeeder.common.LogFeederConstants;
 import org.apache.commons.io.IOUtils;
 import org.apache.log4j.Logger;
 
@@ -39,22 +39,19 @@ import com.amazonaws.services.s3.model.ObjectMetadata;
 import com.amazonaws.services.s3.model.PutObjectRequest;
 import com.amazonaws.services.s3.model.S3Object;
 import com.amazonaws.services.s3.transfer.TransferManager;
-import com.amazonaws.services.s3.transfer.Upload;
 
 /**
  * Utility to connect to s3
  */
 public class S3Util {
-  public static final S3Util INSTANCE = new S3Util();
-
   private static final Logger LOG = Logger.getLogger(S3Util.class);
 
-  public static final String S3_PATH_START_WITH = "s3://";
-  public static final String S3_PATH_SEPARATOR = "/";
-
-  public AmazonS3 getS3Client(String accessKey, String secretKey) {
-    AWSCredentials awsCredentials = AWSUtil.INSTANCE.createAWSCredentials(
-        accessKey, secretKey);
+  private S3Util() {
+    throw new UnsupportedOperationException();
+  }
+  
+  public static AmazonS3 getS3Client(String accessKey, String secretKey) {
+    AWSCredentials awsCredentials = AWSUtil.createAWSCredentials(accessKey, secretKey);
     AmazonS3 s3client;
     if (awsCredentials != null) {
       s3client = new AmazonS3Client(awsCredentials);
@@ -64,9 +61,8 @@ public class S3Util {
     return s3client;
   }
 
-  public TransferManager getTransferManager(String accessKey, String secretKey) {
-    AWSCredentials awsCredentials = AWSUtil.INSTANCE.createAWSCredentials(
-        accessKey, secretKey);
+  public static TransferManager getTransferManager(String accessKey, String secretKey) {
+    AWSCredentials awsCredentials = AWSUtil.createAWSCredentials(accessKey, secretKey);
     TransferManager transferManager;
     if (awsCredentials != null) {
       transferManager = new TransferManager(awsCredentials);
@@ -76,35 +72,31 @@ public class S3Util {
     return transferManager;
   }
 
-  public void shutdownTransferManager(TransferManager transferManager) {
+  public static void shutdownTransferManager(TransferManager transferManager) {
     if (transferManager != null) {
       transferManager.shutdownNow();
     }
   }
 
-  public String getBucketName(String s3Path) {
+  public static String getBucketName(String s3Path) {
     String bucketName = null;
     // s3path
     if (s3Path != null) {
-      String[] s3PathParts = s3Path.replace(S3_PATH_START_WITH, "").split(
-          S3_PATH_SEPARATOR);
+      String[] s3PathParts = s3Path.replace(LogFeederConstants.S3_PATH_START_WITH, "").split(LogFeederConstants.S3_PATH_SEPARATOR);
       bucketName = s3PathParts[0];
     }
     return bucketName;
   }
 
-  public String getS3Key(String s3Path) {
+  public static String getS3Key(String s3Path) {
     StringBuilder s3Key = new StringBuilder();
-    // s3path
     if (s3Path != null) {
-      String[] s3PathParts = s3Path.replace(S3_PATH_START_WITH, "").split(
-          S3_PATH_SEPARATOR);
-      ArrayList<String> s3PathList = new ArrayList<String>(
-          Arrays.asList(s3PathParts));
+      String[] s3PathParts = s3Path.replace(LogFeederConstants.S3_PATH_START_WITH, "").split(LogFeederConstants.S3_PATH_SEPARATOR);
+      ArrayList<String> s3PathList = new ArrayList<String>(Arrays.asList(s3PathParts));
       s3PathList.remove(0);// remove bucketName
       for (int index = 0; index < s3PathList.size(); index++) {
         if (index > 0) {
-          s3Key.append(S3_PATH_SEPARATOR);
+          s3Key.append(LogFeederConstants.S3_PATH_SEPARATOR);
         }
         s3Key.append(s3PathList.get(index));
       }
@@ -112,63 +104,41 @@ public class S3Util {
     return s3Key.toString();
   }
 
-  public void uploadFileTos3(String bucketName, String s3Key, File localFile,
-      String accessKey, String secretKey) {
-    TransferManager transferManager = getTransferManager(accessKey, secretKey);
-    try {
-      Upload upload = transferManager.upload(bucketName, s3Key, localFile);
-      upload.waitForUploadResult();
-    } catch (AmazonClientException | InterruptedException e) {
-      LOG.error("s3 uploading failed for file :" + localFile.getAbsolutePath(),
-          e);
-    } finally {
-      shutdownTransferManager(transferManager);
-    }
-  }
-
   /**
    * Get the buffer reader to read s3 file as a stream
    */
-  public BufferedReader getReader(String s3Path, String accessKey,
-      String secretKey) throws IOException {
+  public static BufferedReader getReader(String s3Path, String accessKey, String secretKey) throws IOException {
     // TODO error handling
     // Compression support
     // read header and decide the compression(auto detection)
     // For now hard-code GZIP compression
     String s3Bucket = getBucketName(s3Path);
     String s3Key = getS3Key(s3Path);
-    S3Object fileObj = getS3Client(accessKey, secretKey).getObject(
-        new GetObjectRequest(s3Bucket, s3Key));
-    GZIPInputStream objectInputStream;
+    S3Object fileObj = getS3Client(accessKey, secretKey).getObject(new GetObjectRequest(s3Bucket, s3Key));
     try {
-      objectInputStream = new GZIPInputStream(fileObj.getObjectContent());
-      BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(
-          objectInputStream));
+      GZIPInputStream objectInputStream = new GZIPInputStream(fileObj.getObjectContent());
+      BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(objectInputStream));
       return bufferedReader;
     } catch (IOException e) {
-      LOG.error("Error in creating stream reader for s3 file :" + s3Path,
-          e.getCause());
+      LOG.error("Error in creating stream reader for s3 file :" + s3Path, e.getCause());
       throw e;
     }
   }
 
-  public void writeIntoS3File(String data, String bucketName, String s3Key,
-      String accessKey, String secretKey) {
+  public static void writeIntoS3File(String data, String bucketName, String s3Key, String accessKey, String secretKey) {
     InputStream in = null;
     try {
       in = IOUtils.toInputStream(data, "UTF-8");
     } catch (IOException e) {
       LOG.error(e);
     }
+    
     if (in != null) {
       TransferManager transferManager = getTransferManager(accessKey, secretKey);
       try {
         if (transferManager != null) {
-          transferManager.upload(
-                  new PutObjectRequest(bucketName, s3Key, in,
-                  new ObjectMetadata())).waitForUploadResult();
-          LOG.debug("Data Uploaded to s3 file :" + s3Key + " in bucket :"
-              + bucketName);
+          transferManager.upload(new PutObjectRequest(bucketName, s3Key, in, new ObjectMetadata())).waitForUploadResult();
+          LOG.debug("Data Uploaded to s3 file :" + s3Key + " in bucket :" + bucketName);
         }
       } catch (AmazonClientException | InterruptedException e) {
         LOG.error(e);
@@ -182,5 +152,4 @@ public class S3Util {
       }
     }
   }
-
 }

+ 0 - 116
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/AppTest.java

@@ -1,116 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logfeeder;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.ambari.logfeeder.filter.FilterGrok;
-import org.apache.log4j.Logger;
-
-import junit.framework.Test;
-import junit.framework.TestCase;
-import junit.framework.TestSuite;
-
-/**
- * Unit test for simple App.
- */
-public class AppTest extends TestCase {
-  static Logger logger = Logger.getLogger(AppTest.class);
-
-  /**
-   * Create the test case
-   *
-   * @param testName name of the test case
-   */
-  public AppTest(String testName) {
-    super(testName);
-  }
-
-  /**
-   * @return the suite of tests being tested
-   */
-  public static Test suite() {
-    return new TestSuite(AppTest.class);
-  }
-
-  /**
-   * Rigourous Test :-)
-   */
-  public void testApp() {
-    assertTrue(true);
-  }
-
-  public void testGrok() {
-    logger.info("testGrok()");
-    FilterGrok grokFilter = new FilterGrok();
-    try {
-      Map<String, Object> map = new HashMap<String, Object>();
-      map.put("message_pattern",
-        "^%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
-      grokFilter.loadConfig(map);
-      grokFilter.init();
-      String out = grokFilter.grokParse("INFO This is a test");
-      logger.info("out=" + out);
-
-    } catch (Exception e) {
-      // TODO Auto-generated catch block
-      e.printStackTrace();
-      assertFalse(true);
-    }
-
-    assertTrue(true);
-  }
-
-  public void testGrokUGI() {
-    logger.info("testGrok()");
-    String[] ugis = new String[]{"user1@xyz.com (auth:TOKEN)",
-      "ambari-qa@example.com (auth:kerberos)",
-      "my_user@example.com (auth:kerberos)",
-      "hive/bdurai-dojran-2.novalocal@example.com (auth:kerberos)",
-      "just_me",
-      "ambari-qa (auth:PROXY) via hive/myhost.novalocal@EXAMPLE.COM (auth:KERBEROS)"};
-
-    FilterGrok grokFilter = new FilterGrok();
-    try {
-      Map<String, Object> map = new HashMap<String, Object>();
-      // map.put("message_pattern",
-      // "(?<user>([\\w\\d\\-]+))\\/|(?<user>([\\w\\d\\-]+))@|(?<user>([\\w\\d\\-]+))/[\\w\\d\\-.]+@|(?<user>([\\w\\d.\\-_]+))[\\s(]+");
-      // map.put("message_pattern",
-      // "(?<user>([\\w\\d\\-]+))/[\\w\\d\\-.]+@");
-      // *(auth:(?<auth>[\\w\\d\\-]+))
-      // GOOD: map.put("message_pattern", "(?<user>([\\w\\d\\-]+)).+auth:(?<auth>([\\w\\d\\-]+))");
-      // OK: map.put("message_pattern", "(?<user>([\\w\\d\\-]+)).+auth:(?<auth>([\\w\\d\\-]+))|%{USERNAME:xuser}");
-      //map.put("message_pattern", "%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}");
-      map.put("message_pattern", "%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}");
-      grokFilter.loadConfig(map);
-      grokFilter.init();
-      for (String ugi : ugis) {
-        String out = grokFilter.grokParse(ugi);
-        logger.info(ugi + "=" + out);
-      }
-
-    } catch (Exception e) {
-      // TODO Auto-generated catch block
-      e.printStackTrace();
-      assertFalse(true);
-    }
-    assertTrue(true);
-  }
-}

+ 26 - 29
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -23,7 +23,7 @@ import java.util.Map;
 
 import org.apache.ambari.logfeeder.input.Input;
 import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.output.OutputMgr;
+import org.apache.ambari.logfeeder.output.OutputManager;
 import org.apache.log4j.Logger;
 import org.easymock.Capture;
 import org.easymock.CaptureType;
@@ -40,16 +40,16 @@ public class FilterGrokTest {
   private static final Logger LOG = Logger.getLogger(FilterGrokTest.class);
 
   private FilterGrok filterGrok;
-  private OutputMgr mockOutputMgr;
+  private OutputManager mockOutputManager;
   private Capture<Map<String, Object>> capture;
 
   public void init(Map<String, Object> config) throws Exception {
-    mockOutputMgr = EasyMock.strictMock(OutputMgr.class);
+    mockOutputManager = EasyMock.strictMock(OutputManager.class);
     capture = EasyMock.newCapture(CaptureType.LAST);
 
     filterGrok = new FilterGrok();
     filterGrok.loadConfig(config);
-    filterGrok.setOutputMgr(mockOutputMgr);
+    filterGrok.setOutputManager(mockOutputManager);
     filterGrok.setInput(EasyMock.mock(Input.class));
     filterGrok.init();
   }
@@ -59,19 +59,18 @@ public class FilterGrokTest {
     LOG.info("testFilterGrok_parseMessage()");
 
     Map<String, Object> config = new HashMap<String, Object>();
-    config.put("message_pattern",
-        "(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
+    config.put("message_pattern", "(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
     config.put("multiline_pattern", "^(%{TIMESTAMP_ISO8601:logtime})");
     init(config);
 
-    mockOutputMgr.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
+    mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
-    filterGrok.apply("2016-04-08 15:55:23,548 INFO This is a test", new InputMarker());
-    filterGrok.apply("2016-04-08 15:55:24,548 WARN Next message", new InputMarker());
+    filterGrok.apply("2016-04-08 15:55:23,548 INFO This is a test", new InputMarker(null, null, 0));
+    filterGrok.apply("2016-04-08 15:55:24,548 WARN Next message", new InputMarker(null, null, 0));
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     Map<String, Object> jsonParams = capture.getValue();
 
     assertNotNull(jsonParams);
@@ -86,23 +85,22 @@ public class FilterGrokTest {
     LOG.info("testFilterGrok_parseMultiLineMessage()");
 
     Map<String, Object> config = new HashMap<String, Object>();
-    config.put("message_pattern",
-        "(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
+    config.put("message_pattern", "(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
     config.put("multiline_pattern", "^(%{TIMESTAMP_ISO8601:logtime})");
     init(config);
 
-    mockOutputMgr.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
+    mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
     String multiLineMessage = "This is a multiline test message\r\n" + "having multiple lines\r\n"
         + "as one may expect";
     String[] messageLines = multiLineMessage.split("\r\n");
     for (int i = 0; i < messageLines.length; i++)
-      filterGrok.apply((i == 0 ? "2016-04-08 15:55:23,548 INFO " : "") + messageLines[i], new InputMarker());
+      filterGrok.apply((i == 0 ? "2016-04-08 15:55:23,548 INFO " : "") + messageLines[i], new InputMarker(null, null, 0));
     filterGrok.flush();
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     Map<String, Object> jsonParams = capture.getValue();
 
     assertNotNull(jsonParams);
@@ -117,19 +115,18 @@ public class FilterGrokTest {
     LOG.info("testFilterGrok_notMatchingMesagePattern()");
 
     Map<String, Object> config = new HashMap<String, Object>();
-    config.put("message_pattern",
-        "(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
+    config.put("message_pattern", "(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
     config.put("multiline_pattern", "^(%{TIMESTAMP_ISO8601:logtime})");
     init(config);
 
-    mockOutputMgr.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
+    mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall().anyTimes();
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
-    filterGrok.apply("04/08/2016 15:55:23,548 INFO This is a test", new InputMarker());
-    filterGrok.apply("04/08/2016 15:55:24,548 WARN Next message", new InputMarker());
+    filterGrok.apply("04/08/2016 15:55:23,548 INFO This is a test", new InputMarker(null, null, 0));
+    filterGrok.apply("04/08/2016 15:55:24,548 WARN Next message", new InputMarker(null, null, 0));
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     assertFalse("Something was captured!", capture.hasCaptured());
   }
 
@@ -141,12 +138,12 @@ public class FilterGrokTest {
     config.put("multiline_pattern", "^(%{TIMESTAMP_ISO8601:logtime})");
     init(config);
 
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
-    filterGrok.apply("2016-04-08 15:55:23,548 INFO This is a test", new InputMarker());
-    filterGrok.apply("2016-04-08 15:55:24,548 WARN Next message", new InputMarker());
+    filterGrok.apply("2016-04-08 15:55:23,548 INFO This is a test", new InputMarker(null, null, 0));
+    filterGrok.apply("2016-04-08 15:55:24,548 WARN Next message", new InputMarker(null, null, 0));
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     assertFalse("Something was captured", capture.hasCaptured());
   }
 

+ 20 - 21
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -27,8 +27,7 @@ import java.util.TimeZone;
 
 import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.output.OutputMgr;
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.ambari.logfeeder.output.OutputManager;
 import org.apache.log4j.Logger;
 import org.easymock.Capture;
 import org.easymock.CaptureType;
@@ -44,16 +43,16 @@ public class FilterJSONTest {
   private static final Logger LOG = Logger.getLogger(FilterJSONTest.class);
 
   private FilterJSON filterJson;
-  private OutputMgr mockOutputMgr;
+  private OutputManager mockOutputManager;
   private Capture<Map<String, Object>> capture;
 
   public void init(Map<String, Object> params) throws Exception {
-    mockOutputMgr = EasyMock.strictMock(OutputMgr.class);
+    mockOutputManager = EasyMock.strictMock(OutputManager.class);
     capture = EasyMock.newCapture(CaptureType.LAST);
 
     filterJson = new FilterJSON();
     filterJson.loadConfig(params);
-    filterJson.setOutputMgr(mockOutputMgr);
+    filterJson.setOutputManager(mockOutputManager);
     filterJson.init();
   }
 
@@ -63,17 +62,17 @@ public class FilterJSONTest {
 
     init(new HashMap<String, Object>());
 
-    mockOutputMgr.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
+    mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
     Date d = new Date();
-    DateFormat sdf = new SimpleDateFormat(LogFeederUtil.SOLR_DATE_FORMAT);
+    DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
     sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
     String dateString = sdf.format(d);
-    filterJson.apply("{ logtime: '" + d.getTime() + "', line_number: 100 }", new InputMarker());
+    filterJson.apply("{ logtime: '" + d.getTime() + "', line_number: 100 }", new InputMarker(null, null, 0));
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     Map<String, Object> jsonParams = capture.getValue();
 
     assertEquals("Incorrect decoding: log time", dateString, jsonParams.remove("logtime"));
@@ -87,17 +86,17 @@ public class FilterJSONTest {
 
     init(new HashMap<String, Object>());
 
-    mockOutputMgr.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
+    mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
     Date d = new Date();
-    DateFormat sdf = new SimpleDateFormat(LogFeederUtil.SOLR_DATE_FORMAT);
+    DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
     sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
     String dateString = sdf.format(d);
-    filterJson.apply("{ logtime: '" + d.getTime() + "', some_field: 'abc' }", new InputMarker());
+    filterJson.apply("{ logtime: '" + d.getTime() + "', some_field: 'abc' }", new InputMarker(null, null, 0));
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     Map<String, Object> jsonParams = capture.getValue();
 
     assertEquals("Incorrect decoding: log time", dateString, jsonParams.remove("logtime"));
@@ -111,13 +110,13 @@ public class FilterJSONTest {
 
     init(new HashMap<String, Object>());
 
-    mockOutputMgr.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
+    mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
-    filterJson.apply("{ line_number: 100, some_field: 'abc' }", new InputMarker());
+    filterJson.apply("{ line_number: 100, some_field: 'abc' }", new InputMarker(null, null, 0));
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     Map<String, Object> jsonParams = capture.getValue();
 
     assertEquals("Incorrect decoding: line number", 100l, jsonParams.remove("line_number"));
@@ -132,7 +131,7 @@ public class FilterJSONTest {
     init(new HashMap<String, Object>());
     String inputStr="invalid json";
     try{
-    filterJson.apply(inputStr,new InputMarker());
+    filterJson.apply(inputStr,new InputMarker(null, null, 0));
     fail("Expected LogfeederException was not occured");
     }catch(LogfeederException logfeederException){
       assertEquals("Json parsing failed for inputstr = "+inputStr, logfeederException.getLocalizedMessage());

+ 17 - 17
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -22,7 +22,7 @@ import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.output.OutputMgr;
+import org.apache.ambari.logfeeder.output.OutputManager;
 import org.apache.log4j.Logger;
 import org.easymock.Capture;
 import org.easymock.CaptureType;
@@ -38,16 +38,16 @@ public class FilterKeyValueTest {
   private static final Logger LOG = Logger.getLogger(FilterKeyValueTest.class);
 
   private FilterKeyValue filterKeyValue;
-  private OutputMgr mockOutputMgr;
+  private OutputManager mockOutputManager;
   private Capture<Map<String, Object>> capture;
 
   public void init(Map<String, Object> config) throws Exception {
-    mockOutputMgr = EasyMock.strictMock(OutputMgr.class);
+    mockOutputManager = EasyMock.strictMock(OutputManager.class);
     capture = EasyMock.newCapture(CaptureType.LAST);
 
     filterKeyValue = new FilterKeyValue();
     filterKeyValue.loadConfig(config);
-    filterKeyValue.setOutputMgr(mockOutputMgr);
+    filterKeyValue.setOutputManager(mockOutputManager);
     filterKeyValue.init();
   }
 
@@ -61,13 +61,13 @@ public class FilterKeyValueTest {
     // using default value split:
     init(config);
 
-    mockOutputMgr.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
+    mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
-    filterKeyValue.apply("{ keyValueField: 'name1=value1&name2=value2' }", new InputMarker());
+    filterKeyValue.apply("{ keyValueField: 'name1=value1&name2=value2' }", new InputMarker(null, null, 0));
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     Map<String, Object> jsonParams = capture.getValue();
 
     assertEquals("Original missing!", "name1=value1&name2=value2", jsonParams.remove("keyValueField"));
@@ -85,13 +85,13 @@ public class FilterKeyValueTest {
     // using default value split: =
     init(config);
 
-    mockOutputMgr.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
+    mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall().anyTimes();
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
-    filterKeyValue.apply("{ keyValueField: 'name1=value1&name2=value2' }", new InputMarker());
+    filterKeyValue.apply("{ keyValueField: 'name1=value1&name2=value2' }", new InputMarker(null, null, 0));
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     assertFalse("Something was captured!", capture.hasCaptured());
   }
 
@@ -105,13 +105,13 @@ public class FilterKeyValueTest {
     init(config);
 
     // using default value split: =
-    mockOutputMgr.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
+    mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall().anyTimes();
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
-    filterKeyValue.apply("{ otherField: 'name1=value1&name2=value2' }", new InputMarker());
+    filterKeyValue.apply("{ otherField: 'name1=value1&name2=value2' }", new InputMarker(null, null, 0));
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     Map<String, Object> jsonParams = capture.getValue();
 
     assertEquals("Original missing!", "name1=value1&name2=value2", jsonParams.remove("otherField"));

+ 12 - 12
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -104,7 +104,7 @@ public class InputFileTest {
 
     inputFile = new InputFile();
     inputFile.loadConfig(config);
-    inputFile.setFirstFilter(capture);
+    inputFile.addFilter(capture);
     inputFile.init();
   }
 
@@ -117,10 +117,10 @@ public class InputFileTest {
 
     init(testFile.getAbsolutePath());
 
-    InputMgr inputMgr = EasyMock.createStrictMock(InputMgr.class);
-    EasyMock.expect(inputMgr.getCheckPointFolderFile()).andReturn(checkPointDir);
-    EasyMock.replay(inputMgr);
-    inputFile.setInputMgr(inputMgr);
+    InputManager inputManager = EasyMock.createStrictMock(InputManager.class);
+    EasyMock.expect(inputManager.getCheckPointFolderFile()).andReturn(checkPointDir);
+    EasyMock.replay(inputManager);
+    inputFile.setInputManager(inputManager);
 
     inputFile.isReady();
     inputFile.start();
@@ -129,7 +129,7 @@ public class InputFileTest {
     for (int row = 0; row < 3; row++)
       assertEquals("Row #" + (row + 1) + " not correct", TEST_LOG_FILE_ROWS[row], rows.get(row));
 
-    EasyMock.verify(inputMgr);
+    EasyMock.verify(inputManager);
   }
 
   @Test
@@ -140,10 +140,10 @@ public class InputFileTest {
     File testFile = createFile("process6.log");
     init(testFile.getAbsolutePath());
 
-    InputMgr inputMgr = EasyMock.createStrictMock(InputMgr.class);
-    EasyMock.expect(inputMgr.getCheckPointFolderFile()).andReturn(checkPointDir).times(2);
-    EasyMock.replay(inputMgr);
-    inputFile.setInputMgr(inputMgr);
+    InputManager inputMabager = EasyMock.createStrictMock(InputManager.class);
+    EasyMock.expect(inputMabager.getCheckPointFolderFile()).andReturn(checkPointDir).times(2);
+    EasyMock.replay(inputMabager);
+    inputFile.setInputManager(inputMabager);
 
     inputFile.isReady();
     inputFile.start();
@@ -155,7 +155,7 @@ public class InputFileTest {
     for (int row = 0; row < 6; row++)
       assertEquals("Row #" + (row + 1) + " not correct", TEST_LOG_FILE_ROWS[row], rows.get(row));
 
-    EasyMock.verify(inputMgr);
+    EasyMock.verify(inputMabager);
   }
 
   @Test

+ 241 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputManagerTest.java

@@ -0,0 +1,241 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.logfeeder.input;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.ambari.logfeeder.metrics.MetricData;
+import org.junit.Test;
+
+public class InputManagerTest {
+
+  @Test
+  public void testInputManager_addAndRemoveInputs() {
+    Input input1 = strictMock(Input.class);
+    Input input2 = strictMock(Input.class);
+    Input input3 = strictMock(Input.class);
+    Input input4 = strictMock(Input.class);
+    
+    expect(input3.getShortDescription()).andReturn("").times(2);
+    expect(input4.getShortDescription()).andReturn("").once();
+    
+    replay(input1, input2, input3, input4);
+    
+    InputManager manager = new InputManager();
+    manager.add(input1);
+    manager.add(input2);
+    manager.add(input3);
+    
+    manager.removeInput(input3);
+    manager.removeInput(input4);
+    
+    verify(input1, input2, input3, input4);
+    
+    List<Input> inputList = manager.getInputList();
+    assertEquals(inputList.size(), 2);
+    assertEquals(inputList.get(0), input1);
+    assertEquals(inputList.get(1), input2);
+  }
+
+  @Test
+  public void testInputManager_init() throws Exception {
+    Input input1 = strictMock(Input.class);
+    Input input2 = strictMock(Input.class);
+    Input input3 = strictMock(Input.class);
+    
+    input1.init(); expectLastCall();
+    input2.init(); expectLastCall();
+    input3.init(); expectLastCall();
+    
+    expect(input1.isTail()).andReturn(false);
+    expect(input2.isTail()).andReturn(false);
+    expect(input3.isTail()).andReturn(false);
+    
+    replay(input1, input2, input3);
+    
+    InputManager manager = new InputManager();
+    manager.add(input1);
+    manager.add(input2);
+    manager.add(input3);
+    
+    manager.init();
+    
+    verify(input1, input2, input3);
+  }
+
+  @Test
+  public void testInputManager_monitor() throws Exception {
+    Input input1 = strictMock(Input.class);
+    Input input2 = strictMock(Input.class);
+    Input input3 = strictMock(Input.class);
+    
+    expect(input1.isReady()).andReturn(true);
+    expect(input2.isReady()).andReturn(true);
+    expect(input3.isReady()).andReturn(false);
+    
+    expect(input1.monitor()).andReturn(false);
+    expect(input2.monitor()).andReturn(false);
+    expect(input3.isTail()).andReturn(false);
+    expect(input3.getShortDescription()).andReturn("").once();
+    
+    replay(input1, input2, input3);
+    
+    InputManager manager = new InputManager();
+    manager.add(input1);
+    manager.add(input2);
+    manager.add(input3);
+    
+    manager.monitor();
+    
+    verify(input1, input2, input3);
+  }
+  
+
+  @Test
+  public void testInputManager_addMetricsContainers() throws Exception {
+    List<MetricData> metrics = new ArrayList<MetricData>();
+    
+    Input input1 = strictMock(Input.class);
+    Input input2 = strictMock(Input.class);
+    Input input3 = strictMock(Input.class);
+    
+    input1.addMetricsContainers(metrics); expectLastCall();
+    input2.addMetricsContainers(metrics); expectLastCall();
+    input3.addMetricsContainers(metrics); expectLastCall();
+    
+    expect(input1.isReady()).andReturn(true);
+    expect(input2.isReady()).andReturn(true);
+    expect(input3.isReady()).andReturn(false);
+    
+    replay(input1, input2, input3);
+    
+    InputManager manager = new InputManager();
+    manager.add(input1);
+    manager.add(input2);
+    manager.add(input3);
+    
+    manager.addMetricsContainers(metrics);
+    
+    verify(input1, input2, input3);
+  }
+
+  @Test
+  public void testInputManager_logStat() throws Exception {
+    Input input1 = strictMock(Input.class);
+    Input input2 = strictMock(Input.class);
+    Input input3 = strictMock(Input.class);
+    
+    input1.logStat(); expectLastCall();
+    input2.logStat(); expectLastCall();
+    input3.logStat(); expectLastCall();
+    
+    expect(input1.isReady()).andReturn(true);
+    expect(input2.isReady()).andReturn(true);
+    expect(input3.isReady()).andReturn(false);
+    
+    replay(input1, input2, input3);
+    
+    InputManager manager = new InputManager();
+    manager.add(input1);
+    manager.add(input2);
+    manager.add(input3);
+    
+    manager.logStats();
+    
+    verify(input1, input2, input3);
+  }
+
+  @Test
+  public void testInputManagr_waitOnAllInputs() throws Exception {
+    Input input1 = strictMock(Input.class);
+    Input input2 = strictMock(Input.class);
+    Input input3 = strictMock(Input.class);
+    
+    Thread mockThread = strictMock(Thread.class);
+    
+    expect(input1.getThread()).andReturn(null);
+    expect(input2.getThread()).andReturn(null);
+    expect(input3.getThread()).andReturn(mockThread);
+    
+    mockThread.join(); expectLastCall();
+    
+    replay(input1, input2, input3);
+    
+    InputManager manager = new InputManager();
+    manager.add(input1);
+    manager.add(input2);
+    manager.add(input3);
+    
+    manager.waitOnAllInputs();
+    
+    verify(input1, input2, input3);
+  }
+
+  @Test
+  public void testInputManager_checkInAll() throws Exception {
+    Input input1 = strictMock(Input.class);
+    Input input2 = strictMock(Input.class);
+    Input input3 = strictMock(Input.class);
+    
+    input1.lastCheckIn(); expectLastCall();
+    input2.lastCheckIn(); expectLastCall();
+    input3.lastCheckIn(); expectLastCall();
+    
+    replay(input1, input2, input3);
+    
+    InputManager manager = new InputManager();
+    manager.add(input1);
+    manager.add(input2);
+    manager.add(input3);
+    
+    manager.checkInAll();
+    
+    verify(input1, input2, input3);
+  }
+
+  @Test
+  public void testInputManager_close() throws Exception {
+    Input input1 = strictMock(Input.class);
+    Input input2 = strictMock(Input.class);
+    Input input3 = strictMock(Input.class);
+    
+    input1.setDrain(true); expectLastCall();
+    input2.setDrain(true); expectLastCall();
+    input3.setDrain(true); expectLastCall();
+    
+    expect(input1.isClosed()).andReturn(true);
+    expect(input2.isClosed()).andReturn(true);
+    expect(input3.isClosed()).andReturn(true);
+    
+    replay(input1, input2, input3);
+    
+    InputManager manager = new InputManager();
+    manager.add(input1);
+    manager.add(input2);
+    manager.add(input3);
+    
+    manager.close();
+    
+    verify(input1, input2, input3);
+  }
+}

+ 117 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/logconfig/LogConfigHandlerTest.java

@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.logfeeder.logconfig;
+
+import java.lang.reflect.Field;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
+
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class LogConfigHandlerTest {
+  
+  private static LogConfigFetcher mockFetcher;
+  
+  private static final Map<String, Object> CONFIG_MAP = new HashMap<>();
+  static {
+    CONFIG_MAP.put("jsons",
+        "{'filter':{" +
+          "'configured_log_file':{" +
+            "'label':'configured_log_file'," +
+            "'hosts':[]," +
+            "'defaultLevels':['FATAL','ERROR','WARN','INFO']," +
+            "'overrideLevels':[]}," +
+          "'configured_log_file2':{" +
+            "'label':'configured_log_file2'," +
+            "'hosts':['host1']," +
+            "'defaultLevels':['FATAL','ERROR','WARN','INFO']," +
+            "'overrideLevels':['FATAL','ERROR','WARN','INFO','DEBUG','TRACE']," +
+            "'expiryTime':'3000-01-01T00:00:00.000Z'}," +
+          "'configured_log_file3':{" +
+            "'label':'configured_log_file3'," +
+            "'hosts':['host1']," +
+            "'defaultLevels':['FATAL','ERROR','WARN','INFO']," +
+            "'overrideLevels':['FATAL','ERROR','WARN','INFO','DEBUG','TRACE']," +
+            "'expiryTime':'1000-01-01T00:00:00.000Z'}" +
+          "}}");
+  }
+  
+  @BeforeClass
+  public static void init() throws Exception {
+    mockFetcher = strictMock(LogConfigFetcher.class);
+    Field f = LogConfigFetcher.class.getDeclaredField("instance");
+    f.setAccessible(true);
+    f.set(null, mockFetcher);
+    expect(mockFetcher.getConfigDoc()).andReturn(CONFIG_MAP).anyTimes();
+    replay(mockFetcher);
+    
+    LogFeederUtil.loadProperties("logfeeder.properties", null);
+    LogConfigHandler.handleConfig();
+    Thread.sleep(1000);
+  }
+  
+  @Test
+  public void testLogConfigHandler_emptyDataAllowed() throws Exception {
+    assertTrue(FilterLogData.INSTANCE.isAllowed((String)null));
+    assertTrue(FilterLogData.INSTANCE.isAllowed(""));
+    assertTrue(FilterLogData.INSTANCE.isAllowed(Collections.<String, Object> emptyMap()));
+  }
+  
+  @Test
+  public void testLogConfigHandler_notConfiguredLogAllowed() throws Exception {
+    assertTrue(FilterLogData.INSTANCE.isAllowed("{'host':'host1', 'type':'not_configured_log_file', 'level':'INFO'}"));
+  }
+  
+  @Test
+  public void testLogConfigHandler_configuredDataAllow() throws Exception {
+    assertTrue(FilterLogData.INSTANCE.isAllowed("{'host':'host1', 'type':'configured_log_file', 'level':'INFO'}"));
+  }
+  
+  @Test
+  public void testLogConfigHandler_configuredDataDontAllow() throws Exception {
+    assertFalse(FilterLogData.INSTANCE.isAllowed("{'host':'host1', 'type':'configured_log_file', 'level':'DEBUG'}"));
+  }
+  
+  @Test
+  public void testLogConfigHandler_overridenConfiguredData() throws Exception {
+    assertTrue(FilterLogData.INSTANCE.isAllowed("{'host':'host1', 'type':'configured_log_file2', 'level':'DEBUG'}"));
+  }
+  
+  @Test
+  public void testLogConfigHandler_overridenConfiguredDataDifferentHost() throws Exception {
+    assertFalse(FilterLogData.INSTANCE.isAllowed("{'host':'host2', 'type':'configured_log_file2', 'level':'DEBUG'}"));
+  }
+  
+  @Test
+  public void testLogConfigHandler_overridenConfiguredDataExpired() throws Exception {
+    assertFalse(FilterLogData.INSTANCE.isAllowed("{'host':'host1', 'type':'configured_log_file3', 'level':'DEBUG'}"));
+  }
+  
+  @AfterClass
+  public static void finish() {
+    verify(mockFetcher);
+  }
+}

+ 8 - 9
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -24,7 +24,6 @@ import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.lang3.time.DateUtils;
 import org.apache.log4j.Logger;
 import org.junit.Test;
@@ -61,7 +60,7 @@ public class MapperDateTest {
     LOG.info("testMapperDate_pattern()");
 
     Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("target_date_pattern", LogFeederUtil.DATE_FORMAT);
+    mapConfigs.put("target_date_pattern", "yyyy-MM-dd HH:mm:ss.SSS");
 
     MapperDate mapperDate = new MapperDate();
     assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapConfigs));
@@ -70,7 +69,7 @@ public class MapperDateTest {
     String dateString = "2016-04-08 15:55:23.548";
     Object mappedValue = mapperDate.apply(jsonObj, dateString);
 
-    Date d = new SimpleDateFormat(LogFeederUtil.DATE_FORMAT).parse(dateString);
+    Date d = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").parse(dateString);
 
     assertEquals("Value wasn't matched properly", d, mappedValue);
     assertEquals("Value wasn't put into jsonObj", d, jsonObj.remove("someField"));
@@ -130,7 +129,7 @@ public class MapperDateTest {
     LOG.info("testMapperDate_invalidDateStringValue()");
 
     Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("target_date_pattern", LogFeederUtil.DATE_FORMAT);
+    mapConfigs.put("target_date_pattern", "yyyy-MM-dd HH:mm:ss.SSS");
 
     MapperDate mapperDate = new MapperDate();
     assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapConfigs));
@@ -149,7 +148,7 @@ public class MapperDateTest {
     String fieldName = "logtime";
     Calendar currentCalendar = Calendar.getInstance();
     Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("target_date_pattern", LogFeederUtil.DATE_FORMAT);
+    mapConfigs.put("target_date_pattern", "yyyy-MM-dd HH:mm:ss.SSS");
     String srcDatePattern ="MMM dd HH:mm:ss";
     mapConfigs.put("src_date_pattern", srcDatePattern);
     MapperDate mapperDate = new MapperDate();
@@ -160,7 +159,7 @@ public class MapperDateTest {
     nextMonthCalendar.set(Calendar.MONTH, currentCalendar.get(Calendar.MONTH)+1 );
     String inputDateStr = new SimpleDateFormat("MMM").format(nextMonthCalendar.getTime()) + " 01 12:01:45";
     Object mappedValue = mapperDate.apply(jsonObj, inputDateStr);
-    Date mappedDateValue = new SimpleDateFormat(LogFeederUtil.DATE_FORMAT).parse(mappedValue.toString());
+    Date mappedDateValue = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").parse(mappedValue.toString());
     String mappedDateValueStr = new SimpleDateFormat(srcDatePattern).format(mappedDateValue);
     assertEquals(Date.class, mappedDateValue.getClass());
     
@@ -179,7 +178,7 @@ public class MapperDateTest {
     String fieldName = "logtime";
     Calendar currentCalendar = Calendar.getInstance();
     Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("target_date_pattern", LogFeederUtil.DATE_FORMAT);
+    mapConfigs.put("target_date_pattern", "yyyy-MM-dd HH:mm:ss.SSS");
     String srcDatePattern ="MMM dd HH:mm:ss";
     mapConfigs.put("src_date_pattern", srcDatePattern);
     MapperDate mapperDate = new MapperDate();
@@ -187,7 +186,7 @@ public class MapperDateTest {
     Map<String, Object> jsonObj = new HashMap<>();
     String inputDateStr = new SimpleDateFormat("MMM").format(currentCalendar.getTime()) + " 01 12:01:45";
     Object mappedValue = mapperDate.apply(jsonObj, inputDateStr);
-    Date mappedDateValue = new SimpleDateFormat(LogFeederUtil.DATE_FORMAT).parse(mappedValue.toString());
+    Date mappedDateValue = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").parse(mappedValue.toString());
     String mappedDateValueStr = new SimpleDateFormat(srcDatePattern).format(mappedDateValue);
     assertEquals(Date.class, mappedDateValue.getClass());
     int expectedLogYear = currentCalendar.get(Calendar.YEAR);

+ 1 - 1
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldNameTest.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

+ 1 - 1
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldValueTest.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

+ 128 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/metrics/MetrcisManagerTest.java

@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.logfeeder.metrics;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
+import org.easymock.Capture;
+import org.easymock.CaptureType;
+import org.easymock.EasyMock;
+
+import java.lang.reflect.Field;
+import java.util.Arrays;
+import java.util.List;
+import java.util.TreeMap;
+
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class MetrcisManagerTest {
+
+  private MetricsManager manager;
+  private LogFeederAMSClient mockClient;
+  private Capture<TimelineMetrics> capture;
+  
+  @BeforeClass
+  public static void loadProperties() throws Exception {
+    LogFeederUtil.loadProperties("logfeeder.properties", null);
+  }
+  
+  @Before
+  public void init() throws Exception {
+    manager = new MetricsManager();
+    manager.init();
+    
+    mockClient = strictMock(LogFeederAMSClient.class);
+    Field f = MetricsManager.class.getDeclaredField("amsClient");
+    f.setAccessible(true);
+    f.set(manager, mockClient);
+    
+    capture = EasyMock.newCapture(CaptureType.FIRST);
+    mockClient.emitMetrics(EasyMock.capture(capture));
+    EasyMock.expectLastCall().andReturn(true).once();
+    
+    replay(mockClient);
+  }
+  
+  @Test
+  public void testMetricManager_pointInTime() throws Exception {
+    MetricData metricCount1 = new MetricData("metric1", true);
+    metricCount1.value = 123;
+    metricCount1.prevPublishValue = 0;
+    metricCount1.publishCount = 0;
+    
+    manager.useMetrics(Arrays.asList(metricCount1));
+    
+    verify(mockClient);
+    
+    TimelineMetrics metrics = capture.getValue();
+    List<TimelineMetric> metricList = metrics.getMetrics();
+    assertEquals(metricList.size(), 1);
+    
+    TimelineMetric metric = metricList.get(0);
+    assertEquals(metric.getHostName(), "test_host_name");
+    assertEquals(metric.getAppId(), "logfeeder");
+    assertEquals(metric.getMetricName(), "metric1");
+    assertEquals(metric.getType(), "Long");
+    
+    TreeMap<Long, Double> values = metric.getMetricValues();
+    assertEquals(values.size(), 1);
+    assertEquals(values.firstEntry().getValue(), Double.valueOf(123.0));
+  }
+  
+  @Test
+  public void testMetricManager_notPointInTime() throws Exception {
+    MetricData metricCount1 = new MetricData("metric1", false);
+    metricCount1.value = 123;
+    metricCount1.prevPublishValue = 0;
+    metricCount1.publishCount = 0;
+    
+    MetricData metricCount2 = new MetricData("metric1", false);
+    metricCount2.value = 123;
+    metricCount2.prevPublishValue = 100;
+    metricCount2.publishCount = 0;
+    
+    MetricData metricCount3 = new MetricData("metric1", false); // not included due to decrease of count
+    metricCount3.value = 99;
+    metricCount3.prevPublishValue = 100;
+    metricCount3.publishCount = 1;
+    
+    manager.useMetrics(Arrays.asList(metricCount1, metricCount2, metricCount3));
+    
+    verify(mockClient);
+    
+    TimelineMetrics metrics = capture.getValue();
+    List<TimelineMetric> metricList = metrics.getMetrics();
+    assertEquals(metricList.size(), 1);
+    
+    TimelineMetric metric = metricList.get(0);
+    assertEquals(metric.getHostName(), "test_host_name");
+    assertEquals(metric.getAppId(), "logfeeder");
+    assertEquals(metric.getMetricName(), "metric1");
+    assertEquals(metric.getType(), "Long");
+    
+    TreeMap<Long, Double> values = metric.getMetricValues();
+    assertEquals(values.size(), 1);
+    assertEquals(values.firstEntry().getValue(), Double.valueOf(146.0));
+  }
+}

+ 2 - 3
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputKafkaTest.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -85,8 +85,7 @@ public class OutputKafkaTest {
     EasyMock.replay(mockKafkaProducer);
 
     for (int i = 0; i < 10; i++) {
-      InputMarker inputMarker = new InputMarker();
-      inputMarker.input = EasyMock.mock(Input.class);
+      InputMarker inputMarker = new InputMarker(EasyMock.mock(Input.class), null, 0);
       outputKafka.write("value" + i, inputMarker);
     }
 

+ 256 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java

@@ -0,0 +1,256 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.logfeeder.output;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logfeeder.metrics.MetricData;
+import org.junit.Test;
+
+public class OutputManagerTest {
+
+  @Test
+  public void testOutputManager_addAndRemoveOutputs() {
+    Output output1 = strictMock(Output.class);
+    Output output2 = strictMock(Output.class);
+    Output output3 = strictMock(Output.class);
+    Output output4 = strictMock(Output.class);
+    
+    replay(output1, output2, output3, output4);
+    
+    OutputManager manager = new OutputManager();
+    manager.add(output1);
+    manager.add(output2);
+    manager.add(output3);
+    
+    manager.retainUsedOutputs(Arrays.asList(output1, output2, output4));
+    
+    verify(output1, output2, output3, output4);
+    
+    List<Output> outputs = manager.getOutputs();
+    assertEquals(outputs.size(), 2);
+    assertEquals(outputs.get(0), output1);
+    assertEquals(outputs.get(1), output2);
+  }
+
+  @Test
+  public void testOutputManager_init() throws Exception {
+    Output output1 = strictMock(Output.class);
+    Output output2 = strictMock(Output.class);
+    Output output3 = strictMock(Output.class);
+    
+    output1.init(); expectLastCall();
+    output2.init(); expectLastCall();
+    output3.init(); expectLastCall();
+    
+    replay(output1, output2, output3);
+    
+    OutputManager manager = new OutputManager();
+    manager.add(output1);
+    manager.add(output2);
+    manager.add(output3);
+    
+    manager.init();
+    
+    verify(output1, output2, output3);
+  }
+
+  @Test
+  public void testOutputManager_write() throws Exception {
+    Map<String, Object> jsonObj = new HashMap<>();
+    jsonObj.put("type", "testType");
+    jsonObj.put("path", "testPath");
+    jsonObj.put("host", "testHost");
+    jsonObj.put("ip", "testIp");
+    jsonObj.put("level", "testLevel");
+    jsonObj.put("id", "testId");
+    
+    Input mockInput = strictMock(Input.class);
+    InputMarker inputMarker = new InputMarker(mockInput, null, 0);
+    
+    Output output1 = strictMock(Output.class);
+    Output output2 = strictMock(Output.class);
+    Output output3 = strictMock(Output.class);
+    
+    expect(mockInput.getContextFields()).andReturn(Collections.<String, String> emptyMap());
+    expect(mockInput.isUseEventMD5()).andReturn(false);
+    expect(mockInput.isGenEventMD5()).andReturn(false);
+    expect(mockInput.getOutputList()).andReturn(Arrays.asList(output1, output2, output3));
+    
+    output1.write(jsonObj, inputMarker); expectLastCall();
+    output2.write(jsonObj, inputMarker); expectLastCall();
+    output3.write(jsonObj, inputMarker); expectLastCall();
+    
+    replay(output1, output2, output3, mockInput);
+    
+    OutputManager manager = new OutputManager();
+    manager.add(output1);
+    manager.add(output2);
+    manager.add(output3);
+    
+    manager.write(jsonObj, inputMarker);
+    
+    verify(output1, output2, output3, mockInput);
+  }
+
+  @Test
+  public void testOutputManager_write2() throws Exception {
+    String jsonString = "{}";
+    
+    Input mockInput = strictMock(Input.class);
+    InputMarker inputMarker = new InputMarker(mockInput, null, 0);
+    
+    Output output1 = strictMock(Output.class);
+    Output output2 = strictMock(Output.class);
+    Output output3 = strictMock(Output.class);
+    
+    expect(mockInput.getOutputList()).andReturn(Arrays.asList(output1, output2, output3));
+    
+    output1.write(jsonString, inputMarker); expectLastCall();
+    output2.write(jsonString, inputMarker); expectLastCall();
+    output3.write(jsonString, inputMarker); expectLastCall();
+    
+    replay(output1, output2, output3, mockInput);
+    
+    OutputManager manager = new OutputManager();
+    manager.add(output1);
+    manager.add(output2);
+    manager.add(output3);
+    
+    manager.write(jsonString, inputMarker);
+    
+    verify(output1, output2, output3, mockInput);
+  }
+
+  @Test
+  public void testOutputManager_addMetricsContainers() throws Exception {
+    List<MetricData> metrics = new ArrayList<MetricData>();
+    
+    Output output1 = strictMock(Output.class);
+    Output output2 = strictMock(Output.class);
+    Output output3 = strictMock(Output.class);
+    
+    output1.addMetricsContainers(metrics); expectLastCall();
+    output2.addMetricsContainers(metrics); expectLastCall();
+    output3.addMetricsContainers(metrics); expectLastCall();
+    
+    replay(output1, output2, output3);
+    
+    OutputManager manager = new OutputManager();
+    manager.add(output1);
+    manager.add(output2);
+    manager.add(output3);
+    
+    manager.addMetricsContainers(metrics);
+    
+    verify(output1, output2, output3);
+  }
+
+  @Test
+  public void testOutputManager_logStat() throws Exception {
+    Output output1 = strictMock(Output.class);
+    Output output2 = strictMock(Output.class);
+    Output output3 = strictMock(Output.class);
+    
+    output1.logStat(); expectLastCall();
+    output2.logStat(); expectLastCall();
+    output3.logStat(); expectLastCall();
+    
+    replay(output1, output2, output3);
+    
+    OutputManager manager = new OutputManager();
+    manager.add(output1);
+    manager.add(output2);
+    manager.add(output3);
+    
+    manager.logStats();
+    
+    verify(output1, output2, output3);
+  }
+
+  @Test
+  public void testOutputManager_copyFile() throws Exception {
+    File f = new File("");
+    
+    Input mockInput = strictMock(Input.class);
+    InputMarker inputMarker = new InputMarker(mockInput, null, 0);
+    
+    Output output1 = strictMock(Output.class);
+    Output output2 = strictMock(Output.class);
+    Output output3 = strictMock(Output.class);
+    
+    expect(mockInput.getOutputList()).andReturn(Arrays.asList(output1, output2, output3));
+    
+    output1.copyFile(f, inputMarker); expectLastCall();
+    output2.copyFile(f, inputMarker); expectLastCall();
+    output3.copyFile(f, inputMarker); expectLastCall();
+    
+    replay(output1, output2, output3, mockInput);
+    
+    OutputManager manager = new OutputManager();
+    manager.add(output1);
+    manager.add(output2);
+    manager.add(output3);
+    
+    manager.copyFile(f, inputMarker);
+    
+    verify(output1, output2, output3, mockInput);
+  }
+
+  @Test
+  public void testOutputManager_close() throws Exception {
+    Output output1 = strictMock(Output.class);
+    Output output2 = strictMock(Output.class);
+    Output output3 = strictMock(Output.class);
+    
+    output1.setDrain(true); expectLastCall();
+    output2.setDrain(true); expectLastCall();
+    output3.setDrain(true); expectLastCall();
+    
+    output1.close(); expectLastCall();
+    output2.close(); expectLastCall();
+    output3.close(); expectLastCall();
+    
+    expect(output1.isClosed()).andReturn(true);
+    expect(output2.isClosed()).andReturn(true);
+    expect(output3.isClosed()).andReturn(true);
+    
+    replay(output1, output2, output3);
+    
+    OutputManager manager = new OutputManager();
+    manager.add(output1);
+    manager.add(output2);
+    manager.add(output3);
+    
+    manager.close();
+    
+    verify(output1, output2, output3);
+  }
+}

+ 7 - 10
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputS3FileTest.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -69,15 +69,14 @@ public class OutputS3FileTest {
   @Test
   public void shouldSpoolLogEventToNewSpooler() throws Exception {
 
-    InputMarker inputMarker = mock(InputMarker.class);
     Input input = mock(Input.class);
-    inputMarker.input = input;
+    InputMarker inputMarker = new InputMarker(input, null, 0);
     expect(input.getFilePath()).andReturn("/var/log/hdfs-namenode.log");
     expect(input.getStringValue(OutputS3File.INPUT_ATTRIBUTE_TYPE)).andReturn("hdfs-namenode");
     final LogSpooler spooler = mock(LogSpooler.class);
     spooler.add("log event block");
     final S3Uploader s3Uploader = mock(S3Uploader.class);
-    replay(input, inputMarker, spooler, s3Uploader);
+    replay(input, spooler, s3Uploader);
 
     OutputS3File outputS3File = new OutputS3File() {
       @Override
@@ -98,16 +97,15 @@ public class OutputS3FileTest {
 
   @Test
   public void shouldReuseSpoolerForSamePath() throws Exception {
-    InputMarker inputMarker = mock(InputMarker.class);
     Input input = mock(Input.class);
-    inputMarker.input = input;
+    InputMarker inputMarker = new InputMarker(input, null, 0);
     expect(input.getFilePath()).andReturn("/var/log/hdfs-namenode.log");
     expect(input.getStringValue(OutputS3File.INPUT_ATTRIBUTE_TYPE)).andReturn("hdfs-namenode");
     final LogSpooler spooler = mock(LogSpooler.class);
     spooler.add("log event block1");
     spooler.add("log event block2");
     final S3Uploader s3Uploader = mock(S3Uploader.class);
-    replay(input, inputMarker, spooler, s3Uploader);
+    replay(input, spooler, s3Uploader);
 
     OutputS3File outputS3File = new OutputS3File() {
       private boolean firstCallComplete;
@@ -169,16 +167,15 @@ public class OutputS3FileTest {
 
   @Test
   public void shouldUploadFileOnRollover() throws Exception {
-    InputMarker inputMarker = mock(InputMarker.class);
     Input input = mock(Input.class);
-    inputMarker.input = input;
+    InputMarker inputMarker = new InputMarker(input, null, 0);
     expect(input.getFilePath()).andReturn("/var/log/hdfs-namenode.log");
     expect(input.getStringValue(OutputS3File.INPUT_ATTRIBUTE_TYPE)).andReturn("hdfs-namenode");
     final LogSpooler spooler = mock(LogSpooler.class);
     spooler.add("log event block1");
     final S3Uploader s3Uploader = mock(S3Uploader.class);
     s3Uploader.addFileForUpload("/var/ambari-logsearch/logfeeder/hdfs-namenode.log.gz");
-    replay(input, inputMarker, spooler, s3Uploader);
+    replay(input, spooler, s3Uploader);
 
     OutputS3File outputS3File = new OutputS3File() {
       @Override

+ 2 - 3
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputSolrTest.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -97,8 +97,7 @@ public class OutputSolrTest {
         jsonObj.put("name" + ++count, "value" + ++count);
       jsonObj.put("id", ++count);
 
-      InputMarker inputMarker = new InputMarker();
-      inputMarker.input = EasyMock.mock(Input.class);
+      InputMarker inputMarker = new InputMarker(EasyMock.mock(Input.class), null, 0);
       outputSolr.write(jsonObj, inputMarker);
 
       SolrInputDocument doc = new SolrInputDocument();

+ 1 - 1
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3LogPathResolverTest.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

+ 17 - 25
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3UploaderTest.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,7 +18,6 @@
 
 package org.apache.ambari.logfeeder.output;
 
-import org.apache.ambari.logfeeder.util.S3Util;
 import org.junit.Test;
 
 import java.io.File;
@@ -46,22 +45,20 @@ public class S3UploaderTest {
     Map<String, Object> configs = setupS3Configs();
 
     S3OutputConfiguration s3OutputConfiguration = new S3OutputConfiguration(configs);
-    S3Util s3Util = mock(S3Util.class);
-    String s3Key = String.format("%s/%s/%s.%s", TEST_PATH, LOG_TYPE, fileName, GZ);
-    s3Util.uploadFileTos3(TEST_BUCKET, s3Key, compressedFile, ACCESS_KEY_VALUE, SECRET_KEY_VALUE);
     expect(compressedFile.delete()).andReturn(true);
     expect(fileToUpload.delete()).andReturn(true);
-    replay(fileToUpload, compressedFile, s3Util);
+    replay(fileToUpload, compressedFile);
 
-    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, s3Util, true, LOG_TYPE) {
+    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, true, LOG_TYPE) {
       @Override
       protected File createCompressedFileForUpload(File fileToUpload, String compressionAlgo) {
         return compressedFile;
       }
+      protected void uploadFileToS3(String bucketName, String s3Key, File localFile, String accessKey, String secretKey) {
+      }
     };
     String resolvedPath = s3Uploader.uploadFile(fileToUpload, LOG_TYPE);
 
-    verify(s3Util);
     assertEquals("test_path/hdfs_namenode/hdfs_namenode.log.123343493473948.gz", resolvedPath);
   }
 
@@ -74,18 +71,17 @@ public class S3UploaderTest {
     Map<String, Object> configs = setupS3Configs();
 
     S3OutputConfiguration s3OutputConfiguration = new S3OutputConfiguration(configs);
-    S3Util s3Util = mock(S3Util.class);
-    String s3Key = String.format("%s/%s/%s.%s", TEST_PATH, LOG_TYPE, fileName, GZ);
-    s3Util.uploadFileTos3(TEST_BUCKET, s3Key, compressedFile, ACCESS_KEY_VALUE, SECRET_KEY_VALUE);
     expect(compressedFile.delete()).andReturn(true);
     expect(fileToUpload.delete()).andReturn(true);
-    replay(fileToUpload, compressedFile, s3Util);
+    replay(fileToUpload, compressedFile);
 
-    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, s3Util, true, LOG_TYPE) {
+    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, true, LOG_TYPE) {
       @Override
       protected File createCompressedFileForUpload(File fileToUpload, String compressionAlgo) {
         return compressedFile;
       }
+      protected void uploadFileToS3(String bucketName, String s3Key, File localFile, String accessKey, String secretKey) {
+      }
     };
     s3Uploader.uploadFile(fileToUpload, LOG_TYPE);
 
@@ -102,17 +98,16 @@ public class S3UploaderTest {
     Map<String, Object> configs = setupS3Configs();
 
     S3OutputConfiguration s3OutputConfiguration = new S3OutputConfiguration(configs);
-    S3Util s3Util = mock(S3Util.class);
-    String s3Key = String.format("%s/%s/%s.%s", TEST_PATH, LOG_TYPE, fileName, GZ);
-    s3Util.uploadFileTos3(TEST_BUCKET, s3Key, compressedFile, ACCESS_KEY_VALUE, SECRET_KEY_VALUE);
     expect(compressedFile.delete()).andReturn(true);
-    replay(fileToUpload, compressedFile, s3Util);
+    replay(fileToUpload, compressedFile);
 
-    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, s3Util, false, LOG_TYPE) {
+    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, false, LOG_TYPE) {
       @Override
       protected File createCompressedFileForUpload(File fileToUpload, String compressionAlgo) {
         return compressedFile;
       }
+      protected void uploadFileToS3(String bucketName, String s3Key, File localFile, String accessKey, String secretKey) {
+      }
     };
     s3Uploader.uploadFile(fileToUpload, LOG_TYPE);
 
@@ -131,22 +126,19 @@ public class S3UploaderTest {
 
 
     S3OutputConfiguration s3OutputConfiguration = new S3OutputConfiguration(configs);
-    S3Util s3Util = mock(S3Util.class);
-    String s3Key = String.format("%s/%s/%s/%s.%s", "cl1", TEST_PATH, LOG_TYPE, fileName, GZ);
-    s3Util.uploadFileTos3(TEST_BUCKET, s3Key, compressedFile, ACCESS_KEY_VALUE, SECRET_KEY_VALUE);
     expect(compressedFile.delete()).andReturn(true);
     expect(fileToUpload.delete()).andReturn(true);
-    replay(fileToUpload, compressedFile, s3Util);
+    replay(fileToUpload, compressedFile);
 
-    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, s3Util, true, LOG_TYPE) {
+    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, true, LOG_TYPE) {
       @Override
       protected File createCompressedFileForUpload(File fileToUpload, String compressionAlgo) {
         return compressedFile;
       }
+      protected void uploadFileToS3(String bucketName, String s3Key, File localFile, String accessKey, String secretKey) {
+      }
     };
     s3Uploader.uploadFile(fileToUpload, LOG_TYPE);
-
-    verify(s3Util);
   }
 
   private Map<String, Object> setupS3Configs() {

+ 1 - 1
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerTest.java

@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

+ 0 - 29
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/AWSUtilTest.java

@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logfeeder.util;
-
-import org.apache.ambari.logfeeder.util.AWSUtil;
-
-public class AWSUtilTest {
-  public void testAWSUtil_getAwsUserName() throws Exception {
-    String S3_ACCESS_KEY = "S3_ACCESS_KEY";
-    String S3_SECRET_KEY = "S3_SECRET_KEY";
-    AWSUtil.INSTANCE.getAwsUserName(S3_ACCESS_KEY, S3_SECRET_KEY);
-  }
-}

+ 10 - 10
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/PlaceholderUtilTest.java

@@ -1,6 +1,4 @@
-package org.apache.ambari.logfeeder.util;
-
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -17,6 +15,9 @@ package org.apache.ambari.logfeeder.util;
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
+package org.apache.ambari.logfeeder.util;
+
 import java.util.HashMap;
 
 import org.junit.Test;
@@ -26,19 +27,18 @@ import static org.junit.Assert.assertEquals;
 public class PlaceholderUtilTest {
   @Test
   public void testPlaceholderUtil_replaceVariables() {
-    HashMap<String, String> contextParam = new HashMap<String, String>();
     String hostName = "host1";
     String ip = "127.0.0.1";
     String clusterName = "test-cluster";
+    
+    HashMap<String, String> contextParam = new HashMap<String, String>();
     contextParam.put("host", hostName);
     contextParam.put("ip", ip);
     contextParam.put("cluster", clusterName);
-    String inputStr = "$CLUSTER/logfeeder/$HOST-$IP/logs";
-    String resultStr = PlaceholderUtil.replaceVariables(inputStr, contextParam);
+    
+    String resultStr = PlaceholderUtil.replaceVariables("$CLUSTER/logfeeder/$HOST-$IP/logs", contextParam);
     String expectedStr = clusterName + "/logfeeder/" + hostName + "-" + ip + "/logs";
-    assertEquals("Result string :" + resultStr
-        + " is not equal to exptected string :" + expectedStr, resultStr,
-        expectedStr);
+    
+    assertEquals("Result string :" + resultStr + " is not equal to exptected string :" + expectedStr, resultStr, expectedStr);
   }
-
 }

+ 2 - 2
ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/S3UtilTest.java

@@ -26,14 +26,14 @@ public class S3UtilTest {
   public void testS3Util_pathToBucketName() throws Exception {
     String s3Path = "s3://bucket_name/path/file.txt";
     String expectedBucketName = "bucket_name";
-    String actualBucketName = S3Util.INSTANCE.getBucketName(s3Path);
+    String actualBucketName = S3Util.getBucketName(s3Path);
     assertEquals(expectedBucketName, actualBucketName);
   }
 
   public void testS3Util_pathToS3Key() throws Exception {
     String s3Path = "s3://bucket_name/path/file.txt";
     String expectedS3key = "path/file.txt";
-    String actualS3key = S3Util.INSTANCE.getS3Key(s3Path);
+    String actualS3key = S3Util.getS3Key(s3Path);
     assertEquals(expectedS3key, actualS3key);
   }
 

+ 20 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/logfeeder.properties

@@ -0,0 +1,20 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+logfeeder.log.filter.enable=true
+logfeeder.solr.config.interval=5
+logfeeder.solr.zk_connect_string=some_connect_string
+logfeeder.metrics.collector.hosts=some_collector_host
+node.hostname=test_host_name