Prechádzať zdrojové kódy

AMBARI-15679. Initial commit for LogSearch module (oleewre)

oleewere 9 rokov pred
rodič
commit
39c85bb825
100 zmenil súbory, kde vykonal 11735 pridanie a 0 odobranie
  1. 16 0
      ambari-logsearch/README.md
  2. 2 0
      ambari-logsearch/ambari-logsearch-appender/.gitignore
  3. 13 0
      ambari-logsearch/ambari-logsearch-appender/build.properties
  4. 40 0
      ambari-logsearch/ambari-logsearch-appender/build.xml
  5. 84 0
      ambari-logsearch/ambari-logsearch-appender/pom.xml
  6. 50 0
      ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchAppender.java
  7. 73 0
      ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchConversion.java
  8. 63 0
      ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/VBase.java
  9. 88 0
      ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/VOutput.java
  10. 41 0
      ambari-logsearch/ambari-logsearch-appender/src/test/java/org/apache/ambari/logsearch/appender/AppTest.java
  11. 23 0
      ambari-logsearch/ambari-logsearch-appender/src/test/resources/log4j.properties
  12. 510 0
      ambari-logsearch/ambari-logsearch-assembly/pom.xml
  13. 21 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/appender/control
  14. 15 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/appender/postinst
  15. 15 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/appender/postrm
  16. 15 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/appender/posttrm
  17. 15 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/appender/preinst
  18. 15 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/appender/prerm
  19. 21 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/control
  20. 15 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/postinst
  21. 15 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/postrm
  22. 15 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/posttrm
  23. 15 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/preinst
  24. 15 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/prerm
  25. 22 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/control
  26. 15 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/postinst
  27. 15 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/postrm
  28. 15 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/preinst
  29. 15 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/prerm
  30. 22 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/solr/control
  31. 15 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/solr/postinst
  32. 15 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/solr/postrm
  33. 15 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/solr/preinst
  34. 15 0
      ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/solr/prerm
  35. 36 0
      ambari-logsearch/ambari-logsearch-logfeeder/README.md
  36. 18 0
      ambari-logsearch/ambari-logsearch-logfeeder/build.properties
  37. 60 0
      ambari-logsearch/ambari-logsearch-logfeeder/build.xml
  38. 201 0
      ambari-logsearch/ambari-logsearch-logfeeder/pom.xml
  39. 19 0
      ambari-logsearch/ambari-logsearch-logfeeder/run.sh
  40. 110 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/AliasUtil.java
  41. 262 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/ConfigBlock.java
  42. 545 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/InputMgr.java
  43. 570 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
  44. 74 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederAMSClient.java
  45. 480 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederUtil.java
  46. 33 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/MetricCount.java
  47. 185 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/MetricsMgr.java
  48. 163 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/MurmurHash.java
  49. 272 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/OutputMgr.java
  50. 223 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java
  51. 351 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java
  52. 132 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
  53. 49 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/JSONFilterCode.java
  54. 321 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/Input.java
  55. 562 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java
  56. 39 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputMarker.java
  57. 81 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/GZIPReader.java
  58. 48 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/LogsearchReaderFactory.java
  59. 171 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java
  60. 39 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederConstants.java
  61. 58 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogfeederScheduler.java
  62. 60 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/ApplyLogFilter.java
  63. 56 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/DefaultDataFilter.java
  64. 53 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/FilterLogData.java
  65. 52 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/Mapper.java
  66. 99 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java
  67. 72 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldName.java
  68. 76 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldValue.java
  69. 119 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java
  70. 48 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputData.java
  71. 138 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputFile.java
  72. 313 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java
  73. 475 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
  74. 202 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java
  75. 90 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/view/VLogfeederFilter.java
  76. 55 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/view/VLogfeederFilterWrapper.java
  77. 95 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
  78. 79 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/timeline/Precision.java
  79. 36 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/timeline/PrecisionLimitExceededException.java
  80. 107 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/timeline/SingleValuedTimelineMetric.java
  81. 188 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetric.java
  82. 123 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetrics.java
  83. 46 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/timeline/UnableToConnectException.java
  84. 175 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/timeline/cache/TimelineMetricsCache.java
  85. 62 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/timeline/configuration/Configuration.java
  86. 106 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/util/Servers.java
  87. 22 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/package/deb/control/control
  88. 15 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/package/deb/control/postinst
  89. 15 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/package/deb/control/postrm
  90. 15 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/package/deb/control/preinst
  91. 15 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/package/deb/control/prerm
  92. 42 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/alias_config.json
  93. 995 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/config.json.j2
  94. 626 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/filters.config.json
  95. 28 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/global.config.json.j2
  96. 145 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/grok-patterns
  97. 284 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/input.config.json.j2
  98. 62 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml
  99. 60 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml.j2
  100. 25 0
      ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/logfeeder.properties

+ 16 - 0
ambari-logsearch/README.md

@@ -0,0 +1,16 @@
+# logsearch
+RPM/DPKG Build Process
+=============
+
+1. Check out the code from GIT repository
+
+2. On the logsearch root folder, please execute the following Maven command to build RPM/DPKG:
+
+  $ mvn -Dbuild-rpm clean package
+
+  or
+
+  $ mvn -Dbuild-deb clean package
+
+3. Generated RPM/DPKG files will be found in ambari-logsearch-assembly/target folder
+  

+ 2 - 0
ambari-logsearch/ambari-logsearch-appender/.gitignore

@@ -0,0 +1,2 @@
+logs/*
+target

+ 13 - 0
ambari-logsearch/ambari-logsearch-appender/build.properties

@@ -0,0 +1,13 @@
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+# log4j configuration used during build and unit tests

+ 40 - 0
ambari-logsearch/ambari-logsearch-appender/build.xml

@@ -0,0 +1,40 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project basedir="." default="build" name="logfeeder">
+  <property environment="env" />
+  <property name="debuglevel" value="source,lines,vars" />
+  <dirname property="builddir" file="build.xml" />
+  <property name="target" value="1.7" />
+  <property name="source" value="1.7" />
+  <property file="local.properties" />
+  <property file="build.properties" />
+  <target name="init">
+  </target>
+  <target name="build" />
+  
+  <target name="package">
+    <delete dir="target/package" />
+    <copy todir="target/package/libs" includeEmptyDirs="no">
+      <fileset file="target/ambari-logsearch-appender.jar" />
+    </copy>
+    <tar compression="gzip" destfile="target/ambari-logsearch-appender.tar.gz">
+      <tarfileset dir="target/package" />
+    </tar>
+  </target>
+  <target description="Build all projects which reference this project. Useful to propagate changes." name="build-refprojects" />
+</project>

+ 84 - 0
ambari-logsearch/ambari-logsearch-appender/pom.xml

@@ -0,0 +1,84 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <artifactId>ambari-logsearch</artifactId>
+    <groupId>org.apache.ambari</groupId>
+    <version>2.0.0.0-SNAPSHOT</version>
+  </parent>
+  <groupId>org.apache.ambari</groupId>
+  <artifactId>ambari-logsearch-appender</artifactId>
+  <version>2.0.0.0-SNAPSHOT</version>
+  <packaging>jar</packaging>
+  <name>Ambari Logsearch Appender</name>
+
+  <url>http://maven.apache.org</url>
+
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+    <maven.compiler.source>1.7</maven.compiler.source>
+    <maven.compiler.target>1.7</maven.compiler.target>
+    <jar.finalName>ambari-logsearch-appender</jar.finalName>
+  </properties>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <version>1.7</version>
+        <executions>
+          <execution>
+            <phase>package</phase>
+            <configuration>
+              <target>
+                <ant antfile="build.xml">
+                  <target name="package"/>
+                </ant>
+              </target>
+            </configuration>
+            <goals>
+              <goal>run</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+      <version>1.2.17</version>
+    </dependency>
+    <dependency>
+      <groupId>com.google.code.gson</groupId>
+      <artifactId>gson</artifactId>
+      <version>2.6.2</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>3.8.1</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+</project>

+ 50 - 0
ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchAppender.java

@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.appender;
+
+import org.apache.log4j.DailyRollingFileAppender;
+import org.apache.log4j.Layout;
+import org.apache.log4j.Logger;
+import org.apache.log4j.spi.LoggingEvent;
+
+public class LogsearchAppender extends DailyRollingFileAppender {
+  private static Logger logger = Logger.getLogger(LogsearchAppender.class);
+
+  public LogsearchAppender() {
+    logger.debug("Initializing LogsearchAppender........... ");
+  }
+
+  @Override
+  public void append(LoggingEvent event) {
+    super.append(event);
+  }
+
+  @Override
+  public void setLayout(Layout layout) {
+    super.setLayout(layout);
+  }
+
+  protected void subAppend(LoggingEvent event) {
+    this.qw.write(this.layout.format(event));
+    if (shouldFlush(event)) {
+      this.qw.flush();
+    }
+  }
+}

+ 73 - 0
ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchConversion.java

@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.appender;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+import org.apache.log4j.EnhancedPatternLayout;
+import org.apache.log4j.spi.LoggingEvent;
+
+public class LogsearchConversion extends EnhancedPatternLayout {
+  //
+  protected final int BUF_SIZE = 256;
+  protected final int MAX_CAPACITY = 1024;
+
+  private StringBuffer sbuf = new StringBuffer(BUF_SIZE);
+
+  private String newLine = System.getProperty("line.separator");
+
+  public LogsearchConversion() {
+  }
+
+  public String format(LoggingEvent event) {
+    if (sbuf.capacity() > MAX_CAPACITY) {
+      sbuf = new StringBuffer(BUF_SIZE);
+    } else {
+      sbuf.setLength(0);
+    }
+    String outputStr = createOutput(event);
+    sbuf.append(outputStr + newLine);
+    return sbuf.toString();
+  }
+
+  public String createOutput(LoggingEvent event) {
+    VOutput vOutput = new VOutput();
+    vOutput.setLevel(event.getLevel().toString());
+    vOutput.setFile(event.getLocationInformation().getFileName());
+    vOutput.setLine_number(Integer.parseInt(event.getLocationInformation().getLineNumber()));
+    String logmsg = event.getMessage() != null ? event.getMessage().toString() : "";
+    if (event.getThrowableInformation() != null && event.getThrowableInformation().getThrowable() != null) {
+      logmsg += newLine + stackTraceToString(event.getThrowableInformation().getThrowable());
+    }
+    vOutput.setLog_message(logmsg);
+    vOutput.setLogtime("" + event.getTimeStamp());
+    vOutput.setLogger_name("" + event.getLoggerName());
+    vOutput.setThread_name(event.getThreadName());
+    return vOutput.toJson();
+  }
+
+  public String stackTraceToString(Throwable e) {
+    StringWriter sw = new StringWriter();
+    PrintWriter pw = new PrintWriter(sw);
+    e.printStackTrace(pw);
+    return sw.toString();
+  }
+}

+ 63 - 0
ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/VBase.java

@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.appender;
+
+import java.lang.reflect.Field;
+
+import org.apache.log4j.Logger;
+
+import com.google.gson.Gson;
+
+public class VBase {
+  private static Logger logger = Logger.getLogger(VBase.class);
+
+  /**
+   *
+   */
+  @Override
+  public String toString() {
+    @SuppressWarnings("rawtypes")
+    Class klass = this.getClass();
+    Field[] fields = klass.getDeclaredFields();
+    StringBuilder builder = new StringBuilder(klass.getSimpleName() + "={");
+    for (Field field : fields) {
+      try {
+        field.setAccessible(true);
+        Object fieldValue = field.get(this);
+        String fieldName = field.getName();
+        if (!fieldName.equalsIgnoreCase("serialVersionUID")) {
+          builder.append(fieldName + "={" + fieldValue + "} ");
+        }
+
+      } catch (Exception e) {
+        logger.error(e.getLocalizedMessage(), e);
+      }
+    }
+    builder.append("}");
+
+    return builder.toString();
+  }
+
+  public String toJson() {
+    Gson gson = new Gson();
+    String json = gson.toJson(this);
+    return json;
+  }
+}

+ 88 - 0
ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/VOutput.java

@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.appender;
+
+public class VOutput extends VBase {
+
+  private String level;
+  private String file;
+  private String thread_name;
+  private int line_number;
+  private String log_message;
+  private String logger_name;
+  private String logtime;
+
+  public String getLevel() {
+    return level;
+  }
+
+  public void setLevel(String level) {
+    this.level = level;
+  }
+
+  public String getFile() {
+    return file;
+  }
+
+  public void setFile(String file) {
+    this.file = file;
+  }
+
+  public String getThread_name() {
+    return thread_name;
+  }
+
+  public void setThread_name(String thread_name) {
+    this.thread_name = thread_name;
+  }
+
+  public int getLine_number() {
+    return line_number;
+  }
+
+  public void setLine_number(int line_number) {
+    this.line_number = line_number;
+  }
+
+  public String getLog_message() {
+    return log_message;
+  }
+
+  public void setLog_message(String log_message) {
+    this.log_message = log_message;
+  }
+
+  public String getLogger_name() {
+    return logger_name;
+  }
+
+  public void setLogger_name(String logger_name) {
+    this.logger_name = logger_name;
+  }
+
+  public String getLogtime() {
+    return logtime;
+  }
+
+  public void setLogtime(String logtime) {
+    this.logtime = logtime;
+  }
+
+}

+ 41 - 0
ambari-logsearch/ambari-logsearch-appender/src/test/java/org/apache/ambari/logsearch/appender/AppTest.java

@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.appender;
+
+import org.apache.log4j.Logger;
+
+public class AppTest {
+  private static Logger logger = Logger.getLogger(AppTest.class);
+
+  public static void main(String[] args) {
+
+    try {
+      throwException();
+    } catch (ClassCastException castException) {
+      logger.error("error", castException);
+    }
+
+  }
+
+  public static void throwException() {
+    throw new ClassCastException("test");
+
+  }
+}

+ 23 - 0
ambari-logsearch/ambari-logsearch-appender/src/test/resources/log4j.properties

@@ -0,0 +1,23 @@
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+# log4j configuration used during build and unit tests
+
+# Root logger option
+log4j.rootLogger=ALL, logsearchJson
+
+# Redirect log messages to a logsearch json 
+log4j.appender.logsearchJson=org.apache.ambari.logsearch.appender.LogsearchAppender
+log4j.appender.logsearchJson.File=target/logsearch/log.json
+log4j.appender.logsearchJson.datePattern='.'yyyy-MM-dd
+log4j.appender.allLog.Append=true
+log4j.appender.logsearchJson.layout=org.apache.ambari.logsearch.appender.LogsearchConversion

+ 510 - 0
ambari-logsearch/ambari-logsearch-assembly/pom.xml

@@ -0,0 +1,510 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <parent>
+    <artifactId>ambari-logsearch</artifactId>
+    <groupId>org.apache.ambari</groupId>
+    <version>2.0.0.0-SNAPSHOT</version>
+  </parent>
+  <name>Ambari Logsearch Assembly</name>
+  <url>http://maven.apache.org</url>
+  <modelVersion>4.0.0</modelVersion>
+  <properties>
+    <mapping.base.path>/usr/lib</mapping.base.path>
+    <solr.tar>http://apache.mirrors.lucidnetworks.net/lucene/solr/${solr.version}/solr-${solr.version}.tgz</solr.tar>
+    <solr.mapping.path>${mapping.base.path}/ambari-logsearch-solr</solr.mapping.path>
+    <solr.package.name>ambari-logsearch-solr</solr.package.name>
+    <logsearch.portal.package.name>ambari-logsearch-portal</logsearch.portal.package.name>
+    <logsearch.portal.mapping.path>${mapping.base.path}/ambari-logsearch-portal</logsearch.portal.mapping.path>
+    <logsearch.portal.dir>${project.basedir}/../ambari-logsearch-portal</logsearch.portal.dir>
+    <logsearch.logfeeder.package.name>ambari-logsearch-logfeeder</logsearch.logfeeder.package.name>
+    <logsearch.logfeeder.mapping.path>${mapping.base.path}/ambari-logsearch-logfeeder</logsearch.logfeeder.mapping.path>
+    <logsearch.logfeeder.dir>${project.basedir}/../ambari-logsearch-logfeeder</logsearch.logfeeder.dir>
+    <logsearch.appender.package.name>ambari-logsearch-appender</logsearch.appender.package.name>
+    <logsearch.appender.mapping.path>${mapping.base.path}/ambari-logsearch-appender</logsearch.appender.mapping.path>
+    <logsearch.appender.dir>${project.basedir}/../ambari-logsearch-appender</logsearch.appender.dir>
+    <logsearch.portal.conf.mapping.path>/etc/${logsearch.portal.package.name}/conf</logsearch.portal.conf.mapping.path>
+    <logsearch.logfeeder.conf.mapping.path>/etc/${logsearch.logfeeder.package.name}/conf
+    </logsearch.logfeeder.conf.mapping.path>
+  </properties>
+  <artifactId>ambari-logsearch-assembly</artifactId>
+  <profiles>
+
+    <profile>
+      <id>rpm</id>
+      <properties>
+        <download.solr>true</download.solr>
+      </properties>
+      <activation>
+        <property>
+          <name>build-rpm</name>
+        </property>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.codehaus.mojo</groupId>
+            <artifactId>rpm-maven-plugin</artifactId>
+            <version>2.1.4</version>
+            <configuration>
+              <copyright>2012, Apache Software Foundation</copyright>
+              <group>Development</group>
+              <description>Maven Recipe: RPM Package.</description>
+              <autoRequires>false</autoRequires>
+              <prefix>/</prefix>
+              <needarch>x86_64</needarch>
+
+              <version>${package-version}</version>
+              <release>${package-release}</release>
+
+              <defaultUsername>root</defaultUsername>
+              <defaultGroupname>root</defaultGroupname>
+            </configuration>
+            <executions>
+              <execution>
+                <id>logsearch-solr</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>rpm</goal>
+                </goals>
+                <configuration>
+                  <group>Development</group>
+                  <name>${solr.package.name}</name>
+                  <mappings>
+                    <mapping>
+                      <directory>${solr.mapping.path}</directory>
+                      <sources>
+                        <source>
+                          <location>${project.build.directory}/solr</location>
+                        </source>
+                      </sources>
+                    </mapping>
+                  </mappings>
+                </configuration>
+              </execution>
+              <execution>
+                <id>logsearch-portal</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>rpm</goal>
+                </goals>
+                <configuration>
+                  <group>Development</group>
+                  <name>${logsearch.portal.package.name}</name>
+                  <mappings>
+                    <mapping>
+                      <directory>${logsearch.portal.mapping.path}</directory>
+                      <sources>
+                        <source>
+                          <location>${logsearch.portal.dir}/target/package</location>
+                          <excludes>
+                            <exclude>classes/log4j.xml</exclude>
+                            <exclude>classes/logsearch.properties</exclude>
+                            <exclude>classes/user_pass.json</exclude>
+                            <exclude>solr_configsets/**</exclude>
+                          </excludes>
+                        </source>
+                      </sources>
+                    </mapping>
+                    <mapping>
+                      <directory>${logsearch.portal.conf.mapping.path}</directory>
+                      <sources>
+                        <source>
+                          <location>${logsearch.portal.dir}/target/package/classes</location>
+                          <includes>
+                            <include>log4j.xml</include>
+                            <include>logsearch.properties</include>
+                            <include>user_pass.json</include>
+                          </includes>
+                        </source>
+                      </sources>
+                    </mapping>
+                    <mapping>
+                      <directory>${logsearch.portal.conf.mapping.path}/solr_configsets</directory>
+                      <sources>
+                        <source>
+                          <location>${logsearch.portal.dir}/target/package/solr_configsets</location>
+                        </source>
+                      </sources>
+                    </mapping>
+                  </mappings>
+                </configuration>
+              </execution>
+              <execution>
+                <id>logsearch-logfeeder</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>rpm</goal>
+                </goals>
+                <configuration>
+                  <group>Development</group>
+                  <name>${logsearch.logfeeder.package.name}</name>
+                  <mappings>
+                    <mapping>
+                      <directory>${logsearch.logfeeder.mapping.path}</directory>
+                      <sources>
+                        <source>
+                          <location>${logsearch.logfeeder.dir}/target/package</location>
+                          <excludes>
+                            <exclude>classes/log4j.xml</exclude>
+                            <exclude>classes/logfeeder.properties</exclude>
+                            <exclude>classes/grok-patterns</exclude>
+                          </excludes>
+                        </source>
+                      </sources>
+                    </mapping>
+                    <mapping>
+                      <directory>${logsearch.logfeeder.conf.mapping.path}</directory>
+                      <sources>
+                        <source>
+                          <location>${logsearch.logfeeder.dir}/target/package/classes</location>
+                          <includes>
+                            <include>log4j.xml</include>
+                            <include>logfeeder.properties</include>
+                            <include>grok-patterns</include>
+                          </includes>
+                        </source>
+                      </sources>
+                    </mapping>
+                  </mappings>
+                </configuration>
+              </execution>
+              <execution>
+                <id>logsearch-appender</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>rpm</goal>
+                </goals>
+                <configuration>
+                  <group>Development</group>
+                  <name>${logsearch.appender.package.name}</name>
+                  <mappings>
+                    <mapping>
+                      <directory>${logsearch.appender.mapping.path}</directory>
+                      <sources>
+                        <source>
+                          <location>${logsearch.appender.dir}/target/package</location>
+                        </source>
+                      </sources>
+                    </mapping>
+                  </mappings>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <version>1.7</version>
+            <executions>
+              <execution>
+                <phase>generate-resources</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <target name="Download Solr">
+                    <get
+                      src="${solr.tar}"
+                      dest="${project.build.directory}/solr.tgz"
+                      usetimestamp="true"
+                    />
+                    <untar
+                      src="${project.build.directory}/solr.tgz"
+                      dest="${project.build.directory}/"
+                      compression="gzip"
+                    />
+                    <move todir="${project.build.directory}/solr">
+                      <fileset dir="${project.build.directory}/solr-${solr.version}"/>
+                    </move>
+                    <chmod file="${project.build.directory}/solr/bin/**" perm="755"/>
+                    <chmod file="${project.build.directory}/solr/server/scripts/**" perm="755"/>
+                  </target>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+    <profile>
+      <id>deb</id>
+
+      <activation>
+        <property>
+          <name>build-deb</name>
+        </property>
+      </activation>
+
+      <build>
+        <plugins>
+          <plugin>
+            <artifactId>maven-resources-plugin</artifactId>
+            <version>2.7</version>
+
+            <executions>
+              <execution>
+                <id>copy-resources</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>copy-resources</goal>
+                </goals>
+                <configuration>
+                  <outputDirectory>${project.build.directory}/resources/deb</outputDirectory>
+                  <resources>
+                    <resource>
+                      <directory>${project.basedir}/src/main/package/deb</directory>
+                      <excludes>
+                        <exclude>solr/postinst</exclude>
+                        <exclude>portal/postinst</exclude>
+                        <exclude>logfeeder/postinst</exclude>
+                        <exclude>appender/postinst</exclude>
+                      </excludes>
+                      <filtering>false</filtering>
+                    </resource>
+                    <resource>
+                      <directory>${project.basedir}/src/main/package/deb</directory>
+                      <includes>
+                        <include>solr/postinst</include>
+                        <include>portal/postinst</include>
+                        <include>logfeeder/postinst</include>
+                        <include>appender/postinst</include>
+                      </includes>
+                      <filtering>true</filtering>
+                    </resource>
+                  </resources>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.vafer</groupId>
+            <artifactId>jdeb</artifactId>
+            <version>1.4</version>
+            <executions>
+              <execution>
+                <phase>package</phase>
+                <id>jdeb-solr</id>
+                <goals>
+                  <goal>jdeb</goal>
+                </goals>
+                <configuration>
+                  <controlDir>${basedir}/src/main/package/deb/solr</controlDir>
+                  <deb>${basedir}/target/${solr.package.name}_${package-version}-${package-release}.deb</deb>
+                  <skip>false</skip>
+                  <skipPOMs>false</skipPOMs>
+                  <dataSet>
+                    <data>
+                      <src>${project.build.directory}/solr</src>
+                      <type>directory</type>
+                      <mapper>
+                        <type>perm</type>
+                        <user>root</user>
+                        <group>root</group>
+                        <prefix>${solr.mapping.path}</prefix>
+                      </mapper>
+                    </data>
+                  </dataSet>
+                </configuration>
+              </execution>
+
+              <execution>
+                <id>jdeb-portal</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>jdeb</goal>
+                </goals>
+                <configuration>
+                  <controlDir>${basedir}/src/main/package/deb/portal</controlDir>
+                  <deb>${basedir}/target/${logsearch.portal.package.name}_${package-version}-${package-release}.deb
+                  </deb>
+                  <skip>false</skip>
+                  <skipPOMs>false</skipPOMs>
+                  <dataSet>
+                    <data>
+                      <src>${logsearch.portal.dir}/target/ambari-logsearch-portal.tar.gz</src>
+                      <type>archive</type>
+                      <mapper>
+                        <prefix>${logsearch.portal.mapping.path}</prefix>
+                        <type>perm</type>
+                        <user>root</user>
+                        <group>root</group>
+                      </mapper>
+                      <excludes>
+                        classes/log4j.xml,classes/logsearch.properties,classes/user_pass.json,solr_configsets/**
+                      </excludes>
+                    </data>
+                    <data>
+                      <src>${logsearch.portal.dir}/target/package/classes</src>
+                      <type>directory</type>
+                      <mapper>
+                        <prefix>${logsearch.portal.conf.mapping.path}</prefix>
+                        <type>perm</type>
+                        <user>root</user>
+                        <group>root</group>
+                        <filemode>644</filemode>
+                      </mapper>
+                      <includes>
+                        log4j.xml,logsearch.properties,user_pass.json
+                      </includes>
+                    </data>
+                    <data>
+                      <src>${logsearch.portal.dir}/target/package/solr_configsets</src>
+                      <type>directory</type>
+                      <mapper>
+                        <prefix>${logsearch.portal.conf.mapping.path}/solr_configsets</prefix>
+                        <type>perm</type>
+                        <user>root</user>
+                        <group>root</group>
+                      </mapper>
+                    </data>
+                  </dataSet>
+                </configuration>
+              </execution>
+
+              <execution>
+                <id>jdeb-logfeeder</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>jdeb</goal>
+                </goals>
+                <configuration>
+                  <controlDir>${basedir}/src/main/package/deb/logfeeder</controlDir>
+                  <deb>${basedir}/target/${logsearch.logfeeder.package.name}_${package-version}-${package-release}.deb
+                  </deb>
+                  <skip>false</skip>
+                  <skipPOMs>false</skipPOMs>
+                  <dataSet>
+                    <data>
+                      <src>${logsearch.logfeeder.dir}/target/ambari-logsearch-logfeeder.tgz</src>
+                      <type>archive</type>
+                      <mapper>
+                        <prefix>${logsearch.logfeeder.mapping.path}</prefix>
+                        <type>perm</type>
+                        <user>root</user>
+                        <group>root</group>
+                      </mapper>
+                      <excludes>
+                        classes/log4j.xml,classes/logfeeder.properties,classes/grok-patterns
+                      </excludes>
+                    </data>
+                    <data>
+                      <src>${logsearch.logfeeder.dir}/target/package/classes</src>
+                      <type>directory</type>
+                      <mapper>
+                        <prefix>${logsearch.logfeeder.conf.mapping.path}</prefix>
+                        <type>perm</type>
+                        <user>root</user>
+                        <group>root</group>
+                        <filemode>644</filemode>
+                      </mapper>
+                      <includes>
+                        log4j.xml,logfeeder.properties,grok-patterns
+                      </includes>
+                    </data>
+                  </dataSet>
+                </configuration>
+              </execution>
+
+              <execution>
+                <id>jdeb-appender</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>jdeb</goal>
+                </goals>
+                <configuration>
+                  <controlDir>${basedir}/src/main/package/deb/appender</controlDir>
+                  <deb>${basedir}/target/${logsearch.appender.package.name}_${package-version}-${package-release}.deb
+                  </deb>
+                  <skip>false</skip>
+                  <skipPOMs>false</skipPOMs>
+                  <dataSet>
+                    <data>
+                      <src>${logsearch.appender.dir}/target/ambari-logsearch-appender.tar.gz</src>
+                      <type>archive</type>
+                      <mapper>
+                        <prefix>${logsearch.appender.mapping.path}</prefix>
+                        <type>perm</type>
+                        <user>root</user>
+                        <group>root</group>
+                      </mapper>
+                    </data>
+                  </dataSet>
+                </configuration>
+              </execution>
+
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <version>1.7</version>
+            <executions>
+              <execution>
+                <phase>generate-resources</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <target name="Download Solr">
+                    <get
+                      src="${solr.tar}"
+                      dest="${project.build.directory}/solr.tgz"
+                      usetimestamp="true"
+                    />
+                    <untar
+                      src="${project.build.directory}/solr.tgz"
+                      dest="${project.build.directory}/"
+                      compression="gzip"
+                    />
+                    <move todir="${project.build.directory}/solr">
+                      <fileset dir="${project.build.directory}/solr-${solr.version}"/>
+                    </move>
+                    <chmod file="${project.build.directory}/solr/bin/**" perm="755"/>
+                    <chmod file="${project.build.directory}/solr/server/scripts/**" perm="755"/>
+                  </target>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-logsearch-portal</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-logsearch-logfeeder</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-logsearch-appender</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+  </dependencies>
+
+</project>

+ 21 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/appender/control

@@ -0,0 +1,21 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+Package: [[logsearch.logfeeder.package.name]]
+Version: [[package-version]]-[[package-release]]
+Section: [[deb.section]]
+Priority: [[deb.priority]]
+Architecture: [[deb.architecture]]
+Description: [[description]]
+Maintainer: [[deb.publisher]]

+ 15 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/appender/postinst

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 15 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/appender/postrm

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 15 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/appender/posttrm

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 15 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/appender/preinst

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 15 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/appender/prerm

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 21 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/control

@@ -0,0 +1,21 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+Package: [[logsearch.logfeeder.package.name]]
+Version: [[package-version]]-[[package-release]]
+Section: [[deb.section]]
+Priority: [[deb.priority]]
+Architecture: [[deb.architecture]]
+Description: [[description]]
+Maintainer: [[deb.publisher]]

+ 15 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/postinst

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 15 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/postrm

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 15 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/posttrm

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 15 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/preinst

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 15 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/logfeeder/prerm

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 22 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/control

@@ -0,0 +1,22 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+Package: [[logsearch.portal.package.name]]
+Version: [[package-version]]-[[package-release]]
+Section: [[deb.section]]
+Priority: [[deb.priority]]
+Depends: [[deb.dependency.list]]
+Architecture: [[deb.architecture]]
+Description: [[description]]
+Maintainer: [[deb.publisher]]

+ 15 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/postinst

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 15 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/postrm

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 15 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/preinst

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 15 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/portal/prerm

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 22 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/solr/control

@@ -0,0 +1,22 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+Package: [[solr.package.name]]
+Version: [[package-version]]-[[package-release]]
+Section: [[deb.section]]
+Priority: [[deb.priority]]
+Depends: [[deb.dependency.list]]
+Architecture: [[deb.architecture]]
+Description: [[description]]
+Maintainer: [[deb.publisher]]

+ 15 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/solr/postinst

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 15 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/solr/postrm

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 15 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/solr/preinst

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 15 - 0
ambari-logsearch/ambari-logsearch-assembly/src/main/package/deb/solr/prerm

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 36 - 0
ambari-logsearch/ambari-logsearch-logfeeder/README.md

@@ -0,0 +1,36 @@
+<!--
+{% comment %}
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+{% endcomment %}
+-->
+
+#LogSearch - LogFeeder:
+
+Logfeeder is a tool that reads log, parses it and stores it in Apache Solr for analyising purpose.
+
+#Compilation
+mvn clean compile package
+
+#Deploy
+##Copy to remote
+copy target/logsearch-logfeeder.tgz to host machine
+
+##Setup environment
+```bash
+mkdir /opt/logfeeder
+cd /opt/logfeeder
+tar xfz ~/logsearch-logfeeder.tar.gz 
+```

+ 18 - 0
ambari-logsearch/ambari-logsearch-logfeeder/build.properties

@@ -0,0 +1,18 @@
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+app.pkg.dir=${app.work.dir}/pkg
+
+app.dev.war.dir=${app.work.dir}/webapps/logsearch
+app.war.name=logsearch.war
+
+app.target.dir=${builddir}/target/classes/webapps/app

+ 60 - 0
ambari-logsearch/ambari-logsearch-logfeeder/build.xml

@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project basedir="." default="build" name="logfeeder">
+  <property environment="env"/>
+  <property name="debuglevel" value="source,lines,vars"/>
+  <dirname property="builddir" file="build.xml"/>
+  <property name="target" value="1.7"/>
+  <property name="source" value="1.7"/>
+  <property file="local.properties"/>
+  <property file="build.properties"/>
+  <target name="init">
+  </target>
+  <target name="build"/>
+
+  <target name="package">
+    <delete dir="target/package"/>
+    <copy todir="target/package/libs" includeEmptyDirs="no">
+      <fileset dir="target/libs"/>
+    </copy>
+    <copy todir="target/package/" includeEmptyDirs="no">
+      <fileset file="target/LogFeeder.jar"/>
+    </copy>
+    <copy todir="target/package/classes" includeEmptyDirs="no">
+      <fileset dir="target/classes"/>
+    </copy>
+    <copy todir="target/package" includeEmptyDirs="no">
+      <fileset file="config.json"/>
+    </copy>
+    <copy todir="target/package" includeEmptyDirs="no">
+      <fileset file="grok-patterns"/>
+      <fileset file="src/main/scripts/run.sh"/>
+    </copy>
+    <chmod file="target/package/run.sh" perm="755"/>
+    <tar compression="gzip" destfile="target/ambari-logsearch-logfeeder.tgz">
+      <tarfileset mode="755" dir="target/package">
+        <include name="*.sh"/>
+      </tarfileset>
+      <tarfileset mode="664" dir="target/package">
+        <exclude name="*.sh"/>
+      </tarfileset>
+    </tar>
+  </target>
+  <target description="Build all projects which reference this project. Useful to propagate changes."
+          name="build-refprojects"/>
+</project>

+ 201 - 0
ambari-logsearch/ambari-logsearch-logfeeder/pom.xml

@@ -0,0 +1,201 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+  <parent>
+    <artifactId>ambari-logsearch</artifactId>
+    <groupId>org.apache.ambari</groupId>
+    <version>2.0.0.0-SNAPSHOT</version>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+
+  <artifactId>ambari-logsearch-logfeeder</artifactId>
+  <groupId>org.apache.ambari</groupId>
+  <version>2.0.0.0-SNAPSHOT</version>
+  <packaging>jar</packaging>
+  <name>Ambari Logsearch Log Feeder</name>
+  <url>http://maven.apache.org</url>
+
+
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>4.11</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+      <version>1.2.17</version>
+    </dependency>
+    <dependency>
+      <groupId>io.thekraken</groupId>
+      <artifactId>grok</artifactId>
+      <version>0.1.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.solr</groupId>
+      <artifactId>solr-solrj</artifactId>
+      <version>${solr.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-core-asl</artifactId>
+      <version>1.9.13</version>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-mapper-asl</artifactId>
+      <version>1.9.13</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+      <version>1.1.1</version>
+    </dependency>
+
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+      <version>18.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <version>1.7.7</version>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-log4j12</artifactId>
+      <version>1.7.10</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-csv</artifactId>
+      <version>1.2</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kafka</groupId>
+      <artifactId>kafka-clients</artifactId>
+      <version>0.9.0.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-xc</artifactId>
+      <version>1.9.13</version>
+    </dependency>
+
+  </dependencies>
+  <build>
+    <finalName>LogFeeder</finalName>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <artifactId>maven-compiler-plugin</artifactId>
+          <version>3.0</version>
+        </plugin>
+        <plugin>
+          <artifactId>maven-dependency-plugin</artifactId>
+          <version>2.8</version>
+        </plugin>
+      </plugins>
+    </pluginManagement>
+
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>3.3</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+
+      <!-- Exec main class plugin -->
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>exec-maven-plugin</artifactId>
+        <version>1.2.1</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>java</goal>
+            </goals>
+          </execution>
+        </executions>
+        <configuration>
+          <mainClass>org.apache.ambari.logfeeder.LogFeeder</mainClass>
+          <!-- <arguments> <argument></argument> </arguments> -->
+        </configuration>
+      </plugin>
+      <!-- copy-dependencies -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <version>2.8</version>
+        <executions>
+          <execution>
+            <id>copy-dependencies</id>
+            <phase>package</phase>
+
+            <goals>
+              <goal>copy-dependencies</goal>
+
+            </goals>
+            <configuration>
+              <artifactItems>*</artifactItems>
+              <outputAbsoluteArtifactFilename>true</outputAbsoluteArtifactFilename>
+              <outputDirectory>${basedir}/target/libs</outputDirectory>
+              <overWriteReleases>false</overWriteReleases>
+              <overWriteSnapshots>false</overWriteSnapshots>
+              <overWriteIfNewer>true</overWriteIfNewer>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <!-- ant pacakge -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <version>1.7</version>
+        <executions>
+          <execution>
+            <phase>package</phase>
+            <configuration>
+              <target>
+                <ant antfile="build.xml">
+                  <target name="package"/>
+                </ant>
+              </target>
+            </configuration>
+            <goals>
+              <goal>run</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</project>

+ 19 - 0
ambari-logsearch/ambari-logsearch-logfeeder/run.sh

@@ -0,0 +1,19 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "LOG FEEDER"
+mvn clean compile
+mvn exec:java

+ 110 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/AliasUtil.java

@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder;
+
+import java.io.File;
+import java.util.HashMap;
+
+import org.apache.log4j.Logger;
+
+public class AliasUtil {
+
+  private static Logger logger = Logger.getLogger(AliasUtil.class);
+
+  private static AliasUtil instance = null;
+
+  private static String aliasConfigJson = "alias_config.json";
+
+  private HashMap<String, Object> aliasMap = null;
+
+  public static enum ALIAS_TYPE {
+    INPUT, FILTER, MAPPER, OUTPUT
+  }
+
+  public static enum ALIAS_PARAM {
+    KLASS
+  }
+
+  /**
+   *
+   */
+  private AliasUtil() {
+    init();
+  }
+
+  /**
+   * @return
+   */
+  public static AliasUtil getInstance() {
+    if (instance == null) {
+      synchronized (AliasUtil.class) {
+        if (instance == null) {
+          instance = new AliasUtil();
+        }
+      }
+    }
+    return instance;
+  }
+
+  /**
+   */
+  private void init() {
+    File jsonFile = LogFeederUtil.getFileFromClasspath(aliasConfigJson);
+    if (jsonFile != null) {
+      this.aliasMap = LogFeederUtil.readJsonFromFile(jsonFile);
+    }
+
+  }
+
+
+  /**
+   * @param key
+   * @param aliastype
+   * @return
+   */
+  public String readAlias(String key, ALIAS_TYPE aliastype, ALIAS_PARAM aliasParam) {
+    String result = key;// key as a default value;
+    HashMap<String, String> aliasInfo = getAliasInfo(key, aliastype);
+    String value = aliasInfo.get(aliasParam.name().toLowerCase());
+    if (value != null && !value.isEmpty()) {
+      result = value;
+      logger.debug("Alias found for key :" + key + ",  param :" + aliasParam.name().toLowerCase() + ", value :"
+        + value + " aliastype:" + aliastype.name());
+    } else {
+      logger.debug("Alias not found for key :" + key + ", param :" + aliasParam.name().toLowerCase());
+    }
+    return result;
+  }
+
+  @SuppressWarnings("unchecked")
+  public HashMap<String, String> getAliasInfo(String key, ALIAS_TYPE aliastype) {
+    HashMap<String, String> aliasInfo = null;
+    if (aliasMap != null) {
+      String typeKey = aliastype.name().toLowerCase();
+      HashMap<String, Object> typeJson = (HashMap<String, Object>) aliasMap.get(typeKey);
+      if (typeJson != null) {
+        aliasInfo = (HashMap<String, String>) typeJson.get(key);
+      }
+    }
+    if (aliasInfo == null) {
+      aliasInfo = new HashMap<String, String>();
+    }
+    return aliasInfo;
+  }
+}

+ 262 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/ConfigBlock.java

@@ -0,0 +1,262 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Logger;
+import org.apache.log4j.Priority;
+
+public abstract class ConfigBlock {
+  static private Logger logger = Logger.getLogger(ConfigBlock.class);
+
+  boolean drain = false;
+
+  protected Map<String, Object> configs;
+  protected Map<String, String> contextFields = new HashMap<String, String>();
+  public MetricCount statMetric = new MetricCount();
+
+  /**
+   *
+   */
+  public ConfigBlock() {
+    super();
+  }
+
+  /**
+   * Used while logging. Keep it short and meaningful
+   *
+   * @return
+   */
+  public abstract String getShortDescription();
+
+  /**
+   * Every implementor need to give name to the thread they create
+   *
+   * @return
+   */
+  public String getNameForThread() {
+    return this.getClass().getSimpleName();
+  }
+
+  /**
+   * @param metricsList
+   */
+  public void addMetricsContainers(List<MetricCount> metricsList) {
+    metricsList.add(statMetric);
+  }
+
+  /**
+   * This method needs to be overwritten by deriving classes.
+   *
+   * @throws Exception
+   */
+  public void init() throws Exception {
+
+  }
+
+  /**
+   * @param map
+   */
+  public void loadConfig(Map<String, Object> map) {
+    configs = LogFeederUtil.cloneObject(map);
+
+    // Extract fields from config block
+    Map<String, String> nvList = getNVList("add_fields");
+    if (nvList != null) {
+      contextFields.putAll(nvList);
+    }
+  }
+
+  public Map<String, Object> getConfigs() {
+    return configs;
+  }
+
+  @SuppressWarnings("unchecked")
+  public boolean isEnabled() {
+    boolean isEnabled = getBooleanValue("is_enabled", true);
+    if (isEnabled) {
+      // Let's check for static conditions
+      Map<String, Object> conditions = (Map<String, Object>) configs
+        .get("conditions");
+      boolean allow = true;
+      if (conditions != null && conditions.size() > 0) {
+        allow = false;
+        for (String conditionType : conditions.keySet()) {
+          if (conditionType.equalsIgnoreCase("fields")) {
+            Map<String, Object> fields = (Map<String, Object>) conditions
+              .get("fields");
+            for (String fieldName : fields.keySet()) {
+              Object values = fields.get(fieldName);
+              if (values instanceof String) {
+                allow = isFieldConditionMatch(fieldName,
+                  (String) values);
+              } else {
+                List<String> listValues = (List<String>) values;
+                for (String stringValue : listValues) {
+                  allow = isFieldConditionMatch(fieldName,
+                    stringValue);
+                  if (allow) {
+                    break;
+                  }
+                }
+              }
+              if (allow) {
+                break;
+              }
+            }
+          }
+          if (allow) {
+            break;
+          }
+        }
+        isEnabled = allow;
+      }
+    }
+    return isEnabled;
+  }
+
+  public boolean isFieldConditionMatch(String fieldName, String stringValue) {
+    boolean allow = false;
+    String fieldValue = (String) configs.get(fieldName);
+    if (fieldValue != null && fieldValue.equalsIgnoreCase(stringValue)) {
+      allow = true;
+    } else {
+      @SuppressWarnings("unchecked")
+      Map<String, Object> addFields = (Map<String, Object>) configs
+        .get("add_fields");
+      if (addFields != null && addFields.get(fieldName) != null) {
+        String addFieldValue = (String) addFields.get(fieldName);
+        if (stringValue.equalsIgnoreCase(addFieldValue)) {
+          allow = true;
+        }
+      }
+
+    }
+    return allow;
+  }
+
+  /**
+   * @param string
+   * @return
+   */
+  @SuppressWarnings("unchecked")
+  public Map<String, String> getNVList(String key) {
+    return (Map<String, String>) configs.get(key);
+  }
+
+  public String getStringValue(String key) {
+    Object value = configs.get(key);
+    if (value != null && value.toString().equalsIgnoreCase("none")) {
+      value = null;
+    }
+    if (value != null) {
+      return value.toString();
+    }
+    return null;
+  }
+
+  public String getStringValue(String key, String defaultValue) {
+    Object value = configs.get(key);
+    if (value != null && value.toString().equalsIgnoreCase("none")) {
+      value = null;
+    }
+
+    if (value != null) {
+      return value.toString();
+    }
+    return defaultValue;
+  }
+
+  public Object getConfigValue(String key) {
+    return configs.get(key);
+  }
+
+  public boolean getBooleanValue(String key, boolean defaultValue) {
+    String strValue = getStringValue(key);
+    boolean retValue = defaultValue;
+    if (!StringUtils.isEmpty(strValue)) {
+      if (strValue.equalsIgnoreCase("true")
+        || strValue.equalsIgnoreCase("yes")) {
+        retValue = true;
+      } else {
+        retValue = false;
+      }
+    }
+    return retValue;
+  }
+
+  public int getIntValue(String key, int defaultValue) {
+    String strValue = getStringValue(key);
+    int retValue = defaultValue;
+    if (!StringUtils.isEmpty(strValue)) {
+      try {
+        retValue = Integer.parseInt(strValue);
+      } catch (Throwable t) {
+        logger.error("Error parsing integer value. key=" + key
+          + ", value=" + strValue);
+      }
+    }
+    return retValue;
+  }
+
+  public Map<String, String> getContextFields() {
+    return contextFields;
+  }
+
+  public void incrementStat(int count) {
+    statMetric.count += count;
+  }
+
+  public void logStatForMetric(MetricCount metric, String prefixStr) {
+    LogFeederUtil.logStatForMetric(metric, prefixStr, ", key="
+      + getShortDescription());
+  }
+
+  synchronized public void logStat() {
+    logStatForMetric(statMetric, "Stat");
+  }
+
+  public boolean logConfgs(Priority level) {
+    if (level.toInt() == Priority.INFO_INT && !logger.isInfoEnabled()) {
+      return false;
+    }
+    if (level.toInt() == Priority.DEBUG_INT && !logger.isDebugEnabled()) {
+      return false;
+    }
+    logger.log(level, "Printing configuration Block="
+      + getShortDescription());
+    logger.log(level, "configs=" + configs);
+    logger.log(level, "contextFields=" + contextFields);
+    return true;
+  }
+
+  public boolean isDrain() {
+    return drain;
+  }
+
+  public void setDrain(boolean drain) {
+    this.drain = drain;
+  }
+
+}

+ 545 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/InputMgr.java

@@ -0,0 +1,545 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder;
+
+import java.io.EOFException;
+import java.io.File;
+import java.io.FileFilter;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.nio.file.FileSystems;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.WatchEvent;
+import java.nio.file.WatchKey;
+import java.nio.file.WatchService;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+
+import static java.nio.file.StandardWatchEventKinds.*;
+
+import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.input.InputFile;
+import org.apache.commons.io.filefilter.WildcardFileFilter;
+import org.apache.log4j.Logger;
+import org.apache.solr.common.util.Base64;
+
+public class InputMgr {
+  static Logger logger = Logger.getLogger(InputMgr.class);
+
+  List<Input> inputList = new ArrayList<Input>();
+  Set<Input> notReadyList = new HashSet<Input>();
+
+  WatchService folderWatcher = null;
+  Set<File> foldersToMonitor = new HashSet<File>();
+  Map<String, Input> filesToMonitor = new HashMap<String, Input>();
+  boolean isDrain = false;
+  boolean isAnyInputTail = false;
+
+  private String checkPointSubFolderName = "logfeeder_checkpoints";
+  File checkPointFolderFile = null;
+
+  MetricCount filesCountMetric = new MetricCount();
+
+  private String checkPointExtension = ".cp";
+
+  public List<Input> getInputList() {
+    return inputList;
+  }
+
+  public void add(Input input) {
+    inputList.add(input);
+  }
+
+  /**
+   * @param input
+   */
+  public void removeInput(Input input) {
+    logger.info("Trying to remove from inputList. "
+      + input.getShortDescription());
+    Iterator<Input> iter = inputList.iterator();
+    while (iter.hasNext()) {
+      Input iterInput = iter.next();
+      if (iterInput.equals(input)) {
+        logger.info("Removing Input from inputList. "
+          + input.getShortDescription());
+        iter.remove();
+      }
+    }
+  }
+
+  /**
+   * @return
+   */
+  public int getActiveFilesCount() {
+    int count = 0;
+    for (Input input : inputList) {
+      if (input.isReady()) {
+        count++;
+      }
+    }
+    return count;
+  }
+
+  public void init() {
+    filesCountMetric.metricsName = "input.files.count";
+    filesCountMetric.isPointInTime = true;
+
+    checkPointExtension = LogFeederUtil.getStringProperty(
+      "logfeeder.checkpoint.extension", checkPointExtension);
+    for (Input input : inputList) {
+      try {
+        input.init();
+        if (input.isTail()) {
+          isAnyInputTail = true;
+        }
+      } catch (Exception e) {
+        logger.error(
+          "Error initializing input. "
+            + input.getShortDescription(), e);
+      }
+    }
+
+    if (isAnyInputTail) {
+      logger.info("Determining valid checkpoint folder");
+      boolean isCheckPointFolderValid = false;
+      // We need to keep track of the files we are reading.
+      String checkPointFolder = LogFeederUtil
+        .getStringProperty("logfeeder.checkpoint.folder");
+      if (checkPointFolder != null && !checkPointFolder.isEmpty()) {
+        checkPointFolderFile = new File(checkPointFolder);
+        isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
+      }
+      if (!isCheckPointFolderValid) {
+        // Let's try home folder
+        String userHome = LogFeederUtil.getStringProperty("user.home");
+        if (userHome != null) {
+          checkPointFolderFile = new File(userHome,
+            checkPointSubFolderName);
+          logger.info("Checking if home folder can be used for checkpoints. Folder="
+            + checkPointFolderFile);
+          isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
+        }
+      }
+      if (!isCheckPointFolderValid) {
+        // Let's use tmp folder
+        String tmpFolder = LogFeederUtil
+          .getStringProperty("java.io.tmpdir");
+        if (tmpFolder == null) {
+          tmpFolder = "/tmp";
+        }
+        checkPointFolderFile = new File(tmpFolder,
+          checkPointSubFolderName);
+        logger.info("Checking if tmps folder can be used for checkpoints. Folder="
+          + checkPointFolderFile);
+        isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
+        if (isCheckPointFolderValid) {
+          logger.warn("Using tmp folder "
+            + checkPointFolderFile
+            + " to store check points. This is not recommended."
+            + "Please set logfeeder.checkpoint.folder property");
+        }
+      }
+
+      if (isCheckPointFolderValid) {
+        logger.warn("Using folder " + checkPointFolderFile
+          + " for storing checkpoints");
+      }
+    }
+
+  }
+
+  public File getCheckPointFolderFile() {
+    return checkPointFolderFile;
+  }
+
+  boolean verifyCheckPointFolder(File folderPathFile) {
+    if (!folderPathFile.exists()) {
+      // Create the folder
+      try {
+        if (!folderPathFile.mkdir()) {
+          logger.warn("Error creating folder for check point. folder="
+            + folderPathFile);
+        }
+      } catch (Throwable t) {
+        logger.warn("Error creating folder for check point. folder="
+          + folderPathFile, t);
+      }
+    }
+
+    if (folderPathFile.exists() && folderPathFile.isDirectory()) {
+      // Let's check whether we can create a file
+      File testFile = new File(folderPathFile, UUID.randomUUID()
+        .toString());
+      try {
+        testFile.createNewFile();
+        return testFile.delete();
+      } catch (IOException e) {
+        logger.warn(
+          "Couldn't create test file in "
+            + folderPathFile.getAbsolutePath()
+            + " for checkPoint", e);
+      }
+    }
+    return false;
+  }
+
+  public void monitor() {
+    for (Input input : inputList) {
+      if (input.isReady()) {
+        input.monitor();
+      } else {
+        if (input.isTail()) {
+          logger.info("Adding input to not ready list. Note, it is possible this component is not run on this host. So it might not be an issue. "
+            + input.getShortDescription());
+          notReadyList.add(input);
+        } else {
+          logger.info("Input is not ready, so going to ignore it "
+            + input.getShortDescription());
+        }
+      }
+    }
+    // Start the monitoring thread if any file is in tail mode
+    if (isAnyInputTail) {
+      Thread monitorThread = new Thread("InputIsReadyMonitor") {
+        @Override
+        public void run() {
+          logger.info("Going to monitor for these missing files: "
+            + notReadyList.toString());
+          while (true) {
+            if (isDrain) {
+              logger.info("Exiting missing file monitor.");
+              break;
+            }
+            try {
+              Iterator<Input> iter = notReadyList.iterator();
+              while (iter.hasNext()) {
+                Input input = iter.next();
+                try {
+                  if (input.isReady()) {
+                    input.monitor();
+                    iter.remove();
+                  }
+                } catch (Throwable t) {
+                  logger.error("Error while enabling monitoring for input. "
+                    + input.getShortDescription());
+                }
+              }
+              Thread.sleep(30 * 1000);
+            } catch (Throwable t) {
+              // Ignore
+            }
+          }
+        }
+      };
+      monitorThread.start();
+    }
+  }
+
+  public void addToNotReady(Input notReadyInput) {
+    notReadyList.add(notReadyInput);
+  }
+
+  public void addMetricsContainers(List<MetricCount> metricsList) {
+    for (Input input : inputList) {
+      input.addMetricsContainers(metricsList);
+    }
+    filesCountMetric.count = getActiveFilesCount();
+    metricsList.add(filesCountMetric);
+  }
+
+  /**
+   *
+   */
+  public void logStats() {
+    for (Input input : inputList) {
+      input.logStat();
+    }
+
+    filesCountMetric.count = getActiveFilesCount();
+    LogFeederUtil.logStatForMetric(filesCountMetric,
+      "Stat: Files Monitored Count", null);
+  }
+
+  public void close() {
+    for (Input input : inputList) {
+      try {
+        input.setDrain(true);
+      } catch (Throwable t) {
+        logger.error(
+          "Error while draining. input="
+            + input.getShortDescription(), t);
+      }
+    }
+    isDrain = true;
+
+    // Need to get this value from property
+    int iterations = 30;
+    int waitTimeMS = 1000;
+    int i = 0;
+    boolean allClosed = true;
+    for (i = 0; i < iterations; i++) {
+      allClosed = true;
+      for (Input input : inputList) {
+        if (!input.isClosed()) {
+          try {
+            allClosed = false;
+            logger.warn("Waiting for input to close. "
+              + input.getShortDescription() + ", "
+              + (iterations - i) + " more seconds");
+            Thread.sleep(waitTimeMS);
+          } catch (Throwable t) {
+            // Ignore
+          }
+        }
+      }
+      if (allClosed) {
+        break;
+      }
+    }
+    if (!allClosed) {
+      logger.warn("Some inputs were not closed. Iterations=" + i);
+      for (Input input : inputList) {
+        if (!input.isClosed()) {
+          logger.warn("Input not closed. Will ignore it."
+            + input.getShortDescription());
+        }
+      }
+    } else {
+      logger.info("All inputs are closed. Iterations=" + i);
+    }
+
+  }
+
+  public void checkInAll() {
+    for (Input input : inputList) {
+      input.checkIn();
+    }
+  }
+
+  public void cleanCheckPointFiles() {
+
+    if (checkPointFolderFile == null) {
+      logger.info("Will not clean checkPoint files. checkPointFolderFile="
+        + checkPointFolderFile);
+      return;
+    }
+    logger.info("Cleaning checkPoint files. checkPointFolderFile="
+      + checkPointFolderFile.getAbsolutePath());
+    try {
+      // Loop over the check point files and if filePath is not present,
+      // then
+      // move to closed
+      String searchPath = "*" + checkPointExtension;
+      FileFilter fileFilter = new WildcardFileFilter(searchPath);
+      File[] checkPointFiles = checkPointFolderFile.listFiles(fileFilter);
+      int totalCheckFilesDeleted = 0;
+      for (File checkPointFile : checkPointFiles) {
+        RandomAccessFile checkPointReader = null;
+        try {
+          checkPointReader = new RandomAccessFile(checkPointFile, "r");
+
+          int contentSize = checkPointReader.readInt();
+          byte b[] = new byte[contentSize];
+          int readSize = checkPointReader.read(b, 0, contentSize);
+          if (readSize != contentSize) {
+            logger.error("Couldn't read expected number of bytes from checkpoint file. expected="
+              + contentSize
+              + ", read="
+              + readSize
+              + ", checkPointFile=" + checkPointFile);
+          } else {
+            // Create JSON string
+            String jsonCheckPointStr = new String(b, 0, readSize);
+            Map<String, Object> jsonCheckPoint = LogFeederUtil
+              .toJSONObject(jsonCheckPointStr);
+
+            String logFilePath = (String) jsonCheckPoint
+              .get("file_path");
+            String logFileKey = (String) jsonCheckPoint
+              .get("file_key");
+            if (logFilePath != null && logFileKey != null) {
+              boolean deleteCheckPointFile = false;
+              File logFile = new File(logFilePath);
+              if (logFile.exists()) {
+                Object fileKeyObj = InputFile
+                  .getFileKey(logFile);
+                String fileBase64 = Base64
+                  .byteArrayToBase64(fileKeyObj
+                    .toString().getBytes());
+                if (!logFileKey.equals(fileBase64)) {
+                  deleteCheckPointFile = true;
+                  logger.info("CheckPoint clean: File key has changed. old="
+                    + logFileKey
+                    + ", new="
+                    + fileBase64
+                    + ", filePath="
+                    + logFilePath
+                    + ", checkPointFile="
+                    + checkPointFile.getAbsolutePath());
+                }
+              } else {
+                logger.info("CheckPoint clean: Log file doesn't exist. filePath="
+                  + logFilePath
+                  + ", checkPointFile="
+                  + checkPointFile.getAbsolutePath());
+                deleteCheckPointFile = true;
+              }
+              if (deleteCheckPointFile) {
+                logger.info("Deleting CheckPoint file="
+                  + checkPointFile.getAbsolutePath()
+                  + ", logFile=" + logFilePath);
+                checkPointFile.delete();
+                totalCheckFilesDeleted++;
+              }
+            }
+          }
+        } catch (EOFException eof) {
+          logger.warn("Caught EOFException. Ignoring reading existing checkPoint file. "
+            + checkPointFile);
+        } catch (Throwable t) {
+          logger.error("Error while checking checkPoint file. "
+            + checkPointFile, t);
+        } finally {
+          if (checkPointReader != null) {
+            try {
+              checkPointReader.close();
+            } catch (Throwable t) {
+              logger.error("Error closing checkPoint file. "
+                + checkPointFile, t);
+            }
+          }
+        }
+      }
+      logger.info("Deleted " + totalCheckFilesDeleted
+        + " checkPoint file(s). checkPointFolderFile="
+        + checkPointFolderFile.getAbsolutePath());
+
+    } catch (Throwable t) {
+      logger.error("Error while cleaning checkPointFiles", t);
+    }
+  }
+
+  synchronized public void monitorSystemFileChanges(Input inputToMonitor) {
+    try {
+      File fileToMonitor = new File(inputToMonitor.getFilePath());
+      if (filesToMonitor.containsKey(fileToMonitor.getAbsolutePath())) {
+        logger.info("Already monitoring file " + fileToMonitor
+          + ". So ignoring this request");
+        return;
+      }
+
+      // make a new watch service that we can register interest in
+      // directories and files with.
+      if (folderWatcher == null) {
+        folderWatcher = FileSystems.getDefault().newWatchService();
+        // start the file watcher thread below
+        Thread th = new Thread(new FileSystemMonitor(),
+          "FileSystemWatcher");
+        th.setDaemon(true);
+        th.start();
+
+      }
+      File folderToWatch = fileToMonitor.getParentFile();
+      if (folderToWatch != null) {
+        if (foldersToMonitor.contains(folderToWatch.getAbsolutePath())) {
+          logger.info("Already monitoring folder " + folderToWatch
+            + ". So ignoring this request.");
+        } else {
+          logger.info("Configuring to monitor folder "
+            + folderToWatch + " for file " + fileToMonitor);
+          // get the directory we want to watch, using the Paths
+          // singleton
+          // class
+          Path toWatch = Paths.get(folderToWatch.getAbsolutePath());
+          if (toWatch == null) {
+            throw new UnsupportedOperationException(
+              "Directory not found. folder=" + folderToWatch);
+          }
+
+          toWatch.register(folderWatcher, ENTRY_CREATE);
+          foldersToMonitor.add(folderToWatch);
+        }
+        filesToMonitor.put(fileToMonitor.getAbsolutePath(),
+          inputToMonitor);
+      } else {
+        logger.error("File doesn't have parent folder." + fileToMonitor);
+      }
+    } catch (IOException e) {
+      logger.error("Error while trying to set watcher for file:"
+        + inputToMonitor);
+    }
+
+  }
+
+  class FileSystemMonitor implements Runnable {
+    /*
+     * (non-Javadoc)
+     * 
+     * @see java.lang.Runnable#run()
+     */
+    @Override
+    public void run() {
+      try {
+        // get the first event before looping
+        WatchKey key = folderWatcher.take();
+        while (key != null) {
+          Path dir = (Path) key.watchable();
+          // we have a polled event, now we traverse it and
+          // receive all the states from it
+          for (WatchEvent<?> event : key.pollEvents()) {
+            if (!event.kind().equals(ENTRY_CREATE)) {
+              logger.info("Ignoring event.kind=" + event.kind());
+              continue;
+            }
+            logger.info("Received " + event.kind()
+              + " event for file " + event.context());
+
+            File newFile = new File(dir.toFile(), event.context()
+              .toString());
+            Input rolledOverInput = filesToMonitor.get(newFile
+              .getAbsolutePath());
+            if (rolledOverInput == null) {
+              logger.info("Input not found for file " + newFile);
+            } else {
+              rolledOverInput.rollOver();
+            }
+          }
+          if (!key.reset()) {
+            logger.error("Error while key.reset(). Will have to abort watching files. Rollover will not work.");
+            break;
+          }
+          key = folderWatcher.take();
+        }
+      } catch (InterruptedException e) {
+        logger.info("Stop request for thread");
+      }
+      logger.info("Exiting FileSystemMonitor thread.");
+    }
+
+  }
+
+}

+ 570 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java

@@ -0,0 +1,570 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+import java.lang.reflect.Type;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.ambari.logfeeder.AliasUtil.ALIAS_PARAM;
+import org.apache.ambari.logfeeder.AliasUtil.ALIAS_TYPE;
+import org.apache.ambari.logfeeder.filter.Filter;
+import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.logconfig.LogfeederScheduler;
+import org.apache.ambari.logfeeder.output.Output;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+
+import com.google.gson.reflect.TypeToken;
+
+public class LogFeeder {
+  static Logger logger = Logger.getLogger(LogFeeder.class);
+
+  // List<Input> inputList = new ArrayList<Input>();
+  Collection<Output> outputList = new ArrayList<Output>();
+
+  OutputMgr outMgr = new OutputMgr();
+  InputMgr inputMgr = new InputMgr();
+  MetricsMgr metricsMgr = new MetricsMgr();
+
+  Map<String, Object> globalMap = null;
+  String[] inputParams;
+
+  List<Map<String, Object>> globalConfigList = new ArrayList<Map<String, Object>>();
+  List<Map<String, Object>> inputConfigList = new ArrayList<Map<String, Object>>();
+  List<Map<String, Object>> filterConfigList = new ArrayList<Map<String, Object>>();
+  List<Map<String, Object>> outputConfigList = new ArrayList<Map<String, Object>>();
+
+  int checkPointCleanIntervalMS = 24 * 60 * 60 * 60 * 1000; // 24 hours
+  long lastCheckPointCleanedMS = 0;
+
+  public LogFeeder(String[] args) {
+    inputParams = args;
+  }
+
+  public void init() throws Throwable {
+
+    // Load properties
+    LogFeederUtil.loadProperties("logfeeder.properties", inputParams);
+
+    // loop the properties and load them
+    // Load the configs
+    String configFiles = LogFeederUtil.getStringProperty("config.files");
+    if (configFiles == null) {
+      configFiles = LogFeederUtil.getStringProperty("config.file",
+        "config.json");
+    }
+    logger.info("config.files=" + configFiles);
+    String[] configFileList = configFiles.split(",");
+    for (String configFileName : configFileList) {
+      logger.info("Going to load config file:" + configFileName);
+      File configFile = new File(configFileName);
+      if (configFile.exists() && configFile.isFile()) {
+        logger.info("Config file exists in path."
+          + configFile.getAbsolutePath());
+        loadConfigsUsingFile(configFile);
+      } else {
+        // Let's try to load it from class loader
+        logger.info("Trying to load config file from classloader: "
+          + configFileName);
+        laodConfigsUsingClassLoader(configFileName);
+        logger.info("Loaded config file from classloader: "
+          + configFileName);
+      }
+    }
+    mergeAllConfigs();
+    outMgr.setOutputList(outputList);
+    for (Output output : outputList) {
+      output.init();
+    }
+    inputMgr.init();
+    metricsMgr.init();
+    //starting timer to fetch config from solr 
+    LogfeederScheduler.INSTANCE.start();
+    logger.debug("==============");
+  }
+
+  void laodConfigsUsingClassLoader(String configFileName) throws Exception {
+    BufferedInputStream fileInputStream = (BufferedInputStream) this
+      .getClass().getClassLoader()
+      .getResourceAsStream(configFileName);
+    if (fileInputStream != null) {
+      BufferedReader br = new BufferedReader(new InputStreamReader(
+        fileInputStream));
+      String configData = readFile(br);
+      loadConfigs(configData);
+    } else {
+      throw new Exception("Can't find configFile=" + configFileName);
+    }
+  }
+
+  /**
+   * This method loads the configurations from the given file.
+   *
+   * @param configFile
+   * @return
+   * @throws Exception
+   */
+  void loadConfigsUsingFile(File configFile) throws Exception {
+    FileInputStream fileInputStream = null;
+    try {
+      fileInputStream = new FileInputStream(configFile);
+      BufferedReader br = new BufferedReader(new InputStreamReader(
+        fileInputStream));
+      String configData = readFile(br);
+      loadConfigs(configData);
+    } catch (Exception t) {
+      logger.error("Error opening config file. configFilePath="
+        + configFile.getAbsolutePath());
+      throw t;
+    } finally {
+      if (fileInputStream != null) {
+        try {
+          fileInputStream.close();
+        } catch (Throwable t) {
+          // ignore
+        }
+      }
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  void loadConfigs(String configData) throws Exception {
+    Type type = new TypeToken<Map<String, Object>>() {
+    }.getType();
+    Map<String, Object> configMap = LogFeederUtil.getGson().fromJson(
+      configData, type);
+
+    // Get the globals
+    for (String key : configMap.keySet()) {
+      if (key.equalsIgnoreCase("global")) {
+        globalConfigList.add((Map<String, Object>) configMap.get(key));
+      } else if (key.equalsIgnoreCase("input")) {
+        List<Map<String, Object>> mapList = (List<Map<String, Object>>) configMap
+          .get(key);
+        inputConfigList.addAll(mapList);
+      } else if (key.equalsIgnoreCase("filter")) {
+        List<Map<String, Object>> mapList = (List<Map<String, Object>>) configMap
+          .get(key);
+        filterConfigList.addAll(mapList);
+      } else if (key.equalsIgnoreCase("output")) {
+        List<Map<String, Object>> mapList = (List<Map<String, Object>>) configMap
+          .get(key);
+        outputConfigList.addAll(mapList);
+      }
+    }
+
+  }
+
+  /**
+   *
+   */
+  private void mergeAllConfigs() {
+    globalMap = mergeConfigs(globalConfigList);
+
+    // Sort the filter blocks
+    sortBlocks(filterConfigList);
+    // First loop for output
+    for (Map<String, Object> map : outputConfigList) {
+      if (map == null) {
+        continue;
+      }
+      mergeBlocks(globalMap, map);
+
+      String value = (String) map.get("destination");
+      Output output;
+      if (value == null || value.isEmpty()) {
+        logger.error("Output block doesn't have destination element");
+        continue;
+      }
+      String classFullName = AliasUtil.getInstance().readAlias(value, ALIAS_TYPE.OUTPUT, ALIAS_PARAM.KLASS);
+      if (classFullName == null || classFullName.isEmpty()) {
+        logger.error("Destination block doesn't have output element");
+        continue;
+      }
+      output = (Output) LogFeederUtil.getClassInstance(classFullName, ALIAS_TYPE.OUTPUT);
+
+      if (output == null) {
+        logger.error("Destination Object is null");
+        continue;
+      }
+
+      output.setDestination(value);
+      output.loadConfig(map);
+
+      // We will only check for is_enabled out here. Down below we will
+      // check whether this output is enabled for the input
+      boolean isEnabled = output.getBooleanValue("is_enabled", true);
+      if (isEnabled) {
+        outputList.add(output);
+        output.logConfgs(Level.INFO);
+      } else {
+        logger.info("Output is disabled. So ignoring it. "
+          + output.getShortDescription());
+      }
+    }
+
+    // Second loop for input
+    for (Map<String, Object> map : inputConfigList) {
+      if (map == null) {
+        continue;
+      }
+      mergeBlocks(globalMap, map);
+
+      String value = (String) map.get("source");
+      Input input;
+      if (value == null || value.isEmpty()) {
+        logger.error("Input block doesn't have source element");
+        continue;
+      }
+      String classFullName = AliasUtil.getInstance().readAlias(value, ALIAS_TYPE.INPUT, ALIAS_PARAM.KLASS);
+      if (classFullName == null || classFullName.isEmpty()) {
+        logger.error("Source block doesn't have source element");
+        continue;
+      }
+      input = (Input) LogFeederUtil.getClassInstance(classFullName, ALIAS_TYPE.INPUT);
+
+      if (input == null) {
+        logger.error("Source Object is null");
+        continue;
+      }
+
+      input.setType(value);
+      input.loadConfig(map);
+
+      if (input.isEnabled()) {
+        input.setOutputMgr(outMgr);
+        input.setInputMgr(inputMgr);
+        inputMgr.add(input);
+        input.logConfgs(Level.INFO);
+      } else {
+        logger.info("Input is disabled. So ignoring it. "
+          + input.getShortDescription());
+      }
+    }
+
+    // Third loop is for filter, but we will have to create a filter
+    // instance for each input, so it can maintain the state per input
+    List<Input> toRemoveInputList = new ArrayList<Input>();
+    for (Input input : inputMgr.getInputList()) {
+      Filter prevFilter = null;
+      for (Map<String, Object> map : filterConfigList) {
+        if (map == null) {
+          continue;
+        }
+        mergeBlocks(globalMap, map);
+
+        String value = (String) map.get("filter");
+        Filter filter;
+        if (value == null || value.isEmpty()) {
+          logger.error("Filter block doesn't have filter element");
+          continue;
+        }
+
+        String classFullName = AliasUtil.getInstance().readAlias(value, ALIAS_TYPE.FILTER, ALIAS_PARAM.KLASS);
+        if (classFullName == null || classFullName.isEmpty()) {
+          logger.error("Filter block doesn't have filter element");
+          continue;
+        }
+        filter = (Filter) LogFeederUtil.getClassInstance(classFullName, ALIAS_TYPE.FILTER);
+
+        if (filter == null) {
+          logger.error("Filter Object is null");
+          continue;
+        }
+        filter.loadConfig(map);
+        filter.setInput(input);
+
+        if (filter.isEnabled()) {
+          filter.setOutputMgr(outMgr);
+          if (prevFilter == null) {
+            input.setFirstFilter(filter);
+          } else {
+            prevFilter.setNextFilter(filter);
+          }
+          prevFilter = filter;
+          filter.logConfgs(Level.INFO);
+        } else {
+          logger.debug("Ignoring filter "
+            + filter.getShortDescription() + " for input "
+            + input.getShortDescription());
+        }
+      }
+      if (input.getFirstFilter() == null) {
+        toRemoveInputList.add(input);
+      }
+    }
+
+    // Fourth loop is for associating valid outputs to input
+    Set<Output> usedOutputSet = new HashSet<Output>();
+    for (Input input : inputMgr.getInputList()) {
+      for (Output output : outputList) {
+        boolean ret = LogFeederUtil.isEnabled(output.getConfigs(),
+          input.getConfigs());
+        if (ret) {
+          usedOutputSet.add(output);
+          input.addOutput(output);
+        }
+      }
+    }
+    outputList = usedOutputSet;
+
+    for (Input toRemoveInput : toRemoveInputList) {
+      logger.warn("There are no filters, we will ignore this input. "
+        + toRemoveInput.getShortDescription());
+      inputMgr.removeInput(toRemoveInput);
+    }
+  }
+
+  /**
+   * @param filterConfigList2
+   * @return
+   */
+  private void sortBlocks(List<Map<String, Object>> blockList) {
+
+    Collections.sort(blockList, new Comparator<Map<String, Object>>() {
+
+      @Override
+      public int compare(Map<String, Object> o1, Map<String, Object> o2) {
+        Object o1Sort = o1.get("sort_order");
+        Object o2Sort = o2.get("sort_order");
+        if (o1Sort == null) {
+          return 0;
+        }
+        if (o2Sort == null) {
+          return 0;
+        }
+        int o1Value = 0;
+        if (!(o1Sort instanceof Number)) {
+          try {
+            o1Value = (new Double(Double.parseDouble(o1Sort
+              .toString()))).intValue();
+          } catch (Throwable t) {
+            logger.error("Value is not of type Number. class="
+              + o1Sort.getClass().getName() + ", value="
+              + o1Sort.toString() + ", map=" + o1.toString());
+          }
+        } else {
+          o1Value = ((Number) o1Sort).intValue();
+        }
+        int o2Value = 0;
+        if (!(o2Sort instanceof Integer)) {
+          try {
+            o2Value = (new Double(Double.parseDouble(o2Sort
+              .toString()))).intValue();
+          } catch (Throwable t) {
+            logger.error("Value is not of type Number. class="
+              + o2Sort.getClass().getName() + ", value="
+              + o2Sort.toString() + ", map=" + o2.toString());
+          }
+        } else {
+
+        }
+        return o1Value - o2Value;
+      }
+    });
+  }
+
+  /**
+   * @param globalConfigList2
+   */
+  private Map<String, Object> mergeConfigs(
+    List<Map<String, Object>> configList) {
+    Map<String, Object> mergedConfig = new HashMap<String, Object>();
+    for (Map<String, Object> config : configList) {
+      mergeBlocks(config, mergedConfig);
+    }
+    return mergedConfig;
+  }
+
+  private void mergeBlocks(Map<String, Object> fromMap,
+                           Map<String, Object> toMap) {
+    // Merge the non-string
+    for (String key : fromMap.keySet()) {
+      Object objValue = fromMap.get(key);
+      if (objValue == null) {
+        continue;
+      }
+      if (objValue instanceof Map) {
+        @SuppressWarnings("unchecked")
+        Map<String, Object> globalFields = LogFeederUtil
+          .cloneObject((Map<String, Object>) fromMap.get(key));
+
+        @SuppressWarnings("unchecked")
+        Map<String, Object> localFields = (Map<String, Object>) toMap
+          .get(key);
+        if (localFields == null) {
+          localFields = new HashMap<String, Object>();
+          toMap.put(key, localFields);
+        }
+
+        if (globalFields != null) {
+          for (String fieldKey : globalFields.keySet()) {
+            if (!localFields.containsKey(fieldKey)) {
+              localFields.put(fieldKey,
+                globalFields.get(fieldKey));
+            }
+          }
+        }
+      }
+    }
+
+    // Let's add the rest of the top level fields if missing
+    for (String key : fromMap.keySet()) {
+      if (!toMap.containsKey(key)) {
+        toMap.put(key, fromMap.get(key));
+      }
+    }
+  }
+
+  private void monitor() throws Exception {
+    inputMgr.monitor();
+    Runtime.getRuntime().addShutdownHook(new JVMShutdownHook());
+
+    Thread statLogger = new Thread("statLogger") {
+
+      @Override
+      public void run() {
+        while (true) {
+          try {
+            Thread.sleep(30 * 1000);
+          } catch (Throwable t) {
+            // Ignore
+          }
+          try {
+            logStats();
+          } catch (Throwable t) {
+            logger.error(
+              "LogStats: Caught exception while logging stats.",
+              t);
+          }
+
+          if (System.currentTimeMillis() > (lastCheckPointCleanedMS + checkPointCleanIntervalMS)) {
+            lastCheckPointCleanedMS = System.currentTimeMillis();
+            inputMgr.cleanCheckPointFiles();
+          }
+        }
+      }
+
+    };
+    statLogger.setDaemon(true);
+    statLogger.start();
+
+  }
+
+  private void logStats() {
+    inputMgr.logStats();
+    outMgr.logStats();
+
+    if (metricsMgr.isMetricsEnabled()) {
+      List<MetricCount> metricsList = new ArrayList<MetricCount>();
+      inputMgr.addMetricsContainers(metricsList);
+      outMgr.addMetricsContainers(metricsList);
+      metricsMgr.useMetrics(metricsList);
+    }
+  }
+
+  /**
+   * @param inFile
+   * @return
+   * @throws Throwable
+   */
+  public String readFile(BufferedReader br) throws Exception {
+    try {
+      StringBuilder sb = new StringBuilder();
+      String line = br.readLine();
+      while (line != null) {
+        sb.append(line);
+        line = br.readLine();
+      }
+      return sb.toString();
+    } catch (Exception t) {
+      logger.error("Error loading properties file.", t);
+      throw t;
+    }
+  }
+
+  public Collection<Output> getOutputList() {
+    return outputList;
+  }
+
+  public OutputMgr getOutMgr() {
+    return outMgr;
+  }
+
+  public static void main(String[] args) {
+    LogFeeder logFeeder = new LogFeeder(args);
+    logFeeder.run(logFeeder);
+  }
+
+
+  public static void run(String[] args) {
+    LogFeeder logFeeder = new LogFeeder(args);
+    logFeeder.run(logFeeder);
+  }
+
+  public void run(LogFeeder logFeeder) {
+    try {
+      Date startTime = new Date();
+      logFeeder.init();
+      Date endTime = new Date();
+      logger.info("Took " + (endTime.getTime() - startTime.getTime())
+        + " ms to initialize");
+      logFeeder.monitor();
+
+    } catch (Throwable t) {
+      logger.fatal("Caught exception in main.", t);
+      System.exit(1);
+    }
+  }
+
+  private class JVMShutdownHook extends Thread {
+
+    public void run() {
+      try {
+        logger.info("Processing is shutting down.");
+
+        inputMgr.close();
+        outMgr.close();
+        inputMgr.checkInAll();
+
+        logStats();
+
+        logger.info("LogSearch is exiting.");
+      } catch (Throwable t) {
+        // Ignore
+      }
+    }
+  }
+
+}

+ 74 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederAMSClient.java

@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder;
+
+import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.apache.log4j.Logger;
+
+public class LogFeederAMSClient extends AbstractTimelineMetricsSink {
+  static Logger logger = Logger.getLogger(LogFeederAMSClient.class);
+
+  String collectorHosts = null;
+
+  public LogFeederAMSClient() {
+    collectorHosts = LogFeederUtil
+      .getStringProperty("metrics.collector.hosts");
+    if (collectorHosts != null && collectorHosts.trim().length() == 0) {
+      collectorHosts = null;
+    }
+    if (collectorHosts != null) {
+      collectorHosts = collectorHosts.trim();
+    }
+    logger.info("AMS collector URL=" + collectorHosts);
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see
+   * org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink#
+   * getCollectorUri()
+   */
+  @Override
+  public String getCollectorUri() {
+
+    return collectorHosts;
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see
+   * org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink#
+   * getTimeoutSeconds()
+   */
+  @Override
+  protected int getTimeoutSeconds() {
+    // TODO: Hard coded timeout
+    return 10;
+  }
+
+  @Override
+  protected void emitMetrics(TimelineMetrics metrics) {
+    super.emitMetrics(metrics);
+  }
+
+}

+ 480 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederUtil.java

@@ -0,0 +1,480 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder;
+
+import java.io.BufferedInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.lang.reflect.Type;
+import java.net.URL;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Hashtable;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.ambari.logfeeder.filter.Filter;
+import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.mapper.Mapper;
+import org.apache.ambari.logfeeder.output.Output;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.codehaus.jackson.JsonParseException;
+import org.codehaus.jackson.map.JsonMappingException;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.type.TypeReference;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.reflect.TypeToken;
+
+/**
+ * This class contains utility methods used by LogFeeder
+ */
+public class LogFeederUtil {
+  static Logger logger = Logger.getLogger(LogFeederUtil.class);
+
+  final static int HASH_SEED = 31174077;
+  public final static String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS";
+  static Gson gson = new GsonBuilder().setDateFormat(DATE_FORMAT).create();
+
+  static Properties props;
+
+  private static Map<String, LogHistory> logHistoryList = new Hashtable<String, LogHistory>();
+  private static int logInterval = 30000; // 30 seconds
+
+  public static Gson getGson() {
+    return gson;
+  }
+
+  /**
+   * This method will read the properties from System, followed by propFile
+   * and finally from the map
+   *
+   * @param propFile
+   * @param propNVList
+   * @throws Exception
+   */
+  static public void loadProperties(String propFile, String[] propNVList)
+    throws Exception {
+    logger.info("Loading properties. propFile=" + propFile);
+    props = new Properties(System.getProperties());
+    boolean propLoaded = false;
+
+    // First get properties file path from environment value
+    String propertiesFilePath = System.getProperty("properties");
+    if (propertiesFilePath != null && !propertiesFilePath.isEmpty()) {
+      File propertiesFile = new File(propertiesFilePath);
+      if (propertiesFile.exists() && propertiesFile.isFile()) {
+        logger.info("Properties file path set in environment. Loading properties file="
+          + propertiesFilePath);
+        FileInputStream fileInputStream = null;
+        try {
+          fileInputStream = new FileInputStream(propertiesFile);
+          props.load(fileInputStream);
+          propLoaded = true;
+        } catch (Throwable t) {
+          logger.error("Error loading properties file. properties file="
+            + propertiesFile.getAbsolutePath());
+        } finally {
+          if (fileInputStream != null) {
+            try {
+              fileInputStream.close();
+            } catch (Throwable t) {
+              // Ignore error
+            }
+          }
+        }
+      } else {
+        logger.error("Properties file path set in environment, but file not found. properties file="
+          + propertiesFilePath);
+      }
+    }
+
+    if (!propLoaded) {
+      // Properties not yet loaded, let's try from class loader
+      BufferedInputStream fileInputStream = (BufferedInputStream) LogFeeder.class
+        .getClassLoader().getResourceAsStream(propFile);
+      if (fileInputStream != null) {
+        logger.info("Loading properties file " + propFile
+          + " from classpath");
+        props.load(fileInputStream);
+        propLoaded = true;
+      } else {
+        logger.fatal("Properties file not found in classpath. properties file name= "
+          + propFile);
+      }
+    }
+
+    if (!propLoaded) {
+      logger.fatal("Properties file is not loaded.");
+      throw new Exception("Properties not loaded");
+    } else {
+      // Let's load properties from argument list
+      updatePropertiesFromMap(propNVList);
+    }
+  }
+
+  /**
+   * @param nvList
+   */
+  private static void updatePropertiesFromMap(String[] nvList) {
+    if (nvList == null) {
+      return;
+    }
+    logger.info("Trying to load additional proeprties from argument paramters. nvList.length="
+      + nvList.length);
+    if (nvList != null && nvList.length > 0) {
+      for (String nv : nvList) {
+        logger.info("Passed nv=" + nv);
+        if (nv.startsWith("-") && nv.length() > 1) {
+          nv = nv.substring(1);
+          logger.info("Stripped nv=" + nv);
+          int i = nv.indexOf("=");
+          if (nv.length() > i) {
+            logger.info("Candidate nv=" + nv);
+            String name = nv.substring(0, i);
+            String value = nv.substring(i + 1);
+            logger.info("Adding property from argument to properties. name="
+              + name + ", value=" + value);
+            props.put(name, value);
+          }
+        }
+      }
+    }
+  }
+
+  static public String getStringProperty(String key) {
+    if (props != null) {
+      return props.getProperty(key);
+    }
+    return null;
+  }
+
+  static public String getStringProperty(String key, String defaultValue) {
+    if (props != null) {
+      return props.getProperty(key, defaultValue);
+    }
+    return defaultValue;
+  }
+
+  static public boolean getBooleanProperty(String key, boolean defaultValue) {
+    String strValue = getStringProperty(key);
+    return toBoolean(strValue, defaultValue);
+  }
+
+  private static boolean toBoolean(String strValue, boolean defaultValue) {
+    boolean retValue = defaultValue;
+    if (!StringUtils.isEmpty(strValue)) {
+      if (strValue.equalsIgnoreCase("true")
+        || strValue.equalsIgnoreCase("yes")) {
+        retValue = true;
+      } else {
+        retValue = false;
+      }
+    }
+    return retValue;
+  }
+
+  static public int getIntProperty(String key, int defaultValue) {
+    String strValue = getStringProperty(key);
+    int retValue = defaultValue;
+    retValue = objectToInt(strValue, retValue, ", key=" + key);
+    return retValue;
+  }
+
+  public static int objectToInt(Object objValue, int retValue,
+                                String errMessage) {
+    if (objValue == null) {
+      return retValue;
+    }
+    String strValue = objValue.toString();
+    if (!StringUtils.isEmpty(strValue)) {
+      try {
+        retValue = Integer.parseInt(strValue);
+      } catch (Throwable t) {
+        logger.error("Error parsing integer value. str=" + strValue
+          + ", " + errMessage);
+      }
+    }
+    return retValue;
+  }
+
+  static public boolean isEnabled(Map<String, Object> configs) {
+    return isEnabled(configs, configs);
+  }
+
+  static public boolean isEnabled(Map<String, Object> conditionConfigs,
+                                  Map<String, Object> valueConfigs) {
+    boolean allow = toBoolean((String) valueConfigs.get("is_enabled"), true);
+    @SuppressWarnings("unchecked")
+    Map<String, Object> conditions = (Map<String, Object>) conditionConfigs
+      .get("conditions");
+    if (conditions != null && conditions.size() > 0) {
+      allow = false;
+      for (String conditionType : conditions.keySet()) {
+        if (conditionType.equalsIgnoreCase("fields")) {
+          @SuppressWarnings("unchecked")
+          Map<String, Object> fields = (Map<String, Object>) conditions
+            .get("fields");
+          for (String fieldName : fields.keySet()) {
+            Object values = fields.get(fieldName);
+            if (values instanceof String) {
+              allow = isFieldConditionMatch(valueConfigs,
+                fieldName, (String) values);
+            } else {
+              @SuppressWarnings("unchecked")
+              List<String> listValues = (List<String>) values;
+              for (String stringValue : listValues) {
+                allow = isFieldConditionMatch(valueConfigs,
+                  fieldName, stringValue);
+                if (allow) {
+                  break;
+                }
+              }
+            }
+            if (allow) {
+              break;
+            }
+          }
+        }
+        if (allow) {
+          break;
+        }
+      }
+    }
+    return allow;
+  }
+
+  static public boolean isFieldConditionMatch(Map<String, Object> configs,
+                                              String fieldName, String stringValue) {
+    boolean allow = false;
+    String fieldValue = (String) configs.get(fieldName);
+    if (fieldValue != null && fieldValue.equalsIgnoreCase(stringValue)) {
+      allow = true;
+    } else {
+      @SuppressWarnings("unchecked")
+      Map<String, Object> addFields = (Map<String, Object>) configs
+        .get("add_fields");
+      if (addFields != null && addFields.get(fieldName) != null) {
+        String addFieldValue = (String) addFields.get(fieldName);
+        if (stringValue.equalsIgnoreCase(addFieldValue)) {
+          allow = true;
+        }
+      }
+
+    }
+    return allow;
+  }
+
+  static public void logStatForMetric(MetricCount metric, String prefixStr,
+                                      String postFix) {
+    long currStat = metric.count;
+    long currMS = System.currentTimeMillis();
+    if (currStat > metric.prevLogCount) {
+      if (postFix == null) {
+        postFix = "";
+      }
+      logger.info(prefixStr + ": total_count=" + metric.count
+        + ", duration=" + (currMS - metric.prevLogMS) / 1000
+        + " secs, count=" + (currStat - metric.prevLogCount)
+        + postFix);
+    }
+    metric.prevLogCount = currStat;
+    metric.prevLogMS = currMS;
+  }
+
+  static public void logCountForMetric(MetricCount metric, String prefixStr,
+                                       String postFix) {
+    logger.info(prefixStr + ": count=" + metric.count + postFix);
+  }
+
+  public static Map<String, Object> cloneObject(Map<String, Object> map) {
+    if (map == null) {
+      return null;
+    }
+    String jsonStr = gson.toJson(map);
+    // We need to clone it, so we will create a JSON string and convert it
+    // back
+    Type type = new TypeToken<Map<String, Object>>() {
+    }.getType();
+    return gson.fromJson(jsonStr, type);
+  }
+
+  public static Map<String, Object> toJSONObject(String jsonStr) {
+    Type type = new TypeToken<Map<String, Object>>() {
+    }.getType();
+    return gson.fromJson(jsonStr, type);
+  }
+
+  static public boolean logErrorMessageByInterval(String key, String message,
+                                                  Throwable e, Logger callerLogger, Level level) {
+
+    LogHistory log = logHistoryList.get(key);
+    if (log == null) {
+      log = new LogHistory();
+      logHistoryList.put(key, log);
+    }
+    if ((System.currentTimeMillis() - log.lastLogTime) > logInterval) {
+      log.lastLogTime = System.currentTimeMillis();
+      int counter = log.counter;
+      log.counter = 0;
+      if (counter > 0) {
+        message += ". Messages suppressed before: " + counter;
+      }
+      if (e == null) {
+        callerLogger.log(level, message);
+      } else {
+        callerLogger.log(level, message, e);
+      }
+
+      return true;
+    } else {
+      log.counter++;
+    }
+    return false;
+
+  }
+
+  static public String subString(String str, int maxLength) {
+    if (str == null || str.length() == 0) {
+      return "";
+    }
+    maxLength = str.length() < maxLength ? str.length() : maxLength;
+    return str.substring(0, maxLength);
+  }
+
+  static public long genHash(String value) {
+    if (value == null) {
+      value = "null";
+    }
+    return MurmurHash.hash64A(value.getBytes(), HASH_SEED);
+  }
+
+  static class LogHistory {
+    long lastLogTime = 0;
+    int counter = 0;
+  }
+
+  public static String getDate(String timeStampStr) {
+    try {
+      DateFormat sdf = new SimpleDateFormat(DATE_FORMAT);
+      Date netDate = (new Date(Long.parseLong(timeStampStr)));
+      return sdf.format(netDate);
+    } catch (Exception ex) {
+      return null;
+    }
+  }
+
+  public static File getFileFromClasspath(String filename) {
+    URL fileCompleteUrl = Thread.currentThread().getContextClassLoader()
+      .getResource(filename);
+    logger.debug("File Complete URI :" + fileCompleteUrl);
+    File file = null;
+    try {
+      file = new File(fileCompleteUrl.toURI());
+    } catch (Exception exception) {
+      logger.debug(exception.getMessage(), exception.getCause());
+    }
+    return file;
+  }
+
+  public static Object getClassInstance(String classFullName, AliasUtil.ALIAS_TYPE aliasType) {
+    Object instance = null;
+    try {
+      instance = (Object) Class.forName(classFullName).getConstructor().newInstance();
+    } catch (Exception exception) {
+      logger.error("Unsupported class =" + classFullName, exception.getCause());
+    }
+    // check instance class as par aliasType
+    if (instance != null) {
+      boolean isValid = false;
+      switch (aliasType) {
+        case FILTER:
+          isValid = Filter.class.isAssignableFrom(instance.getClass());
+          break;
+        case INPUT:
+          isValid = Input.class.isAssignableFrom(instance.getClass());
+          break;
+        case OUTPUT:
+          isValid = Output.class.isAssignableFrom(instance.getClass());
+          break;
+        case MAPPER:
+          isValid = Mapper.class.isAssignableFrom(instance.getClass());
+          break;
+        default:
+          // by default consider all are valid class
+          isValid = true;
+      }
+      if (!isValid) {
+        logger.error("Not a valid class :" + classFullName + " AliasType :" + aliasType.name());
+      }
+    }
+    return instance;
+  }
+
+  /**
+   * @param fileName
+   * @return
+   */
+  public static HashMap<String, Object> readJsonFromFile(File jsonFile) {
+    ObjectMapper mapper = new ObjectMapper();
+    try {
+      HashMap<String, Object> jsonmap = mapper.readValue(jsonFile, new TypeReference<HashMap<String, Object>>() {
+      });
+      return jsonmap;
+    } catch (JsonParseException e) {
+      logger.error(e, e.getCause());
+    } catch (JsonMappingException e) {
+      logger.error(e, e.getCause());
+    } catch (IOException e) {
+      logger.error(e, e.getCause());
+    }
+    return new HashMap<String, Object>();
+  }
+
+  public static boolean isListContains(List<String> list, String str, boolean caseSensitive) {
+    if (list != null) {
+      for (String value : list) {
+        if (value != null) {
+          if (caseSensitive) {
+            if (value.equals(str)) {
+              return true;
+            }
+          } else {
+            if (value.equalsIgnoreCase(str)) {
+              return true;
+            }
+          }
+          if (value.equalsIgnoreCase("ALL")) {
+            return true;
+          }
+        }
+      }
+    }
+    return false;
+  }
+
+}

+ 33 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/MetricCount.java

@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder;
+
+public class MetricCount {
+  public String metricsName = null;
+  public boolean isPointInTime = false;
+
+  public long count = 0;
+  public long prevLogCount = 0;
+  public long prevLogMS = System.currentTimeMillis();
+  public long prevPublishCount = 0;
+  public long prevPublishMS = 0; // We will try to publish one immediately
+  public int publishCount = 0; // Count of published metrics. Used for first
+  // time sending metrics
+}

+ 185 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/MetricsMgr.java

@@ -0,0 +1,185 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder;
+
+import java.net.InetAddress;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.TreeMap;
+
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.apache.log4j.Logger;
+
+public class MetricsMgr {
+  static Logger logger = Logger.getLogger(MetricsMgr.class);
+
+  boolean isMetricsEnabled = false;
+  String nodeHostName = null;
+  String appId = "logfeeder";
+
+  long lastPublishTimeMS = 0; // Let's do the first publish immediately
+  long lastFailedPublishTimeMS = System.currentTimeMillis(); // Reset the
+  // clock
+
+  int publishIntervalMS = 60 * 1000;
+  int maxMetricsBuffer = 60 * 60 * 1000; // If AMS is down, we should not keep
+  // the metrics in memory forever
+  HashMap<String, TimelineMetric> metricsMap = new HashMap<String, TimelineMetric>();
+  LogFeederAMSClient amsClient = null;
+
+  public void init() {
+    logger.info("Initializing MetricsMgr()");
+    amsClient = new LogFeederAMSClient();
+
+    if (amsClient.getCollectorUri() != null) {
+      nodeHostName = LogFeederUtil.getStringProperty("node.hostname");
+      if (nodeHostName == null) {
+        try {
+          nodeHostName = InetAddress.getLocalHost().getHostName();
+        } catch (Throwable e) {
+          logger.warn(
+            "Error getting hostname using InetAddress.getLocalHost().getHostName()",
+            e);
+        }
+        if (nodeHostName == null) {
+          try {
+            nodeHostName = InetAddress.getLocalHost()
+              .getCanonicalHostName();
+          } catch (Throwable e) {
+            logger.warn(
+              "Error getting hostname using InetAddress.getLocalHost().getCanonicalHostName()",
+              e);
+          }
+        }
+      }
+      if (nodeHostName == null) {
+        isMetricsEnabled = false;
+        logger.error("Failed getting hostname for node. Disabling publishing LogFeeder metrics");
+      } else {
+        isMetricsEnabled = true;
+        logger.info("LogFeeder Metrics is enabled. Metrics host="
+          + amsClient.getCollectorUri());
+      }
+    } else {
+      logger.info("LogFeeder Metrics publish is disabled");
+    }
+  }
+
+  /**
+   * @return
+   */
+  public boolean isMetricsEnabled() {
+    return isMetricsEnabled;
+  }
+
+  /**
+   * @param metricsList
+   */
+  synchronized public void useMetrics(List<MetricCount> metricsList) {
+    if (!isMetricsEnabled) {
+      return;
+    }
+    logger.info("useMetrics() metrics.size=" + metricsList.size());
+    long currMS = System.currentTimeMillis();
+    Long currMSLong = new Long(currMS);
+    for (MetricCount metric : metricsList) {
+      if (metric.metricsName == null) {
+        logger.debug("metric.metricsName is null");
+        // Metrics is not meant to be published
+        continue;
+      }
+      long currCount = metric.count;
+      if (!metric.isPointInTime && metric.publishCount > 0
+        && currCount <= metric.prevPublishCount) {
+        // No new data added, so let's ignore it
+        logger.debug("Nothing changed. " + metric.metricsName
+          + ", currCount=" + currCount + ", prevPublishCount="
+          + metric.prevPublishCount);
+        continue;
+      }
+      metric.publishCount++;
+
+      TimelineMetric timelineMetric = metricsMap.get(metric.metricsName);
+      if (timelineMetric == null) {
+        logger.debug("Creating new metric obbject for "
+          + metric.metricsName);
+        // First time for this metric
+        timelineMetric = new TimelineMetric();
+        timelineMetric.setMetricName(metric.metricsName);
+        timelineMetric.setHostName(nodeHostName);
+        timelineMetric.setAppId(appId);
+        timelineMetric.setStartTime(currMS);
+        timelineMetric.setType("Long");
+        timelineMetric.setMetricValues(new TreeMap<Long, Double>());
+
+        metricsMap.put(metric.metricsName, timelineMetric);
+      }
+      logger.debug("Adding metrics=" + metric.metricsName);
+      if (metric.isPointInTime) {
+        timelineMetric.getMetricValues().put(currMSLong,
+          new Double(currCount));
+      } else {
+        Double value = timelineMetric.getMetricValues().get(currMSLong);
+        if (value == null) {
+          value = new Double(0);
+        }
+        value += (currCount - metric.prevPublishCount);
+        timelineMetric.getMetricValues().put(currMSLong, value);
+        metric.prevPublishCount = currCount;
+        metric.prevPublishMS = currMS;
+      }
+    }
+
+    if (metricsMap.size() > 0
+      && currMS - lastPublishTimeMS > publishIntervalMS) {
+      try {
+        // Time to publish
+        TimelineMetrics timelineMetrics = new TimelineMetrics();
+        List<TimelineMetric> timeLineMetricList = new ArrayList<TimelineMetric>();
+        timeLineMetricList.addAll(metricsMap.values());
+        timelineMetrics.setMetrics(timeLineMetricList);
+        amsClient.emitMetrics(timelineMetrics);
+        logger.info("Published " + timeLineMetricList.size()
+          + " metrics to AMS");
+        metricsMap.clear();
+        timeLineMetricList.clear();
+        lastPublishTimeMS = currMS;
+      } catch (Throwable t) {
+        logger.warn("Error sending metrics to AMS.", t);
+        if (currMS - lastFailedPublishTimeMS > maxMetricsBuffer) {
+          logger.error("AMS was not sent for last "
+            + maxMetricsBuffer
+            / 1000
+            + " seconds. Purging it and will start rebuilding it again");
+          metricsMap.clear();
+          lastFailedPublishTimeMS = currMS;
+        }
+      }
+    } else {
+      logger.info("Not publishing metrics. metrics.size()="
+        + metricsMap.size() + ", lastPublished="
+        + (currMS - lastPublishTimeMS) / 1000
+        + " seconds ago, intervalConfigured=" + publishIntervalMS
+        / 1000);
+    }
+  }
+}

+ 163 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/MurmurHash.java

@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.logfeeder;
+
+import com.google.common.primitives.Ints;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * This is a very fast, non-cryptographic hash suitable for general hash-based
+ * lookup.  See http://murmurhash.googlepages.com/ for more details.
+ * <p/>
+ * <p>The C version of MurmurHash 2.0 found at that site was ported
+ * to Java by Andrzej Bialecki (ab at getopt org).</p>
+ */
+public final class MurmurHash {
+
+  private MurmurHash() {
+  }
+
+  /**
+   * Hashes an int.
+   *
+   * @param data The int to hash.
+   * @param seed The seed for the hash.
+   * @return The 32 bit hash of the bytes in question.
+   */
+  public static int hash(int data, int seed) {
+    return hash(ByteBuffer.wrap(Ints.toByteArray(data)), seed);
+  }
+
+  /**
+   * Hashes bytes in an array.
+   *
+   * @param data The bytes to hash.
+   * @param seed The seed for the hash.
+   * @return The 32 bit hash of the bytes in question.
+   */
+  public static int hash(byte[] data, int seed) {
+    return hash(ByteBuffer.wrap(data), seed);
+  }
+
+  /**
+   * Hashes bytes in part of an array.
+   *
+   * @param data   The data to hash.
+   * @param offset Where to start munging.
+   * @param length How many bytes to process.
+   * @param seed   The seed to start with.
+   * @return The 32-bit hash of the data in question.
+   */
+  public static int hash(byte[] data, int offset, int length, int seed) {
+    return hash(ByteBuffer.wrap(data, offset, length), seed);
+  }
+
+  /**
+   * Hashes the bytes in a buffer from the current position to the limit.
+   *
+   * @param buf  The bytes to hash.
+   * @param seed The seed for the hash.
+   * @return The 32 bit murmur hash of the bytes in the buffer.
+   */
+  public static int hash(ByteBuffer buf, int seed) {
+    // save byte order for later restoration
+    ByteOrder byteOrder = buf.order();
+    buf.order(ByteOrder.LITTLE_ENDIAN);
+
+    int m = 0x5bd1e995;
+    int r = 24;
+
+    int h = seed ^ buf.remaining();
+
+    while (buf.remaining() >= 4) {
+      int k = buf.getInt();
+
+      k *= m;
+      k ^= k >>> r;
+      k *= m;
+
+      h *= m;
+      h ^= k;
+    }
+
+    if (buf.remaining() > 0) {
+      ByteBuffer finish = ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN);
+      // for big-endian version, use this first:
+      // finish.position(4-buf.remaining());
+      finish.put(buf).rewind();
+      h ^= finish.getInt();
+      h *= m;
+    }
+
+    h ^= h >>> 13;
+    h *= m;
+    h ^= h >>> 15;
+
+    buf.order(byteOrder);
+    return h;
+  }
+
+
+  public static long hash64A(byte[] data, int seed) {
+    return hash64A(ByteBuffer.wrap(data), seed);
+  }
+
+  public static long hash64A(byte[] data, int offset, int length, int seed) {
+    return hash64A(ByteBuffer.wrap(data, offset, length), seed);
+  }
+
+  public static long hash64A(ByteBuffer buf, int seed) {
+    ByteOrder byteOrder = buf.order();
+    buf.order(ByteOrder.LITTLE_ENDIAN);
+
+    long m = 0xc6a4a7935bd1e995L;
+    int r = 47;
+
+    long h = seed ^ (buf.remaining() * m);
+
+    while (buf.remaining() >= 8) {
+      long k = buf.getLong();
+
+      k *= m;
+      k ^= k >>> r;
+      k *= m;
+
+      h ^= k;
+      h *= m;
+    }
+
+    if (buf.remaining() > 0) {
+      ByteBuffer finish = ByteBuffer.allocate(8).order(ByteOrder.LITTLE_ENDIAN);
+      // for big-endian version, do this first:
+      // finish.position(8-buf.remaining());
+      finish.put(buf).rewind();
+      h ^= finish.getLong();
+      h *= m;
+    }
+
+    h ^= h >>> r;
+    h *= m;
+    h ^= h >>> r;
+
+    buf.order(byteOrder);
+    return h;
+  }
+
+}

+ 272 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/OutputMgr.java

@@ -0,0 +1,272 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+
+import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logfeeder.logconfig.filter.FilterLogData;
+import org.apache.ambari.logfeeder.output.Output;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+
+public class OutputMgr {
+  static Logger logger = Logger.getLogger(OutputMgr.class);
+
+  Collection<Output> outputList = new ArrayList<Output>();
+
+  String hostName = null;
+  String ipAddress = null;
+  boolean addMessageMD5 = true;
+
+  private int MAX_OUTPUT_SIZE = 32765; // 32766-1
+  static long doc_counter = 0;
+  public MetricCount messageTruncateMetric = new MetricCount();
+
+  public OutputMgr() {
+    // Set the host for this server
+    try {
+      InetAddress ip = InetAddress.getLocalHost();
+      ipAddress = ip.getHostAddress();
+      hostName = ip.getHostName();
+    } catch (UnknownHostException e) {
+      logger.error("Error getting hostname.", e);
+    }
+  }
+
+  public Collection<Output> getOutputList() {
+    return outputList;
+  }
+
+  public void setOutputList(Collection<Output> outputList) {
+    this.outputList = outputList;
+  }
+
+  /**
+   * @param jsonObj
+   * @param inputStr
+   * @param input
+   */
+  public void write(Map<String, Object> jsonObj, InputMarker inputMarker) {
+    Input input = inputMarker.input;
+
+    // Update the block with the context fields
+    for (Map.Entry<String, String> entry : input.getContextFields()
+      .entrySet()) {
+      if (jsonObj.get(entry.getKey()) == null) {
+        jsonObj.put(entry.getKey(), entry.getValue());
+      }
+    }
+
+    // TODO: Ideally most of the overrides should be configurable
+
+    // Add the input type
+    if (jsonObj.get("type") == null) {
+      jsonObj.put("type", input.getStringValue("type"));
+    }
+    if (jsonObj.get("path") == null && input.getFilePath() != null) {
+      jsonObj.put("path", input.getFilePath());
+    }
+    if (jsonObj.get("path") == null && input.getStringValue("path") != null) {
+      jsonObj.put("path", input.getStringValue("path"));
+    }
+
+    // Add host if required
+    if (jsonObj.get("host") == null && hostName != null) {
+      jsonObj.put("host", hostName);
+    }
+    // Add IP if required
+    if (jsonObj.get("ip") == null && ipAddress != null) {
+      jsonObj.put("ip", ipAddress);
+    }
+
+    if (input.isUseEventMD5() || input.isGenEventMD5()) {
+      String prefix = "";
+      Object logtimeObj = jsonObj.get("logtime");
+      if (logtimeObj != null) {
+        if (logtimeObj instanceof Date) {
+          prefix = "" + ((Date) logtimeObj).getTime();
+        } else {
+          prefix = logtimeObj.toString();
+        }
+      }
+      Long eventMD5 = LogFeederUtil.genHash(LogFeederUtil.getGson()
+        .toJson(jsonObj));
+      if (input.isGenEventMD5()) {
+        jsonObj.put("event_md5", prefix + eventMD5.toString());
+      }
+      if (input.isUseEventMD5()) {
+        jsonObj.put("id", prefix + eventMD5.toString());
+      }
+    }
+
+    // jsonObj.put("@timestamp", new Date());
+    jsonObj.put("seq_num", new Long(doc_counter++));
+    if (jsonObj.get("id") == null) {
+      jsonObj.put("id", UUID.randomUUID().toString());
+    }
+    if (jsonObj.get("event_count") == null) {
+      jsonObj.put("event_count", new Integer(1));
+    }
+    if (inputMarker.lineNumber > 0) {
+      jsonObj.put("logfile_line_number", new Integer(
+        inputMarker.lineNumber));
+    }
+    if (jsonObj.containsKey("log_message")) {
+      // TODO: Let's check size only for log_message for now
+      String logMessage = (String) jsonObj.get("log_message");
+      if (logMessage != null
+        && logMessage.getBytes().length > MAX_OUTPUT_SIZE) {
+        messageTruncateMetric.count++;
+        final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
+          + "_MESSAGESIZE";
+        LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
+          "Message is too big. size="
+            + logMessage.getBytes().length + ", input="
+            + input.getShortDescription()
+            + ". Truncating to " + MAX_OUTPUT_SIZE
+            + ", first upto 100 characters="
+            + LogFeederUtil.subString(logMessage, 100),
+          null, logger, Level.WARN);
+        logMessage = new String(logMessage.getBytes(), 0,
+          MAX_OUTPUT_SIZE);
+        jsonObj.put("log_message", logMessage);
+        // Add error tags
+        @SuppressWarnings("unchecked")
+        List<String> tagsList = (List<String>) jsonObj.get("tags");
+        if (tagsList == null) {
+          tagsList = new ArrayList<String>();
+          jsonObj.put("tags", tagsList);
+        }
+        tagsList.add("error_message_truncated");
+
+      }
+      if (addMessageMD5) {
+        jsonObj.put("message_md5",
+          "" + LogFeederUtil.genHash(logMessage));
+      }
+    }
+    //check log is allowed to send output
+    if (FilterLogData.INSTANCE.isAllowed(jsonObj)) {
+      for (Output output : input.getOutputList()) {
+        try {
+          output.write(jsonObj, inputMarker);
+        } catch (Exception e) {
+          logger.error("Error writing. to " + output.getShortDescription(), e);
+        }
+      }
+    }
+  }
+
+  public void write(String jsonBlock, InputMarker inputMarker) {
+    //check log is allowed to send output
+    if (FilterLogData.INSTANCE.isAllowed(jsonBlock)) {
+      for (Output output : inputMarker.input.getOutputList()) {
+        try {
+          output.write(jsonBlock, inputMarker);
+        } catch (Exception e) {
+          logger.error("Error writing. to " + output.getShortDescription(), e);
+        }
+      }
+    }
+  }
+
+  /**
+   * Close all the outputs
+   */
+  public void close() {
+    logger.info("Close called for outputs ...");
+    for (Output output : outputList) {
+      try {
+        output.setDrain(true);
+        output.close();
+      } catch (Exception e) {
+        // Ignore
+      }
+    }
+    // Need to get this value from property
+    int iterations = 30;
+    int waitTimeMS = 1000;
+    int i;
+    boolean allClosed = true;
+    for (i = 0; i < iterations; i++) {
+      allClosed = true;
+      for (Output output : outputList) {
+        if (!output.isClosed()) {
+          try {
+            allClosed = false;
+            logger.warn("Waiting for output to close. "
+              + output.getShortDescription() + ", "
+              + (iterations - i) + " more seconds");
+            Thread.sleep(waitTimeMS);
+          } catch (Throwable t) {
+            // Ignore
+          }
+        }
+      }
+      if (allClosed) {
+        break;
+      }
+    }
+
+    if (!allClosed) {
+      logger.warn("Some outpus were not closed. Iterations=" + i);
+      for (Output output : outputList) {
+        if (!output.isClosed()) {
+          logger.warn("Output not closed. Will ignore it."
+            + output.getShortDescription() + ", pendingCound="
+            + output.getPendingCount());
+        }
+      }
+    } else {
+      logger.info("All outputs are closed. Iterations=" + i);
+    }
+  }
+
+  /**
+   *
+   */
+  public void logStats() {
+    for (Output output : outputList) {
+      output.logStat();
+    }
+    LogFeederUtil.logStatForMetric(messageTruncateMetric,
+      "Stat: Messages Truncated", null);
+  }
+
+  /**
+   * @param metricsList
+   */
+  public void addMetricsContainers(List<MetricCount> metricsList) {
+    metricsList.add(messageTruncateMetric);
+    for (Output output : outputList) {
+      output.addMetricsContainers(metricsList);
+    }
+  }
+
+}

+ 223 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java

@@ -0,0 +1,223 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.filter;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.AliasUtil;
+import org.apache.ambari.logfeeder.ConfigBlock;
+import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.MetricCount;
+import org.apache.ambari.logfeeder.OutputMgr;
+import org.apache.ambari.logfeeder.AliasUtil.ALIAS_PARAM;
+import org.apache.ambari.logfeeder.AliasUtil.ALIAS_TYPE;
+import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logfeeder.mapper.Mapper;
+import org.apache.log4j.Logger;
+import org.apache.log4j.Priority;
+
+public abstract class Filter extends ConfigBlock {
+  static private Logger logger = Logger.getLogger(Filter.class);
+
+  OutputMgr outputMgr;
+  Input input;
+  Filter nextFilter = null;
+
+  Map<String, List<Mapper>> postFieldValueMappers = new HashMap<String, List<Mapper>>();
+
+  @Override
+  public void init() throws Exception {
+    super.init();
+
+    initializePostMapValues();
+    if (nextFilter != null) {
+      nextFilter.init();
+    }
+  }
+
+  /**
+   *
+   */
+  @SuppressWarnings("unchecked")
+  protected void initializePostMapValues() {
+    // Initialize map values
+    Map<String, Object> postMapValues = (Map<String, Object>) getConfigValue("post_map_values");
+    if (postMapValues == null) {
+      return;
+    }
+    for (String fieldName : postMapValues.keySet()) {
+      List<Map<String, Object>> mapList = null;
+      Object values = postMapValues.get(fieldName);
+      if (values instanceof List<?>) {
+        mapList = (List<Map<String, Object>>) values;
+      } else {
+        mapList = new ArrayList<Map<String, Object>>();
+        mapList.add((Map<String, Object>) values);
+      }
+      for (Map<String, Object> mapObject : mapList) {
+        for (String mapClassCode : mapObject.keySet()) {
+          Mapper mapper = getMapper(mapClassCode);
+          if (mapper == null) {
+            break;
+          }
+          if (mapper.init(getInput().getShortDescription(),
+            fieldName, mapClassCode,
+            mapObject.get(mapClassCode))) {
+            List<Mapper> fieldMapList = postFieldValueMappers
+              .get(fieldName);
+            if (fieldMapList == null) {
+              fieldMapList = new ArrayList<Mapper>();
+              postFieldValueMappers.put(fieldName, fieldMapList);
+            }
+            fieldMapList.add(mapper);
+          }
+        }
+      }
+    }
+  }
+
+  /**
+   * @param mapClassCode
+   * @return
+   */
+  protected Mapper getMapper(String mapClassCode) {
+    String classFullName = AliasUtil.getInstance().readAlias(mapClassCode, ALIAS_TYPE.MAPPER, ALIAS_PARAM.KLASS);
+    if (classFullName != null && !classFullName.isEmpty()) {
+      Mapper mapper = (Mapper) LogFeederUtil.getClassInstance(classFullName, ALIAS_TYPE.MAPPER);
+      return mapper;
+    }
+    return null;
+  }
+
+  public void setOutputMgr(OutputMgr outputMgr) {
+    this.outputMgr = outputMgr;
+  }
+
+  public Filter getNextFilter() {
+    return nextFilter;
+  }
+
+  public void setNextFilter(Filter nextFilter) {
+    this.nextFilter = nextFilter;
+  }
+
+  public Input getInput() {
+    return input;
+  }
+
+  public void setInput(Input input) {
+    this.input = input;
+  }
+
+  /**
+   * Deriving classes should implement this at the minimum
+   *
+   * @param inputStr
+   * @param marker
+   */
+  public void apply(String inputStr, InputMarker inputMarker) {
+    // TODO: There is no transformation for string types.
+    if (nextFilter != null) {
+      nextFilter.apply(inputStr, inputMarker);
+    } else {
+      outputMgr.write(inputStr, inputMarker);
+    }
+  }
+
+  public void apply(Map<String, Object> jsonObj, InputMarker inputMarker) {
+    if (postFieldValueMappers.size() > 0) {
+      for (String fieldName : postFieldValueMappers.keySet()) {
+        Object value = jsonObj.get(fieldName);
+        if (value != null) {
+          for (Mapper mapper : postFieldValueMappers.get(fieldName)) {
+            value = mapper.apply(jsonObj, value);
+          }
+        }
+      }
+    }
+    if (nextFilter != null) {
+      nextFilter.apply(jsonObj, inputMarker);
+    } else {
+      outputMgr.write(jsonObj, inputMarker);
+    }
+  }
+
+  /**
+   *
+   */
+  public void close() {
+    if (nextFilter != null) {
+      nextFilter.close();
+    }
+  }
+
+  public void flush() {
+
+  }
+
+  @Override
+  public void logStat() {
+    super.logStat();
+    if (nextFilter != null) {
+      nextFilter.logStat();
+    }
+  }
+
+  @Override
+  public boolean isFieldConditionMatch(String fieldName, String stringValue) {
+    if (!super.isFieldConditionMatch(fieldName, stringValue)) {
+      // Let's try input
+      if (input != null) {
+        return input.isFieldConditionMatch(fieldName, stringValue);
+      } else {
+        return false;
+      }
+    }
+    return true;
+  }
+
+  @Override
+  public String getShortDescription() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  @Override
+  public boolean logConfgs(Priority level) {
+    if (!super.logConfgs(level)) {
+      return false;
+    }
+    logger.log(level, "input=" + input.getShortDescription());
+    return true;
+  }
+
+  @Override
+  public void addMetricsContainers(List<MetricCount> metricsList) {
+    super.addMetricsContainers(metricsList);
+    if (nextFilter != null) {
+      nextFilter.addMetricsContainers(metricsList);
+    }
+  }
+
+}

+ 351 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java

@@ -0,0 +1,351 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.filter;
+
+import java.io.BufferedInputStream;
+import java.io.InputStreamReader;
+import java.lang.reflect.Type;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.regex.Pattern;
+
+import oi.thekraken.grok.api.Grok;
+import oi.thekraken.grok.api.exception.GrokException;
+
+import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.MetricCount;
+import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+
+import com.google.gson.reflect.TypeToken;
+
+public class FilterGrok extends Filter {
+  static private Logger logger = Logger.getLogger(FilterGrok.class);
+
+  private static final String GROK_PATTERN_FILE = "grok-patterns";
+
+  String messagePattern = null;
+  String multilinePattern = null;
+
+  Grok grokMultiline = null;
+  Grok grokMessage = null;
+
+  StringBuilder strBuff = null;
+  String currMultilineJsonStr = null;
+
+  InputMarker firstInputMarker = null;
+  InputMarker savedInputMarker = null;
+
+  String sourceField = null;
+  boolean removeSourceField = true;
+
+  Set<String> namedParamList = new HashSet<String>();
+  Set<String> multiLineamedParamList = new HashSet<String>();
+
+  Type jsonType = new TypeToken<Map<String, String>>() {
+  }.getType();
+
+  public MetricCount grokErrorMetric = new MetricCount();
+
+  @Override
+  public void init() throws Exception {
+    super.init();
+
+    try {
+      grokErrorMetric.metricsName = "filter.error.grok";
+      // Get the Grok file patterns
+      messagePattern = escapePattern(getStringValue("message_pattern"));
+      multilinePattern = escapePattern(getStringValue("multiline_pattern"));
+      sourceField = getStringValue("source_field");
+      removeSourceField = getBooleanValue("remove_source_field",
+        removeSourceField);
+
+      logger.info("init() done. grokPattern=" + messagePattern
+        + ", multilinePattern=" + multilinePattern + ", "
+        + getShortDescription());
+      if (StringUtils.isEmpty(messagePattern)) {
+        logger.error("message_pattern is not set for filter.");
+        return;
+      }
+      extractNamedParams(messagePattern, namedParamList);
+
+      grokMessage = new Grok();
+      // grokMessage.addPatternFromReader(r);
+      loadPatterns(grokMessage);
+      grokMessage.compile(messagePattern);
+      if (!StringUtils.isEmpty(multilinePattern)) {
+        extractNamedParams(multilinePattern, multiLineamedParamList);
+
+        grokMultiline = new Grok();
+        loadPatterns(grokMultiline);
+        grokMultiline.compile(multilinePattern);
+      }
+    } catch (Throwable t) {
+      logger.fatal(
+        "Caught exception while initializing Grok. multilinePattern="
+          + multilinePattern + ", messagePattern="
+          + messagePattern, t);
+      grokMessage = null;
+      grokMultiline = null;
+    }
+
+  }
+
+  /**
+   * @param stringValue
+   * @return
+   */
+  private String escapePattern(String inPattern) {
+    String inStr = inPattern;
+    if (inStr != null) {
+      if (inStr.contains("(?m)") && !inStr.contains("(?s)")) {
+        inStr = inStr.replaceFirst("(?m)", "(?s)");
+      }
+      // inStr = inStr.replaceAll("\\[", "\\\\[");
+      // inStr = inStr.replaceAll("\\]", "\\\\]");
+      // inStr = inStr.replaceAll("\\(", "\\\\(");
+      // inStr = inStr.replaceAll("\\)", "\\\\)");
+    }
+    return inStr;
+  }
+
+  private void extractNamedParams(String patternStr, Set<String> paramList) {
+    String grokRegEx = "%\\{" + "(?<name>" + "(?<pattern>[A-z0-9]+)"
+      + "(?::(?<subname>[A-z0-9_:]+))?" + ")" + "(?:=(?<definition>"
+      + "(?:" + "(?:[^{}]+|\\.+)+" + ")+" + ")" + ")?" + "\\}";
+
+    Pattern pattern = Pattern.compile(grokRegEx);
+    java.util.regex.Matcher matcher = pattern.matcher(patternStr);
+    while (matcher.find()) {
+      String subname = matcher.group(3);
+      if (subname != null) {
+        paramList.add(subname);
+      }
+    }
+  }
+
+  private boolean loadPatterns(Grok grok) {
+    InputStreamReader grokPatternsReader = null;
+    logger.info("Loading pattern file " + GROK_PATTERN_FILE);
+    try {
+      BufferedInputStream fileInputStream = (BufferedInputStream) this
+        .getClass().getClassLoader()
+        .getResourceAsStream(GROK_PATTERN_FILE);
+      if (fileInputStream == null) {
+        logger.fatal("Couldn't load grok-patterns file "
+          + GROK_PATTERN_FILE + ". Things will not work");
+        return false;
+      }
+      grokPatternsReader = new InputStreamReader(fileInputStream);
+    } catch (Throwable t) {
+      logger.fatal("Error reading grok-patterns file " + GROK_PATTERN_FILE
+        + " from classpath. Grok filtering will not work.", t);
+      return false;
+    }
+    try {
+      grok.addPatternFromReader(grokPatternsReader);
+    } catch (GrokException e) {
+      logger.fatal(
+        "Error loading patterns from grok-patterns reader for file "
+          + GROK_PATTERN_FILE, e);
+      return false;
+    }
+
+    return true;
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see org.apache.ambari.logfeeder.filter.Filter#apply(java.lang.String)
+   */
+  @Override
+  public void apply(String inputStr, InputMarker inputMarker) {
+    if (grokMessage == null) {
+      return;
+    }
+
+    if (grokMultiline != null) {
+      // Check if new line
+      String jsonStr = grokMultiline.capture(inputStr);
+      if (!"{}".equals(jsonStr)) {
+        // New line
+        if (strBuff != null) {
+          savedInputMarker.beginLineNumber = firstInputMarker.lineNumber;
+          // Construct JSON object and add only the interested named
+          // parameters
+          Map<String, Object> jsonObj = Collections
+            .synchronizedMap(new HashMap<String, Object>());
+          try {
+            // Handle message parsing
+            applyMessage(strBuff.toString(), jsonObj,
+              currMultilineJsonStr);
+          } finally {
+            strBuff = null;
+            savedInputMarker = null;
+            firstInputMarker = null;
+          }
+        }
+        currMultilineJsonStr = jsonStr;
+      }
+
+      if (strBuff == null) {
+        strBuff = new StringBuilder();
+        firstInputMarker = inputMarker;
+      } else {
+        // strBuff.append(System.lineSeparator());
+        strBuff.append('\r');
+        strBuff.append('\n');
+      }
+      strBuff.append(inputStr);
+      savedInputMarker = inputMarker;
+    } else {
+      savedInputMarker = inputMarker;
+      Map<String, Object> jsonObj = Collections
+        .synchronizedMap(new HashMap<String, Object>());
+      applyMessage(inputStr, jsonObj, null);
+    }
+  }
+
+  @Override
+  public void apply(Map<String, Object> jsonObj, InputMarker inputMarker) {
+    if (sourceField != null) {
+      savedInputMarker = inputMarker;
+      applyMessage((String) jsonObj.get(sourceField), jsonObj, null);
+      if (removeSourceField) {
+        jsonObj.remove(sourceField);
+      }
+    }
+  }
+
+  /**
+   * @param inputStr
+   * @param jsonObj
+   */
+  private void applyMessage(String inputStr, Map<String, Object> jsonObj,
+                            String multilineJsonStr) {
+    String jsonStr = grokParse(inputStr);
+
+    boolean parseError = false;
+    if ("{}".equals(jsonStr)) {
+      parseError = true;
+      // Error parsing string.
+      logParseError(inputStr);
+
+      if (multilineJsonStr == null) {
+        // TODO: Should we just add this as raw message in solr?
+        return;
+      }
+    }
+
+    if (parseError) {
+      jsonStr = multilineJsonStr;
+    }
+    Map<String, String> jsonSrc = LogFeederUtil.getGson().fromJson(jsonStr,
+      jsonType);
+    for (String namedParam : namedParamList) {
+      if (jsonSrc.get(namedParam) != null) {
+        jsonObj.put(namedParam, jsonSrc.get(namedParam));
+      }
+    }
+    if (parseError) {
+      // Add error tags
+      @SuppressWarnings("unchecked")
+      List<String> tagsList = (List<String>) jsonObj.get("tags");
+      if (tagsList == null) {
+        tagsList = new ArrayList<String>();
+        jsonObj.put("tags", tagsList);
+      }
+      tagsList.add("error_grok_parsing");
+      if (sourceField == null) {
+        // For now let's put the raw message in log_message, so it is
+        // will be searchable
+        jsonObj.put("log_message", inputStr);
+      }
+    }
+
+    super.apply(jsonObj, savedInputMarker);
+    statMetric.count++;
+  }
+
+  public String grokParse(String inputStr) {
+    String jsonStr = grokMessage.capture(inputStr);
+    return jsonStr;
+  }
+
+  private void logParseError(String inputStr) {
+    grokErrorMetric.count++;
+    final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
+      + "_PARSEERROR";
+    int inputStrLength = inputStr != null ? inputStr.length() : 0;
+    LogFeederUtil.logErrorMessageByInterval(
+      LOG_MESSAGE_KEY,
+      "Error parsing string. length=" + inputStrLength
+        + ", input=" + input.getShortDescription()
+        + ". First upto 100 characters="
+        + LogFeederUtil.subString(inputStr, 100), null, logger,
+      Level.WARN);
+  }
+
+  @Override
+  public void flush() {
+    if (strBuff != null) {
+      // Handle message parsing
+      Map<String, Object> jsonObj = Collections
+        .synchronizedMap(new HashMap<String, Object>());
+      applyMessage(strBuff.toString(), jsonObj, currMultilineJsonStr);
+      strBuff = null;
+      savedInputMarker = null;
+    }
+    super.flush();
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see org.apache.ambari.logfeeder.ConfigBlock#getShortDescription()
+   */
+  @Override
+  public String getShortDescription() {
+    return "filter:filter=grok,regex=" + messagePattern;
+  }
+
+  @Override
+  public void addMetricsContainers(List<MetricCount> metricsList) {
+    super.addMetricsContainers(metricsList);
+    metricsList.add(grokErrorMetric);
+  }
+
+  @Override
+  public void logStat() {
+    super.logStat();
+    // Printing stat for grokErrors
+    logStatForMetric(grokErrorMetric, "Stat: Grok Errors");
+
+  }
+
+}

+ 132 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java

@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.filter;
+
+import java.util.List;
+import java.util.Map;
+import java.util.StringTokenizer;
+
+import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.MetricCount;
+import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+
+public class FilterKeyValue extends Filter {
+  static private Logger logger = Logger.getLogger(FilterKeyValue.class);
+
+  String sourceField = null;
+  String valueSplit = "=";
+  String fieldSplit = "\t";
+
+  public MetricCount errorMetric = new MetricCount();
+
+  @Override
+  public void init() throws Exception {
+    super.init();
+    errorMetric.metricsName = "filter.error.keyvalue";
+
+    sourceField = getStringValue("source_field");
+    valueSplit = getStringValue("value_split", valueSplit);
+    fieldSplit = getStringValue("field_split", fieldSplit);
+
+    logger.info("init() done. source_field=" + sourceField
+      + ", value_split=" + valueSplit + ", " + ", field_split="
+      + fieldSplit + ", " + getShortDescription());
+    if (StringUtils.isEmpty(sourceField)) {
+      logger.fatal("source_field is not set for filter. This filter will not be applied");
+      return;
+    }
+
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see org.apache.ambari.logfeeder.filter.Filter#apply(java.lang.String)
+   */
+  @Override
+  public void apply(String inputStr, InputMarker inputMarker) {
+    apply(LogFeederUtil.toJSONObject(inputStr), inputMarker);
+  }
+
+  @Override
+  public void apply(Map<String, Object> jsonObj, InputMarker inputMarker) {
+    if (sourceField == null) {
+      return;
+    }
+    Object valueObj = jsonObj.get(sourceField);
+    if (valueObj != null) {
+      StringTokenizer fieldTokenizer = new StringTokenizer(
+        valueObj.toString(), fieldSplit);
+      while (fieldTokenizer.hasMoreTokens()) {
+        String nv = fieldTokenizer.nextToken();
+        StringTokenizer nvTokenizer = new StringTokenizer(nv,
+          valueSplit);
+        while (nvTokenizer.hasMoreTokens()) {
+          String name = nvTokenizer.nextToken();
+          if (nvTokenizer.hasMoreTokens()) {
+            String value = nvTokenizer.nextToken();
+            jsonObj.put(name, value);
+          } else {
+            // Unbalanced name value pairs
+            logParseError("name=" + name + ", pair=" + nv
+              + ", field=" + sourceField + ", field_value="
+              + valueObj);
+          }
+        }
+      }
+    }
+    super.apply(jsonObj, inputMarker);
+    statMetric.count++;
+  }
+
+  private void logParseError(String inputStr) {
+    errorMetric.count++;
+    final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
+      + "_PARSEERROR";
+    LogFeederUtil
+      .logErrorMessageByInterval(
+        LOG_MESSAGE_KEY,
+        "Error parsing string. length=" + inputStr.length()
+          + ", input=" + input.getShortDescription()
+          + ". First upto 100 characters="
+          + LogFeederUtil.subString(inputStr, 100), null, logger,
+        Level.ERROR);
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see org.apache.ambari.logfeeder.ConfigBlock#getShortDescription()
+   */
+  @Override
+  public String getShortDescription() {
+    return "filter:filter=keyvalue,regex=" + sourceField;
+  }
+
+  @Override
+  public void addMetricsContainers(List<MetricCount> metricsList) {
+    super.addMetricsContainers(metricsList);
+    metricsList.add(errorMetric);
+  }
+
+}

+ 49 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/JSONFilterCode.java

@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.filter;
+
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.log4j.Logger;
+
+
+public class JSONFilterCode extends Filter {
+  private static Logger logger = Logger.getLogger(JSONFilterCode.class);
+
+  @Override
+  public void apply(String inputStr, InputMarker inputMarker) {
+    Map<String, Object> jsonMap = LogFeederUtil.toJSONObject(inputStr);
+    // linenumber
+    Double lineNumberD = (Double) jsonMap.get("line_number");
+    if (lineNumberD != null) {
+      long lineNumber = lineNumberD.longValue();
+      jsonMap.put("line_number", lineNumber);
+    }
+    // logtime
+    String timeStampStr = (String) jsonMap.get("logtime");
+    if (timeStampStr != null && !timeStampStr.isEmpty()) {
+      String logtime = LogFeederUtil.getDate(timeStampStr);
+      jsonMap.put("logtime", logtime);
+    }
+    super.apply(jsonMap, inputMarker);
+  }
+
+}

+ 321 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/Input.java

@@ -0,0 +1,321 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.input;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.ConfigBlock;
+import org.apache.ambari.logfeeder.InputMgr;
+import org.apache.ambari.logfeeder.MetricCount;
+import org.apache.ambari.logfeeder.OutputMgr;
+import org.apache.ambari.logfeeder.filter.Filter;
+import org.apache.ambari.logfeeder.output.Output;
+import org.apache.log4j.Logger;
+
+public abstract class Input extends ConfigBlock implements Runnable {
+  static private Logger logger = Logger.getLogger(Input.class);
+
+  OutputMgr outputMgr;
+  InputMgr inputMgr;
+
+  List<Output> outputList = new ArrayList<Output>();
+
+  Filter firstFilter = null;
+  Thread thread;
+  private boolean isClosed = false;
+  String filePath = null;
+  String type = null;
+
+  boolean tail = true;
+  boolean useEventMD5 = false;
+  boolean genEventMD5 = true;
+
+  public MetricCount readBytesMetric = new MetricCount();
+
+  /**
+   * This method will be called from the thread spawned for the output. This
+   * method should only exit after all data are read from the source or the
+   * process is exiting
+   *
+   * @throws Exception
+   */
+  abstract void start() throws Exception;
+
+  @Override
+  public void init() throws Exception {
+    super.init();
+    tail = getBooleanValue("tail", tail);
+    useEventMD5 = getBooleanValue("use_event_md5_as_id", useEventMD5);
+    genEventMD5 = getBooleanValue("gen_event_md5", genEventMD5);
+
+    if (firstFilter != null) {
+      firstFilter.init();
+    }
+  }
+
+  @Override
+  public String getNameForThread() {
+    if (filePath != null) {
+      try {
+        return (type + "=" + (new File(filePath)).getName());
+      } catch (Throwable ex) {
+        logger.warn("Couldn't get basename for filePath=" + filePath,
+          ex);
+      }
+    }
+    return super.getNameForThread() + ":" + type;
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see java.lang.Runnable#run()
+   */
+  @Override
+  public void run() {
+    try {
+      logger.info("Started to monitor. " + getShortDescription());
+      start();
+    } catch (Exception e) {
+      logger.error("Error writing to output.", e);
+    }
+    logger.info("Exiting thread. " + getShortDescription());
+  }
+
+  public void outputLine(String line, InputMarker marker) {
+    statMetric.count++;
+    readBytesMetric.count += (line.length());
+
+    if (firstFilter != null) {
+      firstFilter.apply(line, marker);
+    } else {
+      // TODO: For now, let's make filter mandatory, so that no one
+      // accidently forgets to write filter
+      // outputMgr.write(line, this);
+    }
+  }
+
+  /**
+   *
+   */
+  public void flush() {
+    if (firstFilter != null) {
+      firstFilter.flush();
+    }
+  }
+
+  public boolean monitor() {
+    if (isReady()) {
+      logger.info("Starting thread. " + getShortDescription());
+      thread = new Thread(this, getNameForThread());
+      thread.start();
+      return true;
+    } else {
+      return false;
+    }
+  }
+
+  public void checkIn(InputMarker inputMarker) {
+    // Default implementation is to ignore.
+  }
+
+  /**
+   * This is generally used by final checkin
+   */
+  public void checkIn() {
+
+  }
+
+  /**
+   * @return
+   */
+  public boolean isReady() {
+    return true;
+  }
+
+  public boolean isTail() {
+    return tail;
+  }
+
+  public void setTail(boolean tail) {
+    this.tail = tail;
+  }
+
+  public boolean isUseEventMD5() {
+    return useEventMD5;
+  }
+
+  public void setUseEventMD5(boolean useEventMD5) {
+    this.useEventMD5 = useEventMD5;
+  }
+
+  public boolean isGenEventMD5() {
+    return genEventMD5;
+  }
+
+  public void setGenEventMD5(boolean genEventMD5) {
+    this.genEventMD5 = genEventMD5;
+  }
+
+  @Override
+  public void setDrain(boolean drain) {
+    logger.info("Request to drain. " + getShortDescription());
+    super.setDrain(drain);
+    ;
+    try {
+      thread.interrupt();
+    } catch (Throwable t) {
+      // ignore
+    }
+  }
+
+  public Filter getFirstFilter() {
+    return firstFilter;
+  }
+
+  public void setFirstFilter(Filter filter) {
+    firstFilter = filter;
+  }
+
+  public void setInputMgr(InputMgr inputMgr) {
+    this.inputMgr = inputMgr;
+  }
+
+  public void setOutputMgr(OutputMgr outputMgr) {
+    this.outputMgr = outputMgr;
+  }
+
+  public String getFilePath() {
+    return filePath;
+  }
+
+  public void setFilePath(String filePath) {
+    this.filePath = filePath;
+  }
+
+  public void close() {
+    logger.info("Close called. " + getShortDescription());
+
+    try {
+      if (firstFilter != null) {
+        firstFilter.close();
+      } else {
+        outputMgr.close();
+      }
+    } catch (Throwable t) {
+      // Ignore
+    }
+    isClosed = true;
+  }
+
+  public void setClosed(boolean isClosed) {
+    this.isClosed = isClosed;
+  }
+
+  public boolean isClosed() {
+    return isClosed;
+  }
+
+  @Override
+  public void loadConfig(Map<String, Object> map) {
+    super.loadConfig(map);
+    String typeValue = getStringValue("type");
+    if (typeValue != null) {
+      // Explicitly add type and value to field list
+      contextFields.put("type", typeValue);
+      @SuppressWarnings("unchecked")
+      Map<String, Object> addFields = (Map<String, Object>) map
+        .get("add_fields");
+      if (addFields == null) {
+        addFields = new HashMap<String, Object>();
+        map.put("add_fields", addFields);
+      }
+      addFields.put("type", typeValue);
+    }
+  }
+
+  @Override
+  public String getShortDescription() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  @Override
+  public void logStat() {
+    super.logStat();
+    logStatForMetric(readBytesMetric, "Stat: Bytes Read");
+
+    if (firstFilter != null) {
+      firstFilter.logStat();
+    }
+  }
+
+  @Override
+  public String toString() {
+    return getShortDescription();
+  }
+
+  /**
+   *
+   */
+  public void rollOver() {
+    // Only some inputs support it. E.g. InputFile
+  }
+
+  public String getType() {
+    return type;
+  }
+
+  public void setType(String type) {
+    this.type = type;
+  }
+
+  public Date getEventTime() {
+    return null;
+  }
+
+  public List<Output> getOutputList() {
+    return outputList;
+  }
+
+  /**
+   * @param output
+   */
+  public void addOutput(Output output) {
+    outputList.add(output);
+  }
+
+  /**
+   * @param metricsList
+   */
+  public void addMetricsContainers(List<MetricCount> metricsList) {
+    super.addMetricsContainers(metricsList);
+    if (firstFilter != null) {
+      firstFilter.addMetricsContainers(metricsList);
+    }
+    metricsList.add(readBytesMetric);
+  }
+
+}

+ 562 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java

@@ -0,0 +1,562 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.input;
+
+import java.io.BufferedReader;
+import java.io.EOFException;
+import java.io.File;
+import java.io.FileFilter;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.attribute.BasicFileAttributes;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.input.reader.LogsearchReaderFactory;
+import org.apache.commons.io.filefilter.WildcardFileFilter;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.apache.solr.common.util.Base64;
+
+public class InputFile extends Input {
+  static private Logger logger = Logger.getLogger(InputFile.class);
+
+  // String startPosition = "beginning";
+  String logPath = null;
+  boolean isStartFromBegining = true;
+
+  boolean isReady = false;
+  File[] logPathFiles = null;
+  Object fileKey = null;
+  String base64FileKey = null;
+
+  private boolean isRolledOver = false;
+  boolean addWildCard = false;
+
+  long lastCheckPointTimeMS = 0;
+  int checkPointIntervalMS = 5 * 1000; // 5 seconds
+  RandomAccessFile checkPointWriter = null;
+  Map<String, Object> jsonCheckPoint = null;
+
+  File checkPointFile = null;
+
+  private InputMarker lastCheckPointInputMarker = null;
+
+  private String checkPointExtension = ".cp";
+
+  @Override
+  public void init() throws Exception {
+    logger.info("init() called");
+    statMetric.metricsName = "input.files.read_lines";
+    readBytesMetric.metricsName = "input.files.read_bytes";
+    checkPointExtension = LogFeederUtil.getStringProperty(
+      "logfeeder.checkpoint.extension", checkPointExtension);
+
+    // Let's close the file and set it to true after we start monitoring it
+    setClosed(true);
+    logPath = getStringValue("path");
+    tail = getBooleanValue("tail", tail);
+    addWildCard = getBooleanValue("add_wild_card", addWildCard);
+    checkPointIntervalMS = getIntValue("checkpoint.interval.ms",
+      checkPointIntervalMS);
+
+    if (logPath == null || logPath.isEmpty()) {
+      logger.error("path is empty for file input. "
+        + getShortDescription());
+      return;
+    }
+
+    String startPosition = getStringValue("start_position");
+    if (StringUtils.isEmpty(startPosition)
+      || startPosition.equalsIgnoreCase("beginning")
+      || startPosition.equalsIgnoreCase("begining")) {
+      isStartFromBegining = true;
+    }
+
+    if (!tail) {
+      // start position end doesn't apply if we are not tailing
+      isStartFromBegining = true;
+    }
+
+    setFilePath(logPath);
+    boolean isFileReady = isReady();
+
+    logger.info("File to monitor " + logPath + ", tail=" + tail
+      + ", addWildCard=" + addWildCard + ", isReady=" + isFileReady);
+
+    super.init();
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see org.apache.ambari.logfeeder.input.Input#isReady()
+   */
+  @Override
+  public boolean isReady() {
+    if (!isReady) {
+      // Let's try to check whether the file is available
+      logPathFiles = getActualFiles(logPath);
+      if (logPathFiles != null && logPathFiles.length > 0
+        && logPathFiles[0].isFile()) {
+
+        if (isTail() && logPathFiles.length > 1) {
+          logger.warn("Found multiple files (" + logPathFiles.length
+            + ") for the file filter " + filePath
+            + ". Will use only the first one. Using "
+            + logPathFiles[0].getAbsolutePath());
+        }
+        logger.info("File filter " + filePath + " expanded to "
+          + logPathFiles[0].getAbsolutePath());
+        isReady = true;
+      } else {
+        logger.debug(logPath + " file doesn't exist. Ignoring for now");
+      }
+    }
+    return isReady;
+  }
+
+  private File[] getActualFiles(String searchPath) {
+    if (addWildCard) {
+      if (!searchPath.endsWith("*")) {
+        searchPath = searchPath + "*";
+      }
+    }
+    File checkFile = new File(searchPath);
+    if (checkFile.isFile()) {
+      return new File[]{checkFile};
+    }
+    // Let's do wild card search
+    // First check current folder
+    File checkFiles[] = findFileForWildCard(searchPath, new File("."));
+    if (checkFiles == null || checkFiles.length == 0) {
+      // Let's check from the parent folder
+      File parentDir = (new File(searchPath)).getParentFile();
+      if (parentDir != null) {
+        String wildCard = (new File(searchPath)).getName();
+        checkFiles = findFileForWildCard(wildCard, parentDir);
+      }
+    }
+    return checkFiles;
+  }
+
+  private File[] findFileForWildCard(String searchPath, File dir) {
+    logger.debug("findFileForWildCard(). filePath=" + searchPath + ", dir="
+      + dir + ", dir.fullpath=" + dir.getAbsolutePath());
+    FileFilter fileFilter = new WildcardFileFilter(searchPath);
+    return dir.listFiles(fileFilter);
+  }
+
+  @Override
+  synchronized public void checkIn(InputMarker inputMarker) {
+    super.checkIn(inputMarker);
+    if (checkPointWriter != null) {
+      try {
+        int lineNumber = LogFeederUtil.objectToInt(
+          jsonCheckPoint.get("line_number"), 0, "line_number");
+        if (lineNumber > inputMarker.lineNumber) {
+          // Already wrote higher line number for this input
+          return;
+        }
+        // If interval is greater than last checkPoint time, then write
+        long currMS = System.currentTimeMillis();
+        if (!isClosed()
+          && (currMS - lastCheckPointTimeMS) < checkPointIntervalMS) {
+          // Let's save this one so we can update the check point file
+          // on flush
+          lastCheckPointInputMarker = inputMarker;
+          return;
+        }
+        lastCheckPointTimeMS = currMS;
+
+        jsonCheckPoint.put("line_number", ""
+          + new Integer(inputMarker.lineNumber));
+        jsonCheckPoint.put("last_write_time_ms", "" + new Long(currMS));
+        jsonCheckPoint.put("last_write_time_date", new Date());
+
+        String jsonStr = LogFeederUtil.getGson().toJson(jsonCheckPoint);
+
+        // Let's rewind
+        checkPointWriter.seek(0);
+        checkPointWriter.writeInt(jsonStr.length());
+        checkPointWriter.write(jsonStr.getBytes());
+
+        if (isClosed()) {
+          final String LOG_MESSAGE_KEY = this.getClass()
+            .getSimpleName() + "_FINAL_CHECKIN";
+          LogFeederUtil.logErrorMessageByInterval(
+            LOG_MESSAGE_KEY,
+            "Wrote final checkPoint, input="
+              + getShortDescription()
+              + ", checkPointFile="
+              + checkPointFile.getAbsolutePath()
+              + ", checkPoint=" + jsonStr, null, logger,
+            Level.INFO);
+        }
+      } catch (Throwable t) {
+        final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
+          + "_CHECKIN_EXCEPTION";
+        LogFeederUtil
+          .logErrorMessageByInterval(LOG_MESSAGE_KEY,
+            "Caught exception checkIn. , input="
+              + getShortDescription(), t, logger,
+            Level.ERROR);
+      }
+    }
+
+  }
+
+  @Override
+  public void checkIn() {
+    super.checkIn();
+    if (lastCheckPointInputMarker != null) {
+      checkIn(lastCheckPointInputMarker);
+    }
+  }
+
+  @Override
+  public void rollOver() {
+    logger.info("Marking this input file for rollover. "
+      + getShortDescription());
+    isRolledOver = true;
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see org.apache.ambari.logfeeder.input.Input#monitor()
+   */
+  @Override
+  void start() throws Exception {
+    if (logPathFiles == null || logPathFiles.length == 0) {
+      return;
+    }
+
+    if (isTail()) {
+      // Just process the first file
+      processFile(logPathFiles[0]);
+    } else {
+      for (File file : logPathFiles) {
+        try {
+          processFile(file);
+          if (isClosed() || isDrain()) {
+            logger.info("isClosed or isDrain. Now breaking loop.");
+            break;
+          }
+        } catch (Throwable t) {
+          logger.error(
+            "Error processing file=" + file.getAbsolutePath(),
+            t);
+        }
+      }
+    }
+    // Call the close for the input. Which should flush to the filters and
+    // output
+    close();
+  }
+
+  @Override
+  public void close() {
+    super.close();
+    logger.info("close() calling checkPoint checkIn(). "
+      + getShortDescription());
+    checkIn();
+  }
+
+  private void processFile(File logPathFile) throws FileNotFoundException,
+    IOException {
+    logger.info("Monitoring logPath=" + logPath + ", logPathFile="
+      + logPathFile);
+    BufferedReader br = null;
+    checkPointFile = null;
+    checkPointWriter = null;
+    jsonCheckPoint = null;
+    int resumeFromLineNumber = 0;
+
+    int lineCount = 0;
+    try {
+      setFilePath(logPathFile.getAbsolutePath());
+//      br = new BufferedReader(new FileReader(logPathFile));
+      br = new BufferedReader(LogsearchReaderFactory.INSTANCE.getReader(logPathFile));
+
+      // Whether to send to output from the beginning.
+      boolean resume = isStartFromBegining;
+
+      // Seems FileWatch is not reliable, so let's only use file key
+      // comparison
+      // inputMgr.monitorSystemFileChanges(this);
+      fileKey = getFileKey(logPathFile);
+      base64FileKey = Base64.byteArrayToBase64(fileKey.toString()
+        .getBytes());
+      logger.info("fileKey=" + fileKey + ", base64=" + base64FileKey
+        + ". " + getShortDescription());
+
+      if (isTail()) {
+        try {
+          // Let's see if there is a checkpoint for this file
+          logger.info("Checking existing checkpoint file. "
+            + getShortDescription());
+
+          String fileBase64 = Base64.byteArrayToBase64(fileKey
+            .toString().getBytes());
+          String checkPointFileName = fileBase64
+            + checkPointExtension;
+          File checkPointFolder = inputMgr.getCheckPointFolderFile();
+          checkPointFile = new File(checkPointFolder,
+            checkPointFileName);
+          checkPointWriter = new RandomAccessFile(checkPointFile,
+            "rw");
+
+          try {
+            int contentSize = checkPointWriter.readInt();
+            byte b[] = new byte[contentSize];
+            int readSize = checkPointWriter.read(b, 0, contentSize);
+            if (readSize != contentSize) {
+              logger.error("Couldn't read expected number of bytes from checkpoint file. expected="
+                + contentSize
+                + ", read="
+                + readSize
+                + ", checkPointFile="
+                + checkPointFile
+                + ", input=" + getShortDescription());
+            } else {
+              // Create JSON string
+              String jsonCheckPointStr = new String(b, 0,
+                readSize);
+              jsonCheckPoint = LogFeederUtil
+                .toJSONObject(jsonCheckPointStr);
+
+              resumeFromLineNumber = LogFeederUtil.objectToInt(
+                jsonCheckPoint.get("line_number"), 0,
+                "line_number");
+
+              if (resumeFromLineNumber > 0) {
+                // Let's read from last line read
+                resume = false;
+              }
+              logger.info("CheckPoint. checkPointFile="
+                + checkPointFile + ", json="
+                + jsonCheckPointStr
+                + ", resumeFromLineNumber="
+                + resumeFromLineNumber + ", resume="
+                + resume);
+            }
+          } catch (EOFException eofEx) {
+            logger.info("EOFException. Will reset checkpoint file "
+              + checkPointFile.getAbsolutePath() + " for "
+              + getShortDescription());
+          }
+          if (jsonCheckPoint == null) {
+            // This seems to be first time, so creating the initial
+            // checkPoint object
+            jsonCheckPoint = new HashMap<String, Object>();
+            jsonCheckPoint.put("file_path", filePath);
+            jsonCheckPoint.put("file_key", fileBase64);
+          }
+
+        } catch (Throwable t) {
+          logger.error(
+            "Error while configuring checkpoint file. Will reset file. checkPointFile="
+              + checkPointFile, t);
+        }
+      }
+
+      setClosed(false);
+      int sleepStep = 2;
+      int sleepIteration = 0;
+      while (true) {
+        try {
+          if (isDrain()) {
+            break;
+          }
+
+          String line = br.readLine();
+          if (line == null) {
+            if (!resume) {
+              resume = true;
+            }
+            sleepIteration++;
+            try {
+              // Since FileWatch service is not reliable, we will
+              // check
+              // file inode every n seconds after no write
+              if (sleepIteration > 4) {
+                Object newFileKey = getFileKey(logPathFile);
+                if (newFileKey != null) {
+                  if (fileKey == null
+                    || !newFileKey.equals(fileKey)) {
+                    logger.info("File key is different. Calling rollover. oldKey="
+                      + fileKey
+                      + ", newKey="
+                      + newFileKey
+                      + ". "
+                      + getShortDescription());
+                    // File has rotated.
+                    rollOver();
+                  }
+                }
+              }
+              // Flush on the second iteration
+              if (!tail && sleepIteration >= 2) {
+                logger.info("End of file. Done with filePath="
+                  + logPathFile.getAbsolutePath()
+                  + ", lineCount=" + lineCount);
+                flush();
+                break;
+              } else if (sleepIteration == 2) {
+                flush();
+              } else if (sleepIteration >= 2) {
+                if (isRolledOver) {
+                  isRolledOver = false;
+                  // Close existing file
+                  try {
+                    logger.info("File is rolled over. Closing current open file."
+                      + getShortDescription()
+                      + ", lineCount=" + lineCount);
+                    br.close();
+                  } catch (Exception ex) {
+                    logger.error("Error closing file"
+                      + getShortDescription());
+                    break;
+                  }
+                  try {
+                    // Open new file
+                    logger.info("Opening new rolled over file."
+                      + getShortDescription());
+//                    br = new BufferedReader(new FileReader(
+//                            logPathFile));
+                    br = new BufferedReader(LogsearchReaderFactory.
+                      INSTANCE.getReader(logPathFile));
+                    lineCount = 0;
+                    fileKey = getFileKey(logPathFile);
+                    base64FileKey = Base64
+                      .byteArrayToBase64(fileKey
+                        .toString().getBytes());
+                    logger.info("fileKey=" + fileKey
+                      + ", base64=" + base64FileKey
+                      + ", " + getShortDescription());
+                  } catch (Exception ex) {
+                    logger.error("Error opening rolled over file. "
+                      + getShortDescription());
+                    // Let's add this to monitoring and exit
+                    // this
+                    // thread
+                    logger.info("Added input to not ready list."
+                      + getShortDescription());
+                    isReady = false;
+                    inputMgr.addToNotReady(this);
+                    break;
+                  }
+                  logger.info("File is successfully rolled over. "
+                    + getShortDescription());
+                  continue;
+                }
+              }
+              Thread.sleep(sleepStep * 1000);
+              sleepStep = (sleepStep * 2);
+              sleepStep = sleepStep > 10 ? 10 : sleepStep;
+            } catch (InterruptedException e) {
+              logger.info("Thread interrupted."
+                + getShortDescription());
+            }
+          } else {
+            lineCount++;
+            sleepStep = 1;
+            sleepIteration = 0;
+
+            if (!resume && lineCount > resumeFromLineNumber) {
+              logger.info("Resuming to read from last line. lineCount="
+                + lineCount
+                + ", input="
+                + getShortDescription());
+              resume = true;
+            }
+            if (resume) {
+              InputMarker marker = new InputMarker();
+              marker.fileKey = fileKey;
+              marker.base64FileKey = base64FileKey;
+              marker.filePath = filePath;
+              marker.input = this;
+              marker.lineNumber = lineCount;
+              outputLine(line, marker);
+            }
+          }
+        } catch (Throwable t) {
+          final String LOG_MESSAGE_KEY = this.getClass()
+            .getSimpleName() + "_READ_LOOP_EXCEPTION";
+          LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
+            "Caught exception in read loop. lineNumber="
+              + lineCount + ", input="
+              + getShortDescription(), t, logger,
+            Level.ERROR);
+
+        }
+      }
+    } finally {
+      if (br != null) {
+        logger.info("Closing reader." + getShortDescription()
+          + ", lineCount=" + lineCount);
+        try {
+          br.close();
+        } catch (Throwable t) {
+          // ignore
+        }
+      }
+    }
+  }
+
+  /**
+   * @param logPathFile2
+   * @return
+   */
+  static public Object getFileKey(File file) {
+    try {
+      Path fileFullPath = Paths.get(file.getAbsolutePath());
+      if (fileFullPath != null) {
+        BasicFileAttributes basicAttr = Files.readAttributes(
+          fileFullPath, BasicFileAttributes.class);
+        return basicAttr.fileKey();
+      }
+    } catch (Throwable ex) {
+      logger.error("Error getting file attributes for file=" + file, ex);
+    }
+    return file.toString();
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see org.apache.ambari.logfeeder.input.Input#getShortDescription()
+   */
+  @Override
+  public String getShortDescription() {
+    return "input:source="
+      + getStringValue("source")
+      + ", path="
+      + (logPathFiles != null && logPathFiles.length > 0 ? logPathFiles[0]
+      .getAbsolutePath() : getStringValue("path"));
+  }
+}

+ 39 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputMarker.java

@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.input;
+
+/**
+ * This file contains the file inode, line number of the log currently been read
+ */
+public class InputMarker {
+  public int lineNumber = 0;
+  public int beginLineNumber = 0;
+  public Input input;
+  public String filePath;
+  public Object fileKey = null;
+  public String base64FileKey = null;
+
+  @Override
+  public String toString() {
+    return "InputMarker [lineNumber=" + lineNumber + ", input="
+      + input.getShortDescription() + "]";
+  }
+
+}

+ 81 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/GZIPReader.java

@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.input.reader;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.zip.GZIPInputStream;
+
+import org.apache.log4j.Logger;
+
+public class GZIPReader extends InputStreamReader {
+
+  private static Logger logger = Logger.getLogger(GZIPReader.class);
+
+  public GZIPReader(String fileName) throws FileNotFoundException {
+    super(getStream(fileName));
+    logger.info("Created GZIPReader for file : " + fileName);
+  }
+
+  public GZIPReader(File file) throws FileNotFoundException {
+    super(getStream(file.getName()));
+  }
+
+  private static InputStream getStream(String fileName) {
+    InputStream gzipStream = null;
+    InputStream fileStream = null;
+    try {
+      fileStream = new FileInputStream(fileName);
+      gzipStream = new GZIPInputStream(fileStream);
+    } catch (Exception e) {
+      logger.error(e, e.getCause());
+    }
+    return gzipStream;
+  }
+
+  /**
+   * validating file based on magic number
+   *
+   * @param fileName
+   * @return
+   */
+  public static boolean isValidFile(String fileName) {
+    // TODO make it generic and put in factory itself
+    InputStream is = null;
+    try {
+      is = new FileInputStream(fileName);
+      byte[] signature = new byte[2];
+      int nread = is.read(signature); // read the gzip signature
+      return nread == 2 && signature[0] == (byte) 0x1f && signature[1] == (byte) 0x8b;
+    } catch (IOException e) {
+      return false;
+    } finally {
+      if (is != null) {
+        try {
+          is.close();
+        } catch (IOException e) {
+        }
+      }
+    }
+  }
+}

+ 48 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/LogsearchReaderFactory.java

@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.input.reader;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.Reader;
+
+import org.apache.log4j.Logger;
+
+public enum LogsearchReaderFactory {
+  INSTANCE;
+  private static Logger logger = Logger
+    .getLogger(LogsearchReaderFactory.class);
+
+  /**
+   * @param fileName
+   * @return
+   * @throws FileNotFoundException
+   */
+  public Reader getReader(File file) throws FileNotFoundException {
+    logger.debug("Inside reader factory for file:" + file);
+    if (GZIPReader.isValidFile(file.getAbsolutePath())) {
+      logger.info("Reading file " + file + " as gzip file");
+      return new GZIPReader(file.getAbsolutePath());
+    } else {
+      return new FileReader(file);
+    }
+  }
+
+}

+ 171 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java

@@ -0,0 +1,171 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.logconfig;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.TimeZone;
+
+import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.util.SolrUtil;
+import org.apache.ambari.logfeeder.view.VLogfeederFilter;
+import org.apache.ambari.logfeeder.view.VLogfeederFilterWrapper;
+import org.apache.log4j.Logger;
+
+public class FetchConfigFromSolr extends Thread {
+  private static Logger logger = Logger.getLogger(FetchConfigFromSolr.class);
+  private static VLogfeederFilterWrapper logfeederFilterWrapper = null;
+  private static int solrConfigInterval = 5;// 5 sec;
+  private static long delay;
+  private static String endTimeDateFormat = "yyyy-MM-dd'T'HH:mm:ss.SSS";//2016-04-05T04:30:00.000Z
+  private static String sysTimeZone = "GMT";
+
+  public FetchConfigFromSolr() {
+    this.setName(this.getClass().getSimpleName());
+  }
+
+  @Override
+  public void run() {
+    solrConfigInterval = LogFeederUtil.getIntProperty("logfeeder.solr.config.internal", solrConfigInterval);
+    delay = 1000 * solrConfigInterval;
+    do {
+      logger.debug("Updating config from solr after every " + solrConfigInterval + " sec.");
+      pullConfigFromSolr();
+      try {
+        Thread.sleep(delay);
+      } catch (InterruptedException e) {
+        logger.error(e.getLocalizedMessage(), e.getCause());
+      }
+    } while (true);
+  }
+
+  private synchronized void pullConfigFromSolr() {
+    HashMap<String, Object> configDocMap = SolrUtil.getInstance().getConfigDoc();
+    if (configDocMap != null) {
+      String configJson = (String) configDocMap.get(LogFeederConstants.VALUES);
+      if (configJson != null) {
+        logfeederFilterWrapper = LogFeederUtil.getGson().fromJson(configJson, VLogfeederFilterWrapper.class);
+      }
+    }
+  }
+
+  public static boolean isFilterExpired(VLogfeederFilter logfeederFilter) {
+    boolean isFilterExpired = false;// default is false
+    if (logfeederFilter != null) {
+      Date filterEndDate = parseFilterExpireDate(logfeederFilter);
+      if (filterEndDate != null) {
+        Date currentDate = getCurrentDate();
+        if (currentDate.compareTo(filterEndDate) >= 0) {
+          logger.debug("Filter for  Component :" + logfeederFilter.getLabel() + " and Hosts :"
+            + listToStr(logfeederFilter.getHosts()) + "Filter is expired because of filter endTime : "
+            + dateToStr(filterEndDate) + " is older than currentTime :" + dateToStr(currentDate));
+          isFilterExpired = true;
+        }
+      }
+    }
+    return isFilterExpired;
+  }
+
+  public static String dateToStr(Date date) {
+    if (date == null) {
+      return "";
+    }
+    SimpleDateFormat formatter = new SimpleDateFormat(endTimeDateFormat);
+    TimeZone timeZone = TimeZone.getTimeZone(sysTimeZone);
+    formatter.setTimeZone(timeZone);
+    return formatter.format(date);
+  }
+
+  public static Date parseFilterExpireDate(VLogfeederFilter vLogfeederFilter) {
+    String expiryTime = vLogfeederFilter.getExpiryTime();
+    if (expiryTime != null && !expiryTime.isEmpty()) {
+      SimpleDateFormat formatter = new SimpleDateFormat(endTimeDateFormat);
+      TimeZone timeZone = TimeZone.getTimeZone(sysTimeZone);
+      formatter.setTimeZone(timeZone);
+      try {
+        return formatter.parse(expiryTime);
+      } catch (ParseException e) {
+        logger.error("Filter have invalid ExpiryTime : " + expiryTime + " for component :" + vLogfeederFilter.getLabel()
+          + " and hosts :" + listToStr(vLogfeederFilter.getHosts()));
+      }
+    }
+    return null;
+  }
+
+  public static List<String> getAllowedLevels(String hostName, VLogfeederFilter componentFilter) {
+    String componentName = componentFilter.getLabel();
+    List<String> hosts = componentFilter.getHosts();
+    List<String> defaultLevels = componentFilter.getDefaultLevels();
+    List<String> overrideLevels = componentFilter.getOverrideLevels();
+    if (LogFeederUtil.isListContains(hosts, hostName, false)) {
+      if (isFilterExpired(componentFilter)) {
+        // pick default
+        logger.debug("Filter for component " + componentName + " and host :" + hostName + " is expired at "
+          + componentFilter.getExpiryTime());
+        return defaultLevels;
+      } else {
+        // return tmp filter levels
+        return overrideLevels;
+      }
+    } else {
+      return defaultLevels;
+    }
+  }
+
+  public static VLogfeederFilter findComponentFilter(String componentName) {
+    if (logfeederFilterWrapper != null) {
+      HashMap<String, VLogfeederFilter> filter = logfeederFilterWrapper.getFilter();
+      if (filter != null) {
+        VLogfeederFilter componentFilter = filter.get(componentName);
+        if (componentFilter != null) {
+          return componentFilter;
+        }
+      }
+    }
+    logger.trace("Filter is not there for component :" + componentName);
+    return null;
+  }
+
+
+  public static Date getCurrentDate() {
+    TimeZone.setDefault(TimeZone.getTimeZone(sysTimeZone));
+    Date date = new Date();
+    return date;
+  }
+
+  public static String listToStr(List<String> strList) {
+    StringBuilder out = new StringBuilder("[");
+    if (strList != null) {
+      int counter = 0;
+      for (Object o : strList) {
+        if (counter > 0) {
+          out.append(",");
+        }
+        out.append(o.toString());
+        counter++;
+      }
+    }
+    out.append("]");
+    return out.toString();
+  }
+}

+ 39 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederConstants.java

@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.logconfig;
+
+public class LogFeederConstants {
+
+  public static final String ALL = "all";
+  public static final String NAME = "log_feeder_config";
+  // solr fields
+  public static final String SOLR_LEVEL = "level";
+  public static final String SOLR_COMPONENT = "type";
+  public static final String SOLR_HOST = "host";
+
+  //
+  // UserConfig Constants History
+  public static final String ID = "id";
+  public static final String USER_NAME = "username";
+  public static final String VALUES = "jsons";
+  public static final String FILTER_NAME = "filtername";
+  public static final String ROW_TYPE = "rowtype";
+
+
+}

+ 58 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogfeederScheduler.java

@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.logconfig;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.log4j.Logger;
+
+public enum LogfeederScheduler {
+
+  INSTANCE;
+
+  private Logger logger = Logger.getLogger(LogfeederScheduler.class);
+
+  private static boolean running = false;
+
+  public synchronized void start() {
+    boolean filterEnable = LogFeederUtil.getBooleanProperty("logfeeder.log.filter.enable", false);
+    if (!filterEnable) {
+      logger.info("Logfeeder  filter Scheduler is disabled.");
+      return;
+    }
+    if (!running) {
+      for (Thread thread : getThreadList()) {
+        thread.start();
+      }
+      running = true;
+      logger.info("Logfeeder Scheduler started!");
+    } else {
+      logger.warn("Logfeeder Scheduler is already running.");
+    }
+  }
+
+  private List<Thread> getThreadList() {
+    List<Thread> tasks = new ArrayList<Thread>();
+    tasks.add(new FetchConfigFromSolr());
+    return tasks;
+  }
+}

+ 60 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/ApplyLogFilter.java

@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.logconfig.filter;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.logconfig.FetchConfigFromSolr;
+import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
+import org.apache.ambari.logfeeder.view.VLogfeederFilter;
+import org.apache.log4j.Logger;
+
+public class ApplyLogFilter extends DefaultDataFilter {
+
+  private static Logger logger = Logger.getLogger(ApplyLogFilter.class);
+
+  @Override
+  public boolean applyFilter(Map<String, Object> jsonObj, boolean defaultValue) {
+    if (isEmpty(jsonObj)) {
+      logger.warn("Output jsonobj is empty");
+      return defaultValue;
+    }
+    String hostName = (String) jsonObj.get(LogFeederConstants.SOLR_HOST);
+    if (isNotEmpty(hostName)) {
+      String componentName = (String) jsonObj.get(LogFeederConstants.SOLR_COMPONENT);
+      if (isNotEmpty(componentName)) {
+        String level = (String) jsonObj.get(LogFeederConstants.SOLR_LEVEL);
+        if (isNotEmpty(level)) {
+          // find component filter
+          VLogfeederFilter componentFilter = FetchConfigFromSolr.findComponentFilter(componentName);
+          if (componentFilter == null) {
+            //return default value if there is no filter found for particular component
+            return defaultValue;
+          }
+          List<String> allowedLevels = FetchConfigFromSolr.getAllowedLevels(hostName, componentFilter);
+          return LogFeederUtil.isListContains(allowedLevels, level, false);
+        }
+      }
+    }
+    return defaultValue;
+  }
+}

+ 56 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/DefaultDataFilter.java

@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.logconfig.filter;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.log4j.Logger;
+
+/**
+ * Default filter to allow everything
+ */
+public class DefaultDataFilter {
+  private static Logger logger = Logger.getLogger(DefaultDataFilter.class);
+
+  protected static final boolean CASE_SENSITIVE = false;
+
+  public boolean applyFilter(Map<String, Object> outputJsonObj, boolean defaultValue) {
+    return defaultValue;
+  }
+
+  public boolean isEmpty(Map<String, Object> map) {
+    if (map == null || map.isEmpty()) {
+      return true;
+    }
+    return false;
+  }
+
+  public boolean isEmpty(String str) {
+    if (str == null || str.trim().isEmpty()) {
+      return true;
+    }
+    return false;
+  }
+
+  public boolean isNotEmpty(String str) {
+    return !isEmpty(str);
+  }
+
+}

+ 53 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/FilterLogData.java

@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.logconfig.filter;
+
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.logconfig.filter.ApplyLogFilter;
+import org.apache.log4j.Logger;
+
+/**
+ * Read configuration from solr and filter the log
+ */
+public enum FilterLogData {
+  INSTANCE;
+  private ApplyLogFilter applyLogFilter = new ApplyLogFilter();
+  private static Logger logger = Logger.getLogger(FilterLogData.class);
+  // by default allow every log
+  boolean defaultValue = true;
+
+  public boolean isAllowed(String jsonBlock) {
+    if (jsonBlock == null || jsonBlock.isEmpty()) {
+      return defaultValue;
+    }
+    Map<String, Object> jsonObj = LogFeederUtil.toJSONObject(jsonBlock);
+    return applyLogFilter.applyFilter(jsonObj, defaultValue);
+  }
+
+  public boolean isAllowed(Map<String, Object> jsonObj) {
+    boolean isAllowed = applyLogFilter.applyFilter(jsonObj, defaultValue);
+    if (!isAllowed) {
+      logger.trace("Filter block the content :" + LogFeederUtil.getGson().toJson(jsonObj));
+    }
+    return isAllowed;
+  }
+}

+ 52 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/Mapper.java

@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.mapper;
+
+import java.util.Map;
+
+public abstract class Mapper {
+  String inputDesc;
+  String fieldName;
+  String mapClassCode;
+
+  @SuppressWarnings("hiding")
+  public boolean init(String inputDesc, String fieldName,
+                      String mapClassCode, Object mapConfigs) {
+    this.inputDesc = inputDesc;
+    this.fieldName = fieldName;
+    this.mapClassCode = mapClassCode;
+    return true;
+  }
+
+  /**
+   * @param value
+   * @return
+   */
+  public Object apply(Map<String, Object> jsonObj, Object value) {
+    return value;
+  }
+
+  @Override
+  public String toString() {
+    return "mapClass=" + mapClassCode + ", input=" + inputDesc
+      + ", fieldName=" + fieldName;
+  }
+
+}

+ 99 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java

@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.mapper;
+
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+
+public class MapperDate extends Mapper {
+  Logger logger = Logger.getLogger(MapperDate.class);
+
+  String dateFormat = null;
+  SimpleDateFormat dateFormatter = null;
+  boolean isEpoch = false;
+
+  @SuppressWarnings("hiding")
+  @Override
+  public boolean init(String inputDesc, String fieldName,
+                      String mapClassCode, Object mapConfigs) {
+    super.init(inputDesc, fieldName, mapClassCode, mapConfigs);
+    if (!(mapConfigs instanceof Map)) {
+      logger.fatal("Can't initialize object. mapConfigs class is not of type Map. "
+        + mapConfigs.getClass().getName()
+        + ", map="
+        + this.toString());
+      return false;
+    }
+    @SuppressWarnings("unchecked")
+    Map<String, Object> mapObjects = (Map<String, Object>) mapConfigs;
+    dateFormat = (String) mapObjects.get("date_pattern");
+    if (StringUtils.isEmpty(dateFormat)) {
+      logger.fatal("Date format for map is empty. " + this.toString());
+    } else {
+      logger.info("Date mapper format is " + dateFormat);
+
+      if (dateFormat.equalsIgnoreCase("epoch")) {
+        isEpoch = true;
+        return true;
+      } else {
+        try {
+          dateFormatter = new SimpleDateFormat(dateFormat);
+          return true;
+        } catch (Throwable ex) {
+          logger.fatal("Error creating date format. format="
+            + dateFormat + ". " + this.toString());
+        }
+      }
+    }
+    return false;
+  }
+
+  @Override
+  public Object apply(Map<String, Object> jsonObj, Object value) {
+    if (value != null) {
+      try {
+        if (isEpoch) {
+          // First convert to long
+          long ms = Long.parseLong(value.toString()) * 1000;
+          value = new Date(ms);
+        } else if (dateFormatter != null) {
+          value = dateFormatter.parse(value.toString());
+        } else {
+          return value;
+        }
+        jsonObj.put(fieldName, value);
+      } catch (Throwable t) {
+        LogFeederUtil.logErrorMessageByInterval(this.getClass()
+            .getSimpleName() + ":apply",
+          "Error applying date transformation. isEpoch="
+            + isEpoch + ", dateFormat=" + dateFormat
+            + ", value=" + value + ". " + this.toString(),
+          t, logger, Level.ERROR);
+      }
+    }
+    return value;
+  }
+}

+ 72 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldName.java

@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.mapper;
+
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+
+/**
+ * Overrides the value for the field
+ */
+public class MapperFieldName extends Mapper {
+  Logger logger = Logger.getLogger(MapperFieldName.class);
+  String newValue = null;
+
+  @SuppressWarnings("hiding")
+  @Override
+  public boolean init(String inputDesc, String fieldName,
+      String mapClassCode, Object mapConfigs) {
+    super.init(inputDesc, fieldName, mapClassCode, mapConfigs);
+    if (!(mapConfigs instanceof Map)) {
+      logger.fatal("Can't initialize object. mapConfigs class is not of type Map. "
+          + mapConfigs.getClass().getName());
+      return false;
+    }
+    @SuppressWarnings("unchecked")
+    Map<String, Object> mapObjects = (Map<String, Object>) mapConfigs;
+    newValue = (String) mapObjects.get("new_fieldname");
+    if (StringUtils.isEmpty(newValue)) {
+      logger.fatal("Map field value is empty.");
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public Object apply(Map<String, Object> jsonObj, Object value) {
+    if (newValue != null) {
+      // Remove the old one
+      jsonObj.remove(fieldName);
+      // Add with new key name
+      jsonObj.put(newValue, value);
+    } else {
+      LogFeederUtil.logErrorMessageByInterval(this.getClass()
+          .getSimpleName() + ":apply",
+          "New fieldName is null, so transformation is not applied. "
+              + this.toString(), null, logger, Level.ERROR);
+    }
+    return value;
+  }
+
+}

+ 76 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldValue.java

@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.mapper;
+
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+
+/**
+ * Overrides the value for the field
+ */
+public class MapperFieldValue extends Mapper {
+  Logger logger = Logger.getLogger(MapperFieldValue.class);
+  String prevValue = null;
+  String newValue = null;
+
+  @SuppressWarnings("hiding")
+  @Override
+  public boolean init(String inputDesc, String fieldName,
+      String mapClassCode, Object mapConfigs) {
+    super.init(inputDesc, fieldName, mapClassCode, mapConfigs);
+    if (!(mapConfigs instanceof Map)) {
+      logger.fatal("Can't initialize object. mapConfigs class is not of type Map. "
+          + mapConfigs.getClass().getName());
+      return false;
+    }
+    @SuppressWarnings("unchecked")
+    Map<String, Object> mapObjects = (Map<String, Object>) mapConfigs;
+    prevValue = (String) mapObjects.get("pre_value");
+    newValue = (String) mapObjects.get("post_value");
+    if (StringUtils.isEmpty(newValue)) {
+      logger.fatal("Map field value is empty.");
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public Object apply(Map<String, Object> jsonObj, Object value) {
+    if (newValue != null) {
+      if (prevValue != null) {
+        if (prevValue.equalsIgnoreCase(value.toString())) {
+          value = newValue;
+          jsonObj.put(fieldName, value);
+        }
+      }
+    } else {
+      LogFeederUtil.logErrorMessageByInterval(
+          this.getClass().getSimpleName() + ":apply",
+          "New value is null, so transformation is not applied. "
+              + this.toString(), null, logger, Level.ERROR);
+    }
+    return value;
+  }
+
+}

+ 119 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java

@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.output;
+
+import java.lang.reflect.Type;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.ConfigBlock;
+import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.MetricCount;
+import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.log4j.Logger;
+
+import com.google.gson.reflect.TypeToken;
+
+public abstract class Output extends ConfigBlock {
+  static private Logger logger = Logger.getLogger(Output.class);
+
+  String destination = null;
+
+  Type jsonType = new TypeToken<Map<String, String>>() {
+  }.getType();
+
+  public MetricCount writeBytesMetric = new MetricCount();
+
+  @Override
+  public String getShortDescription() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  @Override
+  public String getNameForThread() {
+    if (destination != null) {
+      return destination;
+    }
+    return super.getNameForThread();
+  }
+
+  public void write(String block, InputMarker inputMarker) throws Exception {
+    // No-op. Please implement in sub classes
+  }
+
+  /**
+   * @param jsonObj
+   * @param input
+   * @throws Exception
+   */
+  public void write(Map<String, Object> jsonObj, InputMarker inputMarker)
+    throws Exception {
+    write(LogFeederUtil.getGson().toJson(jsonObj), inputMarker);
+  }
+
+  boolean isClosed = false;
+
+  /**
+   * Extend this method to clean up
+   */
+  public void close() {
+    logger.info("Calling base close()." + getShortDescription());
+    isClosed = true;
+  }
+
+  /**
+   * This is called on shutdown. All output should extend it.
+   *
+   * @return
+   */
+  public boolean isClosed() {
+    return isClosed;
+  }
+
+  public long getPendingCount() {
+    return 0;
+  }
+
+  public String getDestination() {
+    return destination;
+  }
+
+  public void setDestination(String destination) {
+    this.destination = destination;
+  }
+
+  @Override
+  public void addMetricsContainers(List<MetricCount> metricsList) {
+    super.addMetricsContainers(metricsList);
+    metricsList.add(writeBytesMetric);
+  }
+
+  @Override
+  public synchronized void logStat() {
+    super.logStat();
+
+    //Printing stat for writeBytesMetric
+    logStatForMetric(writeBytesMetric, "Stat: Bytes Written");
+
+  }
+
+}

+ 48 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputData.java

@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.output;
+
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.input.InputMarker;
+
+/**
+ * This contains the output json object and InputMarker.
+ */
+public class OutputData {
+  Map<String, Object> jsonObj;
+  InputMarker inputMarker;
+
+  /**
+   * @param jsonObj
+   * @param inputMarker
+   */
+  public OutputData(Map<String, Object> jsonObj, InputMarker inputMarker) {
+    super();
+    this.jsonObj = jsonObj;
+    this.inputMarker = inputMarker;
+  }
+
+  @Override
+  public String toString() {
+    return "OutputData [jsonObj=" + jsonObj + ", inputMarker="
+      + inputMarker + "]";
+  }
+}

+ 138 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputFile.java

@@ -0,0 +1,138 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.output;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.PrintWriter;
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.commons.csv.CSVFormat;
+import org.apache.commons.csv.CSVPrinter;
+import org.apache.log4j.Logger;
+
+public class OutputFile extends Output {
+  static Logger logger = Logger.getLogger(OutputFile.class);
+
+  PrintWriter outWriter = null;
+  String filePath = null;
+  String codec;
+
+  @Override
+  public void init() throws Exception {
+    super.init();
+
+    filePath = getStringValue("path");
+    if (filePath == null || filePath.isEmpty()) {
+      logger.error("Filepath config property <path> is not set in config file.");
+      return;
+    }
+    codec = getStringValue("codec");
+    if (codec == null || codec.trim().isEmpty()) {
+      codec = "json";
+    } else {
+      if (codec.trim().equalsIgnoreCase("csv")) {
+        codec = "csv";
+      } else if (codec.trim().equalsIgnoreCase("json")) {
+        codec = "csv";
+      } else {
+        logger.error("Unsupported codec type. codec=" + codec
+          + ", will use json");
+        codec = "json";
+      }
+    }
+    logger.info("Out filePath=" + filePath + ", codec=" + codec);
+    File outFile = new File(filePath);
+    if (outFile.getParentFile() != null) {
+      File parentDir = outFile.getParentFile();
+      if (!parentDir.isDirectory()) {
+        parentDir.mkdirs();
+      }
+    }
+
+    outWriter = new PrintWriter(new BufferedWriter(new FileWriter(outFile,
+      true)));
+
+    logger.info("init() is successfull. filePath="
+      + outFile.getAbsolutePath());
+  }
+
+  @Override
+  public void close() {
+    logger.info("Closing file." + getShortDescription());
+    if (outWriter != null) {
+      try {
+        outWriter.close();
+      } catch (Throwable t) {
+        // Ignore this exception
+      }
+    }
+    isClosed = true;
+  }
+
+  @Override
+  public void write(Map<String, Object> jsonObj, InputMarker inputMarker)
+    throws Exception {
+    String outStr = null;
+    if (codec.equals("csv")) {
+      // Convert to CSV
+      CSVPrinter csvPrinter = new CSVPrinter(outWriter, CSVFormat.RFC4180);
+      //TODO:
+    } else {
+      outStr = LogFeederUtil.getGson().toJson(jsonObj);
+    }
+    if (outWriter != null && outStr != null) {
+      statMetric.count++;
+
+      outWriter.println(outStr);
+      outWriter.flush();
+    }
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see org.apache.ambari.logfeeder.output.Output#write()
+   */
+  @Override
+  synchronized public void write(String block, InputMarker inputMarker) throws Exception {
+    if (outWriter != null && block != null) {
+      statMetric.count++;
+
+      outWriter.println(block);
+      outWriter.flush();
+    }
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see org.apache.ambari.logfeeder.ConfigBlock#getShortDescription()
+   */
+  @Override
+  public String getShortDescription() {
+    return "output:destination=file,path=" + filePath;
+  }
+
+}

+ 313 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java

@@ -0,0 +1,313 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.output;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.LinkedTransferQueue;
+
+import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.kafka.clients.producer.Callback;
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.clients.producer.RecordMetadata;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+
+public class OutputKafka extends Output {
+  static private Logger logger = Logger.getLogger(OutputKafka.class);
+
+  String brokerList = null;
+  String topic = null;
+  boolean isAsync = true;
+  long messageCount = 0;
+  int batchSize = 5000;
+  int lingerMS = 1000;
+
+  private KafkaProducer<String, String> producer = null;
+  BlockingQueue<KafkaCallBack> failedMessages = new LinkedTransferQueue<KafkaCallBack>();
+
+  // Let's start with the assumption Kafka is down
+  boolean isKafkaBrokerUp = false;
+
+  static final int FAILED_RETRY_INTERVAL = 30;
+  static final int CATCHUP_RETRY_INTERVAL = 5;
+
+  @Override
+  public void init() throws Exception {
+    super.init();
+    statMetric.metricsName = "output.kafka.write_logs";
+    writeBytesMetric.metricsName = "output.kafka.write_bytes";
+
+    brokerList = getStringValue("broker_list");
+    topic = getStringValue("topic");
+    isAsync = getBooleanValue("is_async", true);
+    batchSize = getIntValue("batch_size", batchSize);
+    lingerMS = getIntValue("linger_ms", lingerMS);
+
+    Map<String, Object> kafkaCustomProperties = new HashMap<String, Object>();
+    // Get all kafka custom properties
+    for (String key : configs.keySet()) {
+      if (key.startsWith("kafka.")) {
+        Object value = configs.get(key);
+        if (value == null || value.toString().length() == 0) {
+          continue;
+        }
+        String kafkaKey = key.substring("kafka.".length());
+        kafkaCustomProperties.put(kafkaKey, value);
+      }
+    }
+
+    if (StringUtils.isEmpty(brokerList)) {
+      throw new Exception(
+        "For kafka output, bootstrap broker_list is needed");
+    }
+
+    if (StringUtils.isEmpty(topic)) {
+      throw new Exception("For kafka output, topic is needed");
+    }
+
+    Properties props = new Properties();
+    // 0.9.0
+    props.put("bootstrap.servers", brokerList);
+    props.put("client.id", "logfeeder_producer");
+    props.put("key.serializer", StringSerializer.class.getName());
+    props.put("value.serializer", StringSerializer.class.getName());
+    props.put("compression.type", "snappy");
+    // props.put("retries", "3");
+    props.put("batch.size", batchSize);
+    props.put("linger.ms", lingerMS);
+
+    for (String kafkaKey : kafkaCustomProperties.keySet()) {
+      logger.info("Adding custom Kafka property. " + kafkaKey + "="
+        + kafkaCustomProperties.get(kafkaKey));
+      props.put(kafkaKey, kafkaCustomProperties.get(kafkaKey));
+    }
+
+    // props.put("metadata.broker.list", brokerList);
+
+    producer = new KafkaProducer<String, String>(props);
+    Thread retryThread = new Thread("kafka-writer-retry,topic=" + topic) {
+      @Override
+      public void run() {
+        KafkaCallBack kafkaCallBack = null;
+        logger.info("Started thread to monitor failed messsages. "
+          + getShortDescription());
+        while (true) {
+          try {
+            if (kafkaCallBack == null) {
+              kafkaCallBack = failedMessages.take();
+            }
+            if (publishMessage(kafkaCallBack.message,
+              kafkaCallBack.inputMarker)) {
+              // logger.info("Sent message. count="
+              // + kafkaCallBack.thisMessageNumber);
+              kafkaCallBack = null;
+            } else {
+              // Should wait for sometime
+              logger.error("Kafka is down. messageNumber="
+                + kafkaCallBack.thisMessageNumber
+                + ". Going to sleep for "
+                + FAILED_RETRY_INTERVAL + " seconds");
+              Thread.sleep(FAILED_RETRY_INTERVAL * 1000);
+            }
+
+          } catch (Throwable t) {
+            final String LOG_MESSAGE_KEY = this.getClass()
+              .getSimpleName() + "_KAFKA_RETRY_WRITE_ERROR";
+            LogFeederUtil.logErrorMessageByInterval(
+              LOG_MESSAGE_KEY,
+              "Error sending message to Kafka during retry. message="
+                + (kafkaCallBack == null ? null
+                : kafkaCallBack.message), t,
+              logger, Level.ERROR);
+          }
+        }
+
+      }
+    };
+    retryThread.setDaemon(true);
+    retryThread.start();
+  }
+
+  @Override
+  public void setDrain(boolean drain) {
+    super.setDrain(drain);
+  }
+
+  /**
+   * Flush document buffer
+   */
+  public void flush() {
+    logger.info("Flush called...");
+    setDrain(true);
+  }
+
+  @Override
+  public void close() {
+    logger.info("Closing Kafka client...");
+    flush();
+    if (producer != null) {
+      try {
+        producer.close();
+      } catch (Throwable t) {
+        logger.error("Error closing Kafka topic. topic=" + topic);
+      }
+    }
+    logger.info("Closed Kafka client");
+    super.close();
+  }
+
+  @Override
+  synchronized public void write(String block, InputMarker inputMarker) throws Exception {
+    while (!isDrain() && !inputMarker.input.isDrain()) {
+      try {
+        if (failedMessages.size() == 0) {
+          if (publishMessage(block, inputMarker)) {
+            break;
+          }
+        }
+        if (isDrain() || inputMarker.input.isDrain()) {
+          break;
+        }
+        if (!isKafkaBrokerUp) {
+          logger.error("Kafka is down. Going to sleep for "
+            + FAILED_RETRY_INTERVAL + " seconds");
+          Thread.sleep(FAILED_RETRY_INTERVAL * 1000);
+
+        } else {
+          logger.warn("Kafka is still catching up from previous failed messages. outstanding messages="
+            + failedMessages.size()
+            + " Going to sleep for "
+            + CATCHUP_RETRY_INTERVAL + " seconds");
+          Thread.sleep(CATCHUP_RETRY_INTERVAL * 1000);
+        }
+      } catch (Throwable t) {
+        // ignore
+        break;
+      }
+    }
+  }
+
+  private boolean publishMessage(String block, InputMarker inputMarker) {
+    if (isAsync && isKafkaBrokerUp) { // Send asynchronously
+      producer.send(new ProducerRecord<String, String>(topic, block),
+        new KafkaCallBack(this, block, inputMarker, ++messageCount));
+      return true;
+    } else { // Send synchronously
+      try {
+        // Not using key. Let it round robin
+        RecordMetadata metadata = producer.send(
+          new ProducerRecord<String, String>(topic, block)).get();
+        if (metadata != null) {
+          statMetric.count++;
+          writeBytesMetric.count += block.length();
+        }
+        if (!isKafkaBrokerUp) {
+          logger.info("Started writing to kafka. "
+            + getShortDescription());
+          isKafkaBrokerUp = true;
+        }
+        return true;
+      } catch (InterruptedException e) {
+        isKafkaBrokerUp = false;
+        final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
+          + "_KAFKA_INTERRUPT";
+        LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
+          "InterruptedException-Error sending message to Kafka",
+          e, logger, Level.ERROR);
+      } catch (ExecutionException e) {
+        isKafkaBrokerUp = false;
+        final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
+          + "_KAFKA_EXECUTION";
+        LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
+          "ExecutionException-Error sending message to Kafka", e,
+          logger, Level.ERROR);
+      } catch (Throwable t) {
+        isKafkaBrokerUp = false;
+        final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
+          + "_KAFKA_WRITE_ERROR";
+        LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
+          "GenericException-Error sending message to Kafka", t,
+          logger, Level.ERROR);
+      }
+    }
+    return false;
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see org.apache.ambari.logfeeder.ConfigBlock#getShortDescription()
+   */
+  @Override
+  public String getShortDescription() {
+    return "output:destination=kafka,topic=" + topic;
+  }
+
+}
+
+class KafkaCallBack implements Callback {
+  static private Logger logger = Logger.getLogger(KafkaCallBack.class);
+
+  long thisMessageNumber;
+  OutputKafka output = null;
+  String message;
+  InputMarker inputMarker;
+
+  public KafkaCallBack(OutputKafka output, String message, InputMarker inputMarker,
+                       long messageCount) {
+    this.thisMessageNumber = messageCount;
+    this.output = output;
+    this.inputMarker = inputMarker;
+    this.message = message;
+  }
+
+  public void onCompletion(RecordMetadata metadata, Exception exception) {
+    if (metadata != null) {
+      if (!output.isKafkaBrokerUp) {
+        logger.info("Started writing to kafka. "
+          + output.getShortDescription());
+        output.isKafkaBrokerUp = true;
+      }
+      output.incrementStat(1);
+      output.writeBytesMetric.count += message.length();
+
+      // metadata.partition();
+      // metadata.offset();
+    } else {
+      output.isKafkaBrokerUp = false;
+      final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
+        + "_KAFKA_ASYNC_ERROR";
+      LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
+        "Error sending message to Kafka. Async Callback",
+        exception, logger, Level.ERROR);
+
+      output.failedMessages.add(this);
+    }
+  }
+}

+ 475 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java

@@ -0,0 +1,475 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.output;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.impl.LBHttpSolrClient;
+import org.apache.solr.client.solrj.response.SolrPingResponse;
+import org.apache.solr.client.solrj.response.UpdateResponse;
+import org.apache.solr.common.SolrInputDocument;
+
+public class OutputSolr extends Output {
+  static private Logger logger = Logger.getLogger(OutputSolr.class);
+
+  private static final String ROUTER_FIELD = "_router_field_";
+
+  String solrUrl = null;
+  String zkHosts = null;
+  String collection = null;
+  String splitMode = "none";
+  int splitInterval = 0;
+  int numberOfShards = 1;
+  boolean isComputeCurrentCollection = false;
+
+  int maxBufferSize = 5000;
+  int maxIntervalMS = 3000;
+  int workers = 1;
+
+  BlockingQueue<OutputData> outgoingBuffer = null;
+  List<SolrWorkerThread> writerThreadList = new ArrayList<SolrWorkerThread>();
+  private static final int RETRY_INTERVAL = 30;
+
+  int lastSlotByMin = -1;
+
+  @Override
+  public void init() throws Exception {
+    super.init();
+    statMetric.metricsName = "output.solr.write_logs";
+    writeBytesMetric.metricsName = "output.solr.write_bytes";
+
+    solrUrl = getStringValue("url");
+    zkHosts = getStringValue("zk_hosts");
+    splitMode = getStringValue("splits_interval_mins", splitMode);
+    if (!splitMode.equalsIgnoreCase("none")) {
+      splitInterval = getIntValue("split_interval_mins", 30);
+    }
+    numberOfShards = getIntValue("number_of_shards", numberOfShards);
+
+    maxBufferSize = getIntValue("flush_size", maxBufferSize);
+    if (maxBufferSize < 1) {
+      logger.warn("maxBufferSize is less than 1. Making it 1");
+    }
+    maxIntervalMS = getIntValue("idle_flush_time_ms", maxIntervalMS);
+    workers = getIntValue("workers", workers);
+
+    logger.info("Config: Number of workers=" + workers + ", splitMode="
+        + splitMode + ", splitInterval=" + splitInterval
+        + ", numberOfShards=" + numberOfShards + ". "
+        + getShortDescription());
+
+    if (StringUtils.isEmpty(solrUrl) && StringUtils.isEmpty(zkHosts)) {
+      throw new Exception(
+          "For solr output, either url or zk_hosts property need to be set");
+    }
+
+    int bufferSize = maxBufferSize * (workers + 3);
+    logger.info("Creating blocking queue with bufferSize=" + bufferSize);
+    // outgoingBuffer = new ArrayBlockingQueue<OutputData>(bufferSize);
+    outgoingBuffer = new LinkedBlockingQueue<OutputData>(bufferSize);
+
+    for (int count = 0; count < workers; count++) {
+      SolrClient solrClient = null;
+      CloudSolrClient solrClouldClient = null;
+      if (zkHosts != null) {
+        logger.info("Using zookeepr. zkHosts=" + zkHosts);
+        collection = getStringValue("collection");
+        if (StringUtils.isEmpty(collection)) {
+          throw new Exception(
+              "For solr cloud property collection is mandatory");
+        }
+        logger.info("Using collection=" + collection);
+        solrClouldClient = new CloudSolrClient(zkHosts);
+        solrClouldClient.setDefaultCollection(collection);
+        solrClient = solrClouldClient;
+        if (splitMode.equalsIgnoreCase("none")) {
+          isComputeCurrentCollection = false;
+        } else {
+          isComputeCurrentCollection = true;
+        }
+      } else {
+        String[] solrUrls = StringUtils.split(solrUrl, ",");
+        if (solrUrls.length == 1) {
+          logger.info("Using SolrURL=" + solrUrl);
+          solrClient = new HttpSolrClient(solrUrl);
+        } else {
+          logger.info("Using load balance solr client. solrUrls="
+              + solrUrl);
+          logger.info("Initial URL for LB solr=" + solrUrls[0]);
+          @SuppressWarnings("resource")
+          LBHttpSolrClient lbSolrClient = new LBHttpSolrClient(
+              solrUrls[0]);
+          for (int i = 1; i < solrUrls.length; i++) {
+            logger.info("Adding URL for LB solr=" + solrUrls[i]);
+            lbSolrClient.addSolrServer(solrUrls[i]);
+          }
+          solrClient = lbSolrClient;
+        }
+      }
+      try {
+        logger.info("Pinging Solr server. zkHosts=" + zkHosts
+            + ", urls=" + solrUrl);
+        SolrPingResponse response = solrClient.ping();
+        if (response.getStatus() == 0) {
+          logger.info("Ping to Solr server is successful for writer="
+              + count);
+        } else {
+          logger.warn("Ping to Solr server failed. It would check again. writer="
+              + count
+              + ", solrUrl="
+              + solrUrl
+              + ", zkHosts="
+              + zkHosts
+              + ", collection="
+              + collection
+              + ", response=" + response);
+        }
+      } catch (Throwable t) {
+        logger.warn(
+            "Ping to Solr server failed. It would check again. writer="
+                + count + ", solrUrl=" + solrUrl + ", zkHosts="
+                + zkHosts + ", collection=" + collection, t);
+      }
+
+      // Let's start the thread
+      SolrWorkerThread solrWriterThread = new SolrWorkerThread(solrClient);
+      solrWriterThread.setName(getNameForThread() + "," + collection
+          + ",writer=" + count);
+      solrWriterThread.setDaemon(true);
+      solrWriterThread.start();
+      writerThreadList.add(solrWriterThread);
+    }
+  }
+
+  @Override
+  public void setDrain(boolean drain) {
+    super.setDrain(drain);
+  }
+
+  /**
+   * Flush document buffer
+   */
+  public void flush() {
+    logger.info("Flush called...");
+    setDrain(true);
+
+    int wrapUpTimeSecs = 30;
+    // Give wrapUpTimeSecs seconds to wrap up
+    boolean isPending = false;
+    for (int i = 0; i < wrapUpTimeSecs; i++) {
+      for (SolrWorkerThread solrWorkerThread : writerThreadList) {
+        if (solrWorkerThread.isDone()) {
+          try {
+            solrWorkerThread.interrupt();
+          } catch (Throwable t) {
+            // ignore
+          }
+        } else {
+          isPending = true;
+        }
+      }
+      if (isPending) {
+        try {
+          logger.info("Will give " + (wrapUpTimeSecs - i)
+              + " seconds to wrap up");
+          Thread.sleep(1000);
+        } catch (InterruptedException e) {
+          // ignore
+        }
+      }
+      isPending = false;
+    }
+  }
+
+  @Override
+  public long getPendingCount() {
+    long totalCount = 0;
+    for (SolrWorkerThread solrWorkerThread : writerThreadList) {
+      totalCount += solrWorkerThread.localBuffer.size();
+    }
+    return totalCount;
+  }
+
+  @Override
+  public void close() {
+    logger.info("Closing Solr client...");
+    flush();
+
+    logger.info("Closed Solr client");
+    super.close();
+  }
+
+  @Override
+  public void write(Map<String, Object> jsonObj, InputMarker inputMarker)
+      throws Exception {
+    try {
+      outgoingBuffer.put(new OutputData(jsonObj, inputMarker));
+    } catch (InterruptedException e) {
+      // ignore
+    }
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see org.apache.ambari.logfeeder.ConfigBlock#getShortDescription()
+   */
+  @Override
+  public String getShortDescription() {
+    return "output:destination=solr,collection=" + collection;
+  }
+
+  class SolrWorkerThread extends Thread {
+    /**
+     * 
+     */
+    SolrClient solrClient = null;
+    Collection<SolrInputDocument> localBuffer = new ArrayList<SolrInputDocument>();
+    long localBufferBytesSize = 0;
+    Map<String, InputMarker> latestInputMarkerList = new HashMap<String, InputMarker>();
+
+    /**
+     * 
+     */
+    public SolrWorkerThread(SolrClient solrClient) {
+      this.solrClient = solrClient;
+    }
+
+    /*
+     * (non-Javadoc)
+     * 
+     * @see java.lang.Runnable#run()
+     */
+    @Override
+    public void run() {
+      logger.info("SolrWriter thread started");
+      long lastDispatchTime = System.currentTimeMillis();
+
+      //long totalWaitTimeMS = 0;
+      while (true) {
+        long currTimeMS = System.currentTimeMillis();
+        OutputData outputData = null;
+        try {
+          long nextDispatchDuration = maxIntervalMS
+              - (currTimeMS - lastDispatchTime);
+          outputData = outgoingBuffer.poll();
+          if (outputData == null && !isDrain()
+              && nextDispatchDuration > 0) {
+            outputData = outgoingBuffer.poll(nextDispatchDuration,
+                TimeUnit.MILLISECONDS);
+//            long diffTimeMS = System.currentTimeMillis()
+//                - currTimeMS;
+            // logger.info("Waited for " + diffTimeMS +
+            // " ms, planned for "
+            // + nextDispatchDuration + " ms, localBuffer.size="
+            // + localBuffer.size() + ", timedOut="
+            // + (outputData == null ? "true" : "false"));
+          }
+
+          if (isDrain() && outputData == null
+              && outgoingBuffer.size() == 0) {
+            break;
+          }
+          if (outputData != null) {
+            if (outputData.jsonObj.get("id") == null) {
+              outputData.jsonObj.put("id", UUID.randomUUID()
+                  .toString());
+            }
+            SolrInputDocument document = new SolrInputDocument();
+            for (String name : outputData.jsonObj.keySet()) {
+              Object obj = outputData.jsonObj.get(name);
+              document.addField(name, obj);
+              try {
+                localBufferBytesSize += obj.toString().length();
+              } catch (Throwable t) {
+                final String LOG_MESSAGE_KEY = this.getClass()
+                    .getSimpleName() + "_BYTE_COUNT_ERROR";
+                LogFeederUtil.logErrorMessageByInterval(
+                    LOG_MESSAGE_KEY,
+                    "Error calculating byte size. object="
+                        + obj, t, logger, Level.ERROR);
+
+              }
+            }
+            latestInputMarkerList.put(
+                outputData.inputMarker.base64FileKey,
+                outputData.inputMarker);
+            localBuffer.add(document);
+          }
+
+          if (localBuffer.size() > 0
+              && ((outputData == null && isDrain()) || (nextDispatchDuration <= 0 || localBuffer
+                  .size() >= maxBufferSize))) {
+            try {
+              if (isComputeCurrentCollection) {
+                // Compute the current router value
+
+                int weekDay = Calendar.getInstance().get(
+                    Calendar.DAY_OF_WEEK);
+                int currHour = Calendar.getInstance().get(
+                    Calendar.HOUR_OF_DAY);
+                int currMin = Calendar.getInstance().get(
+                    Calendar.MINUTE);
+
+                int minOfWeek = (weekDay - 1) * 24 * 60
+                    + currHour * 60 + currMin;
+                int slotByMin = minOfWeek / splitInterval
+                    % numberOfShards;
+
+                String shard = "shard" + slotByMin;
+
+                if (lastSlotByMin != slotByMin) {
+                  logger.info("Switching to shard " + shard
+                      + ", output="
+                      + getShortDescription());
+                  lastSlotByMin = slotByMin;
+                }
+
+                for (SolrInputDocument solrInputDocument : localBuffer) {
+                  solrInputDocument.addField(ROUTER_FIELD,
+                      shard);
+                }
+              }
+
+//              long beginTime = System.currentTimeMillis();
+              UpdateResponse response = solrClient
+                  .add(localBuffer);
+//              long endTime = System.currentTimeMillis();
+//              logger.info("Adding to Solr. Document count="
+//                  + localBuffer.size() + ". Took "
+//                  + (endTime - beginTime) + " ms");
+
+              if (response.getStatus() != 0) {
+                final String LOG_MESSAGE_KEY = this.getClass()
+                    .getSimpleName() + "_SOLR_UPDATE_ERROR";
+                LogFeederUtil
+                    .logErrorMessageByInterval(
+                        LOG_MESSAGE_KEY,
+                        "Error writing to Solr. response="
+                            + response.toString()
+                            + ", log="
+                            + (outputData == null ? null
+                                : outputData
+                                    .toString()),
+                        null, logger, Level.ERROR);
+              }
+              statMetric.count += localBuffer.size();
+              writeBytesMetric.count += localBufferBytesSize;
+              for (InputMarker inputMarker : latestInputMarkerList
+                  .values()) {
+                inputMarker.input.checkIn(inputMarker);
+              }
+
+              resetLocalBuffer();
+              lastDispatchTime = System.currentTimeMillis();
+            } catch (IOException ioException) {
+              // Transient error, lets block till it is available
+              while (!isDrain()) {
+                try {
+                  logger.warn("Solr is down. Going to sleep for "
+                      + RETRY_INTERVAL
+                      + " seconds. output="
+                      + getShortDescription());
+                  Thread.sleep(RETRY_INTERVAL * 1000);
+                } catch (Throwable t) {
+                  // ignore
+                  break;
+                }
+                if (isDrain()) {
+                  break;
+                }
+                try {
+                  SolrPingResponse pingResponse = solrClient
+                      .ping();
+                  if (pingResponse.getStatus() == 0) {
+                    logger.info("Solr seems to be up now. Resuming... output="
+                        + getShortDescription());
+                    break;
+                  }
+                } catch (Throwable t) {
+                  // Ignore
+                }
+              }
+            } catch (Throwable serverException) {
+              // Clear the buffer
+              resetLocalBuffer();
+              final String LOG_MESSAGE_KEY = this.getClass()
+                  .getSimpleName() + "_SOLR_UPDATE_EXCEPTION";
+              LogFeederUtil.logErrorMessageByInterval(
+                  LOG_MESSAGE_KEY,
+                  "Error sending log message to server. "
+                      + (outputData == null ? null
+                          : outputData.toString()),
+                  serverException, logger, Level.ERROR);
+            }
+          }
+        } catch (InterruptedException e) {
+          // Handle thread exiting
+        } catch (Throwable t) {
+          final String LOG_MESSAGE_KEY = this.getClass()
+              .getSimpleName() + "_SOLR_MAINLOOP_EXCEPTION";
+          LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
+              "Caught exception in main loop. " + outputData, t,
+              logger, Level.ERROR);
+        }
+      }
+
+      if (solrClient != null) {
+        try {
+          solrClient.close();
+        } catch (IOException e) {
+          // Ignore
+        }
+      }
+
+      resetLocalBuffer();
+      logger.info("Exiting Solr writer thread. output="
+          + getShortDescription());
+    }
+
+    public boolean isDone() {
+      return localBuffer.size() == 0;
+    }
+
+    public void resetLocalBuffer() {
+      localBuffer.clear();
+      localBufferBytesSize = 0;
+      latestInputMarkerList.clear();
+    }
+  }
+}

+ 202 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java

@@ -0,0 +1,202 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.util;
+
+import java.io.IOException;
+import java.util.HashMap;
+
+import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
+import org.apache.log4j.Logger;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.SolrRequest.METHOD;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.response.CollectionAdminResponse;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.SolrDocument;
+import org.apache.solr.common.SolrDocumentList;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrInputDocument;
+
+public class SolrUtil {
+
+  private static Logger logger = Logger.getLogger(SolrUtil.class);
+
+  private static SolrUtil instance = null;
+  SolrClient solrClient = null;
+  CloudSolrClient solrClouldClient = null;
+
+  boolean isSolrCloud = true;
+  String solrDetail = "";
+  String collectionName = null;
+
+  private SolrUtil() throws Exception {
+    String url = LogFeederUtil.getStringProperty("logfeeder.solr.url");
+    String zkHosts = LogFeederUtil.getStringProperty("logfeeder.solr.zkhosts");
+    String collection = LogFeederUtil.getStringProperty("logfeeder.solr.core.history", "history");
+    connectToSolr(url, zkHosts, collection);
+  }
+
+  public static SolrUtil getInstance() {
+    if (instance == null) {
+      synchronized (SolrUtil.class) {
+        if (instance == null) {
+          try {
+            instance = new SolrUtil();
+          } catch (Exception e) {
+            logger.error(e);
+          }
+        }
+      }
+    }
+    return instance;
+  }
+
+  public SolrClient connectToSolr(String url, String zkHosts,
+                                  String collection) throws Exception {
+    this.collectionName = collection;
+    solrDetail = "zkHosts=" + zkHosts + ", collection=" + collection
+      + ", url=" + url;
+
+    logger.info("connectToSolr() " + solrDetail);
+    if (collection == null || collection.isEmpty()) {
+      throw new Exception("For solr, collection name is mandatory. "
+        + solrDetail);
+    }
+    if (zkHosts != null && !zkHosts.isEmpty()) {
+      solrDetail = "zkHosts=" + zkHosts + ", collection=" + collection;
+      logger.info("Using zookeepr. " + solrDetail);
+      solrClouldClient = new CloudSolrClient(zkHosts);
+      solrClouldClient.setDefaultCollection(collection);
+      solrClient = solrClouldClient;
+      int waitDurationMS = 3 * 60 * 1000;
+      checkSolrStatus(waitDurationMS);
+    } else {
+      if (url == null || url.trim().isEmpty()) {
+        throw new Exception("Both zkHosts and URL are empty. zkHosts="
+          + zkHosts + ", collection=" + collection + ", url="
+          + url);
+      }
+      solrDetail = "collection=" + collection + ", url=" + url;
+      String collectionURL = url + "/" + collection;
+      logger.info("Connecting to  solr : " + collectionURL);
+      solrClient = new HttpSolrClient(collectionURL);
+
+    }
+    return solrClient;
+  }
+
+  /**
+   * @param waitDurationMS
+   * @return
+   */
+  public boolean checkSolrStatus(int waitDurationMS) {
+    boolean status = false;
+    try {
+      long beginTimeMS = System.currentTimeMillis();
+      long waitIntervalMS = 2000;
+      int pingCount = 0;
+      while (true) {
+        pingCount++;
+        CollectionAdminResponse response = null;
+        try {
+          CollectionAdminRequest.List colListReq = new CollectionAdminRequest.List();
+          response = colListReq.process(solrClient);
+        } catch (Exception ex) {
+          logger.error("Con't connect to Solr. solrDetail=" + solrDetail, ex);
+        }
+        if (response != null && response.getStatus() == 0) {
+          logger.info("Solr getCollections() is success. solr=" + solrDetail);
+          status = true;
+          break;
+        }
+        if (System.currentTimeMillis() - beginTimeMS > waitDurationMS) {
+          logger.error("Solr is not reachable even after "
+            + (System.currentTimeMillis() - beginTimeMS)
+            + " ms. If you are using alias, then you might have to restart LogSearch after Solr is up and running. solr="
+            + solrDetail + ", response=" + response);
+          break;
+        } else {
+          logger.warn("Solr is not not reachable yet. getCollections() attempt count=" + pingCount
+            + ". Will sleep for " + waitIntervalMS + " ms and try again." + " solr=" + solrDetail
+            + ", response=" + response);
+
+        }
+        Thread.sleep(waitIntervalMS);
+      }
+    } catch (Throwable t) {
+      logger.error("Seems Solr is not up. solrDetail=" + solrDetail);
+    }
+    return status;
+  }
+
+  /**
+   * @param solrQuery
+   * @return
+   * @throws SolrServerException
+   * @throws IOException
+   * @throws SolrException
+   */
+  public QueryResponse process(SolrQuery solrQuery) throws SolrServerException, IOException, SolrException {
+    if (solrClient != null) {
+      QueryResponse queryResponse = solrClient.query(solrQuery, METHOD.POST);
+      return queryResponse;
+    } else {
+      return null;
+    }
+  }
+
+  /**
+   * @return
+   */
+  public HashMap<String, Object> getConfigDoc() {
+    HashMap<String, Object> configMap = new HashMap<String, Object>();
+    SolrQuery solrQuery = new SolrQuery();
+    solrQuery.setQuery("*:*");
+    String fq = LogFeederConstants.ROW_TYPE + ":" + LogFeederConstants.NAME;
+    solrQuery.setFilterQueries(fq);
+    try {
+      QueryResponse response = SolrUtil.getInstance().process(solrQuery);
+      SolrDocumentList documentList = response.getResults();
+      if (documentList != null && documentList.size() > 0) {
+        SolrDocument configDoc = documentList.get(0);
+        String configJson = LogFeederUtil.getGson().toJson(configDoc);
+        configMap = (HashMap<String, Object>) LogFeederUtil.toJSONObject(configJson);
+      }
+    } catch (SolrException | SolrServerException | IOException e) {
+      logger.error(e);
+    }
+    return configMap;
+  }
+
+  /**
+   * @param solrInputDocument
+   * @throws SolrServerException
+   * @throws IOException
+   */
+  public void addDoc(SolrInputDocument solrInputDocument) throws SolrServerException, IOException {
+    solrClient.add(solrInputDocument);
+    solrClient.commit();
+  }
+
+}

+ 90 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/view/VLogfeederFilter.java

@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.view;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+@JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.FIELD)
+public class VLogfeederFilter {
+
+  private String label;
+  private List<String> hosts;
+  private List<String> defaultLevels;
+  private List<String> overrideLevels;
+  private String expiryTime;
+
+  public VLogfeederFilter() {
+    hosts = new ArrayList<String>();
+    defaultLevels = new ArrayList<String>();
+    overrideLevels = new ArrayList<String>();
+  }
+
+  public String getLabel() {
+    return label;
+  }
+
+  public void setLabel(String label) {
+    this.label = label;
+  }
+
+  public List<String> getHosts() {
+    return hosts;
+  }
+
+  public void setHosts(List<String> hosts) {
+    this.hosts = hosts;
+  }
+
+  public List<String> getDefaultLevels() {
+    return defaultLevels;
+  }
+
+  public void setDefaultLevels(List<String> defaultLevels) {
+    this.defaultLevels = defaultLevels;
+  }
+
+  public List<String> getOverrideLevels() {
+    return overrideLevels;
+  }
+
+  public void setOverrideLevels(List<String> overrideLevels) {
+    this.overrideLevels = overrideLevels;
+  }
+
+  public String getExpiryTime() {
+    return expiryTime;
+  }
+
+  public void setExpiryTime(String expiryTime) {
+    this.expiryTime = expiryTime;
+  }
+
+}

+ 55 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/view/VLogfeederFilterWrapper.java

@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.view;
+
+import java.util.HashMap;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+@JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.FIELD)
+public class VLogfeederFilterWrapper {
+
+  private HashMap<String, VLogfeederFilter> filter;
+  private String id;
+
+  public HashMap<String, VLogfeederFilter> getFilter() {
+    return filter;
+  }
+
+  public void setFilter(HashMap<String, VLogfeederFilter> filter) {
+    this.filter = filter;
+  }
+
+  public String getId() {
+    return id;
+  }
+
+  public void setId(String id) {
+    this.id = id;
+  }
+}

+ 95 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java

@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.codehaus.jackson.map.AnnotationIntrospector;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
+
+public abstract class AbstractTimelineMetricsSink {
+  public static final String TAGS_FOR_PREFIX_PROPERTY_PREFIX = "tagsForPrefix.";
+  public static final String MAX_METRIC_ROW_CACHE_SIZE = "maxRowCacheSize";
+  public static final String METRICS_SEND_INTERVAL = "sendInterval";
+  public static final String METRICS_POST_TIMEOUT_SECONDS = "timeout";
+  public static final String COLLECTOR_HOST_PROPERTY = "collector";
+  public static final String COLLECTOR_PORT_PROPERTY = "port";
+  public static final int DEFAULT_POST_TIMEOUT_SECONDS = 10;
+
+  protected final Log LOG;
+
+  protected static ObjectMapper mapper;
+
+  static {
+    mapper = new ObjectMapper();
+    AnnotationIntrospector introspector = new JaxbAnnotationIntrospector();
+    mapper.setAnnotationIntrospector(introspector);
+    mapper.getSerializationConfig()
+        .setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
+  }
+
+  public AbstractTimelineMetricsSink() {
+    LOG = LogFactory.getLog(this.getClass());
+  }
+
+  protected void emitMetrics(TimelineMetrics metrics) {
+    String connectUrl = getCollectorUri();
+    int timeout = getTimeoutSeconds() * 1000;
+    try {
+      String jsonData = mapper.writeValueAsString(metrics);
+      LOG.info("Posting JSON=" + jsonData);
+      
+      HttpURLConnection connection =
+        (HttpURLConnection) new URL(connectUrl).openConnection();
+
+      connection.setRequestMethod("POST");
+      connection.setRequestProperty("Content-Type", "application/json");
+      connection.setConnectTimeout(timeout);
+      connection.setReadTimeout(timeout);
+      connection.setDoOutput(true);
+
+      if (jsonData != null) {
+        try (OutputStream os = connection.getOutputStream()) {
+          os.write(jsonData.getBytes("UTF-8"));
+        }
+      }
+
+      int statusCode = connection.getResponseCode();
+
+      if (statusCode != 200) {
+        LOG.info("Unable to POST metrics to collector, " + connectUrl + ", " +
+          "statusCode = " + statusCode);
+      } else {
+        LOG.debug("Metrics posted to Collector " + connectUrl);
+      }
+    } catch (IOException e) {
+      throw new UnableToConnectException(e).setConnectUrl(connectUrl);
+    }
+  }
+
+  abstract protected String getCollectorUri();
+
+  abstract protected int getTimeoutSeconds();
+}

+ 79 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/timeline/Precision.java

@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+/**
+ * Is used to determine metrics aggregate table.
+ *
+ * @see org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.TimelineWebServices#getTimelineMetric
+ */
+public enum Precision {
+  SECONDS,
+  MINUTES,
+  HOURS,
+  DAYS;
+
+  public static class PrecisionFormatException extends IllegalArgumentException {
+    public PrecisionFormatException(String message, Throwable cause) {
+      super(message, cause);
+    }
+  }
+
+  public static Precision getPrecision(String precision) throws PrecisionFormatException {
+    if (precision == null ) {
+      return null;
+    }
+    try {
+      return Precision.valueOf(precision.toUpperCase());
+    } catch (IllegalArgumentException e) {
+      throw new PrecisionFormatException("precision should be seconds, " +
+        "minutes, hours or days", e);
+    }
+  }
+
+  public static Precision getPrecision(long startTime, long endTime) {
+    long HOUR = 3600000; // 1 hour
+    long DAY = 86400000; // 1 day
+    long timeRange = endTime - startTime;
+    if (timeRange > 30 * DAY) {
+      return Precision.DAYS;
+    } else if (timeRange > 1 * DAY) {
+      return Precision.HOURS;
+    } else if (timeRange > 2 * HOUR) {
+      return Precision.MINUTES;
+    } else {
+      return Precision.SECONDS;
+    }
+  }
+
+  public static Precision getHigherPrecision(Precision precision) {
+
+    if (precision == null)
+      return null;
+
+    if (precision.equals(Precision.SECONDS)) {
+      return Precision.MINUTES;
+    } else if (precision.equals(Precision.MINUTES)) {
+      return Precision.HOURS;
+    } else if (precision.equals(Precision.HOURS)) {
+      return Precision.DAYS;
+    } else {
+      return null;
+    }
+  }
+}

+ 36 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/timeline/PrecisionLimitExceededException.java

@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+public class PrecisionLimitExceededException extends IllegalArgumentException {
+
+  private static final long serialVersionUID = 1L;
+
+  public PrecisionLimitExceededException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public PrecisionLimitExceededException(String message) {
+    super(message);
+  }
+
+  public PrecisionLimitExceededException(Throwable cause) {
+    super(cause);
+  }
+
+}

+ 107 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/timeline/SingleValuedTimelineMetric.java

@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+/**
+ * This class prevents creating a TreeMap for every instantiation of a metric
+ * read from the store. The methods are meant to provide interoperability
+ * with @TimelineMetric
+ */
+public class SingleValuedTimelineMetric {
+  private Long timestamp;
+  private Double value;
+  private String metricName;
+  private String appId;
+  private String instanceId;
+  private String hostName;
+  private Long startTime;
+  private String type;
+
+  public void setSingleTimeseriesValue(Long timestamp, Double value) {
+    this.timestamp = timestamp;
+    this.value = value;
+  }
+
+  public SingleValuedTimelineMetric(String metricName, String appId,
+                                    String instanceId, String hostName,
+                                    long timestamp, long startTime, String type) {
+    this.metricName = metricName;
+    this.appId = appId;
+    this.instanceId = instanceId;
+    this.hostName = hostName;
+    this.timestamp = timestamp;
+    this.startTime = startTime;
+    this.type = type;
+  }
+
+  public Long getTimestamp() {
+    return timestamp;
+  }
+
+  public long getStartTime() {
+    return startTime;
+  }
+
+  public String getType() {
+    return type;
+  }
+
+  public Double getValue() {
+    return value;
+  }
+
+  public String getMetricName() {
+    return metricName;
+  }
+
+  public String getAppId() {
+    return appId;
+  }
+
+  public String getInstanceId() {
+    return instanceId;
+  }
+
+  public String getHostName() {
+    return hostName;
+  }
+
+  public boolean equalsExceptTime(TimelineMetric metric) {
+    if (!metricName.equals(metric.getMetricName())) return false;
+    if (hostName != null ? !hostName.equals(metric.getHostName()) : metric.getHostName() != null)
+      return false;
+    if (appId != null ? !appId.equals(metric.getAppId()) : metric.getAppId() != null)
+      return false;
+    if (instanceId != null ? !instanceId.equals(metric.getInstanceId()) : metric.getInstanceId() != null) return false;
+
+    return true;
+  }
+
+  public TimelineMetric getTimelineMetric() {
+    TimelineMetric metric = new TimelineMetric();
+    metric.setMetricName(this.metricName);
+    metric.setAppId(this.appId);
+    metric.setHostName(this.hostName);
+    metric.setType(this.type);
+    metric.setInstanceId(this.instanceId);
+    metric.setStartTime(this.startTime);
+    metric.setTimestamp(this.timestamp);
+    metric.getMetricValues().put(timestamp, value);
+    return metric;
+  }
+}

+ 188 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetric.java

@@ -0,0 +1,188 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+import java.util.Map;
+import java.util.TreeMap;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.codehaus.jackson.map.annotate.JsonDeserialize;
+
+@XmlRootElement(name = "metric")
+@XmlAccessorType(XmlAccessType.NONE)
+public class TimelineMetric implements Comparable<TimelineMetric> {
+
+  private String metricName;
+  private String appId;
+  private String instanceId;
+  private String hostName;
+  private long timestamp;
+  private long startTime;
+  private String type;
+  private TreeMap<Long, Double> metricValues = new TreeMap<Long, Double>();
+
+  // default
+  public TimelineMetric() {
+
+  }
+
+  // copy constructor
+  public TimelineMetric(TimelineMetric metric) {
+    setMetricName(metric.getMetricName());
+    setType(metric.getType());
+    setTimestamp(metric.getTimestamp());
+    setAppId(metric.getAppId());
+    setInstanceId(metric.getInstanceId());
+    setHostName(metric.getHostName());
+    setStartTime(metric.getStartTime());
+    setMetricValues(new TreeMap<Long, Double>(metric.getMetricValues()));
+  }
+
+  @XmlElement(name = "metricname")
+  public String getMetricName() {
+    return metricName;
+  }
+
+  public void setMetricName(String metricName) {
+    this.metricName = metricName;
+  }
+
+  @XmlElement(name = "appid")
+  public String getAppId() {
+    return appId;
+  }
+
+  public void setAppId(String appId) {
+    this.appId = appId;
+  }
+
+  @XmlElement(name = "instanceid")
+  public String getInstanceId() {
+    return instanceId;
+  }
+
+  public void setInstanceId(String instanceId) {
+    this.instanceId = instanceId;
+  }
+
+  @XmlElement(name = "hostname")
+  public String getHostName() {
+    return hostName;
+  }
+
+  public void setHostName(String hostName) {
+    this.hostName = hostName;
+  }
+
+  @XmlElement(name = "timestamp")
+  public long getTimestamp() {
+    return timestamp;
+  }
+
+  public void setTimestamp(long timestamp) {
+    this.timestamp = timestamp;
+  }
+
+  @XmlElement(name = "starttime")
+  public long getStartTime() {
+    return startTime;
+  }
+
+  public void setStartTime(long startTime) {
+    this.startTime = startTime;
+  }
+
+  @XmlElement(name = "type")
+  public String getType() {
+    return type;
+  }
+
+  public void setType(String type) {
+    this.type = type;
+  }
+
+  @XmlElement(name = "metrics")
+  public TreeMap<Long, Double> getMetricValues() {
+    return metricValues;
+  }
+
+  public void setMetricValues(TreeMap<Long, Double> metricValues) {
+    this.metricValues = metricValues;
+  }
+
+  public void addMetricValues(Map<Long, Double> metricValues) {
+    this.metricValues.putAll(metricValues);
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    TimelineMetric metric = (TimelineMetric) o;
+
+    if (!metricName.equals(metric.metricName)) return false;
+    if (hostName != null ? !hostName.equals(metric.hostName) : metric.hostName != null)
+      return false;
+    if (appId != null ? !appId.equals(metric.appId) : metric.appId != null)
+      return false;
+    if (instanceId != null ? !instanceId.equals(metric.instanceId) : metric.instanceId != null)
+      return false;
+    if (timestamp != metric.timestamp) return false;
+    if (startTime != metric.startTime) return false;
+
+    return true;
+  }
+
+  public boolean equalsExceptTime(TimelineMetric metric) {
+    if (!metricName.equals(metric.metricName)) return false;
+    if (hostName != null ? !hostName.equals(metric.hostName) : metric.hostName != null)
+      return false;
+    if (appId != null ? !appId.equals(metric.appId) : metric.appId != null)
+      return false;
+    if (instanceId != null ? !instanceId.equals(metric.instanceId) : metric.instanceId != null)
+      return false;
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    int result = metricName.hashCode();
+    result = 31 * result + (appId != null ? appId.hashCode() : 0);
+    result = 31 * result + (instanceId != null ? instanceId.hashCode() : 0);
+    result = 31 * result + (hostName != null ? hostName.hashCode() : 0);
+    result = 31 * result + (int) (timestamp ^ (timestamp >>> 32));
+    return result;
+  }
+
+  @Override
+  public int compareTo(TimelineMetric other) {
+    if (timestamp > other.timestamp) {
+      return -1;
+    } else if (timestamp < other.timestamp) {
+      return 1;
+    } else {
+      return metricName.compareTo(other.metricName);
+    }
+  }
+}

+ 123 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetrics.java

@@ -0,0 +1,123 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+/**
+ * The class that hosts a list of timeline entities.
+ */
+@XmlRootElement(name = "metrics")
+@XmlAccessorType(XmlAccessType.NONE)
+public class TimelineMetrics {
+
+  private List<TimelineMetric> allMetrics = new ArrayList<TimelineMetric>();
+
+  public TimelineMetrics() {}
+
+  @XmlElement(name = "metrics")
+  public List<TimelineMetric> getMetrics() {
+    return allMetrics;
+  }
+
+  public void setMetrics(List<TimelineMetric> allMetrics) {
+    this.allMetrics = allMetrics;
+  }
+
+  private boolean isEqualTimelineMetrics(TimelineMetric metric1,
+                                         TimelineMetric metric2) {
+
+    boolean isEqual = true;
+
+    if (!metric1.getMetricName().equals(metric2.getMetricName())) {
+      return false;
+    }
+
+    if (metric1.getHostName() != null) {
+      isEqual = metric1.getHostName().equals(metric2.getHostName());
+    }
+
+    if (metric1.getAppId() != null) {
+      isEqual = metric1.getAppId().equals(metric2.getAppId());
+    }
+
+    return isEqual;
+  }
+
+  /**
+   * Merge with existing TimelineMetric if everything except startTime is
+   * the same.
+   * @param metric {@link TimelineMetric}
+   */
+  public void addOrMergeTimelineMetric(TimelineMetric metric) {
+    TimelineMetric metricToMerge = null;
+
+    if (!allMetrics.isEmpty()) {
+      for (TimelineMetric timelineMetric : allMetrics) {
+        if (timelineMetric.equalsExceptTime(metric)) {
+          metricToMerge = timelineMetric;
+          break;
+        }
+      }
+    }
+
+    if (metricToMerge != null) {
+      metricToMerge.addMetricValues(metric.getMetricValues());
+      if (metricToMerge.getTimestamp() > metric.getTimestamp()) {
+        metricToMerge.setTimestamp(metric.getTimestamp());
+      }
+      if (metricToMerge.getStartTime() > metric.getStartTime()) {
+        metricToMerge.setStartTime(metric.getStartTime());
+      }
+    } else {
+      allMetrics.add(metric);
+    }
+  }
+
+  // Optimization that addresses too many TreeMaps from getting created.
+  public void addOrMergeTimelineMetric(SingleValuedTimelineMetric metric) {
+    TimelineMetric metricToMerge = null;
+
+    if (!allMetrics.isEmpty()) {
+      for (TimelineMetric timelineMetric : allMetrics) {
+        if (metric.equalsExceptTime(timelineMetric)) {
+          metricToMerge = timelineMetric;
+          break;
+        }
+      }
+    }
+
+    if (metricToMerge != null) {
+      metricToMerge.getMetricValues().put(metric.getTimestamp(), metric.getValue());
+      if (metricToMerge.getTimestamp() > metric.getTimestamp()) {
+        metricToMerge.setTimestamp(metric.getTimestamp());
+      }
+      if (metricToMerge.getStartTime() > metric.getStartTime()) {
+        metricToMerge.setStartTime(metric.getStartTime());
+      }
+    } else {
+      allMetrics.add(metric.getTimelineMetric());
+    }
+  }
+}

+ 46 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/timeline/UnableToConnectException.java

@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+public class UnableToConnectException extends RuntimeException {
+
+  private static final long serialVersionUID = 1L;
+
+  private String connectUrl;
+
+  public UnableToConnectException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public UnableToConnectException(String message) {
+    super(message);
+  }
+
+  public UnableToConnectException(Throwable cause) {
+    super(cause);
+  }
+
+  public UnableToConnectException setConnectUrl(String connectUrl) {
+    this.connectUrl = connectUrl;
+    return this;
+  }
+
+  public String getConnectUrl() {
+    return connectUrl;
+  }
+}

+ 175 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/timeline/cache/TimelineMetricsCache.java

@@ -0,0 +1,175 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline.cache;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.TreeMap;
+
+public class TimelineMetricsCache {
+
+  private final TimelineMetricHolder timelineMetricCache = new TimelineMetricHolder();
+  private static final Log LOG = LogFactory.getLog(TimelineMetric.class);
+  public static final int MAX_RECS_PER_NAME_DEFAULT = 10000;
+  public static final int MAX_EVICTION_TIME_MILLIS = 59000; // ~ 1 min
+  private final int maxRecsPerName;
+  private final int maxEvictionTimeInMillis;
+  private final Map<String, Double> counterMetricLastValue = new HashMap<String, Double>();
+
+  public TimelineMetricsCache(int maxRecsPerName, int maxEvictionTimeInMillis) {
+    this.maxRecsPerName = maxRecsPerName;
+    this.maxEvictionTimeInMillis = maxEvictionTimeInMillis;
+  }
+
+  class TimelineMetricWrapper {
+    private long timeDiff = -1;
+    private long oldestTimestamp = -1;
+    private TimelineMetric timelineMetric;
+
+    TimelineMetricWrapper(TimelineMetric timelineMetric) {
+      this.timelineMetric = timelineMetric;
+      this.oldestTimestamp = timelineMetric.getStartTime();
+    }
+
+    private void updateTimeDiff(long timestamp) {
+      if (oldestTimestamp != -1 && timestamp > oldestTimestamp) {
+        timeDiff = timestamp - oldestTimestamp;
+      } else {
+        oldestTimestamp = timestamp;
+      }
+    }
+
+    public void putMetric(TimelineMetric metric) {
+      this.timelineMetric.addMetricValues(metric.getMetricValues());
+      updateTimeDiff(metric.getStartTime());
+    }
+
+    public long getTimeDiff() {
+      return timeDiff;
+    }
+
+    public TimelineMetric getTimelineMetric() {
+      return timelineMetric;
+    }
+  }
+
+  // TODO: Change to ConcurentHashMap with weighted eviction
+  class TimelineMetricHolder extends LinkedHashMap<String, TimelineMetricWrapper> {//
+    private static final long serialVersionUID = 1L;
+    private boolean gotOverflow = false;
+    // To avoid duplication at the end of the buffer and beginning of the next
+    // segment of values
+    private Map<String, Long> endOfBufferTimestamps = new HashMap<String, Long>();
+
+    @Override
+    protected boolean removeEldestEntry(Map.Entry<String, TimelineMetricWrapper> eldest) {
+      boolean overflow = size() > maxRecsPerName;
+      if (overflow && !gotOverflow) {
+        LOG.warn("Metrics cache overflow at "+ size() +" for "+ eldest);
+        gotOverflow = true;
+      }
+      return overflow;
+    }
+
+    public TimelineMetric evict(String metricName) {
+      TimelineMetricWrapper metricWrapper = this.get(metricName);
+
+      if (metricWrapper == null
+        || metricWrapper.getTimeDiff() < getMaxEvictionTimeInMillis()) {
+        return null;
+      }
+
+      TimelineMetric timelineMetric = metricWrapper.getTimelineMetric();
+      this.remove(metricName);
+
+      return timelineMetric;
+    }
+
+    public void put(String metricName, TimelineMetric timelineMetric) {
+      if (isDuplicate(timelineMetric)) {
+        return;
+      }
+      TimelineMetricWrapper metric = this.get(metricName);
+      if (metric == null) {
+        this.put(metricName, new TimelineMetricWrapper(timelineMetric));
+      } else {
+        metric.putMetric(timelineMetric);
+      }
+      // Buffer last ts value
+      endOfBufferTimestamps.put(metricName, timelineMetric.getStartTime());
+    }
+
+    /**
+     * Test whether last buffered timestamp is same as the newly received.
+     * @param timelineMetric @TimelineMetric
+     * @return true/false
+     */
+    private boolean isDuplicate(TimelineMetric timelineMetric) {
+      return endOfBufferTimestamps.containsKey(timelineMetric.getMetricName())
+        && endOfBufferTimestamps.get(timelineMetric.getMetricName()).equals(timelineMetric.getStartTime());
+    }
+  }
+
+  public TimelineMetric getTimelineMetric(String metricName) {
+    if (timelineMetricCache.containsKey(metricName)) {
+      return timelineMetricCache.evict(metricName);
+    }
+
+    return null;
+  }
+
+  /**
+   * Getter method to help testing eviction
+   * @return @int
+   */
+  public int getMaxEvictionTimeInMillis() {
+    return maxEvictionTimeInMillis;
+  }
+
+  public void putTimelineMetric(TimelineMetric timelineMetric) {
+    timelineMetricCache.put(timelineMetric.getMetricName(), timelineMetric);
+  }
+
+  private void transformMetricValuesToDerivative(TimelineMetric timelineMetric) {
+    String metricName = timelineMetric.getMetricName();
+    double firstValue = timelineMetric.getMetricValues().size() > 0
+        ? timelineMetric.getMetricValues().entrySet().iterator().next().getValue() : 0;
+    Double value = counterMetricLastValue.get(metricName);
+    double previousValue = value != null ? value : firstValue;
+    Map<Long, Double> metricValues = timelineMetric.getMetricValues();
+    TreeMap<Long, Double>   newMetricValues = new TreeMap<Long, Double>();
+    for (Map.Entry<Long, Double> entry : metricValues.entrySet()) {
+      newMetricValues.put(entry.getKey(), entry.getValue() - previousValue);
+      previousValue = entry.getValue();
+    }
+    timelineMetric.setMetricValues(newMetricValues);
+    counterMetricLastValue.put(metricName, previousValue);
+  }
+
+  public void putTimelineMetric(TimelineMetric timelineMetric, boolean isCounter) {
+    if (isCounter) {
+      transformMetricValuesToDerivative(timelineMetric);
+    }
+    putTimelineMetric(timelineMetric);
+  }
+}

+ 62 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/timeline/configuration/Configuration.java

@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.sink.timeline.configuration;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Properties;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+public class Configuration {
+  public final Log LOG = LogFactory.getLog(this.getClass());
+  private final Properties properties;
+
+  public Configuration(String configFile) {
+    properties = new Properties();
+
+    //Get property file stream from classpath
+    InputStream inputStream = Configuration.class.getResourceAsStream(configFile);
+
+    if (inputStream == null) {
+      throw new IllegalArgumentException(configFile + " not found in classpath");
+    }
+
+    // load the properties
+    try {
+      properties.load(inputStream);
+      inputStream.close();
+    } catch (FileNotFoundException fnf) {
+      LOG.info("No configuration file " + configFile + " found in classpath.", fnf);
+    } catch (IOException ie) {
+      throw new IllegalArgumentException("Can't read configuration file " +
+          configFile, ie);
+    }
+  }
+
+  public String getProperty(String key) {
+    return properties.getProperty(key);
+  }
+
+  public String getProperty(String key, String defaultValue) {
+    return properties.getProperty(key, defaultValue);
+  }
+}

+ 106 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/hadoop/metrics2/sink/util/Servers.java

@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.util;
+
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Helpers to handle server addresses
+ */
+public class Servers {
+  /**
+   * This class is not intended to be instantiated
+   */
+  private Servers() {}
+
+  /**
+   * Parses a space and/or comma separated sequence of server specifications
+   * of the form <i>hostname</i> or <i>hostname:port</i>.  If
+   * the specs string is null, defaults to localhost:defaultPort.
+   *
+   * @param specs   server specs (see description)
+   * @param defaultPort the default port if not specified
+   * @return a list of InetSocketAddress objects.
+   */
+  public static List<InetSocketAddress> parse(String specs, int defaultPort) {
+    List<InetSocketAddress> result = new ArrayList<InetSocketAddress>();
+    if (specs == null) {
+      result.add(new InetSocketAddress("localhost", defaultPort));
+    } else {
+      String[] specStrings = specs.split("[ ,]+");
+      for (String specString : specStrings) {
+        result.add(createSocketAddr(specString, defaultPort));
+      }
+    }
+    return result;
+  }
+
+  /**
+   * @param host
+   * @param port
+   * @return a InetSocketAddress created with the specified host and port
+   */
+  private static InetSocketAddress createSocketAddr(String target, int defaultPort) {
+    String helpText = "";
+    if (target == null) {
+      throw new IllegalArgumentException("Target address cannot be null." + helpText);
+    }
+    boolean hasScheme = target.contains("://");
+    URI uri = null;
+    try {
+      uri = hasScheme ? URI.create(target) : URI.create("dummyscheme://" + target);
+    } catch (IllegalArgumentException e) {
+      throw new IllegalArgumentException("Does not contain a valid host:port authority: " + target + helpText);
+    }
+
+    String host = uri.getHost();
+    int port = uri.getPort();
+    if (port == -1) {
+      port = defaultPort;
+    }
+    String path = uri.getPath();
+
+    if ((host == null) || (port < 0) || (!hasScheme && path != null && !path.isEmpty())) {
+      throw new IllegalArgumentException("Does not contain a valid host:port authority: " + target + helpText);
+    }
+    return createSocketAddrForHost(host, port);
+  }
+
+  /**
+   * @param host
+   * @param port
+   * @return a InetSocketAddress created with the specified host and port
+   */
+  private static InetSocketAddress createSocketAddrForHost(String host, int port) {
+    InetSocketAddress addr;
+    try {
+      InetAddress iaddr = InetAddress.getByName(host);
+      iaddr = InetAddress.getByAddress(host, iaddr.getAddress());
+      addr = new InetSocketAddress(iaddr, port);
+    } catch (UnknownHostException e) {
+      addr = InetSocketAddress.createUnresolved(host, port);
+    }
+    return addr;
+  }
+
+}

+ 22 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/package/deb/control/control

@@ -0,0 +1,22 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+Package: [[artifactId]]
+Version: [[package-version]]-[[package-release]]
+Section: [[deb.section]]
+Priority: [[deb.priority]]
+Depends: [[deb.dependency.list]]
+Architecture: [[deb.architecture]]
+Description: [[description]]
+Maintainer: [[deb.publisher]]

+ 15 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/package/deb/control/postinst

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 15 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/package/deb/control/postrm

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 15 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/package/deb/control/preinst

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 15 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/package/deb/control/prerm

@@ -0,0 +1,15 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License

+ 42 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/alias_config.json

@@ -0,0 +1,42 @@
+{
+	"input": {
+		"file": {
+			"klass": "org.apache.ambari.logfeeder.input.InputFile"
+		}
+
+	},
+	"filter": {
+		"json": {
+			"klass": "org.apache.ambari.logfeeder.filter.JSONFilterCode"
+		},
+		"keyvalue": {
+			"klass": "org.apache.ambari.logfeeder.filter.FilterKeyValue"
+		},
+		"grok": {
+			"klass": "org.apache.ambari.logfeeder.filter.FilterGrok"
+		}
+	},
+	  
+	 "mapper": {
+		"map_date": {
+			"klass": "org.apache.ambari.logfeeder.mapper.MapperDate"
+		},
+		"map_fieldname": {
+			"klass": "org.apache.ambari.logfeeder.mapper.MapperFieldName"
+		},
+		"map_fieldvalue": {
+			"klass": "org.apache.ambari.logfeeder.mapper.MapperFieldValue"
+		}
+	},
+	  "output": {
+		"solr": {
+			"klass": "org.apache.ambari.logfeeder.output.OutputSolr"
+		},
+		"file": {
+			"klass": "org.apache.ambari.logfeeder.output.OutputFile"
+		},
+		"kafka": {
+			"klass": "org.apache.ambari.logfeeder.output.OutputKafka"
+		}
+	}
+}

+ 995 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/config.json.j2

@@ -0,0 +1,995 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+	"global":{
+		"add_fields":{
+			"cluster":"{{cluster_name}}"
+		},
+		"source":"file",
+		"tail":"true",
+		"gen_event_md5":"true",
+		"start_position":"beginning"
+	},
+	"input":[
+		{
+			"type":"accumulo_gc",
+			"rowtype":"service",
+			"path":"{{accumulo_log_dir}}/gc_*.log"
+		},
+		{
+			"type":"accumulo_master",
+			"rowtype":"service",
+			"path":"{{accumulo_log_dir}}/master_*.log"
+		},
+		{
+			"type":"accumulo_monitor",
+			"rowtype":"service",
+			"path":"{{accumulo_log_dir}}/monitor_*.log"
+		},
+		{
+			"type":"accumulo_tracer",
+			"rowtype":"service",
+			"path":"{{accumulo_log_dir}}/tracer_*.log"
+		},
+		{
+			"type":"accumulo_tserver",
+			"rowtype":"service",
+			"path":"{{accumulo_log_dir}}/tserver_*.log"
+		},
+		{
+			"type":"atlas_app",
+			"rowtype":"service",
+			"path":"{{atlas_log_dir}}/application.log"
+		},
+		{
+			"type":"ambari_agent",
+			"rowtype":"service",
+			"path":"{{ambari_agent_log_dir}}/ambari-agent.log"
+		},
+		{
+			"type":"ambari_server",
+			"rowtype":"service",
+			"path":"{{ambari_server_log_dir}}/ambari-server.log"
+		},
+		{
+			"type":"ams_hbase_master",
+			"rowtype":"service",
+			"path":"{{metrics_collector_log_dir}}/hbase-ams-master-*.log"
+		},
+		{
+			"type":"ams_hbase_regionserver",
+			"rowtype":"service",
+			"path":"{{metrics_collector_log_dir}}/hbase-ams-regionserver-*.log"
+		},
+		{
+			"type":"ams_collector",
+			"rowtype":"service",
+			"path":"{{metrics_collector_log_dir}}/ambari-metrics-collector.log"
+		},
+		{
+			"type":"falcon_app",
+			"rowtype":"service",
+			"path":"{{falcon_log_dir}}/falcon.application.log"
+		},
+		{
+			"type":"hbase_master",
+			"rowtype":"service",
+			"path":"{{hbase_log_dir}}/hbase-hbase-master-*.log"
+		},
+		{
+			"type":"hbase_regionserver",
+			"rowtype":"service",
+			"path":"{{hbase_log_dir}}/hbase-hbase-regionserver-*.log"
+		},
+		{
+			"type":"hdfs_datanode",
+			"rowtype":"service",
+			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-datanode-*.log"
+		},
+		{
+			"type":"hdfs_namenode",
+			"rowtype":"service",
+			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-namenode-*.log"
+		},
+		{
+			"type":"hdfs_journalnode",
+			"rowtype":"service",
+			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-journalnode-*.log"
+		},
+		{
+			"type":"hdfs_secondarynamenode",
+			"rowtype":"service",
+			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-secondarynamenode-*.log"
+		},
+		{
+			"type":"hdfs_zkfc",
+			"rowtype":"service",
+			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-zkfc-*.log"
+		},
+		{
+			"type":"hive_hiveserver2",
+			"rowtype":"service",
+			"path":"{{hive_log_dir}}/hiveserver2.log"
+		},
+		{
+			"type":"hive_metastore",
+			"rowtype":"service",
+			"path":"{{hive_log_dir}}/hivemetastore.log"
+		},
+		{
+			"type":"kafka_controller",
+			"rowtype":"service",
+			"path":"{{kafka_log_dir}}/controller.log"
+		},
+		{
+			"type":"kafka_request",
+			"rowtype":"service",
+			"path":"{{kafka_log_dir}}/kafka-request.log"
+		},
+		{
+			"type":"kafka_logcleaner",
+			"rowtype":"service",
+			"path":"{{kafka_log_dir}}/log-cleaner.log"
+		},
+		{
+			"type":"kafka_server",
+			"rowtype":"service",
+			"path":"{{kafka_log_dir}}/server.log"
+		},
+		{
+			"type":"kafka_statechange",
+			"rowtype":"service",
+			"path":"{{kafka_log_dir}}/state-change.log"
+		},
+		{
+			"type":"knox_gateway",
+			"rowtype":"service",
+			"path":"{{knox_log_dir}}/gateway.log"
+		},
+		{
+			"type":"knox_cli",
+			"rowtype":"service",
+			"path":"{{knox_log_dir}}/knoxcli.log"
+		},
+		{
+			"type":"knox_ldap",
+			"rowtype":"service",
+			"path":"{{knox_log_dir}}/ldap.log"
+		},
+		{
+			"type":"mapred_historyserver",
+			"rowtype":"service",
+			"path":"{{mapred_log_dir_prefix}}/mapred/mapred-mapred-historyserver*.log"
+		},
+		{
+			"type":"logsearch_app",
+			"rowtype":"service",
+			"path":"{{logsearch_log_dir}}/logsearch.log"
+		},
+		{
+			"type":"logsearch_feeder",
+			"rowtype":"service",
+			"path":"{{logfeeder_log_dir}}/logfeeder.log"
+		},
+		{
+			"type":"logsearch_perf",
+			"rowtype":"service",
+			"path":"{{logsearch_log_dir}}/logsearch-performance.log"
+		},
+		{
+			"type":"ranger_admin",
+			"rowtype":"service",
+			"path":"{{ranger_admin_log_dir}}/xa_portal.log"
+		},
+		{
+			"type":"ranger_dbpatch",
+			"is_enabled":"true",
+			"path":"{{ranger_admin_log_dir}}/ranger_db_patch.log"
+		},
+		{
+			"type":"ranger_kms",
+			"rowtype":"service",
+			"path":"{{ranger_kms_log_dir}}/kms.log"
+		},
+		{
+			"type":"ranger_usersync",
+			"rowtype":"service",
+			"path":"{{ranger_usersync_log_dir}}/usersync.log"
+		},
+		{
+			"type":"oozie_app",
+			"rowtype":"service",
+			"path":"{{oozie_log_dir}}/oozie.log"
+		},
+		{
+			"type":"yarn_nodemanager",
+			"rowtype":"service",
+			"path":"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-nodemanager-*.log"
+		},
+		{
+			"type":"yarn_resourcemanager",
+			"rowtype":"service",
+			"path":"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-resourcemanager-*.log"
+		},
+		{
+			"type":"yarn_timelineserver",
+			"rowtype":"service",
+			"path":"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-timelineserver-*.log"
+		},
+		{
+			"type":"yarn_historyserver",
+			"rowtype":"service",
+			"path":"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-historyserver-*.log"
+		},
+		{
+			"type":"yarn_jobsummary",
+			"rowtype":"service",
+			"path":"{{yarn_log_dir_prefix}}/yarn/hadoop-mapreduce.jobsummary.log"
+		},
+		{
+			"type":"storm_drpc",
+			"rowtype":"service",
+			"path":"{{storm_log_dir}}/drpc.log"
+		},
+		{
+			"type":"storm_logviewer",
+			"rowtype":"service",
+			"path":"{{storm_log_dir}}/logviewer.log"
+		},
+		{
+			"type":"storm_nimbus",
+			"rowtype":"service",
+			"path":"{{storm_log_dir}}/nimbus.log"
+		},
+		{
+			"type":"storm_supervisor",
+			"rowtype":"service",
+			"path":"{{storm_log_dir}}/supervisor.log"
+		},
+		{
+			"type":"storm_ui",
+			"rowtype":"service",
+			"path":"{{storm_log_dir}}/ui.log"
+		},
+		{
+			"type":"storm_worker",
+			"rowtype":"service",
+			"path":"{{storm_log_dir}}/*worker*.log"
+		},
+		{
+			"type":"zookeeper",
+			"rowtype":"service",
+			"path":"{{zk_log_dir}}/zookeeper/zookeeper*.out"
+		},
+		{
+			"type":"hdfs_audit",
+			"rowtype":"audit",
+			"is_enabled":"true",
+			"add_fields":{
+				"logType":"HDFSAudit",
+				"enforcer":"hadoop-acl",
+				"repoType":"1",
+				"repo":"hdfs"
+			},
+			"path":"{{hdfs_log_dir_prefix}}/hdfs/hdfs-audit.log"
+		}
+		
+	],
+	"filter":[
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"accumulo_master"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} [%-8c{2}] %-5p: %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}:%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"comment":"This one has one extra space after LEVEL",
+			"conditions":{
+				"fields":{
+					"type":[
+						"accumulo_gc",
+						"accumulo_monitor",
+						"accumulo_tracer",
+						"accumulo_tserver"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} [%-8c{2}] %-5p: %X{application} %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}:%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"atlas_app",
+						"falcon_app"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{SPACE}-%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}~%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"ams_collector"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} %p %c: %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"ams_hbase_master",
+						"ams_hbase_regionserver",
+						"hbase_master",
+						"hbase_regionserver"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"ambari_agent"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"",
+			"multiline_pattern":"^(%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime} %{JAVAFILE:file}:%{INT:line_number} - %{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				},
+				"level":{
+					"map_fieldvalue":{
+						"pre_value":"WARNING",
+						"post_value":"WARN"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"ambari_server"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n",
+			"multiline_pattern":"^(%{USER_SYNC_DATE:logtime})",
+			"message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"dd MMM yyyy HH:mm:ss"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"hdfs_datanode",
+						"hdfs_journalnode",
+						"hdfs_secondarynamenode",
+						"hdfs_namenode",
+						"hdfs_zkfc",
+						"knox_gateway",
+						"knox_cli",
+						"knox_ldap",
+						"mapred_historyserver",
+						"yarn_historyserver",
+						"yarn_jobsummary",
+						"yarn_nodemanager",
+						"yarn_resourcemanager",
+						"yarn_timelineserver"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"hive_hiveserver2",
+						"hive_metastore"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]:%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"kafka_controller",
+						"kafka_request",
+						"kafka_logcleaner"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"[%d] %p %m (%c)%n",
+			"multiline_pattern":"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])",
+			"message_pattern":"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"comment":"Suppose to be same log4j pattern as other kafka processes, but some reason thread is not printed",
+			"conditions":{
+				"fields":{
+					"type":[
+						"kafka_server",
+						"kafka_statechange"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"[%d] %p %m (%c)%n",
+			"multiline_pattern":"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])",
+			"message_pattern":"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"oozie_app"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} %5p %c{1}:%L - SERVER[${oozie.instance.id}] %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{DATA:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"logsearch_app",
+						"logsearch_feeder",
+						"logsearch_perf",
+						"ranger_admin",
+						"ranger_dbpatch"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d [%t] %-5p %C{6} (%F:%L) - %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"ranger_kms"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} %-5p %c{1} - %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"ranger_usersync"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %m%n",
+			"multiline_pattern":"^(%{USER_SYNC_DATE:logtime})",
+			"message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"dd MMM yyyy HH:mm:ss"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"storm_drpc",
+						"storm_logviewer",
+						"storm_nimbus",
+						"storm_supervisor",
+						"storm_ui",
+						"storm_worker"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{LOGLEVEL:level}\\]%{SPACE}%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss.SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"zookeeper"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}-%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\@%{INT:line_number}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"hdfs_audit"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:evtTime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:evtTime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"evtTime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"keyvalue",
+			"sort_order":1,
+			"conditions":{
+				"fields":{
+					"type":[
+						"hdfs_audit"
+					]
+					
+				}
+				
+			},
+			"source_field":"log_message",
+			"value_split":"=",
+			"field_split":"\t",
+			"post_map_values":{
+				"src":{
+					"map_fieldname":{
+						"new_fieldname":"resource"
+					}
+					
+				},
+				"ip":{
+					"map_fieldname":{
+						"new_fieldname":"cliIP"
+					}
+					
+				},
+				"allowed":[
+					{
+						"map_fieldvalue":{
+							"pre_value":"true",
+							"post_value":"1"
+						}
+						
+					},
+					{
+						"map_fieldvalue":{
+							"pre_value":"false",
+							"post_value":"0"
+						}
+						
+					},
+					{
+						"map_fieldname":{
+							"new_fieldname":"result"
+						}
+						
+					}
+					
+				],
+				"cmd":{
+					"map_fieldname":{
+						"new_fieldname":"action"
+					}
+					
+				},
+				"proto":{
+					"map_fieldname":{
+						"new_fieldname":"cliType"
+					}
+					
+				},
+				"callerContext":{
+					"map_fieldname":{
+						"new_fieldname":"req_caller_id"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"sort_order":2,
+			"source_field":"ugi",
+			"remove_source_field":"false",
+			"conditions":{
+				"fields":{
+					"type":[
+						"hdfs_audit"
+					]
+					
+				}
+				
+			},
+			"message_pattern":"%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}",
+			"post_map_values":{
+				"user":{
+					"map_fieldname":{
+						"new_fieldname":"reqUser"
+					}
+					
+				},
+				"x_user":{
+					"map_fieldname":{
+						"new_fieldname":"reqUser"
+					}
+					
+				},
+				"p_user":{
+					"map_fieldname":{
+						"new_fieldname":"reqUser"
+					}
+					
+				},
+				"k_user":{
+					"map_fieldname":{
+						"new_fieldname":"proxyUsers"
+					}
+					
+				},
+				"p_authType":{
+					"map_fieldname":{
+						"new_fieldname":"authType"
+					}
+					
+				},
+				"k_authType":{
+					"map_fieldname":{
+						"new_fieldname":"proxyAuthType"
+					}
+					
+				}
+				
+			}
+			
+		}
+		
+	],
+	"output":[
+		{
+			"is_enabled":"{{solr_service_logs_enable}}",
+			"comment":"Output to solr for service logs",
+			"destination":"solr",
+			"zk_hosts":"{{zookeeper_quorum}}{{solr_znode}}",
+			"collection":"{{solr_collection_service_logs}}",
+			"number_of_shards": "{{logsearch_numshards}}",
+			"splits_interval_mins": "{{service_logs_collection_splits_interval_mins}}",
+			"conditions":{
+				"fields":{
+					"rowtype":[
+						"service"
+					]
+					
+				}
+				
+			}
+			
+		},
+		{
+			"comment":"Output to solr for audit records",
+			"is_enabled":"{{solr_audit_logs_enable}}",
+			"destination":"solr",
+			"zk_hosts":"{{zookeeper_quorum}}{{solr_znode}}",
+			"collection":"{{solr_collection_audit_logs}}",
+			"number_of_shards": "{{logsearch_numshards}}",
+			"splits_interval_mins": "{{audit_logs_collection_splits_interval_mins}}",
+			"conditions":{
+				"fields":{
+					"rowtype":[
+						"audit"
+					]
+					
+				}
+				
+			}
+			
+		},
+		{
+			"is_enabled":"{{kafka_service_logs_enable}}",
+			"destination":"kafka",
+			"broker_list":"{{kafka_broker_list}}",
+			"topic":"{{kafka_topic_service_logs}}",
+			"kafka.security.protocol":"{{kafka_security_protocol}}",
+			"kafka.sasl.kerberos.service.name":"{{kafka_kerberos_service_name}}",
+			"conditions":{
+				"fields":{
+					"rowtype":[
+						"service"
+					]
+					
+				}
+				
+			}
+			
+		},
+		{
+			"is_enabled":"{{kafka_topic_service_logs}}",
+			"destination":"kafka",
+			"broker_list":"{{kafka_broker_list}}",
+			"topic":"{{kafka_topic_audit_logs}}",
+			"kafka.security.protocol":"{{kafka_security_protocol}}",
+			"kafka.sasl.kerberos.service.name":"{{kafka_kerberos_service_name}}",
+			"conditions":{
+				"fields":{
+					"rowtype":[
+						"audit"
+					]
+					
+				}
+				
+			}
+			
+		}
+		
+	]
+	
+}

+ 626 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/filters.config.json

@@ -0,0 +1,626 @@
+{
+	"filter":[
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"accumulo_master"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} [%-8c{2}] %-5p: %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}:%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"comment":"This one has one extra space after LEVEL",
+			"conditions":{
+				"fields":{
+					"type":[
+						"accumulo_gc",
+						"accumulo_monitor",
+						"accumulo_tracer",
+						"accumulo_tserver"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} [%-8c{2}] %-5p: %X{application} %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}:%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"atlas_app",
+						"falcon_app"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{SPACE}-%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}~%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"ams_collector"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} %p %c: %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"ams_hbase_master",
+						"ams_hbase_regionserver",
+						"hbase_master",
+						"hbase_regionserver"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"ambari_agent"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"",
+			"multiline_pattern":"^(%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime} %{JAVAFILE:file}:%{INT:line_number} - %{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				},
+				"level":{
+					"map_fieldvalue":{
+						"pre_value":"WARNING",
+						"post_value":"WARN"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"ambari_server"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n",
+			"multiline_pattern":"^(%{USER_SYNC_DATE:logtime})",
+			"message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"dd MMM yyyy HH:mm:ss"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"hdfs_datanode",
+						"hdfs_journalnode",
+						"hdfs_secondarynamenode",
+						"hdfs_namenode",
+						"hdfs_zkfc",
+						"knox_gateway",
+						"knox_cli",
+						"knox_ldap",
+						"mapred_historyserver",
+						"yarn_historyserver",
+						"yarn_jobsummary",
+						"yarn_nodemanager",
+						"yarn_resourcemanager",
+						"yarn_timelineserver"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"hive_hiveserver2",
+						"hive_metastore"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]:%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"kafka_controller",
+						"kafka_request",
+						"kafka_logcleaner"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"[%d] %p %m (%c)%n",
+			"multiline_pattern":"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])",
+			"message_pattern":"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"comment":"Suppose to be same log4j pattern as other kafka processes, but some reason thread is not printed",
+			"conditions":{
+				"fields":{
+					"type":[
+						"kafka_server",
+						"kafka_statechange"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"[%d] %p %m (%c)%n",
+			"multiline_pattern":"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])",
+			"message_pattern":"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"oozie_app"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} %5p %c{1}:%L - SERVER[${oozie.instance.id}] %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{DATA:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"logsearch_app",
+						"logsearch_feeder",
+					    	"logsearch_perf",
+						"ranger_admin",
+						"ranger_dbpatch"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d [%t] %-5p %C{6} (%F:%L) - %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"ranger_kms"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} %-5p %c{1} - %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"ranger_usersync"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %m%n",
+			"multiline_pattern":"^(%{USER_SYNC_DATE:logtime})",
+			"message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"dd MMM yyyy HH:mm:ss"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"storm_drpc",
+						"storm_logviewer",
+						"storm_nimbus",
+						"storm_supervisor",
+						"storm_ui",
+						"storm_worker"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{LOGLEVEL:level}\\]%{SPACE}%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss.SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"zookeeper"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}-%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\@%{INT:line_number}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"logtime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"conditions":{
+				"fields":{
+					"type":[
+						"hdfs_audit"
+					]
+					
+				}
+				
+			},
+			"log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:evtTime})",
+			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:evtTime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+			"post_map_values":{
+				"evtTime":{
+					"map_date":{
+						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"keyvalue",
+			"sort_order":1,
+			"conditions":{
+				"fields":{
+					"type":[
+						"hdfs_audit"
+					]
+					
+				}
+				
+			},
+			"source_field":"log_message",
+			"value_split":"=",
+			"field_split":"\t",
+			"post_map_values":{
+				"src":{
+					"map_fieldname":{
+						"new_fieldname":"resource"
+					}
+					
+				},
+				"ip":{
+					"map_fieldname":{
+						"new_fieldname":"cliIP"
+					}
+					
+				},
+				"allowed":[
+					{
+						"map_fieldvalue":{
+							"pre_value":"true",
+							"post_value":"1"
+						}
+						
+					},
+					{
+						"map_fieldvalue":{
+							"pre_value":"false",
+							"post_value":"0"
+						}
+						
+					},
+					{
+						"map_fieldname":{
+							"new_fieldname":"result"
+						}
+						
+					}
+					
+				],
+				"cmd":{
+					"map_fieldname":{
+						"new_fieldname":"action"
+					}
+					
+				},
+				"proto":{
+					"map_fieldname":{
+						"new_fieldname":"cliType"
+					}
+					
+				},
+				"callerContext":{
+					"map_fieldname":{
+						"new_fieldname":"req_caller_id"
+					}
+					
+				}
+				
+			}
+			
+		},
+		{
+			"filter":"grok",
+			"sort_order":2,
+			"source_field":"ugi",
+			"remove_source_field":"false",
+			"conditions":{
+				"fields":{
+					"type":[
+						"hdfs_audit"
+					]
+					
+				}
+				
+			},
+			"message_pattern":"%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}",
+			"post_map_values":{
+				"user":{
+					"map_fieldname":{
+						"new_fieldname":"reqUser"
+					}
+					
+				},
+				"x_user":{
+					"map_fieldname":{
+						"new_fieldname":"reqUser"
+					}
+					
+				},
+				"p_user":{
+					"map_fieldname":{
+						"new_fieldname":"reqUser"
+					}
+					
+				},
+				"k_user":{
+					"map_fieldname":{
+						"new_fieldname":"proxyUsers"
+					}
+					
+				},
+				"p_authType":{
+					"map_fieldname":{
+						"new_fieldname":"authType"
+					}
+					
+				},
+				"k_authType":{
+					"map_fieldname":{
+						"new_fieldname":"proxyAuthType"
+					}
+					
+				}
+				
+			}
+			
+		}
+		
+	]
+}

+ 28 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/global.config.json.j2

@@ -0,0 +1,28 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+	"global":{
+		"add_fields":{
+			"cluster":"{{cluster_name}}"
+		},
+		"source":"file",
+		"tail":"true",
+		"gen_event_md5":"true",
+		"start_position":"beginning"
+	}	
+}

+ 145 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/grok-patterns

@@ -0,0 +1,145 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#Updated JAVACLASS to be same as JAVAFILE. Because if class doesn't have package, then it doesn't work.
+JAVACLASS (?:[A-Za-z$0-9_. -]+)
+#JAVACLASS (?:[a-zA-Z$_][a-zA-Z$_0-9]*\.)*[a-zA-Z$_][a-zA-Z$_0-9]*
+#JAVACLASS (?:[a-zA-Z0-9-]+\.)+[A-Za-z0-9$]+
+
+#Space is an allowed character to match special cases like 'Native Method' or 'Unknown Source'
+JAVAFILE (?:[A-Za-z0-9_. -]+)
+#Allow special <init> or <clinit> method
+JAVAMETHOD (?:(<init>)|(<clinit>)|[a-zA-Z$_][a-zA-Z$_0-9]*)
+#Line number is optional in special cases 'Native method' or 'Unknown source'
+JAVASTACKTRACEPART %{SPACE}at %{JAVACLASS:class}\.%{JAVAMETHOD:method}\(%{JAVAFILE:file}(?::%{NUMBER:line})?\)
+# Java Logs
+JAVATHREAD (?:[A-Z]{2}-Processor[\d]+)
+
+JAVASTACKTRACEPART at %{JAVACLASS:class}\.%{WORD:method}\(%{JAVAFILE:file}:%{NUMBER:line}\)
+JAVALOGMESSAGE (.*)
+# MMM dd, yyyy HH:mm:ss eg: Jan 9, 2014 7:13:13 AM
+CATALINA_DATESTAMP %{MONTH} %{MONTHDAY}, 20%{YEAR} %{HOUR}:?%{MINUTE}(?::?%{SECOND}) (?:AM|PM)
+# yyyy-MM-dd HH:mm:ss,SSS ZZZ eg: 2014-01-09 17:32:25,527 -0800
+TOMCAT_DATESTAMP 20%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}(?::?%{SECOND}) %{ISO8601_TIMEZONE}
+CATALINALOG %{CATALINA_DATESTAMP:timestamp} %{JAVACLASS:class} %{JAVALOGMESSAGE:logmessage}
+# 2014-01-09 20:03:28,269 -0800 | ERROR | com.example.service.ExampleService - something compeletely unexpected happened...
+TOMCATLOG %{TOMCAT_DATESTAMP:timestamp} \| %{LOGLEVEL:level} \| %{JAVACLASS:class} - %{JAVALOGMESSAGE:logmessage}
+
+USERNAME [a-zA-Z0-9._-]+
+USER %{USERNAME}
+EMAILLOCALPART [a-zA-Z][a-zA-Z0-9_.+-=:]+
+EMAILADDRESS %{EMAILLOCALPART}@%{HOSTNAME}
+HTTPDUSER %{EMAILADDRESS}|%{USER}
+INT (?:[+-]?(?:[0-9]+))
+BASE10NUM (?<![0-9.+-])(?>[+-]?(?:(?:[0-9]+(?:\.[0-9]+)?)|(?:\.[0-9]+)))
+NUMBER (?:%{BASE10NUM})
+BASE16NUM (?<![0-9A-Fa-f])(?:[+-]?(?:0x)?(?:[0-9A-Fa-f]+))
+BASE16FLOAT \b(?<![0-9A-Fa-f.])(?:[+-]?(?:0x)?(?:(?:[0-9A-Fa-f]+(?:\.[0-9A-Fa-f]*)?)|(?:\.[0-9A-Fa-f]+)))\b
+
+POSINT \b(?:[1-9][0-9]*)\b
+NONNEGINT \b(?:[0-9]+)\b
+WORD \b\w+\b
+NOTSPACE \S+
+SPACE \s*
+DATA .*?
+GREEDYDATA .*
+QUOTEDSTRING (?>(?<!\\)(?>"(?>\\.|[^\\"]+)+"|""|(?>'(?>\\.|[^\\']+)+')|''|(?>`(?>\\.|[^\\`]+)+`)|``))
+UUID [A-Fa-f0-9]{8}-(?:[A-Fa-f0-9]{4}-){3}[A-Fa-f0-9]{12}
+
+# Networking
+MAC (?:%{CISCOMAC}|%{WINDOWSMAC}|%{COMMONMAC})
+CISCOMAC (?:(?:[A-Fa-f0-9]{4}\.){2}[A-Fa-f0-9]{4})
+WINDOWSMAC (?:(?:[A-Fa-f0-9]{2}-){5}[A-Fa-f0-9]{2})
+COMMONMAC (?:(?:[A-Fa-f0-9]{2}:){5}[A-Fa-f0-9]{2})
+IPV6 ((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?
+IPV4 (?<![0-9])(?:(?:[0-1]?[0-9]{1,2}|2[0-4][0-9]|25[0-5])[.](?:[0-1]?[0-9]{1,2}|2[0-4][0-9]|25[0-5])[.](?:[0-1]?[0-9]{1,2}|2[0-4][0-9]|25[0-5])[.](?:[0-1]?[0-9]{1,2}|2[0-4][0-9]|25[0-5]))(?![0-9])
+IP (?:%{IPV6}|%{IPV4})
+HOSTNAME \b(?:[0-9A-Za-z][0-9A-Za-z-]{0,62})(?:\.(?:[0-9A-Za-z][0-9A-Za-z-]{0,62}))*(\.?|\b)
+IPORHOST (?:%{IP}|%{HOSTNAME})
+HOSTPORT %{IPORHOST}:%{POSINT}
+
+# paths
+PATH (?:%{UNIXPATH}|%{WINPATH})
+UNIXPATH (/([\w_%!$@:.,~-]+|\\.)*)+
+TTY (?:/dev/(pts|tty([pq])?)(\w+)?/?(?:[0-9]+))
+WINPATH (?>[A-Za-z]+:|\\)(?:\\[^\\?*]*)+
+URIPROTO [A-Za-z]+(\+[A-Za-z+]+)?
+URIHOST %{IPORHOST}(?::%{POSINT:port})?
+# uripath comes loosely from RFC1738, but mostly from what Firefox
+# doesn't turn into %XX
+URIPATH (?:/[A-Za-z0-9$.+!*'(){},~:;=@#%_\-]*)+
+#URIPARAM \?(?:[A-Za-z0-9]+(?:=(?:[^&]*))?(?:&(?:[A-Za-z0-9]+(?:=(?:[^&]*))?)?)*)?
+URIPARAM \?[A-Za-z0-9$.+!*'|(){},~@#%&/=:;_?\-\[\]<>]*
+URIPATHPARAM %{URIPATH}(?:%{URIPARAM})?
+URI %{URIPROTO}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST})?(?:%{URIPATHPARAM})?
+
+# Months: January, Feb, 3, 03, 12, December
+MONTH \b(?:Jan(?:uary|uar)?|Feb(?:ruary|ruar)?|M(?:a|ä)?r(?:ch|z)?|Apr(?:il)?|Ma(?:y|i)?|Jun(?:e|i)?|Jul(?:y)?|Aug(?:ust)?|Sep(?:tember)?|O(?:c|k)?t(?:ober)?|Nov(?:ember)?|De(?:c|z)(?:ember)?)\b
+MONTHNUM (?:0?[1-9]|1[0-2])
+MONTHNUM2 (?:0[1-9]|1[0-2])
+MONTHDAY (?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9])
+
+# Days: Monday, Tue, Thu, etc...
+DAY (?:Mon(?:day)?|Tue(?:sday)?|Wed(?:nesday)?|Thu(?:rsday)?|Fri(?:day)?|Sat(?:urday)?|Sun(?:day)?)
+
+# Years?
+YEAR (?>\d\d){1,2}
+HOUR (?:2[0123]|[01]?[0-9])
+MINUTE (?:[0-5][0-9])
+# '60' is a leap second in most time standards and thus is valid.
+SECOND (?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?)
+TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9])
+# datestamp is YYYY/MM/DD-HH:MM:SS.UUUU (or something like it)
+DATE_US %{MONTHNUM}[/-]%{MONTHDAY}[/-]%{YEAR}
+DATE_EU %{MONTHDAY}[./-]%{MONTHNUM}[./-]%{YEAR}
+ISO8601_TIMEZONE (?:Z|[+-]%{HOUR}(?::?%{MINUTE}))
+ISO8601_SECOND (?:%{SECOND}|60)
+TIMESTAMP_ISO8601 %{YEAR}-%{MONTHNUM}-%{MONTHDAY}[T ]%{HOUR}:?%{MINUTE}(?::?%{SECOND})?%{ISO8601_TIMEZONE}?
+DATE %{DATE_US}|%{DATE_EU}
+DATESTAMP %{DATE}[- ]%{TIME}
+TZ (?:[PMCE][SD]T|UTC)
+DATESTAMP_RFC822 %{DAY} %{MONTH} %{MONTHDAY} %{YEAR} %{TIME} %{TZ}
+DATESTAMP_RFC2822 %{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{TIME} %{ISO8601_TIMEZONE}
+DATESTAMP_OTHER %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{TZ} %{YEAR}
+DATESTAMP_EVENTLOG %{YEAR}%{MONTHNUM2}%{MONTHDAY}%{HOUR}%{MINUTE}%{SECOND}
+HTTPDERROR_DATE %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR}
+
+# Syslog Dates: Month Day HH:MM:SS
+SYSLOGTIMESTAMP %{MONTH} +%{MONTHDAY} %{TIME}
+PROG [\x21-\x5a\x5c\x5e-\x7e]+
+SYSLOGPROG %{PROG:program}(?:\[%{POSINT:pid}\])?
+SYSLOGHOST %{IPORHOST}
+SYSLOGFACILITY <%{NONNEGINT:facility}.%{NONNEGINT:priority}>
+HTTPDATE %{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME} %{INT}
+
+# Shortcuts
+QS %{QUOTEDSTRING}
+
+# Log formats
+SYSLOGBASE %{SYSLOGTIMESTAMP:timestamp} (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource} %{SYSLOGPROG}:
+COMMONAPACHELOG %{IPORHOST:clientip} %{HTTPDUSER:ident} %{USER:auth} \[%{HTTPDATE:timestamp}\] "(?:%{WORD:verb} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest})" %{NUMBER:response} (?:%{NUMBER:bytes}|-)
+COMBINEDAPACHELOG %{COMMONAPACHELOG} %{QS:referrer} %{QS:agent}
+HTTPD20_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[%{LOGLEVEL:loglevel}\] (?:\[client %{IPORHOST:clientip}\] ){0,1}%{GREEDYDATA:errormsg}
+HTTPD24_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[%{WORD:module}:%{LOGLEVEL:loglevel}\] \[pid %{POSINT:pid}:tid %{NUMBER:tid}\]( \(%{POSINT:proxy_errorcode}\)%{DATA:proxy_errormessage}:)?( \[client %{IPORHOST:client}:%{POSINT:clientport}\])? %{DATA:errorcode}: %{GREEDYDATA:message}
+HTTPD_ERRORLOG %{HTTPD20_ERRORLOG}|%{HTTPD24_ERRORLOG}
+
+
+# Log Levels
+LOGLEVEL ([Aa]lert|ALERT|[Tt]race|TRACE|[Dd]ebug|DEBUG|[Nn]otice|NOTICE|[Ii]nfo|INFO|[Ww]arn?(?:ing)?|WARN?(?:ING)?|[Ee]rr?(?:or)?|ERR?(?:OR)?|[Cc]rit?(?:ical)?|CRIT?(?:ICAL)?|[Ff]atal|FATAL|[Ss]evere|SEVERE|EMERG(?:ENCY)?|[Ee]merg(?:ency)?)
+
+
+# Custom
+USER_SYNC_DATE %{MONTHDAY} %{MONTH} %{YEAR} %{TIME}

+ 284 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/input.config.json.j2

@@ -0,0 +1,284 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+	"input":[
+		{
+			"type":"accumulo_gc",
+			"rowtype":"service",
+			"path":"{{accumulo_log_dir}}/gc_*.log"
+		},
+		{
+			"type":"accumulo_master",
+			"rowtype":"service",
+			"path":"{{accumulo_log_dir}}/master_*.log"
+		},
+		{
+			"type":"accumulo_monitor",
+			"rowtype":"service",
+			"path":"{{accumulo_log_dir}}/monitor_*.log"
+		},
+		{
+			"type":"accumulo_tracer",
+			"rowtype":"service",
+			"path":"{{accumulo_log_dir}}/tracer_*.log"
+		},
+		{
+			"type":"accumulo_tserver",
+			"rowtype":"service",
+			"path":"{{accumulo_log_dir}}/tserver_*.log"
+		},
+		{
+			"type":"atlas_app",
+			"rowtype":"service",
+			"path":"{{atlas_log_dir}}/application.log"
+		},
+		{
+			"type":"ambari_agent",
+			"rowtype":"service",
+			"path":"{{ambari_agent_log_dir}}/ambari-agent.log"
+		},
+		{
+			"type":"ambari_server",
+			"rowtype":"service",
+			"path":"{{ambari_server_log_dir}}/ambari-server.log"
+		},
+		{
+			"type":"ams_hbase_master",
+			"rowtype":"service",
+			"path":"{{metrics_collector_log_dir}}/hbase-ams-master-*.log"
+		},
+		{
+			"type":"ams_hbase_regionserver",
+			"rowtype":"service",
+			"path":"{{metrics_collector_log_dir}}/hbase-ams-regionserver-*.log"
+		},
+		{
+			"type":"ams_collector",
+			"rowtype":"service",
+			"path":"{{metrics_collector_log_dir}}/ambari-metrics-collector.log"
+		},
+		{
+			"type":"falcon_app",
+			"rowtype":"service",
+			"path":"{{falcon_log_dir}}/falcon.application.log"
+		},
+		{
+			"type":"hbase_master",
+			"rowtype":"service",
+			"path":"{{hbase_log_dir}}/hbase-hbase-master-*.log"
+		},
+		{
+			"type":"hbase_regionserver",
+			"rowtype":"service",
+			"path":"{{hbase_log_dir}}/hbase-hbase-regionserver-*.log"
+		},
+		{
+			"type":"hdfs_datanode",
+			"rowtype":"service",
+			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-datanode-*.log"
+		},
+		{
+			"type":"hdfs_namenode",
+			"rowtype":"service",
+			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-namenode-*.log"
+		},
+		{
+			"type":"hdfs_journalnode",
+			"rowtype":"service",
+			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-journalnode-*.log"
+		},
+		{
+			"type":"hdfs_secondarynamenode",
+			"rowtype":"service",
+			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-secondarynamenode-*.log"
+		},
+		{
+			"type":"hdfs_zkfc",
+			"rowtype":"service",
+			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-zkfc-*.log"
+		},
+		{
+			"type":"hive_hiveserver2",
+			"rowtype":"service",
+			"path":"{{hive_log_dir}}/hiveserver2.log"
+		},
+		{
+			"type":"hive_metastore",
+			"rowtype":"service",
+			"path":"{{hive_log_dir}}/hivemetastore.log"
+		},
+		{
+			"type":"kafka_controller",
+			"rowtype":"service",
+			"path":"{{kafka_log_dir}}/controller.log"
+		},
+		{
+			"type":"kafka_request",
+			"rowtype":"service",
+			"path":"{{kafka_log_dir}}/kafka-request.log"
+		},
+		{
+			"type":"kafka_logcleaner",
+			"rowtype":"service",
+			"path":"{{kafka_log_dir}}/log-cleaner.log"
+		},
+		{
+			"type":"kafka_server",
+			"rowtype":"service",
+			"path":"{{kafka_log_dir}}/server.log"
+		},
+		{
+			"type":"kafka_statechange",
+			"rowtype":"service",
+			"path":"{{kafka_log_dir}}/state-change.log"
+		},
+		{
+			"type":"knox_gateway",
+			"rowtype":"service",
+			"path":"{{knox_log_dir}}/gateway.log"
+		},
+		{
+			"type":"knox_cli",
+			"rowtype":"service",
+			"path":"{{knox_log_dir}}/knoxcli.log"
+		},
+		{
+			"type":"knox_ldap",
+			"rowtype":"service",
+			"path":"{{knox_log_dir}}/ldap.log"
+		},
+		{
+			"type":"mapred_historyserver",
+			"rowtype":"service",
+			"path":"{{mapred_log_dir_prefix}}/mapred/mapred-mapred-historyserver*.log"
+		},
+		{
+			"type":"logsearch_app",
+			"rowtype":"service",
+			"path":"{{logsearch_log_dir}}/logsearch.log"
+		},
+		{
+			"type":"logsearch_feeder",
+			"rowtype":"service",
+			"path":"{{logfeeder_log_dir}}/logfeeder.log"
+		},
+		{
+			"type":"logsearch_perf",
+			"rowtype":"service",
+			"path":"{{logsearch_log_dir}}/logsearch-performance.log"
+		},
+		{
+			"type":"ranger_admin",
+			"rowtype":"service",
+			"path":"{{ranger_admin_log_dir}}/xa_portal.log"
+		},
+		{
+			"type":"ranger_dbpatch",
+			"is_enabled":"true",
+			"path":"{{ranger_admin_log_dir}}/ranger_db_patch.log"
+		},
+		{
+			"type":"ranger_kms",
+			"rowtype":"service",
+			"path":"{{ranger_kms_log_dir}}/kms.log"
+		},
+		{
+			"type":"ranger_usersync",
+			"rowtype":"service",
+			"path":"{{ranger_usersync_log_dir}}/usersync.log"
+		},
+		{
+			"type":"oozie_app",
+			"rowtype":"service",
+			"path":"{{oozie_log_dir}}/oozie.log"
+		},
+		{
+			"type":"yarn_nodemanager",
+			"rowtype":"service",
+			"path":"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-nodemanager-*.log"
+		},
+		{
+			"type":"yarn_resourcemanager",
+			"rowtype":"service",
+			"path":"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-resourcemanager-*.log"
+		},
+		{
+			"type":"yarn_timelineserver",
+			"rowtype":"service",
+			"path":"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-timelineserver-*.log"
+		},
+		{
+			"type":"yarn_historyserver",
+			"rowtype":"service",
+			"path":"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-historyserver-*.log"
+		},
+		{
+			"type":"yarn_jobsummary",
+			"rowtype":"service",
+			"path":"{{yarn_log_dir_prefix}}/yarn/hadoop-mapreduce.jobsummary.log"
+		},
+		{
+			"type":"storm_drpc",
+			"rowtype":"service",
+			"path":"{{storm_log_dir}}/drpc.log"
+		},
+		{
+			"type":"storm_logviewer",
+			"rowtype":"service",
+			"path":"{{storm_log_dir}}/logviewer.log"
+		},
+		{
+			"type":"storm_nimbus",
+			"rowtype":"service",
+			"path":"{{storm_log_dir}}/nimbus.log"
+		},
+		{
+			"type":"storm_supervisor",
+			"rowtype":"service",
+			"path":"{{storm_log_dir}}/supervisor.log"
+		},
+		{
+			"type":"storm_ui",
+			"rowtype":"service",
+			"path":"{{storm_log_dir}}/ui.log"
+		},
+		{
+			"type":"storm_worker",
+			"rowtype":"service",
+			"path":"{{storm_log_dir}}/*worker*.log"
+		},
+		{
+			"type":"zookeeper",
+			"rowtype":"service",
+			"path":"{{zk_log_dir}}/zookeeper/zookeeper*.out"
+		},
+		{
+			"type":"hdfs_audit",
+			"rowtype":"audit",
+			"is_enabled":"true",
+			"add_fields":{
+				"logType":"HDFSAudit",
+				"enforcer":"hadoop-acl",
+				"repoType":"1",
+				"repo":"hdfs"
+			},
+			"path":"{{hdfs_log_dir_prefix}}/hdfs/hdfs-audit.log"
+		}
+		
+	]	
+}

+ 62 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml

@@ -0,0 +1,62 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
+
+<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
+  <appender name="console" class="org.apache.log4j.ConsoleAppender">
+    <param name="Target" value="System.out" />
+    <layout class="org.apache.log4j.PatternLayout">
+      <param name="ConversionPattern" value="%d [%t] %-5p %C{6} (%F:%L) - %m%n" />
+      <!-- <param name="ConversionPattern" value="%d [%t] %-5p %c %x - %m%n"/> -->
+    </layout>
+  </appender>
+
+  <appender name="daily_rolling_file" class="org.apache.log4j.DailyRollingFileAppender"> 
+    <param name="file" value="logs/logsearch-logfeeder.log" /> 
+    <param name="datePattern"  value="'.'yyyy-MM-dd" /> 
+    <param name="append" value="true" /> 
+    <layout class="org.apache.log4j.PatternLayout"> 
+      <param name="ConversionPattern" value="%d [%t] %-5p %C{6} (%F:%L) - %m%n"/> 
+    </layout> 
+  </appender> 
+
+  <!-- Logs to suppress BEGIN -->
+  <category name="org.apache.solr.common.cloud.ZkStateReader" additivity="false">
+    <priority value="error" />
+    <appender-ref ref="rolling_file" />
+  </category>
+
+  <category name="apache.solr.client.solrj.impl.CloudSolrClient" additivity="false">
+    <priority value="fatal" />
+    <appender-ref ref="rolling_file" />
+  </category>
+  <!-- Logs to suppress END -->
+
+  <category name="org.apache.ambari.logfeeder" additivity="false">
+    <priority value="info" />
+    <appender-ref ref="console" /> 
+    <!-- <appender-ref ref="daily_rolling_file" /> -->
+  </category>
+
+  <root>
+    <priority value="warn" />
+    <!-- <appender-ref ref="console" /> -->
+    <!-- <appender-ref ref="daily_rolling_file" /> -->
+  </root>
+ 
+</log4j:configuration>  

+ 60 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml.j2

@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
+
+<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
+  <appender name="console" class="org.apache.log4j.ConsoleAppender">
+    <param name="Target" value="System.out" />
+    <layout class="org.apache.log4j.PatternLayout">
+      <param name="ConversionPattern" value="%d [%t] %-5p %C{6} (%F:%L) - %m%n" />
+      <!-- <param name="ConversionPattern" value="%d [%t] %-5p %c %x - %m%n"/> -->
+    </layout>
+  </appender>
+
+  <appender name="rolling_file" class="org.apache.log4j.RollingFileAppender">
+    <param name="file" value="{{logfeeder_log_dir}}/logfeeder.log" />
+    <param name="append" value="true" />
+    <layout class="org.apache.log4j.PatternLayout">
+      <param name="ConversionPattern" value="%d [%t] %-5p %C{6} (%F:%L) - %m%n"/>
+    </layout>
+  </appender>
+
+  <!-- Logs to suppress BEGIN -->
+  <category name="org.apache.solr.common.cloud.ZkStateReader" additivity="false">
+    <priority value="error" />
+    <appender-ref ref="rolling_file" />
+  </category>
+
+  <category name="apache.solr.client.solrj.impl.CloudSolrClient" additivity="false">
+    <priority value="fatal" />
+    <appender-ref ref="rolling_file" />
+  </category>
+
+  <!-- Logs to suppress END -->
+
+  <category name="org.apache.ambari.logfeeder" additivity="false">
+    <priority value="{{logfeeder_log_level}}" />
+    <appender-ref ref="rolling_file" />
+  </category>
+
+  <root>
+    <level value="warn" />
+    <!-- <appender-ref ref="console" /> -->
+    <appender-ref ref="rolling_file" />
+  </root>
+</log4j:configuration>

+ 25 - 0
ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/logfeeder.properties

@@ -0,0 +1,25 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+logfeeder.checkpoint.folder=
+metrics.collector.hosts=
+
+#filter config
+logfeeder.solr.url=
+logfeeder.solr.zkhosts=
+logfeeder.solr.core.history=history
+logfeeder.log.filter.enable=true
+#Internal to fetch filter config from solr in sec
+logfeeder.solr.config.internal=5

Niektoré súbory nie sú zobrazené, pretože je v týchto rozdielových dátach zmenené mnoho súborov