Prechádzať zdrojové kódy

AMBARI-3793. Do not store disks_info in DB. Store it as dynamic info in memory that can be used to show on the UI. (Vitaly Brodetskyi via dlysnichenko)

Lisnichenko Dmitro 11 rokov pred
rodič
commit
d44b3124f1
20 zmenil súbory, kde vykonal 54 pridanie a 59 odobranie
  1. 3 2
      ambari-agent/src/main/python/ambari_agent/Hardware.py
  2. 4 0
      ambari-agent/src/main/python/ambari_agent/Heartbeat.py
  3. 0 2
      ambari-agent/src/main/python/ambari_agent/HostInfo.py
  4. 2 11
      ambari-agent/src/test/python/TestHostInfo.py
  5. 0 14
      ambari-server/src/main/java/org/apache/ambari/server/agent/AgentEnv.java
  6. 11 0
      ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeat.java
  7. 1 1
      ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
  8. 0 16
      ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostEntity.java
  9. 14 1
      ambari-server/src/main/java/org/apache/ambari/server/state/host/HostHealthyHeartbeatEvent.java
  10. 9 5
      ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java
  11. 1 1
      ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
  12. 1 1
      ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
  13. 1 1
      ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
  14. 1 1
      ambari-server/src/main/resources/Ambari-DDL-Postgres-REMOTE-CREATE.sql
  15. 0 1
      ambari-server/src/main/resources/Ambari-DDL.sql
  16. 2 0
      ambari-server/src/main/resources/upgrade/ddl/Ambari-DDL-Oracle-UPGRADE.sql
  17. 1 0
      ambari-server/src/main/resources/upgrade/ddl/Ambari-DDL-Postgres-UPGRADE-1.3.0.sql
  18. 1 0
      ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
  19. 1 1
      ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
  20. 1 1
      ambari-server/src/test/java/org/apache/ambari/server/state/host/HostTest.java

+ 3 - 2
ambari-agent/src/main/python/ambari_agent/Hardware.py

@@ -63,7 +63,8 @@ class Hardware:
     else:
     else:
       return None
       return None
 
 
-  def osdisks(self):
+  @staticmethod
+  def osdisks():
     """ Run df to find out the disks on the host. Only works on linux 
     """ Run df to find out the disks on the host. Only works on linux 
     platforms. Note that this parser ignores any filesystems with spaces 
     platforms. Note that this parser ignores any filesystems with spaces 
     and any mounts with spaces. """
     and any mounts with spaces. """
@@ -72,7 +73,7 @@ class Hardware:
     dfdata = df.communicate()[0]
     dfdata = df.communicate()[0]
     lines = dfdata.splitlines()
     lines = dfdata.splitlines()
     for l in lines:
     for l in lines:
-      mountinfo = self.extractMountInfo(l)
+      mountinfo = Hardware.extractMountInfo(l)
       if mountinfo != None and os.access(mountinfo['mountpoint'], os.W_OK):
       if mountinfo != None and os.access(mountinfo['mountpoint'], os.W_OK):
         mounts.append(mountinfo)
         mounts.append(mountinfo)
       pass
       pass

+ 4 - 0
ambari-agent/src/main/python/ambari_agent/Heartbeat.py

@@ -27,6 +27,7 @@ from ActionQueue import ActionQueue
 import AmbariConfig
 import AmbariConfig
 import hostname
 import hostname
 from HostInfo import HostInfo
 from HostInfo import HostInfo
+from Hardware import Hardware
 
 
 
 
 logger = logging.getLogger()
 logger = logging.getLogger()
@@ -83,6 +84,9 @@ class Heartbeat:
       hostInfo.register(nodeInfo, componentsMapped, commandsInProgress)
       hostInfo.register(nodeInfo, componentsMapped, commandsInProgress)
       heartbeat['agentEnv'] = nodeInfo
       heartbeat['agentEnv'] = nodeInfo
       logger.debug("agentEnv : " + str(nodeInfo))
       logger.debug("agentEnv : " + str(nodeInfo))
+      mounts = Hardware.osdisks()
+      heartbeat['mounts'] = mounts
+      logger.debug("mounts : " + str(mounts))
 
 
     return heartbeat
     return heartbeat
 
 

+ 0 - 2
ambari-agent/src/main/python/ambari_agent/HostInfo.py

@@ -313,8 +313,6 @@ class HostInfo:
     self.javaProcs(java)
     self.javaProcs(java)
     dict['hostHealth']['activeJavaProcs'] = java
     dict['hostHealth']['activeJavaProcs'] = java
 
 
-    dict['hostHealth']['diskStatus'] = [self.osdiskAvailableSpace("/")]
-
     liveSvcs = []
     liveSvcs = []
     self.checkLiveServices(self.DEFAULT_LIVE_SERVICES, liveSvcs)
     self.checkLiveServices(self.DEFAULT_LIVE_SERVICES, liveSvcs)
     dict['hostHealth']['liveServices'] = liveSvcs
     dict['hostHealth']['liveServices'] = liveSvcs

+ 2 - 11
ambari-agent/src/test/python/TestHostInfo.py

@@ -232,7 +232,6 @@ class TestHostInfo(TestCase):
 
 
   @patch.object(HostInfo, 'get_os_type')
   @patch.object(HostInfo, 'get_os_type')
   @patch('os.umask')
   @patch('os.umask')
-  @patch.object(HostInfo, 'osdiskAvailableSpace')
   @patch.object(HostCheckReportFileHandler, 'writeHostCheckFile')
   @patch.object(HostCheckReportFileHandler, 'writeHostCheckFile')
   @patch.object(PackagesAnalyzer, 'allAvailablePackages')
   @patch.object(PackagesAnalyzer, 'allAvailablePackages')
   @patch.object(PackagesAnalyzer, 'allInstalledPackages')
   @patch.object(PackagesAnalyzer, 'allInstalledPackages')
@@ -249,14 +248,12 @@ class TestHostInfo(TestCase):
   @patch.object(HostInfo, 'hadoopVarLogCount')
   @patch.object(HostInfo, 'hadoopVarLogCount')
   def test_hostinfo_register_suse(self, hvlc_mock, hvrc_mock, eac_mock, cf_mock, jp_mock,
   def test_hostinfo_register_suse(self, hvlc_mock, hvrc_mock, eac_mock, cf_mock, jp_mock,
                              cls_mock, cu_mock, gir_mock, gipbr_mock, gipbn_mock,
                              cls_mock, cu_mock, gir_mock, gipbr_mock, gipbn_mock,
-                             gpd_mock, aip_mock, aap_mock, whcf_mock, odas_mock,
-                             os_umask_mock, get_os_type_mock):
+                             gpd_mock, aip_mock, aap_mock, whcf_mock, os_umask_mock, get_os_type_mock):
     hvlc_mock.return_value = 1
     hvlc_mock.return_value = 1
     hvrc_mock.return_value = 1
     hvrc_mock.return_value = 1
     gipbr_mock.return_value = ["pkg1"]
     gipbr_mock.return_value = ["pkg1"]
     gipbn_mock.return_value = ["pkg2"]
     gipbn_mock.return_value = ["pkg2"]
     gpd_mock.return_value = ["pkg1", "pkg2"]
     gpd_mock.return_value = ["pkg1", "pkg2"]
-    odas_mock.return_value = [{'name':'name1'}]
     get_os_type_mock.return_value = "suse"
     get_os_type_mock.return_value = "suse"
 
 
     hostInfo = HostInfo()
     hostInfo = HostInfo()
@@ -266,7 +263,6 @@ class TestHostInfo(TestCase):
     self.assertFalse(gpd_mock.called)
     self.assertFalse(gpd_mock.called)
     self.assertFalse(aip_mock.called)
     self.assertFalse(aip_mock.called)
     self.assertFalse(aap_mock.called)
     self.assertFalse(aap_mock.called)
-    self.assertTrue(odas_mock.called)
     self.assertTrue(os_umask_mock.called)
     self.assertTrue(os_umask_mock.called)
     self.assertFalse(whcf_mock.called)
     self.assertFalse(whcf_mock.called)
 
 
@@ -276,7 +272,6 @@ class TestHostInfo(TestCase):
 
 
   @patch.object(HostInfo, 'get_os_type')
   @patch.object(HostInfo, 'get_os_type')
   @patch('os.umask')
   @patch('os.umask')
-  @patch.object(HostInfo, 'osdiskAvailableSpace')
   @patch.object(HostCheckReportFileHandler, 'writeHostCheckFile')
   @patch.object(HostCheckReportFileHandler, 'writeHostCheckFile')
   @patch.object(PackagesAnalyzer, 'allAvailablePackages')
   @patch.object(PackagesAnalyzer, 'allAvailablePackages')
   @patch.object(PackagesAnalyzer, 'allInstalledPackages')
   @patch.object(PackagesAnalyzer, 'allInstalledPackages')
@@ -294,15 +289,13 @@ class TestHostInfo(TestCase):
   @patch.object(HostInfo, 'checkIptables')
   @patch.object(HostInfo, 'checkIptables')
   def test_hostinfo_register(self, cit_mock, hvlc_mock, hvrc_mock, eac_mock, cf_mock, jp_mock,
   def test_hostinfo_register(self, cit_mock, hvlc_mock, hvrc_mock, eac_mock, cf_mock, jp_mock,
                              cls_mock, cu_mock, gir_mock, gipbr_mock, gipbn_mock,
                              cls_mock, cu_mock, gir_mock, gipbr_mock, gipbn_mock,
-                             gpd_mock, aip_mock, aap_mock, whcf_mock, odas_mock,
-                             os_umask_mock, get_os_type_mock):
+                             gpd_mock, aip_mock, aap_mock, whcf_mock, os_umask_mock, get_os_type_mock):
     cit_mock.return_value = True
     cit_mock.return_value = True
     hvlc_mock.return_value = 1
     hvlc_mock.return_value = 1
     hvrc_mock.return_value = 1
     hvrc_mock.return_value = 1
     gipbr_mock.return_value = ["pkg1"]
     gipbr_mock.return_value = ["pkg1"]
     gipbn_mock.return_value = ["pkg2"]
     gipbn_mock.return_value = ["pkg2"]
     gpd_mock.return_value = ["pkg1", "pkg2"]
     gpd_mock.return_value = ["pkg1", "pkg2"]
-    odas_mock.return_value = [{'name':'name1'}]
     get_os_type_mock.return_value = "redhat"
     get_os_type_mock.return_value = "redhat"
 
 
     hostInfo = HostInfo()
     hostInfo = HostInfo()
@@ -323,7 +316,6 @@ class TestHostInfo(TestCase):
     self.assertTrue(gir_mock.called)
     self.assertTrue(gir_mock.called)
     self.assertTrue(gpd_mock.called)
     self.assertTrue(gpd_mock.called)
     self.assertTrue(aip_mock.called)
     self.assertTrue(aip_mock.called)
-    self.assertTrue(odas_mock.called)
     self.assertTrue(cit_mock.called)
     self.assertTrue(cit_mock.called)
 
 
     for existingPkg in ["pkg1", "pkg2"]:
     for existingPkg in ["pkg1", "pkg2"]:
@@ -339,7 +331,6 @@ class TestHostInfo(TestCase):
     self.assertEqual(dict['existingUsers'], [])
     self.assertEqual(dict['existingUsers'], [])
     self.assertEqual(dict['existingRepos'][0], hostInfo.RESULT_UNAVAILABLE)
     self.assertEqual(dict['existingRepos'][0], hostInfo.RESULT_UNAVAILABLE)
     self.assertEqual(dict['installedPackages'], [])
     self.assertEqual(dict['installedPackages'], [])
-    self.assertEqual(1, len(dict['hostHealth']['diskStatus']))
     self.assertTrue(dict['iptablesIsRunning'])
     self.assertTrue(dict['iptablesIsRunning'])
 
 
   @patch("os.path.exists")
   @patch("os.path.exists")

+ 0 - 14
ambari-server/src/main/java/org/apache/ambari/server/agent/AgentEnv.java

@@ -149,12 +149,6 @@ public class AgentEnv {
     @SerializedName("liveServices")
     @SerializedName("liveServices")
     private LiveService[] liveServices = new LiveService[0];
     private LiveService[] liveServices = new LiveService[0];
 
 
-    /**
-     * The available space in the root disk
-     */
-    @SerializedName("diskStatus")
-    private DiskInfo[] diskStatus = new DiskInfo[0];
-
     public void setAgentTimeStampAtReporting(long currentTime) {
     public void setAgentTimeStampAtReporting(long currentTime) {
       agentTimeStampAtReporting = currentTime;
       agentTimeStampAtReporting = currentTime;
     }
     }
@@ -186,14 +180,6 @@ public class AgentEnv {
     public LiveService[] getLiveServices() {
     public LiveService[] getLiveServices() {
       return liveServices;
       return liveServices;
     }
     }
-
-    public void setDiskStatus(DiskInfo[] diskInfo) {
-      diskStatus = diskInfo;
-    }
-
-    public DiskInfo[] getDiskStatus() {
-      return diskStatus;
-    }
   }
   }
 
 
   public static class PackageDetail {
   public static class PackageDetail {

+ 11 - 0
ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeat.java

@@ -36,6 +36,7 @@ public class HeartBeat {
   private String hostname;
   private String hostname;
   List<CommandReport> reports = new ArrayList<CommandReport>();
   List<CommandReport> reports = new ArrayList<CommandReport>();
   List<ComponentStatus> componentStatus = new ArrayList<ComponentStatus>();
   List<ComponentStatus> componentStatus = new ArrayList<ComponentStatus>();
+  private List<DiskInfo> mounts = new ArrayList<DiskInfo>();
   HostStatus nodeStatus;
   HostStatus nodeStatus;
   private AgentEnv agentEnv = null;
   private AgentEnv agentEnv = null;
 
 
@@ -99,6 +100,16 @@ public class HeartBeat {
     this.componentStatus = componentStatus;
     this.componentStatus = componentStatus;
   }
   }
 
 
+  @JsonProperty("mounts")
+  public List<DiskInfo> getMounts() {
+    return this.mounts;
+  }
+
+  @JsonProperty("mounts")
+  public void setMounts(List<DiskInfo> mounts) {
+    this.mounts = mounts;
+  }
+
   @Override
   @Override
   public String toString() {
   public String toString() {
     return "HeartBeat{" +
     return "HeartBeat{" +

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java

@@ -162,7 +162,7 @@ public class HeartBeatHandler {
     try {
     try {
       if (heartbeat.getNodeStatus().getStatus().equals(HostStatus.Status.HEALTHY)) {
       if (heartbeat.getNodeStatus().getStatus().equals(HostStatus.Status.HEALTHY)) {
         hostObject.handleEvent(new HostHealthyHeartbeatEvent(hostname, now,
         hostObject.handleEvent(new HostHealthyHeartbeatEvent(hostname, now,
-            heartbeat.getAgentEnv()));
+            heartbeat.getAgentEnv(), heartbeat.getMounts()));
       } else {
       } else {
         hostObject.handleEvent(new HostUnhealthyHeartbeatEvent(hostname, now,
         hostObject.handleEvent(new HostUnhealthyHeartbeatEvent(hostname, now,
             null));
             null));

+ 0 - 16
ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostEntity.java

@@ -63,12 +63,6 @@ public class HostEntity {
   @Basic
   @Basic
   private String osArch = "";
   private String osArch = "";
 
 
-  @Lob
-  @Column(name = "disks_info", nullable = false, insertable = true,
-      updatable = true, length = 32000)
-  @Basic
-  private String disksInfo = "";
-
   @Column(name = "os_info", insertable = true, updatable = true,
   @Column(name = "os_info", insertable = true, updatable = true,
       length = 1000)
       length = 1000)
   @Basic
   @Basic
@@ -188,14 +182,6 @@ public class HostEntity {
     this.osArch = osArch;
     this.osArch = osArch;
   }
   }
 
 
-  public String getDisksInfo() {
-    return defaultString(disksInfo);
-  }
-
-  public void setDisksInfo(String disksInfo) {
-    this.disksInfo = disksInfo;
-  }
-
   public String getOsInfo() {
   public String getOsInfo() {
     return defaultString(osInfo);
     return defaultString(osInfo);
   }
   }
@@ -257,7 +243,6 @@ public class HostEntity {
     if (cpuInfo != null ? !cpuInfo.equals(that.cpuInfo) : that.cpuInfo != null) return false;
     if (cpuInfo != null ? !cpuInfo.equals(that.cpuInfo) : that.cpuInfo != null) return false;
     if (discoveryStatus != null ? !discoveryStatus.equals(that.discoveryStatus) : that.discoveryStatus != null)
     if (discoveryStatus != null ? !discoveryStatus.equals(that.discoveryStatus) : that.discoveryStatus != null)
       return false;
       return false;
-    if (disksInfo != null ? !disksInfo.equals(that.disksInfo) : that.disksInfo != null) return false;
     if (hostAttributes != null ? !hostAttributes.equals(that.hostAttributes) : that.hostAttributes != null)
     if (hostAttributes != null ? !hostAttributes.equals(that.hostAttributes) : that.hostAttributes != null)
       return false;
       return false;
     if (hostName != null ? !hostName.equals(that.hostName) : that.hostName != null) return false;
     if (hostName != null ? !hostName.equals(that.hostName) : that.hostName != null) return false;
@@ -278,7 +263,6 @@ public class HostEntity {
     result = 31 * result + cpuCount;
     result = 31 * result + cpuCount;
     result = 31 * result + (cpuInfo != null ? cpuInfo.hashCode() : 0);
     result = 31 * result + (cpuInfo != null ? cpuInfo.hashCode() : 0);
     result = 31 * result + (osArch != null ? osArch.hashCode() : 0);
     result = 31 * result + (osArch != null ? osArch.hashCode() : 0);
-    result = 31 * result + (disksInfo != null ? disksInfo.hashCode() : 0);
     result = 31 * result + (osInfo != null ? osInfo.hashCode() : 0);
     result = 31 * result + (osInfo != null ? osInfo.hashCode() : 0);
     result = 31 * result + (osType != null ? osType.hashCode() : 0);
     result = 31 * result + (osType != null ? osType.hashCode() : 0);
     result = 31 * result + (discoveryStatus != null ? discoveryStatus.hashCode() : 0);
     result = 31 * result + (discoveryStatus != null ? discoveryStatus.hashCode() : 0);

+ 14 - 1
ambari-server/src/main/java/org/apache/ambari/server/state/host/HostHealthyHeartbeatEvent.java

@@ -20,18 +20,24 @@
 package org.apache.ambari.server.state.host;
 package org.apache.ambari.server.state.host;
 
 
 import org.apache.ambari.server.agent.AgentEnv;
 import org.apache.ambari.server.agent.AgentEnv;
+import org.apache.ambari.server.agent.DiskInfo;
 import org.apache.ambari.server.state.HostEvent;
 import org.apache.ambari.server.state.HostEvent;
 import org.apache.ambari.server.state.HostEventType;
 import org.apache.ambari.server.state.HostEventType;
 
 
+import java.util.ArrayList;
+import java.util.List;
+
 public class HostHealthyHeartbeatEvent extends HostEvent {
 public class HostHealthyHeartbeatEvent extends HostEvent {
 
 
   private final long heartbeatTime;
   private final long heartbeatTime;
   private AgentEnv agentEnv = null;
   private AgentEnv agentEnv = null;
+  private List<DiskInfo> mounts = new ArrayList<DiskInfo>();;
 
 
-  public HostHealthyHeartbeatEvent(String hostName, long heartbeatTime, AgentEnv env) {
+  public HostHealthyHeartbeatEvent(String hostName, long heartbeatTime, AgentEnv env, List<DiskInfo> mounts) {
     super(hostName, HostEventType.HOST_HEARTBEAT_HEALTHY);
     super(hostName, HostEventType.HOST_HEARTBEAT_HEALTHY);
     this.heartbeatTime = heartbeatTime;
     this.heartbeatTime = heartbeatTime;
     agentEnv = env;
     agentEnv = env;
+    this.mounts = mounts;
   }
   }
 
 
   /**
   /**
@@ -49,4 +55,11 @@ public class HostHealthyHeartbeatEvent extends HostEvent {
     return agentEnv;
     return agentEnv;
   }
   }
 
 
+  /**
+   * @return the disks info, if present.  Can return <code>null</code> if
+   * there was no new info.
+   */
+  public List<DiskInfo> getMounts() {
+    return mounts;
+  }
 }
 }

+ 9 - 5
ambari-server/src/main/java/org/apache/ambari/server/state/host/HostImpl.java

@@ -18,6 +18,7 @@
 package org.apache.ambari.server.state.host;
 package org.apache.ambari.server.state.host;
 
 
 import java.lang.reflect.Type;
 import java.lang.reflect.Type;
+import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
@@ -93,6 +94,7 @@ public class HostImpl implements Host {
 
 
   private long lastHeartbeatTime = 0L;
   private long lastHeartbeatTime = 0L;
   private AgentEnv lastAgentEnv = null;
   private AgentEnv lastAgentEnv = null;
+  private List<DiskInfo> disksInfo = new ArrayList<DiskInfo>();
   private boolean persisted = false;
   private boolean persisted = false;
   private Integer currentPingPort = null;
   private Integer currentPingPort = null;
 
 
@@ -298,8 +300,12 @@ public class HostImpl implements Host {
         case HOST_HEARTBEAT_HEALTHY:
         case HOST_HEARTBEAT_HEALTHY:
           HostHealthyHeartbeatEvent hhevent = (HostHealthyHeartbeatEvent) event;
           HostHealthyHeartbeatEvent hhevent = (HostHealthyHeartbeatEvent) event;
           heartbeatTime = hhevent.getHeartbeatTime();
           heartbeatTime = hhevent.getHeartbeatTime();
-          if (null != hhevent.getAgentEnv())
+          if (null != hhevent.getAgentEnv()) {
             host.setLastAgentEnv(hhevent.getAgentEnv());
             host.setLastAgentEnv(hhevent.getAgentEnv());
+          }
+          if (null != hhevent.getMounts() && !hhevent.getMounts().isEmpty()) {
+            host.setDisksInfo(hhevent.getMounts());
+          }
           break;
           break;
         case HOST_HEARTBEAT_UNHEALTHY:
         case HOST_HEARTBEAT_UNHEALTHY:
           heartbeatTime =
           heartbeatTime =
@@ -815,8 +821,7 @@ public class HostImpl implements Host {
   public List<DiskInfo> getDisksInfo() {
   public List<DiskInfo> getDisksInfo() {
     try {
     try {
       readLock.lock();
       readLock.lock();
-      return gson.<List<DiskInfo>>fromJson(
-                hostEntity.getDisksInfo(), diskInfoType);
+      return this.disksInfo;
     } finally {
     } finally {
       readLock.unlock();
       readLock.unlock();
     }
     }
@@ -826,8 +831,7 @@ public class HostImpl implements Host {
   public void setDisksInfo(List<DiskInfo> disksInfo) {
   public void setDisksInfo(List<DiskInfo> disksInfo) {
     try {
     try {
       writeLock.lock();
       writeLock.lock();
-      hostEntity.setDisksInfo(gson.toJson(disksInfo, diskInfoType));
-      saveIfPersisted();
+      this.disksInfo = disksInfo;
     } finally {
     } finally {
       writeLock.unlock();
       writeLock.unlock();
     }
     }

+ 1 - 1
ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql

@@ -32,7 +32,7 @@ CREATE TABLE clusterservices (service_name VARCHAR(255) NOT NULL, cluster_id BIG
 CREATE TABLE clusterstate (cluster_id BIGINT NOT NULL, current_cluster_state VARCHAR(255) NOT NULL, current_stack_version VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id));
 CREATE TABLE clusterstate (cluster_id BIGINT NOT NULL, current_cluster_state VARCHAR(255) NOT NULL, current_stack_version VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id));
 CREATE TABLE hostcomponentdesiredstate (cluster_id BIGINT NOT NULL, component_name VARCHAR(255) NOT NULL, desired_stack_version VARCHAR(255) NOT NULL, desired_state VARCHAR(255) NOT NULL, host_name VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, component_name, host_name, service_name));
 CREATE TABLE hostcomponentdesiredstate (cluster_id BIGINT NOT NULL, component_name VARCHAR(255) NOT NULL, desired_stack_version VARCHAR(255) NOT NULL, desired_state VARCHAR(255) NOT NULL, host_name VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, component_name, host_name, service_name));
 CREATE TABLE hostcomponentstate (cluster_id BIGINT NOT NULL, component_name VARCHAR(255) NOT NULL, current_stack_version VARCHAR(255) NOT NULL, current_state VARCHAR(255) NOT NULL, host_name VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, component_name, host_name, service_name));
 CREATE TABLE hostcomponentstate (cluster_id BIGINT NOT NULL, component_name VARCHAR(255) NOT NULL, current_stack_version VARCHAR(255) NOT NULL, current_state VARCHAR(255) NOT NULL, host_name VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, component_name, host_name, service_name));
-CREATE TABLE hosts (host_name VARCHAR(255) NOT NULL, cpu_count INTEGER NOT NULL, cpu_info VARCHAR(255) NOT NULL, discovery_status VARCHAR(2000) NOT NULL, disks_info LONGTEXT NOT NULL, host_attributes LONGTEXT, ipv4 VARCHAR(255), ipv6 VARCHAR(255), last_registration_time BIGINT NOT NULL, os_arch VARCHAR(255) NOT NULL, os_info VARCHAR(1000) NOT NULL, os_type VARCHAR(255) NOT NULL, ph_cpu_count INTEGER NOT NULL, public_host_name VARCHAR(255), rack_info VARCHAR(255) NOT NULL, total_mem BIGINT NOT NULL, PRIMARY KEY (host_name));
+CREATE TABLE hosts (host_name VARCHAR(255) NOT NULL, cpu_count INTEGER NOT NULL, cpu_info VARCHAR(255) NOT NULL, discovery_status VARCHAR(2000) NOT NULL, host_attributes LONGTEXT, ipv4 VARCHAR(255), ipv6 VARCHAR(255), last_registration_time BIGINT NOT NULL, os_arch VARCHAR(255) NOT NULL, os_info VARCHAR(1000) NOT NULL, os_type VARCHAR(255) NOT NULL, ph_cpu_count INTEGER NOT NULL, public_host_name VARCHAR(255), rack_info VARCHAR(255) NOT NULL, total_mem BIGINT NOT NULL, PRIMARY KEY (host_name));
 CREATE TABLE hoststate (agent_version VARCHAR(255) NOT NULL, available_mem BIGINT NOT NULL, current_state VARCHAR(255) NOT NULL, health_status VARCHAR(255), host_name VARCHAR(255) NOT NULL, time_in_state BIGINT NOT NULL, PRIMARY KEY (host_name));
 CREATE TABLE hoststate (agent_version VARCHAR(255) NOT NULL, available_mem BIGINT NOT NULL, current_state VARCHAR(255) NOT NULL, health_status VARCHAR(255), host_name VARCHAR(255) NOT NULL, time_in_state BIGINT NOT NULL, PRIMARY KEY (host_name));
 CREATE TABLE servicecomponentdesiredstate (component_name VARCHAR(255) NOT NULL, cluster_id BIGINT NOT NULL, desired_stack_version VARCHAR(255) NOT NULL, desired_state VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (component_name, cluster_id, service_name));
 CREATE TABLE servicecomponentdesiredstate (component_name VARCHAR(255) NOT NULL, cluster_id BIGINT NOT NULL, desired_stack_version VARCHAR(255) NOT NULL, desired_state VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (component_name, cluster_id, service_name));
 CREATE TABLE servicedesiredstate (cluster_id BIGINT NOT NULL, desired_host_role_mapping INTEGER NOT NULL, desired_stack_version VARCHAR(255) NOT NULL, desired_state VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, service_name));
 CREATE TABLE servicedesiredstate (cluster_id BIGINT NOT NULL, desired_host_role_mapping INTEGER NOT NULL, desired_stack_version VARCHAR(255) NOT NULL, desired_state VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, service_name));

+ 1 - 1
ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql

@@ -22,7 +22,7 @@ CREATE TABLE clusterservices (service_name VARCHAR2(255) NOT NULL, cluster_id NU
 CREATE TABLE clusterstate (cluster_id NUMBER(19) NOT NULL, current_cluster_state VARCHAR2(255) NULL, current_stack_version VARCHAR2(255) NULL, PRIMARY KEY (cluster_id));
 CREATE TABLE clusterstate (cluster_id NUMBER(19) NOT NULL, current_cluster_state VARCHAR2(255) NULL, current_stack_version VARCHAR2(255) NULL, PRIMARY KEY (cluster_id));
 CREATE TABLE hostcomponentdesiredstate (cluster_id NUMBER(19) NOT NULL, component_name VARCHAR2(255) NOT NULL, desired_stack_version VARCHAR2(255) NULL, desired_state VARCHAR2(255) NOT NULL, host_name VARCHAR2(255) NOT NULL, service_name VARCHAR2(255) NOT NULL, PRIMARY KEY (cluster_id, component_name, host_name, service_name));
 CREATE TABLE hostcomponentdesiredstate (cluster_id NUMBER(19) NOT NULL, component_name VARCHAR2(255) NOT NULL, desired_stack_version VARCHAR2(255) NULL, desired_state VARCHAR2(255) NOT NULL, host_name VARCHAR2(255) NOT NULL, service_name VARCHAR2(255) NOT NULL, PRIMARY KEY (cluster_id, component_name, host_name, service_name));
 CREATE TABLE hostcomponentstate (cluster_id NUMBER(19) NOT NULL, component_name VARCHAR2(255) NOT NULL, current_stack_version VARCHAR2(255) NOT NULL, current_state VARCHAR2(255) NOT NULL, host_name VARCHAR2(255) NOT NULL, service_name VARCHAR2(255) NOT NULL, PRIMARY KEY (cluster_id, component_name, host_name, service_name));
 CREATE TABLE hostcomponentstate (cluster_id NUMBER(19) NOT NULL, component_name VARCHAR2(255) NOT NULL, current_stack_version VARCHAR2(255) NOT NULL, current_state VARCHAR2(255) NOT NULL, host_name VARCHAR2(255) NOT NULL, service_name VARCHAR2(255) NOT NULL, PRIMARY KEY (cluster_id, component_name, host_name, service_name));
-CREATE TABLE hosts (host_name VARCHAR2(255) NOT NULL, cpu_count INTEGER NOT NULL, cpu_info VARCHAR2(255) NULL, discovery_status VARCHAR2(2000) NULL, disks_info CLOB NOT NULL, host_attributes CLOB NULL, ipv4 VARCHAR2(255) NULL, ipv6 VARCHAR2(255) NULL, last_registration_time INTEGER NOT NULL, os_arch VARCHAR2(255) NULL, os_info VARCHAR2(1000) NULL, os_type VARCHAR2(255) NULL, ph_cpu_count INTEGER NOT NULL, public_host_name VARCHAR2(255) NULL, rack_info VARCHAR2(255) NOT NULL, total_mem INTEGER NOT NULL, PRIMARY KEY (host_name));
+CREATE TABLE hosts (host_name VARCHAR2(255) NOT NULL, cpu_count INTEGER NOT NULL, cpu_info VARCHAR2(255) NULL, discovery_status VARCHAR2(2000) NULL, host_attributes CLOB NULL, ipv4 VARCHAR2(255) NULL, ipv6 VARCHAR2(255) NULL, last_registration_time INTEGER NOT NULL, os_arch VARCHAR2(255) NULL, os_info VARCHAR2(1000) NULL, os_type VARCHAR2(255) NULL, ph_cpu_count INTEGER NOT NULL, public_host_name VARCHAR2(255) NULL, rack_info VARCHAR2(255) NOT NULL, total_mem INTEGER NOT NULL, PRIMARY KEY (host_name));
 CREATE TABLE hoststate (agent_version VARCHAR2(255) NULL, available_mem NUMBER(19) NOT NULL, current_state VARCHAR2(255) NOT NULL, health_status VARCHAR2(255) NULL, host_name VARCHAR2(255) NOT NULL, time_in_state NUMBER(19) NOT NULL, PRIMARY KEY (host_name));
 CREATE TABLE hoststate (agent_version VARCHAR2(255) NULL, available_mem NUMBER(19) NOT NULL, current_state VARCHAR2(255) NOT NULL, health_status VARCHAR2(255) NULL, host_name VARCHAR2(255) NOT NULL, time_in_state NUMBER(19) NOT NULL, PRIMARY KEY (host_name));
 CREATE TABLE servicecomponentdesiredstate (component_name VARCHAR2(255) NOT NULL, cluster_id NUMBER(19) NOT NULL, desired_stack_version VARCHAR2(255) NULL, desired_state VARCHAR2(255) NOT NULL, service_name VARCHAR2(255) NOT NULL, PRIMARY KEY (component_name, cluster_id, service_name));
 CREATE TABLE servicecomponentdesiredstate (component_name VARCHAR2(255) NOT NULL, cluster_id NUMBER(19) NOT NULL, desired_stack_version VARCHAR2(255) NULL, desired_state VARCHAR2(255) NOT NULL, service_name VARCHAR2(255) NOT NULL, PRIMARY KEY (component_name, cluster_id, service_name));
 CREATE TABLE servicedesiredstate (cluster_id NUMBER(19) NOT NULL, desired_host_role_mapping NUMBER(10) NOT NULL, desired_stack_version VARCHAR2(255) NULL, desired_state VARCHAR2(255) NOT NULL, service_name VARCHAR2(255) NOT NULL, PRIMARY KEY (cluster_id, service_name));
 CREATE TABLE servicedesiredstate (cluster_id NUMBER(19) NOT NULL, desired_host_role_mapping NUMBER(10) NOT NULL, desired_stack_version VARCHAR2(255) NULL, desired_state VARCHAR2(255) NOT NULL, service_name VARCHAR2(255) NOT NULL, PRIMARY KEY (cluster_id, service_name));

+ 1 - 1
ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql

@@ -49,7 +49,7 @@ GRANT ALL PRIVILEGES ON TABLE ambari.hostcomponentdesiredstate TO :username;
 CREATE TABLE ambari.hostcomponentstate (cluster_id BIGINT NOT NULL, component_name VARCHAR(255) NOT NULL, current_stack_version VARCHAR(255) NOT NULL, current_state VARCHAR(255) NOT NULL, host_name VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, component_name, host_name, service_name));
 CREATE TABLE ambari.hostcomponentstate (cluster_id BIGINT NOT NULL, component_name VARCHAR(255) NOT NULL, current_stack_version VARCHAR(255) NOT NULL, current_state VARCHAR(255) NOT NULL, host_name VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, component_name, host_name, service_name));
 GRANT ALL PRIVILEGES ON TABLE ambari.hostcomponentstate TO :username;
 GRANT ALL PRIVILEGES ON TABLE ambari.hostcomponentstate TO :username;
 
 
-CREATE TABLE ambari.hosts (host_name VARCHAR(255) NOT NULL, cpu_count INTEGER NOT NULL, ph_cpu_count INTEGER, cpu_info VARCHAR(255) NOT NULL, discovery_status VARCHAR(2000) NOT NULL, disks_info VARCHAR(32000) NOT NULL, host_attributes VARCHAR(20000) NOT NULL, ipv4 VARCHAR(255), ipv6 VARCHAR(255), public_host_name VARCHAR(255), last_registration_time BIGINT NOT NULL, os_arch VARCHAR(255) NOT NULL, os_info VARCHAR(1000) NOT NULL, os_type VARCHAR(255) NOT NULL, rack_info VARCHAR(255) NOT NULL, total_mem BIGINT NOT NULL, PRIMARY KEY (host_name));
+CREATE TABLE ambari.hosts (host_name VARCHAR(255) NOT NULL, cpu_count INTEGER NOT NULL, ph_cpu_count INTEGER, cpu_info VARCHAR(255) NOT NULL, discovery_status VARCHAR(2000) NOT NULL, host_attributes VARCHAR(20000) NOT NULL, ipv4 VARCHAR(255), ipv6 VARCHAR(255), public_host_name VARCHAR(255), last_registration_time BIGINT NOT NULL, os_arch VARCHAR(255) NOT NULL, os_info VARCHAR(1000) NOT NULL, os_type VARCHAR(255) NOT NULL, rack_info VARCHAR(255) NOT NULL, total_mem BIGINT NOT NULL, PRIMARY KEY (host_name));
 GRANT ALL PRIVILEGES ON TABLE ambari.hosts TO :username;
 GRANT ALL PRIVILEGES ON TABLE ambari.hosts TO :username;
 
 
 CREATE TABLE ambari.hoststate (agent_version VARCHAR(255) NOT NULL, available_mem BIGINT NOT NULL, current_state VARCHAR(255) NOT NULL, health_status VARCHAR(255), host_name VARCHAR(255) NOT NULL, time_in_state BIGINT NOT NULL, PRIMARY KEY (host_name));
 CREATE TABLE ambari.hoststate (agent_version VARCHAR(255) NOT NULL, available_mem BIGINT NOT NULL, current_state VARCHAR(255) NOT NULL, health_status VARCHAR(255), host_name VARCHAR(255) NOT NULL, time_in_state BIGINT NOT NULL, PRIMARY KEY (host_name));

+ 1 - 1
ambari-server/src/main/resources/Ambari-DDL-Postgres-REMOTE-CREATE.sql

@@ -26,7 +26,7 @@ CREATE TABLE ambari.clusterservices (service_name VARCHAR(255) NOT NULL, cluster
 CREATE TABLE ambari.clusterstate (cluster_id BIGINT NOT NULL, current_cluster_state VARCHAR(255) NOT NULL, current_stack_version VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id));
 CREATE TABLE ambari.clusterstate (cluster_id BIGINT NOT NULL, current_cluster_state VARCHAR(255) NOT NULL, current_stack_version VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id));
 CREATE TABLE ambari.hostcomponentdesiredstate (cluster_id BIGINT NOT NULL, component_name VARCHAR(255) NOT NULL, desired_stack_version VARCHAR(255) NOT NULL, desired_state VARCHAR(255) NOT NULL, host_name VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, component_name, host_name, service_name));
 CREATE TABLE ambari.hostcomponentdesiredstate (cluster_id BIGINT NOT NULL, component_name VARCHAR(255) NOT NULL, desired_stack_version VARCHAR(255) NOT NULL, desired_state VARCHAR(255) NOT NULL, host_name VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, component_name, host_name, service_name));
 CREATE TABLE ambari.hostcomponentstate (cluster_id BIGINT NOT NULL, component_name VARCHAR(255) NOT NULL, current_stack_version VARCHAR(255) NOT NULL, current_state VARCHAR(255) NOT NULL, host_name VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, component_name, host_name, service_name));
 CREATE TABLE ambari.hostcomponentstate (cluster_id BIGINT NOT NULL, component_name VARCHAR(255) NOT NULL, current_stack_version VARCHAR(255) NOT NULL, current_state VARCHAR(255) NOT NULL, host_name VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, component_name, host_name, service_name));
-CREATE TABLE ambari.hosts (host_name VARCHAR(255) NOT NULL, cpu_count INTEGER NOT NULL, ph_cpu_count INTEGER, cpu_info VARCHAR(255) NOT NULL, discovery_status VARCHAR(2000) NOT NULL, disks_info VARCHAR(10000) NOT NULL, host_attributes VARCHAR(20000) NOT NULL, ipv4 VARCHAR(255), ipv6 VARCHAR(255), public_host_name VARCHAR(255), last_registration_time BIGINT NOT NULL, os_arch VARCHAR(255) NOT NULL, os_info VARCHAR(1000) NOT NULL, os_type VARCHAR(255) NOT NULL, rack_info VARCHAR(255) NOT NULL, total_mem BIGINT NOT NULL, PRIMARY KEY (host_name));
+CREATE TABLE ambari.hosts (host_name VARCHAR(255) NOT NULL, cpu_count INTEGER NOT NULL, ph_cpu_count INTEGER, cpu_info VARCHAR(255) NOT NULL, discovery_status VARCHAR(2000) NOT NULL, host_attributes VARCHAR(20000) NOT NULL, ipv4 VARCHAR(255), ipv6 VARCHAR(255), public_host_name VARCHAR(255), last_registration_time BIGINT NOT NULL, os_arch VARCHAR(255) NOT NULL, os_info VARCHAR(1000) NOT NULL, os_type VARCHAR(255) NOT NULL, rack_info VARCHAR(255) NOT NULL, total_mem BIGINT NOT NULL, PRIMARY KEY (host_name));
 CREATE TABLE ambari.hoststate (agent_version VARCHAR(255) NOT NULL, available_mem BIGINT NOT NULL, current_state VARCHAR(255) NOT NULL, health_status VARCHAR(255), host_name VARCHAR(255) NOT NULL, time_in_state BIGINT NOT NULL,  PRIMARY KEY (host_name));
 CREATE TABLE ambari.hoststate (agent_version VARCHAR(255) NOT NULL, available_mem BIGINT NOT NULL, current_state VARCHAR(255) NOT NULL, health_status VARCHAR(255), host_name VARCHAR(255) NOT NULL, time_in_state BIGINT NOT NULL,  PRIMARY KEY (host_name));
 CREATE TABLE ambari.servicecomponentdesiredstate (component_name VARCHAR(255) NOT NULL, cluster_id BIGINT NOT NULL, desired_stack_version VARCHAR(255) NOT NULL, desired_state VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (component_name, cluster_id, service_name));
 CREATE TABLE ambari.servicecomponentdesiredstate (component_name VARCHAR(255) NOT NULL, cluster_id BIGINT NOT NULL, desired_stack_version VARCHAR(255) NOT NULL, desired_state VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (component_name, cluster_id, service_name));
 CREATE TABLE ambari.servicedesiredstate (cluster_id BIGINT NOT NULL, desired_host_role_mapping INTEGER NOT NULL, desired_stack_version VARCHAR(255) NOT NULL, desired_state VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, service_name));
 CREATE TABLE ambari.servicedesiredstate (cluster_id BIGINT NOT NULL, desired_host_role_mapping INTEGER NOT NULL, desired_stack_version VARCHAR(255) NOT NULL, desired_state VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, service_name));

+ 0 - 1
ambari-server/src/main/resources/Ambari-DDL.sql

@@ -77,7 +77,6 @@ cpu_count INTEGER DEFAULT '0' NOT NULL,
 ph_cpu_count INTEGER DEFAULT '0' NOT NULL,
 ph_cpu_count INTEGER DEFAULT '0' NOT NULL,
 cpu_info VARCHAR DEFAULT '' NOT NULL,
 cpu_info VARCHAR DEFAULT '' NOT NULL,
 os_arch VARCHAR DEFAULT '' NOT NULL,
 os_arch VARCHAR DEFAULT '' NOT NULL,
-disks_info VARCHAR DEFAULT '' NOT NULL,
 os_info VARCHAR DEFAULT '' NOT NULL,
 os_info VARCHAR DEFAULT '' NOT NULL,
 os_type VARCHAR DEFAULT '' NOT NULL,
 os_type VARCHAR DEFAULT '' NOT NULL,
 discovery_status VARCHAR DEFAULT '' NOT NULL,
 discovery_status VARCHAR DEFAULT '' NOT NULL,

+ 2 - 0
ambari-server/src/main/resources/upgrade/ddl/Ambari-DDL-Oracle-UPGRADE.sql

@@ -74,4 +74,6 @@ SET
   
   
 ALTER TABLE stage MODIFY (cluster_host_info NOT NULL);
 ALTER TABLE stage MODIFY (cluster_host_info NOT NULL);
 
 
+ALTER TABLE ambari.hosts DROP COLUMN disks_info;
+
 commit;
 commit;

+ 1 - 0
ambari-server/src/main/resources/upgrade/ddl/Ambari-DDL-Postgres-UPGRADE-1.3.0.sql

@@ -153,3 +153,4 @@ UPDATE ambari.stage sd
 --Set cluster_host_info column mandatory
 --Set cluster_host_info column mandatory
 ALTER TABLE ambari.stage ALTER COLUMN cluster_host_info SET NOT NULL;
 ALTER TABLE ambari.stage ALTER COLUMN cluster_host_info SET NOT NULL;
 
 
+ALTER TABLE ambari.hosts DROP COLUMN disks_info;

+ 1 - 0
ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java

@@ -1041,6 +1041,7 @@ public class TestHeartbeatHandler {
     hb.setNodeStatus(new HostStatus(Status.HEALTHY, DummyHostStatus));
     hb.setNodeStatus(new HostStatus(Status.HEALTHY, DummyHostStatus));
     hb.setReports(new ArrayList<CommandReport>());
     hb.setReports(new ArrayList<CommandReport>());
     hb.setAgentEnv(new AgentEnv());
     hb.setAgentEnv(new AgentEnv());
+    hb.setMounts(new ArrayList<DiskInfo>());
 
 
     ArrayList<ComponentStatus> componentStatuses = new ArrayList<ComponentStatus>();
     ArrayList<ComponentStatus> componentStatuses = new ArrayList<ComponentStatus>();
     ComponentStatus componentStatus1 = createComponentStatus(DummyCluster, HDFS, DummyHostStatus, State.STARTED,
     ComponentStatus componentStatus1 = createComponentStatus(DummyCluster, HDFS, DummyHostStatus, State.STARTED,

+ 1 - 1
ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java

@@ -172,7 +172,7 @@ public class ClusterTest {
 
 
     try {
     try {
       clusters.getHost("h1").handleEvent(
       clusters.getHost("h1").handleEvent(
-          new HostHealthyHeartbeatEvent("h1", currentTime, null));
+          new HostHealthyHeartbeatEvent("h1", currentTime, null, null));
       fail("Exception should be thrown on invalid event");
       fail("Exception should be thrown on invalid event");
     }
     }
     catch (InvalidStateTransitionException e) {
     catch (InvalidStateTransitionException e) {

+ 1 - 1
ambari-server/src/test/java/org/apache/ambari/server/state/host/HostTest.java

@@ -187,7 +187,7 @@ public class HostTest {
   private void sendHealthyHeartbeat(Host host, long counter)
   private void sendHealthyHeartbeat(Host host, long counter)
       throws Exception {
       throws Exception {
     HostHealthyHeartbeatEvent e = new HostHealthyHeartbeatEvent(
     HostHealthyHeartbeatEvent e = new HostHealthyHeartbeatEvent(
-        host.getHostName(), counter, null);
+        host.getHostName(), counter, null, null);
     host.handleEvent(e);
     host.handleEvent(e);
   }
   }