Browse Source

AMBARI-11167. Move alert success logs to DEBUG level (dlysnichenko)

Lisnichenko Dmitro 10 years ago
parent
commit
e59405796e
25 changed files with 234 additions and 81 deletions
  1. 65 0
      ambari-agent/conf/unix/logging.conf.sample
  2. 65 0
      ambari-agent/conf/windows/logging.conf.sample
  3. 23 1
      ambari-agent/pom.xml
  4. 3 4
      ambari-agent/src/main/python/ambari_agent/ActionQueue.py
  5. 2 10
      ambari-agent/src/main/python/ambari_agent/AlertSchedulerHandler.py
  6. 2 2
      ambari-agent/src/main/python/ambari_agent/ClusterConfiguration.py
  7. 10 11
      ambari-agent/src/main/python/ambari_agent/Controller.py
  8. 4 4
      ambari-agent/src/main/python/ambari_agent/DataCleaner.py
  9. 2 2
      ambari-agent/src/main/python/ambari_agent/Heartbeat.py
  10. 4 4
      ambari-agent/src/main/python/ambari_agent/HostCheckReportFileHandler.py
  11. 2 3
      ambari-agent/src/main/python/ambari_agent/NetUtil.py
  12. 2 3
      ambari-agent/src/main/python/ambari_agent/PingPortListener.py
  13. 2 2
      ambari-agent/src/main/python/ambari_agent/StackVersionsFileHandler.py
  14. 1 1
      ambari-agent/src/main/python/ambari_agent/alerts/script_alert.py
  15. 1 1
      ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
  16. 4 4
      ambari-agent/src/main/python/ambari_agent/apscheduler/scheduler.py
  17. 1 1
      ambari-agent/src/main/python/ambari_agent/apscheduler/threadpool.py
  18. 1 1
      ambari-agent/src/main/python/ambari_agent/hostname.py
  19. 23 14
      ambari-agent/src/main/python/ambari_agent/main.py
  20. 4 4
      ambari-agent/src/main/python/ambari_agent/security.py
  21. 1 1
      ambari-common/src/main/python/resource_management/core/environment.py
  22. 2 2
      ambari-common/src/main/python/resource_management/core/logger.py
  23. 1 1
      ambari-common/src/main/python/resource_management/libraries/script/script.py
  24. 7 3
      ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
  25. 2 2
      ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py

+ 65 - 0
ambari-agent/conf/unix/logging.conf.sample

@@ -0,0 +1,65 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific
+
+#To create separate log levels and log files for different classes and modules,
+# user should rename current file to "logging.conf" and restart ambari-agent.
+# Before restart, user have to define his own loggers and handlers which be
+# logging accordingly their qualname's 
+
+# Loggers definitions section. 
+# root logger definition is mandatory.
+# User can define so many loggers as need
+# User defined loggers have to described in separate sections e.g. [logger_<logger name>]
+[loggers]
+keys=root,Controller
+
+# Handlers section defined name of handlers which processed log info
+# At least one handler should be defined.
+# Settings of user defined have to be done in [handler_<user defined handler name>] section
+[handlers]
+keys=logfile
+
+# This section defined name of formatters for which use to formate log string
+# Format of string have to be defined in [formatter_<formatter name>] section
+[formatters]
+keys=logfileformatter
+
+# This section is mandatory! This section defined settings for root logge
+[logger_root]
+level=WARNING
+handlers=logfile
+
+# Settings of user defined logger called Controller (this logger added as example)
+# level - for this logger can be different from root log level and can be:
+# CRITICAL|ERROR|WARNING|INFO|DEBUG
+# If level was not defined, by default it will be NOTSET
+# handler - name of handler which will be processed log (mandatory)
+# qualname - fully qualified name of logger (mandatory)
+# Usually qualname of logger defined in code as logger = logging.getLogger(__name__) that the same to file name
+[logger_Controller]
+level=DEBUG
+handlers=logfile
+qualname=Controller
+
+# Format of log string definition
+[formatter_logfileformatter]
+format=%(levelname)s %(asctime)s %(filename)s:%(lineno)d - %(message)s
+
+# Definition of handler for logging
+# User can create different type of handlers
+[handler_logfile]
+class=handlers.RotatingFileHandler
+level=DEBUG
+args=('/var/log/ambari-agent/ambari-agent.log',"a", 10000000, 25)
+formatter=logfileformatter

+ 65 - 0
ambari-agent/conf/windows/logging.conf.sample

@@ -0,0 +1,65 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific
+
+#To create separate log levels and log files for different classes and modules,
+# user should rename current file to "logging.conf" and restart ambari-agent.
+# Before restart, user have to define his own loggers and handlers which be
+# logging accordingly their qualname's 
+
+# Loggers definitions section. 
+# root logger definition is mandatory.
+# User can define so many loggers as need
+# User defined loggers have to described in separate sections e.g. [logger_<logger name>] 
+[loggers]
+keys=root,Controller
+
+# Handlers section defined name of handlers which processed log info
+# At least one handler should be defined.
+# Settings of user defined have to be done in [handler_<user defined handler name>] section
+[handlers]
+keys=logfile
+
+# This section defined name of formatters for which use to formate log string
+# Format of string have to be defined in [formatter_<formatter name>] section
+[formatters]
+keys=logfileformatter
+
+# This section is mandatory! This section defined settings for root logger
+[logger_root]
+level=WARNING
+handlers=logfile
+
+# Settings of user defined logger called Controller (this logger added as example)
+# level - for this logger can be different from root log level and can be:
+# CRITICAL|ERROR|WARNING|INFO|DEBUG
+# If level was not defined, by default it will be NOTSET
+# handler - name of handler which will be processed log (mandatory)
+# qualname - fully qualified name of logger (mandatory)
+# Usually qualname of logger defined in code as logger = logging.getLogger(__name__) that the same to file name
+[logger_Controller]
+level=DEBUG
+handlers=logfile
+qualname=Controller
+
+# Format of log string definition
+[formatter_logfileformatter]
+format=%(levelname)s %(asctime)s %(filename)s:%(lineno)d - %(message)s
+
+# Definition of handler for logging
+# User can create different type of handlers 
+[handler_logfile]
+class=handlers.RotatingFileHandler
+level=DEBUG
+args=('\\var\\log\\ambari-agent\\ambari-agent.log',"a", 10000000, 25)
+formatter=logfileformatter

+ 23 - 1
ambari-agent/pom.xml

@@ -248,7 +248,7 @@
                   <location>conf/unix/ambari-sudo.sh</location>
                 </source>
               </sources>
-            </mapping>
+            </mapping>         
             <mapping>
               <directory>${ambari_commons.install.dir}</directory>
               <username>root</username>
@@ -305,6 +305,17 @@
                 </source>
               </sources>
             </mapping>
+            <mapping>
+              <directory>/etc/ambari-agent/conf</directory>
+              <filemode>755</filemode>
+              <username>root</username>
+              <groupname>root</groupname>
+              <sources>
+                <source>
+                  <location>conf/unix/logging.conf.sample</location>
+                </source>
+              </sources>
+            </mapping>              
             <mapping>
               <directory>/usr/sbin</directory>
               <filemode>755</filemode>
@@ -495,6 +506,17 @@
                   <filemode>755</filemode>
               </mapper>
             </data>
+            <data>
+              <src>conf/unix/logging.conf.sample</src>
+              <type>file</type>
+              <mapper>
+                <type>perm</type>
+                <prefix>/etc/ambari-agent/conf</prefix>
+                  <user>root</user>
+                  <group>root</group>
+                  <filemode>755</filemode>
+              </mapper>
+            </data>            
            <data>
               <src>etc/sudoers.d/ambari-agent</src>
               <type>file</type>

+ 3 - 4
ambari-agent/src/main/python/ambari_agent/ActionQueue.py

@@ -25,7 +25,6 @@ import threading
 import pprint
 import os
 import json
-from random import randint
 import time
 
 from AgentException import AgentException
@@ -95,7 +94,7 @@ class ActionQueue(threading.Thread):
     self.statusCommandQueue.queue.clear()
 
     for command in commands:
-      logger.info("Adding " + command['commandType'] + " for service " + \
+      logger.debug("Adding " + command['commandType'] + " for service " + \
                   command['serviceName'] + " of cluster " + \
                   command['clusterName'] + " to the queue.")
       self.statusCommandQueue.put(command)
@@ -107,7 +106,7 @@ class ActionQueue(threading.Thread):
       if not command.has_key('clusterName'):
         command['clusterName'] = 'null'
 
-      logger.info("Adding " + command['commandType'] + " for role " + \
+      logger.debug("Adding " + command['commandType'] + " for role " + \
                   command['role'] + " for service " + \
                   command['serviceName'] + " of cluster " + \
                   command['clusterName'] + " to the queue.")
@@ -230,7 +229,7 @@ class ActionQueue(threading.Thread):
               "cluster {cluster}.".format(
               commandId = str(commandId), role=command['role'],
               cluster=clusterName)
-    logger.info(message)
+    logger.debug(message)
 
     taskId = command['taskId']
     # Preparing 'IN_PROGRESS' report

+ 2 - 10
ambari-agent/src/main/python/ambari_agent/AlertSchedulerHandler.py

@@ -35,7 +35,7 @@ from alerts.port_alert import PortAlert
 from alerts.script_alert import ScriptAlert
 from alerts.web_alert import WebAlert
 
-logger = logging.getLogger()
+logger = logging.getLogger(__name__)
 
 class AlertSchedulerHandler():
   FILENAME = 'definitions.json'
@@ -364,15 +364,7 @@ def main():
   args = list(sys.argv)
   del args[0]
 
-  try:
-    logger.setLevel(logging.DEBUG)
-  except TypeError:
-    logger.setLevel(12)
-
-  ch = logging.StreamHandler()
-  ch.setLevel(logger.level)
-  logger.addHandler(ch)
-    
+  
   ash = AlertSchedulerHandler(args[0], args[1], args[2], False)
   ash.start()
   

+ 2 - 2
ambari-agent/src/main/python/ambari_agent/ClusterConfiguration.py

@@ -23,7 +23,7 @@ import json
 import os
 import threading
 
-logger = logging.getLogger()
+logger = logging.getLogger(__name__)
 
 class ClusterConfiguration():
   """
@@ -161,4 +161,4 @@ class ClusterConfiguration():
       logger.debug("Cache miss for configuration property {0} in cluster {1}".format(key, cluster_name))
       return None
     finally:
-      self.__cache_lock.release()
+      self.__cache_lock.release()

+ 10 - 11
ambari-agent/src/main/python/ambari_agent/Controller.py

@@ -45,7 +45,7 @@ from ambari_agent.ClusterConfiguration import  ClusterConfiguration
 from ambari_agent.RecoveryManager import  RecoveryManager
 from ambari_agent.HeartbeatHandlers import HeartbeatStopHandlers, bind_signal_handlers
 
-logger = logging.getLogger()
+logger = logging.getLogger(__name__)
 
 AGENT_AUTO_RESTART_EXIT_CODE = 77
 
@@ -122,7 +122,7 @@ class Controller(threading.Thread):
 
         try:
           server_ip = socket.gethostbyname(self.hostname)
-          logger.info("Registering with %s (%s) (agent=%s)", self.hostname, server_ip, prettyData)
+          logger.debug("Registering with %s (%s) (agent=%s)", self.hostname, server_ip, prettyData)
         except socket.error:
           logger.warn("Unable to determine the IP address of '%s', agent registration may fail (agent=%s)",
                       self.hostname, prettyData)
@@ -148,11 +148,11 @@ class Controller(threading.Thread):
           return ret
 
         self.responseId = int(ret['responseId'])
-        logger.info("Registration Successful (response id = %s)", self.responseId)
+        logger.debug("Registration Successful (response id = %s)", self.responseId)
 
         self.isRegistered = True
         if 'statusCommands' in ret.keys():
-          logger.info("Got status commands on registration.")
+          logger.debug("Got status commands on registration.")
           self.addToStatusQueue(ret['statusCommands'])
         else:
           self.hasMappedComponents = False
@@ -241,7 +241,7 @@ class Controller(threading.Thread):
 
         serverId = int(response['responseId'])
 
-        logger.info('Heartbeat response received (id = %s)', serverId)
+        logger.debug('Heartbeat response received (id = %s)', serverId)
 
         if 'hasMappedComponents' in response.keys():
           self.hasMappedComponents = response['hasMappedComponents'] is not False
@@ -299,7 +299,7 @@ class Controller(threading.Thread):
           logger.error("Received the restartAgent command")
           self.restartAgent()
         else:
-          logger.info("No commands sent from %s", self.serverHostname)
+          logger.debug("No commands sent from %s", self.serverHostname)
 
         if retry:
           logger.info("Reconnected to %s", self.heartbeatUrl)
@@ -366,12 +366,12 @@ class Controller(threading.Thread):
   def registerAndHeartbeat(self):
     registerResponse = self.registerWithServer()
     message = registerResponse['response']
-    logger.info("Registration response from %s was %s", self.serverHostname, message)
+    logger.debug("Registration response from %s was %s", self.serverHostname, message)
 
     if self.isRegistered:
       # Clearing command queue to stop executing "stale" commands
       # after registration
-      logger.info('Resetting ActionQueue...')
+      logger.debug('Resetting ActionQueue...')
       self.actionQueue.reset()
 
       # Process callbacks
@@ -403,7 +403,7 @@ class Controller(threading.Thread):
 
 
   def updateComponents(self, cluster_name):
-    logger.info("Updating components map of cluster " + cluster_name)
+    logger.debug("Updating components map of cluster " + cluster_name)
 
     # May throw IOError on server connection error
     response = self.sendRequest(self.componentsUrl + cluster_name, None)
@@ -416,7 +416,7 @@ class Controller(threading.Thread):
           LiveStatus.CLIENT_COMPONENTS.append({"serviceName": service, "componentName": component})
         else:
           LiveStatus.COMPONENTS.append({"serviceName": service, "componentName": component})
-    logger.info("Components map updated")
+    logger.debug("Components map updated")
     logger.debug("LiveStatus.SERVICES" + str(LiveStatus.SERVICES))
     logger.debug("LiveStatus.CLIENT_COMPONENTS" + str(LiveStatus.CLIENT_COMPONENTS))
     logger.debug("LiveStatus.COMPONENTS" + str(LiveStatus.COMPONENTS))
@@ -424,7 +424,6 @@ class Controller(threading.Thread):
 def main(argv=None):
   # Allow Ctrl-C
 
-  logger.setLevel(logging.INFO)
   formatter = logging.Formatter("%(asctime)s %(filename)s:%(lineno)d - \
     %(message)s")
   stream_handler = logging.StreamHandler()

+ 4 - 4
ambari-agent/src/main/python/ambari_agent/DataCleaner.py

@@ -25,7 +25,7 @@ import time
 import re
 import logging
 
-logger = logging.getLogger()
+logger = logging.getLogger(__name__)
 
 class DataCleaner(threading.Thread):
   COMMAND_FILE_NAMES_PATTERN = 'errors-\d+.txt|output-\d+.txt|site-\d+.pp|structured-out-\d+.json|command-\d+.json'
@@ -36,7 +36,7 @@ class DataCleaner(threading.Thread):
   def __init__(self, config):
     threading.Thread.__init__(self)
     self.daemon = True
-    logger.info('Data cleanup thread started')
+    logger.debug('Data cleanup thread started')
     self.config = config
 
     self.file_max_age = config.get('agent', 'data_cleanup_max_age', 86400)
@@ -117,9 +117,9 @@ class DataCleaner(threading.Thread):
 
   def run(self):
     while not self.stopped:
-      logger.info('Data cleanup started')
+      logger.debug('Data cleanup started')
       self.cleanup()
-      logger.info('Data cleanup finished')
+      logger.debug('Data cleanup finished')
       time.sleep(self.cleanup_interval)
 
 

+ 2 - 2
ambari-agent/src/main/python/ambari_agent/Heartbeat.py

@@ -29,7 +29,7 @@ from ambari_agent.HostInfo import HostInfo
 from ambari_agent.Hardware import Hardware
 
 
-logger = logging.getLogger()
+logger = logging.getLogger(__name__)
 
 firstContact = True
 class Heartbeat:
@@ -74,7 +74,7 @@ class Heartbeat:
     if int(id) == 0:
       componentsMapped = False
 
-    logger.info("Building Heartbeat: {responseId = %s, timestamp = %s, commandsInProgress = %s, componentsMapped = %s}",
+    logger.debug("Building Heartbeat: {responseId = %s, timestamp = %s, commandsInProgress = %s, componentsMapped = %s}",
         str(id), str(timestamp), repr(commandsInProgress), repr(componentsMapped))
 
     if logger.isEnabledFor(logging.DEBUG):

+ 4 - 4
ambari-agent/src/main/python/ambari_agent/HostCheckReportFileHandler.py

@@ -25,7 +25,7 @@ import traceback
 from AmbariConfig import AmbariConfig
 import ConfigParser;
 
-logger = logging.getLogger()
+logger = logging.getLogger(__name__)
 
 class HostCheckReportFileHandler:
 
@@ -88,7 +88,7 @@ class HostCheckReportFileHandler:
       return
 
     try:
-      logger.info("Host check report at " + self.hostCheckFilePath)
+      logger.debug("Host check report at " + self.hostCheckFilePath)
       config = ConfigParser.RawConfigParser()
       config.add_section('metadata')
       config.set('metadata', 'created', str(datetime.datetime.now()))
@@ -138,12 +138,12 @@ class HostCheckReportFileHandler:
 
   def removeFile(self, path):
     if os.path.isfile(path):
-      logger.info("Removing old host check file at %s" % path)
+      logger.debug("Removing old host check file at %s" % path)
       os.remove(path)
 
   def touchFile(self, path):
     if not os.path.isfile(path):
-      logger.info("Creating host check file at %s" % path)
+      logger.debug("Creating host check file at %s" % path)
       open(path, 'w').close()
 
 

+ 2 - 3
ambari-agent/src/main/python/ambari_agent/NetUtil.py

@@ -18,14 +18,13 @@ from urlparse import urlparse
 import logging
 import httplib
 from ssl import SSLError
-import platform
 from HeartbeatHandlers import HeartbeatStopHandlers
 
 ERROR_SSL_WRONG_VERSION = "SSLError: Failed to connect. Please check openssl library versions. \n" +\
               "Refer to: https://bugzilla.redhat.com/show_bug.cgi?id=1022468 for more details."
 LOG_REQUEST_MESSAGE = "GET %s -> %s, body: %s"
 
-logger = logging.getLogger()
+logger = logging.getLogger(__name__)
 
 
 class NetUtil:
@@ -57,7 +56,7 @@ class NetUtil:
 
        Additionally returns body of request, if available
     """
-    logger.info("Connecting to " + url)
+    logger.debug("Connecting to " + url)
     responseBody = ""
 
     try:

+ 2 - 3
ambari-agent/src/main/python/ambari_agent/PingPortListener.py

@@ -20,12 +20,11 @@ limitations under the License.
 
 import sys
 import logging
-import AmbariConfig
 import threading
 import socket
 import subprocess
 
-logger = logging.getLogger()
+logger = logging.getLogger(__name__)
 FUSER_CMD = "fuser {0}/tcp 2>/dev/null | awk '{1}'"
 PSPF_CMD = "ps -fp {0}"
 PORT_IN_USE_MESSAGE = "Could not open port {0} because port already used by another process:\n{1}"
@@ -48,7 +47,7 @@ class PingPortListener(threading.Thread):
     self.socket.bind((self.host, self.port))
     self.socket.listen(1)
     config.set('agent','current_ping_port',str(self.socket.getsockname()[1]))
-    logger.info("Ping port listener started on port: " + str(self.socket.getsockname()[1]))
+    logger.debug("Ping port listener started on port: " + str(self.socket.getsockname()[1]))
 
 
   def run_os_command_in_shell(self, command):

+ 2 - 2
ambari-agent/src/main/python/ambari_agent/StackVersionsFileHandler.py

@@ -24,7 +24,7 @@ import threading
 import traceback
 import shutil
 
-logger = logging.getLogger()
+logger = logging.getLogger(__name__)
 
 class StackVersionsFileHandler:
 
@@ -104,7 +104,7 @@ class StackVersionsFileHandler:
      Called to create file when it does not exist
     '''
     if not os.path.isfile(self.versionsFilePath):
-      logger.info("Creating stacks versions file at %s" % self.versionsFilePath)
+      logger.debug("Creating stacks versions file at %s" % self.versionsFilePath)
       open(self.versionsFilePath, 'w').close()
 
 

+ 1 - 1
ambari-agent/src/main/python/ambari_agent/alerts/script_alert.py

@@ -25,7 +25,7 @@ import re
 from alerts.base_alert import BaseAlert
 from resource_management.core.environment import Environment
 
-logger = logging.getLogger()
+logger = logging.getLogger(__name__)
 
 class ScriptAlert(BaseAlert):
   def __init__(self, alert_meta, alert_source_meta, config):

+ 1 - 1
ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py

@@ -46,7 +46,7 @@ except ImportError:
   import md5
   _md5 = md5.new
 
-logger = logging.getLogger()
+logger = logging.getLogger(__name__)
 
 # default timeout
 DEFAULT_CONNECTION_TIMEOUT = 5

+ 4 - 4
ambari-agent/src/main/python/ambari_agent/apscheduler/scheduler.py

@@ -259,7 +259,7 @@ class Scheduler(object):
         event = JobStoreEvent(EVENT_JOBSTORE_JOB_ADDED, jobstore, job)
         self._notify_listeners(event)
 
-        logger.info('Added job "%s" to job store "%s"', job, jobstore)
+        logger.debug('Added job "%s" to job store "%s"', job, jobstore)
 
         # Notify the scheduler about the new job
         if wakeup:
@@ -505,7 +505,7 @@ class Scheduler(object):
                                    'reached (%d)', job, job.max_instances)
                     break
 
-                logger.info('Running job "%s" (scheduled at %s)', job,
+                logger.debug('Running job "%s" (scheduled at %s)', job,
                             run_time)
 
                 try:
@@ -524,7 +524,7 @@ class Scheduler(object):
                                      retval=retval)
                     self._notify_listeners(event)
 
-                    logger.info('Job "%s" executed successfully', job)
+                    logger.debug('Job "%s" executed successfully', job)
 
                 job.remove_instance()
 
@@ -571,7 +571,7 @@ class Scheduler(object):
     def _main_loop(self):
         """Executes jobs on schedule."""
 
-        logger.info('Scheduler started')
+        logger.debug('Scheduler started')
         self._notify_listeners(SchedulerEvent(EVENT_SCHEDULER_START))
 
         self._wakeup.clear()

+ 1 - 1
ambari-agent/src/main/python/ambari_agent/apscheduler/threadpool.py

@@ -48,7 +48,7 @@ class ThreadPool(object):
         self._shutdown = False
 
         _threadpools.add(ref(self))
-        logger.info('Started thread pool with %d core threads and %s maximum '
+        logger.debug('Started thread pool with %d core threads and %s maximum '
                     'threads', core_threads, max_threads or 'unlimited')
 
     def _adjust_threadcount(self):

+ 1 - 1
ambari-agent/src/main/python/ambari_agent/hostname.py

@@ -25,7 +25,7 @@ import AmbariConfig
 import logging
 import traceback
 
-logger = logging.getLogger()
+logger = logging.getLogger(__name__)
 
 cached_hostname = None
 cached_public_hostname = None

+ 23 - 14
ambari-agent/src/main/python/ambari_agent/main.py

@@ -19,6 +19,7 @@ limitations under the License.
 '''
 
 import logging.handlers
+import logging.config
 import signal
 from optparse import OptionParser
 import sys
@@ -42,7 +43,7 @@ from ambari_commons import shell
 import HeartbeatHandlers
 from HeartbeatHandlers import bind_signal_handlers
 from ambari_commons.constants import AMBARI_SUDO_BINARY
-logger = logging.getLogger()
+logger = logging.getLogger(__name__)
 
 formatstr = "%(levelname)s %(asctime)s %(filename)s:%(lineno)d - %(message)s"
 agentPid = os.getpid()
@@ -70,19 +71,27 @@ def setup_logging(verbose):
 
 def update_log_level(config):
   # Setting loglevel based on config file
-  try:
-    loglevel = config.get('agent', 'loglevel')
-    if loglevel is not None:
-      if loglevel == 'DEBUG':
-        logging.basicConfig(format=formatstr, level=logging.DEBUG, filename=AmbariConfig.AmbariConfig.getLogFile())
-        logger.setLevel(logging.DEBUG)
-        logger.info("Newloglevel=logging.DEBUG")
-      else:
-        logging.basicConfig(format=formatstr, level=logging.INFO, filename=AmbariConfig.AmbariConfig.getLogFile())
-        logger.setLevel(logging.INFO)
-        logger.debug("Newloglevel=logging.INFO")
-  except Exception, err:
-    logger.info("Default loglevel=DEBUG")
+  global logger
+  log_cfg_file = os.path.join(os.path.dirname(AmbariConfig.AmbariConfig.getConfigFile()), "logging.conf")
+  if os.path.exists(log_cfg_file):
+    logging.config.fileConfig(log_cfg_file)
+    # create logger
+    logger = logging.getLogger(__name__)
+    logger.info("Logging configured by " + log_cfg_file)
+  else:  
+    try:
+      loglevel = config.get('agent', 'loglevel')
+      if loglevel is not None:
+        if loglevel == 'DEBUG':
+          logging.basicConfig(format=formatstr, level=logging.DEBUG, filename=AmbariConfig.AmbariConfig.getLogFile())
+          logger.setLevel(logging.DEBUG)
+          logger.info("Newloglevel=logging.DEBUG")
+        else:
+          logging.basicConfig(format=formatstr, level=logging.INFO, filename=AmbariConfig.AmbariConfig.getLogFile())
+          logger.setLevel(logging.INFO)
+          logger.debug("Newloglevel=logging.INFO")
+    except Exception, err:
+      logger.info("Default loglevel=DEBUG")
 
 
 #  ToDo: move that function inside AmbariConfig

+ 4 - 4
ambari-agent/src/main/python/ambari_agent/security.py

@@ -29,7 +29,7 @@ import traceback
 import hostname
 import platform
 
-logger = logging.getLogger()
+logger = logging.getLogger(__name__)
 
 GEN_AGENT_KEY = 'openssl req -new -newkey rsa:1024 -nodes -keyout "%(keysdir)s'+os.sep+'%(hostname)s.key" '\
 	'-subj /OU=%(hostname)s/ -out "%(keysdir)s'+os.sep+'%(hostname)s.csr"'
@@ -52,11 +52,11 @@ class VerifiedHTTPSConnection(httplib.HTTPSConnection):
       try:
         sock = self.create_connection()
         self.sock = ssl.wrap_socket(sock, cert_reqs=ssl.CERT_NONE)
-        logger.info('SSL connection established. Two-way SSL authentication is '
+        logger.debug('SSL connection established. Two-way SSL authentication is '
                     'turned off on the server.')
       except (ssl.SSLError, AttributeError):
         self.two_way_ssl_required = True
-        logger.info('Insecure connection to https://' + self.host + ':' + self.port +
+        logger.error('Insecure connection to https://' + self.host + ':' + self.port +
                     '/ failed. Reconnecting using two-way SSL authentication..')
 
     if self.two_way_ssl_required:
@@ -90,7 +90,7 @@ class VerifiedHTTPSConnection(httplib.HTTPSConnection):
   def create_connection(self):
     if self.sock:
       self.sock.close()
-    logger.info("SSL Connect being called.. connecting to the server")
+    logger.debug("SSL Connect being called.. connecting to the server")
     sock = socket.create_connection((self.host, self.port), 60)
     sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
     if self._tunnel_host:

+ 1 - 1
ambari-common/src/main/python/resource_management/core/environment.py

@@ -48,7 +48,7 @@ class Environment(object):
     self.reset(basedir, test_mode, tmp_dir)
     
     if not Logger.logger:
-      Logger.initialize_logger(logging_level)
+      Logger.initialize_logger(__name__, logging_level)
 
   def reset(self, basedir, test_mode, tmp_dir):
     self.system = System.get_instance()

+ 2 - 2
ambari-common/src/main/python/resource_management/core/logger.py

@@ -34,7 +34,7 @@ class Logger:
   sensitive_strings = {}
   
   @staticmethod
-  def initialize_logger(logging_level=logging.INFO, name='resource_management', format='%(asctime)s - %(message)s'):
+  def initialize_logger(name='resource_management', logging_level=logging.INFO, format='%(asctime)s - %(message)s'):
     # set up logging (two separate loggers for stderr and stdout with different loglevels)
     logger = logging.getLogger(name)
     logger.setLevel(logging_level)
@@ -47,7 +47,7 @@ class Logger:
     cherr.setFormatter(formatter)
     logger.addHandler(cherr)
     logger.addHandler(chout)
-    
+
     Logger.logger = logger
     
     return logger, chout, cherr

+ 1 - 1
ambari-common/src/main/python/resource_management/libraries/script/script.py

@@ -166,7 +166,7 @@ class Script(object):
     Sets up logging;
     Parses command parameters and executes method relevant to command type
     """
-    logger, chout, cherr = Logger.initialize_logger()
+    logger, chout, cherr = Logger.initialize_logger(__name__)
     
     # parse arguments
     if len(sys.argv) < 7:

+ 7 - 3
ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java

@@ -450,10 +450,14 @@ public class ClustersImpl implements Clusters {
     checkLoaded();
 
     Map<String, Host> hostMap = new HashMap<String, Host>();
-
+    Host host = null;
     for (String hostName : hostSet) {
-      Host host = hosts.get(hostName);
-      if (null == hostName) {
+      if (null != hostName) {
+          host= hosts.get(hostName);
+        if (host == null) {
+          throw new HostNotFoundException(hostName);
+        }
+      } else {
         throw new HostNotFoundException(hostName);
       }
 

+ 2 - 2
ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py

@@ -121,7 +121,7 @@ class TestPhoenixQueryServer(RMFTestCase):
     )
     self.assertNoMoreResources()
 
-  @skip("there's nothing to upgrade to yet")    
+  @skip("there's nothing to upgrade to yet")
   def test_start_default_24(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
                    classname = "PhoenixQueryServer",
@@ -359,4 +359,4 @@ class TestPhoenixQueryServer(RMFTestCase):
                               group='hadoop',
                               owner='hbase',
                               content='log4jproperties\nline2'
-    )
+    )