FileCache.py 9.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253
  1. #!/usr/bin/env python
  2. '''
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. '''
  17. import StringIO
  18. import logging
  19. import os
  20. import shutil
  21. import zipfile
  22. import urllib2
  23. import urllib
  24. logger = logging.getLogger()
  25. class CachingException(Exception):
  26. pass
  27. class FileCache():
  28. """
  29. Provides caching and lookup for service metadata files.
  30. If service metadata is not available at cache,
  31. downloads relevant files from the server.
  32. """
  33. STACKS_CACHE_DIRECTORY="stacks"
  34. COMMON_SERVICES_DIRECTORY="common-services"
  35. CUSTOM_ACTIONS_CACHE_DIRECTORY="custom_actions"
  36. HOST_SCRIPTS_CACHE_DIRECTORY="host_scripts"
  37. HASH_SUM_FILE=".hash"
  38. ARCHIVE_NAME="archive.zip"
  39. BLOCK_SIZE=1024*16
  40. SOCKET_TIMEOUT=10
  41. def __init__(self, config):
  42. self.service_component_pool = {}
  43. self.config = config
  44. self.cache_dir = config.get('agent', 'cache_dir')
  45. # Defines whether command should fail when downloading scripts
  46. # from the server is not possible or agent should rollback to local copy
  47. self.tolerate_download_failures = \
  48. config.get('agent','tolerate_download_failures').lower() == 'true'
  49. self.reset()
  50. def reset(self):
  51. self.uptodate_paths = [] # Paths that already have been recently checked
  52. def get_service_base_dir(self, command, server_url_prefix):
  53. """
  54. Returns a base directory for service
  55. """
  56. service_subpath = command['commandParams']['service_package_folder']
  57. return self.provide_directory(self.cache_dir, service_subpath,
  58. server_url_prefix)
  59. def get_hook_base_dir(self, command, server_url_prefix):
  60. """
  61. Returns a base directory for hooks
  62. """
  63. try:
  64. hooks_subpath = command['commandParams']['hooks_folder']
  65. except KeyError:
  66. return None
  67. subpath = os.path.join(self.STACKS_CACHE_DIRECTORY, hooks_subpath)
  68. return self.provide_directory(self.cache_dir, subpath,
  69. server_url_prefix)
  70. def get_custom_actions_base_dir(self, server_url_prefix):
  71. """
  72. Returns a base directory for custom action scripts
  73. """
  74. return self.provide_directory(self.cache_dir,
  75. self.CUSTOM_ACTIONS_CACHE_DIRECTORY,
  76. server_url_prefix)
  77. def get_host_scripts_base_dir(self, server_url_prefix):
  78. """
  79. Returns a base directory for host scripts (host alerts, etc) which
  80. are scripts that are not part of the main agent code
  81. """
  82. return self.provide_directory(self.cache_dir,
  83. self.HOST_SCRIPTS_CACHE_DIRECTORY,
  84. server_url_prefix)
  85. def provide_directory(self, cache_path, subdirectory, server_url_prefix):
  86. """
  87. Ensures that directory at cache is up-to-date. Throws a CachingException
  88. if any problems occur
  89. Parameters;
  90. cache_path: full path to cache directory
  91. subdirectory: subpath inside cache
  92. server_url_prefix: url of "resources" folder at the server
  93. """
  94. full_path = os.path.join(cache_path, subdirectory)
  95. logger.debug("Trying to provide directory {0}".format(subdirectory))
  96. try:
  97. if full_path not in self.uptodate_paths:
  98. logger.debug("Checking if update is available for "
  99. "directory {0}".format(full_path))
  100. # Need to check for updates at server
  101. remote_url = self.build_download_url(server_url_prefix,
  102. subdirectory, self.HASH_SUM_FILE)
  103. memory_buffer = self.fetch_url(remote_url)
  104. remote_hash = memory_buffer.getvalue().strip()
  105. local_hash = self.read_hash_sum(full_path)
  106. if not local_hash or local_hash != remote_hash:
  107. logger.debug("Updating directory {0}".format(full_path))
  108. download_url = self.build_download_url(server_url_prefix,
  109. subdirectory, self.ARCHIVE_NAME)
  110. membuffer = self.fetch_url(download_url)
  111. self.invalidate_directory(full_path)
  112. self.unpack_archive(membuffer, full_path)
  113. self.write_hash_sum(full_path, remote_hash)
  114. # Finally consider cache directory up-to-date
  115. self.uptodate_paths.append(full_path)
  116. except CachingException, e:
  117. if self.tolerate_download_failures:
  118. # ignore
  119. logger.warn("Error occured during cache update. "
  120. "Error tolerate setting is set to true, so"
  121. " ignoring this error and continuing with current cache. "
  122. "Error details: {0}".format(str(e)))
  123. else:
  124. raise # we are not tolerant to exceptions, command execution will fail
  125. return full_path
  126. def build_download_url(self, server_url_prefix,
  127. directory, filename):
  128. """
  129. Builds up a proper download url for file. Used for downloading files
  130. from the server.
  131. directory - relative path
  132. filename - file inside directory we are trying to fetch
  133. """
  134. return "{0}/{1}/{2}".format(server_url_prefix,
  135. urllib.pathname2url(directory), filename)
  136. def fetch_url(self, url):
  137. """
  138. Fetches content on url to in-memory buffer and returns the resulting buffer.
  139. May throw exceptions because of various reasons
  140. """
  141. logger.debug("Trying to download {0}".format(url))
  142. try:
  143. memory_buffer = StringIO.StringIO()
  144. proxy_handler = urllib2.ProxyHandler({})
  145. opener = urllib2.build_opener(proxy_handler)
  146. u = opener.open(url, timeout=self.SOCKET_TIMEOUT)
  147. logger.debug("Connected with {0} with code {1}".format(u.geturl(),
  148. u.getcode()))
  149. buff = u.read(self.BLOCK_SIZE)
  150. while buff:
  151. memory_buffer.write(buff)
  152. buff = u.read(self.BLOCK_SIZE)
  153. if not buff:
  154. break
  155. return memory_buffer
  156. except Exception, err:
  157. raise CachingException("Can not download file from"
  158. " url {0} : {1}".format(url, str(err)))
  159. def read_hash_sum(self, directory):
  160. """
  161. Tries to read a hash sum from previously generated file. Returns string
  162. containing hash or None
  163. """
  164. hash_file = os.path.join(directory, self.HASH_SUM_FILE)
  165. try:
  166. with open(hash_file) as fh:
  167. return fh.readline().strip()
  168. except:
  169. return None # We don't care
  170. def write_hash_sum(self, directory, new_hash):
  171. """
  172. Tries to read a hash sum from previously generated file. Returns string
  173. containing hash or None
  174. """
  175. hash_file = os.path.join(directory, self.HASH_SUM_FILE)
  176. try:
  177. with open(hash_file, "w") as fh:
  178. fh.write(new_hash)
  179. os.chmod(hash_file, 0o666)
  180. except Exception, err:
  181. raise CachingException("Can not write to file {0} : {1}".format(hash_file,
  182. str(err)))
  183. def invalidate_directory(self, directory):
  184. """
  185. Recursively removes directory content (if any). Also, creates
  186. directory and any parent directories if needed. May throw exceptions
  187. on permission problems
  188. """
  189. logger.debug("Invalidating directory {0}".format(directory))
  190. try:
  191. if os.path.exists(directory):
  192. if os.path.isfile(directory): # It would be a strange situation
  193. os.unlink(directory)
  194. elif os.path.isdir(directory):
  195. shutil.rmtree(directory)
  196. # create directory itself and any parent directories
  197. os.makedirs(directory)
  198. except Exception, err:
  199. raise CachingException("Can not invalidate cache directory {0}: {1}",
  200. directory, str(err))
  201. def unpack_archive(self, mem_buffer, target_directory):
  202. """
  203. Unpacks contents of in-memory buffer to file system.
  204. In-memory buffer is expected to contain a valid zip archive
  205. """
  206. try:
  207. zfile = zipfile.ZipFile(mem_buffer)
  208. for name in zfile.namelist():
  209. (dirname, filename) = os.path.split(name)
  210. concrete_dir=os.path.abspath(os.path.join(target_directory, dirname))
  211. if not os.path.isdir(concrete_dir):
  212. os.makedirs(concrete_dir)
  213. logger.debug("Unpacking file {0} to {1}".format(name, concrete_dir))
  214. if filename!='':
  215. zfile.extract(name, target_directory)
  216. except Exception, err:
  217. raise CachingException("Can not unpack zip file to "
  218. "directory {0} : {1}".format(
  219. target_directory, str(err)))