Package archive :: Module mappers
[hide private]
[frames] | no frames]

Source Code for Module archive.mappers

  1  #============================================================================  
  2  #Name        : mappers.py  
  3  #Part of     : Helium  
  4   
  5  #Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). 
  6  #All rights reserved. 
  7  #This component and the accompanying materials are made available 
  8  #under the terms of the License "Eclipse Public License v1.0" 
  9  #which accompanies this distribution, and is available 
 10  #at the URL "http://www.eclipse.org/legal/epl-v10.html". 
 11  # 
 12  #Initial Contributors: 
 13  #Nokia Corporation - initial contribution. 
 14  # 
 15  #Contributors: 
 16  # 
 17  #Description: 
 18  #=============================================================================== 
 19   
 20  """ Archive mappers that map how the input files are divided into archives. 
 21   
 22   
 23  """ 
 24   
 25  import buildtools 
 26  import os 
 27  import sys 
 28  import codecs 
 29  import fileutils 
 30  import logging 
 31  import symrec 
 32  import re 
 33  import csv 
 34  import shutil 
 35   
 36  _logger = logging.getLogger('logger.mappers') 
 37  _logger.setLevel(logging.INFO) 
 38   
 39  # Default value for missing/invalid policy files. 
 40  MISSING_POLICY = "9999" 
 41   
 42   
43 -class Mapper(object):
44 """ Mapper Abstract class. Any custom implementation must derive it!. 45 46 It handles metadata creation. 47 """ 48
49 - def __init__(self, config, tool):
50 self._tool = tool 51 self._config = config 52 self._metadata = None 53 if not os.path.exists(self._config['archives.dir']): 54 os.makedirs(self._config['archives.dir']) 55 if self._config.has_key("grace.metadata") and self._config.get_boolean("grace.metadata", False): 56 if self._config.has_key("grace.template") and os.path.exists(self._config["grace.template"]) and \ 57 not os.path.exists(os.path.join(self._config['archives.dir'], self._config['name'] + ".metadata.xml")): 58 shutil.copy(config["grace.template"], os.path.join(self._config['archives.dir'], self._config['name'] + ".metadata.xml")) 59 self._metadata = symrec.ReleaseMetadata(os.path.join(self._config['archives.dir'], self._config['name']+ ".metadata.xml"), 60 service=self._config['grace.service'], 61 product=self._config['grace.product'], 62 release=self._config['grace.release']) 63 self._metadata.save()
64
65 - def declare_package(self, filename, extract="single"):
66 """ Add a package to the metadata file. """ 67 if self._metadata is None: 68 return 69 self._metadata.add_package(os.path.basename(filename), extract=extract, filters=self._config.get_list('grace.filters', None), default=self._config.get_boolean('grace.default', True)) 70 self._metadata.save()
71
72 - def create_commands(self, manifest):
73 """ Return a list of command list. """ 74 return [[self._tool.create_command(self._config['name'], manifests=[manifest])]]
75 76
77 -class DefaultMapper(Mapper):
78 """ The default mapper. It splits the content based on size characteristics. 79 80 'the max.files.per.archive' and 'max.uncompressed.size' properties define how the input files 81 are split between a number of part zips. 82 """
83 - def __init__(self, config, archiver):
84 """ Initialization. """ 85 Mapper.__init__(self, config, archiver)
86
87 - def create_commands(self, manifest):
88 """ Return a list of command lists. """ 89 result = [] 90 91 _logger.info(" * Input manifest: " + manifest) 92 manifests = self._split_manifest_file(self._config['name'], manifest) 93 if not os.path.exists(self._config['archives.dir']): 94 _logger.info(" * Mkdir " + self._config['archives.dir']) 95 os.makedirs(self._config['archives.dir']) 96 97 for manifest in manifests: 98 _logger.info(" * Creating command for manifest: " + manifest) 99 filename = os.path.join(self._config['archives.dir'], os.path.splitext(os.path.basename(manifest))[0]) 100 if len(manifests) == 1: 101 filename = os.path.join(self._config['archives.dir'], self._config['name']) 102 _logger.info(" * " + filename + self._tool.extension()) 103 self.declare_package(filename + self._tool.extension(), self._config.get('grace.extract', 'single')) 104 result.extend(self._tool.create_command(self._config.get('zip.root.dir', self._config['root.dir']), filename, manifests=[manifest])) 105 106 return [result]
107
108 - def _split_manifest_file(self, name, manifest_file_path):
109 """ This method return a list of files that contain the content of the zip parts to create. """ 110 filenames = [] 111 112 if (self._config.has_key('max.files.per.archive') or self._config.has_key('max.uncompressed.size')): 113 size = 0 114 files = 0 115 part = 0 116 filename = "" 117 output = None 118 119 if os.path.exists(self._config['root.dir']) and os.path.isdir(self._config['root.dir']): 120 curdir = os.path.abspath(os.curdir) 121 os.chdir(self._config.get('zip.root.dir', self._config['root.dir'])) 122 maxfiles = self._config.get('max.files.per.archive', 100000000) 123 _logger.info("Max number of files per archive: " + str(maxfiles)) 124 max_uncompressed_size = self._config.get('max.uncompressed.size', 100000000) 125 _logger.info("Max uncompressed size per archive: " + str(max_uncompressed_size)) 126 127 file_handle = codecs.open(manifest_file_path, "r", "utf-8" ) 128 129 for line in file_handle.readlines(): 130 line = line.rstrip() 131 132 if(os.path.isfile(line)): 133 if part == 0 or files == int(maxfiles) or size + os.path.getsize(line) >= int(max_uncompressed_size): 134 if output != None: 135 output.close() 136 137 size = 0 138 files = 0 139 part += 1 140 141 filename = "%s_part%02d" % (name, part) 142 filenames.append(os.path.join(self._config['temp.build.dir'], filename + ".txt")) 143 144 output = codecs.open(os.path.join(self._config['temp.build.dir'], filename + ".txt"), 'w', "utf-8" ) 145 146 files += 1 147 size += os.path.getsize(line) 148 149 output.write(u"".join([line, u'\n'])) 150 elif(os.path.isdir(line)): 151 if (len(os.listdir(line)) == 0): 152 if part == 0 or files == int(maxfiles): 153 if output != None: 154 output.close() 155 156 size = 0 157 files = 0 158 part += 1 159 160 filename = "%s_part%02d" % (name, part) 161 filenames.append(os.path.join(self._config['temp.build.dir'], filename + ".txt")) 162 163 output = open(os.path.abspath(os.path.join(self._config['temp.build.dir'], filename + ".txt")), 'w') 164 165 files += 1 166 167 output.write(u"".join([line, u'\n'])) 168 else: 169 _logger.warning('Not recognized as file or directory: %s' % line) 170 171 if output != None: 172 output.close() 173 174 file_handle.close() 175 os.chdir(curdir) 176 else: 177 filenames.append(manifest_file_path) 178 179 return filenames
180 181
182 -class PolicyMapper(Mapper):
183 """ Implements a policy content mapper. 184 185 It transforms a list of files into a list of commands with their inputs. 186 All files with policy 0 will be under the main archive. 187 All other files will get backed up by policy and then store into an second archive. 188 """ 189
190 - def __init__(self, config, archiver):
191 """ Initialization. """ 192 Mapper.__init__(self, config, archiver) 193 self._policies = {} 194 self._policy_cache = {} 195 self._binary = {} 196 # Load csv 197 if self._config.has_key('policy.csv'): 198 if os.path.exists(self._config['policy.csv']): 199 self.load_policy_binary(self._config['policy.csv']) 200 else: 201 _logger.error("POLICY_ERROR: File not found '%s'." % self._config['policy.csv'])
202
203 - def load_policy_binary(self, csvfile, column=1):
204 """ Loads the binary IDs from the CSV file. """ 205 _logger.info("POLICY_INFO: Loading policy definition '%s'." % csvfile) 206 reader = csv.reader(open(csvfile, "rU")) 207 for row in reader: 208 if re.match(r"^((?:\d+)|(?:0842[0-9a-zA-Z]{3}))$", row[0].strip()): 209 _logger.info("POLICY_INFO: Adding policy: '%s' => '%s'" % (row[0].strip(), row[column].strip().lower())) 210 self._binary[row[0].strip()] = row[column].strip().lower() 211 else: 212 _logger.warning("POLICY_WARNING: Discarding policy: '%s'." % row[0].strip())
213
214 - def zip2zip(self):
215 """ Should the non public zip be zipped up under a specific zip. """ 216 return self._config.get_boolean('policy.zip2zip', False)
217
218 - def create_commands(self, manifest):
219 """ Generates a list of build commands. """ 220 result = [] 221 stages = [] 222 223 # Create the archive output directory 224 if not os.path.exists(self._config['archives.dir']): 225 _logger.info(" * Mkdir " + self._config['archives.dir']) 226 os.makedirs(self._config['archives.dir']) 227 228 # Sort the manifest content, splitting it by policy 229 file_handle = codecs.open(manifest, "r", "utf-8") 230 for line in file_handle.readlines(): 231 line = line.rstrip() 232 self._sort_by_policy(line) 233 file_handle.close() 234 235 # Generating sublists. 236 for key in self._policies.keys(): 237 self._policies[key].close() 238 manifest = os.path.join(self._config['temp.build.dir'], self._config['name'] + "_%s" % key + ".txt") 239 filename = os.path.join(self._config['archives.dir'], self._config['name'] + "_%s" % key) 240 _logger.info(" * " + filename + self._tool.extension()) 241 result.extend(self._tool.create_command(self._config.get('zip.root.dir', self._config['root.dir']), filename, manifests=[manifest])) 242 stages.append(result) 243 244 # See if any internal archives need to be created 245 content = [] 246 for key in self._policies.keys(): 247 if not self.zip2zip(): 248 self.declare_package(self._config['name'] + "_%s" % key + self._tool.extension()) 249 else: 250 if key != "0": 251 content.append(os.path.join(self._config['archives.dir'], self._config['name'] + "_%s" % key + self._tool.extension())) 252 else: 253 self.declare_package(self._config['name'] + "_%s" % key + self._tool.extension()) 254 255 # Creating zip that contains each policy zips. 256 if self.zip2zip() and len(content) > 0: 257 manifest = os.path.join(self._config['temp.build.dir'], self._config['name'] + ".internal.txt") 258 file_handle = codecs.open( manifest, "w+", "utf-8" ) 259 file_handle.write(u"\n".join(content)) 260 file_handle.close() 261 internal = "internal" 262 if self._config.has_key('policy.internal.name'): 263 internal = self._config['policy.internal.name'] 264 filename = os.path.join(self._config['archives.dir'], self._config['name'] + "_" + internal) 265 _logger.info(" * " + filename + self._tool.extension()) 266 self.declare_package(filename + self._tool.extension(), "double") 267 stages.append(self._tool.create_command(self._config['archives.dir'], filename, manifests=[manifest])) 268 269 cmds = [] 270 for filename in content: 271 cmds.append(buildtools.Delete(filename=filename)) 272 stages.append(cmds) 273 return stages
274
275 - def get_dir_policy(self, dirname):
276 """ Get policy value for a specific directory. """ 277 dirname = os.path.normpath(dirname) 278 if not self._policy_cache.has_key(dirname): 279 policyfile = None 280 for name in self.get_policy_filenames(): 281 if sys.platform != 'win32': 282 for filename in os.listdir(dirname): 283 if filename.lower() == name.lower(): 284 policyfile = os.path.join(dirname, filename) 285 break 286 elif os.path.exists(os.path.join(dirname, name)): 287 policyfile = os.path.join(dirname, name) 288 break 289 290 value = self._config.get('policy.default.value', MISSING_POLICY) 291 if policyfile != None: 292 try: 293 value = fileutils.read_policy_content(policyfile) 294 if value not in self._binary.keys(): 295 _logger.error("POLICY_ERROR: policy file found %s but policy %s value not exists in csv" % (policyfile, value)) 296 except Exception, exc: 297 _logger.error("POLICY_ERROR: %s" % exc) 298 value = self._config.get('policy.default.value', MISSING_POLICY) 299 else: 300 _logger.error("POLICY_ERROR: could not find a policy file under: '%s'" % dirname) 301 # saving the policy value for that directory. 302 self._policy_cache[dirname] = value 303 return self._policy_cache[dirname]
304
305 - def get_policy_filenames(self):
306 """ Returns the list of potential policy filenames. """ 307 return self._config.get_list('policy.filenames', ['Distribution.policy.s60'])
308
309 - def _sort_by_policy(self, filename):
310 """ Store the input file sorted by its policy number. """ 311 path = os.path.join(self._config['root.dir'], filename) 312 parentdir = os.path.dirname(path) 313 if os.path.isdir(path): 314 parentdir = path 315 value = self.get_dir_policy(parentdir) 316 if not value in self._policies: 317 self._policies[value] = codecs.open( os.path.join(self._config['temp.build.dir'], self._config['name'] + "_%s" % value + ".txt"), "w+", "utf-8" ) 318 self._policies[value].write(u"%s\n" % filename)
319 320
321 -class PolicyRemoverMapper(PolicyMapper):
322 """ This class implements a variant of the policy mapper. 323 324 It removes the internal source. Only binary flagged content is kept. 325 """ 326
327 - def __init__(self, config, archiver):
328 """ Initialization. """ 329 PolicyMapper.__init__(self, config, archiver) 330 self._rm_policy_cache = {}
331
332 - def get_policy_root_dir(self):
333 """ Return the policy.root.dir or root.dir if not set or not under root.dir.""" 334 if not self._config.has_key("policy.root.dir"): 335 return os.path.normpath(self._config['root.dir']) 336 else: 337 if fileutils.destinsrc(self._config['root.dir'], self._config['policy.root.dir']): 338 return os.path.normpath(self._config['policy.root.dir']) 339 else: 340 return os.path.normpath(self._config['root.dir'])
341
342 - def get_rmdir_policy(self, dirname):
343 """ check if the directory should be dropped or not""" 344 dirname = os.path.normpath(dirname) 345 # check if parent is banned... 346 prootdir = os.path.normpath(self.get_policy_root_dir()) 347 rootdir = os.path.normpath(self._config['root.dir']) 348 if sys.platform == 'win32': 349 dirname = dirname.lower() 350 prootdir = prootdir.lower() 351 rootdir = rootdir.lower() 352 353 # else get real value... 354 if not self._rm_policy_cache.has_key(dirname): 355 self._rm_policy_cache[dirname] = self.get_dir_policy(dirname) 356 357 return self._rm_policy_cache[dirname]
358
359 - def create_commands(self, manifest):
360 """ Generates a list of build commands. """ 361 stages = PolicyMapper.create_commands(self, manifest) 362 363 if not self._config.has_key('policy.csv'): 364 _logger.error("POLICY_ERROR: Property 'policy.csv' not defined everything will get removed.") 365 cmds = [] 366 file_handle = codecs.open( manifest, "r", "utf-8" ) 367 for line in file_handle.readlines(): 368 line = line.rstrip() 369 filepath = os.path.normpath(os.path.join(self._config.get('zip.root.dir', self._config['root.dir']), line)) 370 value = self.get_rmdir_policy(os.path.dirname(filepath)) 371 delete = True 372 if value in self._binary.keys(): 373 if self._binary[value] == "yes": 374 _logger.info("POLICY_INFO: Keeping %s (%s=>yes)!" % (filepath, value)) 375 delete = False 376 elif self._binary[value] == "bin": 377 _logger.info("POLICY_INFO: Keeping %s (%s=>bin)!" % (filepath, value)) 378 delete = False 379 else: 380 _logger.error("POLICY_ERROR: %s value for %s not in csv file. Will be removed!!" % (value, filepath)) 381 382 if delete: 383 _logger.info("POLICY_INFO: File %s will be removed!" % filepath) 384 cmds.append(buildtools.Delete(filename=filepath)) 385 file_handle.close() 386 if len(cmds) > 0: 387 stages.append(cmds) 388 return stages
389 390
391 -class SFPolicyRemoverMapper(PolicyRemoverMapper):
392 """ Implement an SFL column based policy remover. """ 393
394 - def __init__(self, config, archiver):
395 """ Initialization. """ 396 PolicyRemoverMapper.__init__(self, config, archiver)
397
398 - def load_policy_binary(self, csvfile):
399 """ Loading the policy using the 3rd column. """ 400 _logger.info("POLICY_INFO: Loading actions from the 3rd column") 401 PolicyRemoverMapper.load_policy_binary(self, csvfile, column=3)
402
403 -class EPLPolicyRemoverMapper(PolicyRemoverMapper):
404 """ Implement an EPL column based policy remover. """
405 - def __init__(self, config, archiver):
406 """ Initialization. """ 407 PolicyRemoverMapper.__init__(self, config, archiver)
408
409 - def load_policy_binary(self, csvfile):
410 """ Loading the policy using the 4th column. """ 411 _logger.info("POLICY_INFO: Loading actions from the 4th column") 412 PolicyRemoverMapper.load_policy_binary(self, csvfile, column=4)
413 414 415 MAPPERS = {'default': DefaultMapper, 416 'policy': PolicyMapper, 417 'policy.remover': PolicyRemoverMapper, 418 'sfl.policy.remover': SFPolicyRemoverMapper, 419 'epl.policy.remover': EPLPolicyRemoverMapper,} 420
421 -def get_mapper(name, config, archiver):
422 """ Get mapper instance from its string id. """ 423 if name in MAPPERS: 424 return MAPPERS[name](config, archiver) 425 raise Exception("ERROR: Could not find mapper '%s'." % name)
426