diff -r 9374c207cfee -r 9dcc6e7393f7 doc/api/python/symrec-pysrc.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/api/python/symrec-pysrc.html Fri Sep 11 15:39:31 2009 +0100 @@ -0,0 +1,2204 @@ + + + +
+| Trees | + + +Indices | + + +Help | + ++ |
|---|
| + + | +
+ |
+
+ 1 #============================================================================
+ 2 #Name : symrec.py
+ 3 #Part of : Helium
+ 4
+ 5 #Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
+ 6 #All rights reserved.
+ 7 #This component and the accompanying materials are made available
+ 8 #under the terms of the License "Eclipse Public License v1.0"
+ 9 #which accompanies this distribution, and is available
+ 10 #at the URL "http://www.eclipse.org/legal/epl-v10.html".
+ 11 #
+ 12 #Initial Contributors:
+ 13 #Nokia Corporation - initial contribution.
+ 14 #
+ 15 #Contributors:
+ 16 #
+ 17 #Description:
+ 18 #===============================================================================
+ 19
+ 20 """ SYMREC metadata file generation. """
+ 21 import xml.dom.minidom
+ 22 import amara
+ 23 import codecs
+ 24 import os
+ 25 import re
+ 26 import logging
+ 27 import fileutils
+ 28 import csv
+ 29
+ 30 LOGGER = logging.getLogger("symrec")
+ 31
+ 33 result = []
+ 34 for chars in input:
+ 35 if chars is not None and chars.strip() != "":
+ 36 result.append(chars)
+ 37 return result
+ 38
+ 40 """ Create the attribute if needed. """
+ 41 if hasattr(node, attr):
+ 42 setattr(node, unicode(attr), unicode(value))
+ 43 else:
+ 44 node.xml_set_attribute(unicode(attr), unicode(value))
+ 45
+ 47
+ 50
+ 51 @property
+ 53 return self.__xml.name
+ 54
+ 55 @property
+ 57 result = []
+ 58 if hasattr(self.__xml, 'file'):
+ 59 for filen in self.__xml.file:
+ 60 result.append(filen.name)
+ 61 return result
+ 62
+ 63 @property
+ 65 result = []
+ 66 if hasattr(self.__xml, 'instructions'):
+ 67 for instr in self.__xml.instructions:
+ 68 result.append(instr.name)
+ 69 return result
+ 70
+ 72 """ Create or read Metadata XML from SYMREC/SYMDEC. """
+ 73
+ 75 self._filename = filename
+ 76 if os.path.exists(filename):
+ 77 self._xml = amara.parse(open(filename, "r"))
+ 78 if service != None:
+ 79 self.service = service
+ 80 if product != None:
+ 81 self.product = product
+ 82 if release != None:
+ 83 self.release = release
+ 84 elif service!=None and product!=None and release!=None:
+ 85 self._xml = amara.create_document(u"releaseInformation")
+ 86
+ 87 # releaseDetails
+ 88 self._xml.releaseInformation.xml_append(self._xml.xml_create_element(u'releaseDetails'))
+ 89
+ 90 # releaseID
+ 91 self._xml.releaseInformation.releaseDetails.xml_append(self._xml.xml_create_element(u'releaseID'))
+ 92
+ 93 # service
+ 94 serv = self._xml.xml_create_element(u'service')
+ 95 xml_setattr(serv, 'name', unicode(service))
+ 96 self._xml.releaseInformation.releaseDetails.releaseID.xml_append(serv)
+ 97 # product
+ 98 prod = self._xml.xml_create_element(u'product')
+ 99 xml_setattr(prod, 'name', unicode(product))
+100 self._xml.releaseInformation.releaseDetails.releaseID.xml_append(prod)
+101 # release
+102 rel = self._xml.xml_create_element(u'release')
+103 xml_setattr(rel, 'name', unicode(release))
+104 self._xml.releaseInformation.releaseDetails.releaseID.xml_append(rel)
+105
+106 # releaseFiles
+107 self._xml.releaseInformation.xml_append(self._xml.xml_create_element(u'releaseFiles'))
+108
+109 # releaseFiles
+110 self._xml.releaseInformation.xml_append(self._xml.xml_create_element(u'externalFiles'))
+111 else:
+112 raise Exception("Error metadata file doesn't exists.")
+113
+114
+116 """ Return a ReleaseMetada object pointing to the dependency release. """
+117 if self.dependsof_service != None and self.dependsof_product != None and self.dependsof_release != None:
+118 filename = os.path.join(os.path.dirname(self._filename), "../../..",
+119 self.dependsof_service,
+120 self.dependsof_product,
+121 self.dependsof_release)
+122 return ReleaseMetadata(find_latest_metadata(filename))
+123 else:
+124 return None
+125
+126
+128 """ Setting the dependency release. """
+129 metadata = ReleaseMetadata(filename)
+130 self.dependsof_service = metadata.service
+131 self.dependsof_product = metadata.product
+132 self.dependsof_release = metadata.release
+133
+134 - def add_package(self, name, type=None, default=True, filters=None, extract="single", md5checksum=None, size=None):
+135 """ Adding a package to the metadata file. """
+136 # check if update mode
+137 package = None
+138 if hasattr(self._xml.releaseInformation.releaseFiles, 'package'):
+139 for pkg in self._xml.releaseInformation.releaseFiles.package:
+140 if (pkg.name.lower() == os.path.basename(name).lower()):
+141 package = pkg
+142 break
+143
+144 # if not found create new package.
+145 if package is None:
+146 package = self._xml.xml_create_element(u'package')
+147 self._xml.releaseInformation.releaseFiles.xml_append(package)
+148
+149 xml_setattr(package, 'name', os.path.basename(name))
+150 if type != None:
+151 xml_setattr(package, 'type', type)
+152 else:
+153 xml_setattr(package, 'type', os.path.splitext(name)[1].lstrip('.'))
+154 xml_setattr(package, 'default', str(default).lower())
+155 xml_setattr(package, 'extract', extract)
+156 if filters and len(filters)>0:
+157 xml_setattr(package, 'filters', ','.join(filters))
+158 xml_setattr(package, 's60filter', ','.join(filters))
+159 else:
+160 xml_setattr(package, 'filters', '')
+161 xml_setattr(package, 's60filter', '')
+162 if md5checksum != None:
+163 xml_setattr(package, unicode("md5checksum"), unicode(md5checksum))
+164 if size != None:
+165 xml_setattr(package, unicode("size"), unicode(size))
+166
+167
+169 keys = []
+170 if hasattr(self._xml, 'releaseInformation') and hasattr(self._xml.releaseInformation, 'releaseFiles') \
+171 and hasattr(self._xml.releaseInformation.releaseFiles, 'package'):
+172 for pkg in self._xml.releaseInformation.releaseFiles.package:
+173 keys.append(pkg.name)
+174 return keys
+175
+177 if not (hasattr(self._xml, 'releaseInformation') and hasattr(self._xml.releaseInformation, 'releaseFiles') \
+178 and hasattr(self._xml.releaseInformation.releaseFiles, 'package')):
+179 raise Exception("Key '%s' not found." % key)
+180 for pkg in self._xml.releaseInformation.releaseFiles.package:
+181 if pkg.name.lower() == key.lower():
+182 filters = []
+183 s60filters = []
+184 md5checksum = None
+185 size = None
+186 if u'filters' in pkg.xml_attributes:
+187 filters = _cleanup_list(pkg.filters.split(','))
+188 if u's60filter' in pkg.xml_attributes:
+189 s60filters = _cleanup_list(pkg.s60filter.split(','))
+190 if u'md5checksum' in pkg.xml_attributes:
+191 md5checksum = pkg.md5checksum
+192 if u'size' in pkg.xml_attributes:
+193 size = pkg.size
+194 return {'type': pkg.type, 'extract': pkg.extract, 'default': (pkg.default=="true"), \
+195 'filters': filters, 's60filter': s60filters, 'md5checksum': md5checksum, 'size': size}
+196 raise Exception("Key '%s' not found." % key)
+197
+199 self.add_package(key, value['type'], value['default'], value['filters'], value['extract'], value['md5checksum'], value['size'])
+200
+202 """ Generic function to set releaseid info. """
+203 detailsnode = None
+204 if not hasattr(self._xml.releaseInformation.releaseDetails, details):
+205 detailsnode = self._xml.xml_create_element(details)
+206 self._xml.releaseInformation.releaseDetails.xml_append(detailsnode)
+207 else:
+208 detailsnode = self._xml.releaseInformation.releaseDetails.xml_child_elements[details]
+209 namenode = None
+210 if not hasattr(detailsnode, name):
+211 namenode = self._xml.xml_create_element(name, attributes={u'name': unicode(value)})
+212 detailsnode.xml_append(namenode)
+213 else:
+214 namenode = detailsnode.xml_child_elements[name]
+215 namenode.name = value
+216
+217
+219 """ Generic function to extract releaseid info. """
+220 if hasattr(self._xml.releaseInformation.releaseDetails, details):
+221 group = getattr(self._xml.releaseInformation.releaseDetails, details)
+222 if hasattr(group, name) != None and getattr(getattr(group, name), 'name'):
+223 return getattr(group, name).name
+224 return None
+225
+227 for variant in self._xml.releaseInformation.localeVariants.variant:
+228 if(variant != None):
+229 if variant.name.lower() == variant_name.lower():
+230 return variant.file.name
+231
+232
+236
+238 """ Saving the XML into the provided filename. """
+239 if filename == None:
+240 filename = self._filename
+241 file_object = codecs.open(os.path.join(filename), 'w', "utf_8")
+242 file_object.write(self._xml.xml(indent=u"yes"))
+243 file_object.close()
+244
+245 @property
+247 """ Getting the service pack names. """
+248 result = []
+249 if hasattr(self._xml, 'releaseInformation') and hasattr(self._xml.releaseInformation, 'servicePacks'):
+250 for sp in self._xml.releaseInformation.servicePacks.servicePack:
+251 result.append(ServicePack(sp))
+252 return result
+253
+254 filename = property(lambda self:self._filename)
+255 service = property(lambda self:self.get_releasedetails_info('service'), lambda self, value:self.set_releasedetails_info('service', value))
+256 product = property(lambda self:self.get_releasedetails_info('product'), lambda self, value:self.set_releasedetails_info('product', value))
+257 release = property(lambda self:self.get_releasedetails_info('release'), lambda self, value:self.set_releasedetails_info('release', value))
+258 dependsof_service = property(lambda self:self.get_releasedetails_info('service', 'dependsOf'), lambda self, value:self.set_releasedetails_info('service', value, 'dependsOf'))
+259 dependsof_product = property(lambda self:self.get_releasedetails_info('product', 'dependsOf'), lambda self, value:self.set_releasedetails_info('product', value, 'dependsOf'))
+260 dependsof_release = property(lambda self:self.get_releasedetails_info('release', 'dependsOf'), lambda self, value:self.set_releasedetails_info('release', value, 'dependsOf'))
+261 baseline_service = property(lambda self:self.get_releasedetails_info('service', 'previousBaseline'), lambda self, value:self.set_releasedetails_info('service', value, 'previousBaseline'))
+262 baseline_product = property(lambda self:self.get_releasedetails_info('product', 'previousBaseline'), lambda self, value:self.set_releasedetails_info('product', value, 'previousBaseline'))
+263 baseline_release = property(lambda self:self.get_releasedetails_info('release', 'previousBaseline'), lambda self, value:self.set_releasedetails_info('release', value, 'previousBaseline'))
+264
+265
+267 """ Update Metadata XML already created from SYMREC/SYMDEC. """
+271
+273 """ Update each existing package md5checksum and size attribute."""
+274 for name in self.keys():
+275 fullname = os.path.join(self._filepath, name)
+276 if os.path.exists(fullname):
+277 result = self[name]
+278 result['md5checksum'] = unicode(fileutils.getmd5(fullname))
+279 result['size'] = unicode(os.path.getsize(fullname))
+280 self[name] = result
+281
+282
+284 """ This class validate if a metadata file is stored in the correct location and
+285 if all deps exists.
+286 """
+290
+292 """ Run the validation mechanism. """
+293 if os.path.basename(self.location) != self.release:
+294 LOGGER.error("Release doesn't match.")
+295 return False
+296 if os.path.basename(os.path.dirname(self.location)) != self.product:
+297 LOGGER.error("Product doesn't match.")
+298 return False
+299 if os.path.basename(os.path.dirname(os.path.dirname(self.location))) != self.service:
+300 LOGGER.error("Service doesn't match.")
+301 return False
+302
+303 for name in self.keys():
+304 path = os.path.join(self.location, name)
+305 if not os.path.exists(path):
+306 LOGGER.error("%s doesn't exist." % path)
+307 return False
+308 try:
+309 LOGGER.debug("Trying to open %s" % path)
+310 content_file = open(path)
+311 content_file.read(1)
+312 except IOError:
+313 LOGGER.error("%s is not available yet" % path)
+314 return False
+315
+316 if checkmd5 and self[name].has_key('md5checksum'):
+317 if self[name]['md5checksum'] != None:
+318 if fileutils.getmd5(path).lower() != self[name]['md5checksum']:
+319 LOGGER.error("%s md5checksum missmatch." % path)
+320 return False
+321
+322 for sp in self.servicepacks:
+323 for name in sp.files:
+324 path = os.path.join(self.location, name)
+325 if not os.path.exists(path):
+326 LOGGER.error("%s doesn't exist." % path)
+327 return False
+328 for name in sp.instructions:
+329 path = os.path.join(self.location, name)
+330 if not os.path.exists(path):
+331 LOGGER.error("%s doesn't exist." % path)
+332 return False
+333
+334 dependency = self.get_dependsof()
+335 if dependency != None:
+336 return ValidateReleaseMetadata(dependency.filename).is_valid(checkmd5)
+337 return True
+338
+340 """ Merge packages definition to the root metadata. """
+341
+343 """ Construct a metadata merger providing root metadata filename. """
+344 self._metadata = ReleaseMetadata(metadata)
+345
+347 """ Merge the content of filename into the root metadata. """
+348 metadata = ReleaseMetadata(filename)
+349 for name in metadata.keys():
+350 if name in self._metadata.keys():
+351 LOGGER.warning('Package %s already declared, overriding previous definition!' % name)
+352 self._metadata[name] = metadata[name]
+353
+357
+361
+363
+365 ReleaseMetadata.__init__(self, filename)
+366 if includes is None:
+367 includes = []
+368 if excludes is None:
+369 excludes = []
+370 self.location = os.path.dirname(filename)
+371 self.includes = includes
+372 self.excludes = excludes
+373
+375 tdd = "\t[\n"
+376 for name in metadata.keys():
+377 path_ = os.path.join(os.path.dirname(metadata.filename), name)
+378 if (((len(self.includes) == 0) and metadata[name]['extract']) or (self.includes in metadata[name]['s60filter'])) and self.excludes not in metadata[name]['s60filter']:
+379 tdd += "\t\t{\n"
+380 tdd += "\t\t\t\"command\": \"unzip_%s\",\n" % metadata[name]['extract']
+381 tdd += "\t\t\t\"src\": \"%s\",\n" % os.path.normpath(path_).replace('\\', '/')
+382 tdd += "\t\t},\n"
+383 tdd += "\t],\n"
+384 return tdd
+385
+387 """ Generating a TDD file that contains a list of list of filenames. """
+388 tdd = "[\n"
+389 # generates unarchiving steps for dependency
+390 dependency = self.get_dependsof()
+391 if dependency != None:
+392 tdd += self.archives_to_tdd(dependency)
+393 # generates unarchiving steps
+394 tdd += self.archives_to_tdd(self)
+395 tdd += "]\n"
+396 return tdd
+397
+398
+399
+401 """ Finding the release latest release metadata file. """
+402 metadatas = []
+403 for filename in os.listdir(releasedir):
+404 if re.match(r'^release_metadata(_\d+)?\.xml$', filename, re.I) is not None:
+405 LOGGER.debug("Found %s" % filename)
+406 metadatas.append(filename)
+407 # reverse the order...
+408 metadatas.sort(reverse=True)
+409 if len(metadatas) > 0:
+410 return os.path.normpath(os.path.join(releasedir, metadatas[0]))
+411 return None
+412
+413
+414
+416 """ Cached version of the metadata validation. """
+420
+422 """ Check if file is in the local cache.
+423 Add valid release to the cache.
+424 """
+425 metadatas = self.load_cache()
+426 if self.in_cache(metadatas, os.path.normpath(self._filename)):
+427 LOGGER.debug("Release found in cache.")
+428 return self.value_from_cache(metadatas, os.path.normpath(self._filename))
+429 else:
+430 result = ValidateReleaseMetadata.is_valid(self, checkmd5)
+431 LOGGER.debug("Updating the cache.")
+432 metadatas.append([os.path.normpath(self._filename), result])
+433 self.update_cache(metadatas)
+434 return result
+435
+441
+443 for metadata in metadatas:
+444 if metadata[0] == key:
+445 return metadata[1]
+446 return None
+447
+449 metadatas = []
+450 if self.__cachefile is not None and os.path.exists(self.__cachefile):
+451 for row in csv.reader(open(self.__cachefile, "rb")):
+452 if len(row) == 2:
+453 metadatas.append([os.path.normpath(row[0]), row[1].lower() == "true"])
+454 elif len(row) == 1:
+455 # backward compatibility with old cache.
+456 metadatas.append([os.path.normpath(row[0]), True])
+457 return metadatas
+458
+463
+480
+
+| Trees | + + +Indices | + + +Help | + ++ |
|---|
| + Generated by Epydoc 3.0beta1 on Wed Sep 09 13:44:22 2009 + | ++ http://epydoc.sourceforge.net + | +