587
|
1 |
#============================================================================
|
|
2 |
#Name : symrec.py
|
|
3 |
#Part of : Helium
|
|
4 |
|
|
5 |
#Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
|
|
6 |
#All rights reserved.
|
|
7 |
#This component and the accompanying materials are made available
|
|
8 |
#under the terms of the License "Eclipse Public License v1.0"
|
|
9 |
#which accompanies this distribution, and is available
|
|
10 |
#at the URL "http://www.eclipse.org/legal/epl-v10.html".
|
|
11 |
#
|
|
12 |
#Initial Contributors:
|
|
13 |
#Nokia Corporation - initial contribution.
|
|
14 |
#
|
|
15 |
#Contributors:
|
|
16 |
#
|
|
17 |
#Description:
|
|
18 |
#===============================================================================
|
|
19 |
|
|
20 |
""" SYMREC metadata file generation. """
|
|
21 |
import xml.dom.minidom
|
|
22 |
import codecs
|
|
23 |
import os
|
|
24 |
import re
|
|
25 |
import logging
|
|
26 |
import fileutils
|
|
27 |
import csv
|
|
28 |
|
|
29 |
LOGGER = logging.getLogger("symrec")
|
|
30 |
logging.basicConfig(level=logging.INFO)
|
|
31 |
|
628
|
32 |
def _cleanup_list(input_):
|
588
|
33 |
"""cleanup list"""
|
587
|
34 |
result = []
|
628
|
35 |
for chars in input_:
|
587
|
36 |
if chars is not None and chars.strip() != "":
|
|
37 |
result.append(chars)
|
|
38 |
return result
|
|
39 |
|
|
40 |
def xml_setattr(node, attr, value):
|
|
41 |
""" Create the attribute if needed. """
|
|
42 |
node.setAttribute(attr, value)
|
|
43 |
|
|
44 |
def is_child_text_only(node):
|
|
45 |
""" Returns true if child node are all from TEXT_NODE type. """
|
|
46 |
for child in node.childNodes:
|
|
47 |
if child.nodeType != xml.dom.minidom.Node.TEXT_NODE:
|
|
48 |
return False
|
|
49 |
return True
|
|
50 |
|
|
51 |
|
|
52 |
def ignore_whitespace_writexml(self, writer, indent="", addindent="", newl=""):
|
|
53 |
""" This version of writexml will ignore whitespace text to alway render
|
|
54 |
the output in a structure way.
|
|
55 |
indent = current indentation
|
|
56 |
addindent = indentation to add to higher levels
|
|
57 |
newl = newline string
|
|
58 |
"""
|
628
|
59 |
# pylint: disable=W0212
|
587
|
60 |
writer.write(indent + "<" + self.tagName)
|
|
61 |
|
|
62 |
attrs = self._get_attributes()
|
|
63 |
a_names = attrs.keys()
|
|
64 |
a_names.sort()
|
|
65 |
|
|
66 |
for a_name in a_names:
|
|
67 |
writer.write(" %s=\"" % a_name)
|
|
68 |
xml.dom.minidom._write_data(writer, attrs[a_name].value)
|
|
69 |
writer.write("\"")
|
|
70 |
if self.childNodes:
|
|
71 |
writer.write(">")
|
|
72 |
if is_child_text_only(self):
|
|
73 |
for node in self.childNodes:
|
|
74 |
node.writexml(writer, '', '', '')
|
|
75 |
writer.write("</%s>%s" % (self.tagName, newl))
|
|
76 |
else:
|
|
77 |
writer.write(newl)
|
|
78 |
for node in self.childNodes:
|
|
79 |
if node.nodeType == xml.dom.minidom.Node.TEXT_NODE and node.data.isspace():
|
|
80 |
pass
|
|
81 |
else:
|
|
82 |
node.writexml(writer, indent + addindent, addindent, newl)
|
|
83 |
writer.write("%s</%s>%s" % (indent, self.tagName, newl))
|
|
84 |
else:
|
|
85 |
writer.write("/>%s" % (newl))
|
|
86 |
|
|
87 |
xml.dom.minidom.Element.writexml = ignore_whitespace_writexml
|
|
88 |
|
|
89 |
|
|
90 |
class ServicePack(object):
|
|
91 |
""" Create a ServicePack """
|
|
92 |
def __init__(self, node):
|
|
93 |
self.__xml = node
|
|
94 |
|
|
95 |
@property
|
|
96 |
def name(self):
|
588
|
97 |
"""name"""
|
587
|
98 |
return self.__xml.getAttribute('name')
|
|
99 |
|
|
100 |
@property
|
|
101 |
def files(self):
|
588
|
102 |
"""files"""
|
587
|
103 |
result = []
|
|
104 |
for filen in self.__xml.getElementsByTagName('file'):
|
|
105 |
result.append(filen.getAttribute('name'))
|
|
106 |
return result
|
|
107 |
|
|
108 |
@property
|
|
109 |
def instructions(self):
|
588
|
110 |
"""instructions"""
|
587
|
111 |
result = []
|
|
112 |
for instr in self.__xml.getElementsByTagName('instructions'):
|
|
113 |
result.append(instr.getAttribute('name'))
|
|
114 |
return result
|
|
115 |
|
628
|
116 |
|
587
|
117 |
class ReleaseMetadata(object):
|
|
118 |
""" Create or read Metadata XML from SYMREC/SYMDEC. """
|
|
119 |
|
|
120 |
def __init__(self, filename, service=None, product=None, release=None):
|
|
121 |
self._filename = filename
|
|
122 |
if filename and os.path.exists(filename):
|
|
123 |
self._xml = xml.dom.minidom.parse(open(filename, "r"))
|
|
124 |
releaseInformation = self._xml.getElementsByTagName(u"releaseInformation")
|
|
125 |
if releaseInformation != []:
|
|
126 |
self._releaseInformation = releaseInformation[0]
|
|
127 |
else:
|
|
128 |
self._releaseInformation = self._xml.createElement(u"releaseInformation")
|
|
129 |
releaseDetails = self._xml.getElementsByTagName(u'releaseDetails')
|
|
130 |
if releaseDetails != []:
|
|
131 |
self._releaseDetails = releaseDetails[0]
|
|
132 |
else:
|
|
133 |
self._releaseDetails = self._xml.createElement(u'releaseDetails')
|
|
134 |
releaseFiles = self._xml.getElementsByTagName(u'releaseFiles')
|
|
135 |
if releaseFiles != []:
|
|
136 |
self._releaseFiles = releaseFiles[0]
|
|
137 |
else:
|
|
138 |
self._releaseFiles = self._xml.createElement(u'releaseFiles')
|
|
139 |
|
|
140 |
if service != None:
|
|
141 |
self.service = service
|
|
142 |
if product != None:
|
|
143 |
self.product = product
|
|
144 |
if release != None:
|
|
145 |
self.release = release
|
628
|
146 |
elif service != None and product != None and release != None:
|
587
|
147 |
self._xml = xml.dom.minidom.Document()
|
|
148 |
self._releaseInformation = self._xml.createElement(u"releaseInformation")
|
|
149 |
self._xml.appendChild(self._releaseInformation)
|
|
150 |
self._releaseDetails = self._xml.createElement(u'releaseDetails')
|
|
151 |
self._releaseInformation.appendChild(self._releaseDetails)
|
|
152 |
releaseID = self._xml.createElement(u'releaseID')
|
|
153 |
self._releaseDetails.appendChild(releaseID)
|
|
154 |
|
|
155 |
# service
|
588
|
156 |
serv = self._xml.createElement(u'service')
|
587
|
157 |
xml_setattr(serv, 'name', unicode(service))
|
|
158 |
releaseID.appendChild(serv)
|
|
159 |
# product
|
|
160 |
prod = self._xml.createElement(u'product')
|
|
161 |
xml_setattr(prod, 'name', unicode(product))
|
|
162 |
releaseID.appendChild(prod)
|
|
163 |
# release
|
|
164 |
rel = self._xml.createElement(u'release')
|
|
165 |
xml_setattr(rel, 'name', unicode(release))
|
|
166 |
releaseID.appendChild(rel)
|
|
167 |
|
|
168 |
# releaseFiles
|
|
169 |
self._releaseFiles = self._xml.createElement(u'releaseFiles')
|
|
170 |
self._releaseInformation.appendChild(self._releaseFiles)
|
|
171 |
|
|
172 |
# releaseFiles
|
|
173 |
self._releaseInformation.appendChild(self._xml.createElement(u'externalFiles'))
|
|
174 |
else:
|
628
|
175 |
raise IOError("Error metadata file doesn't exists.")
|
587
|
176 |
|
|
177 |
def get_dependsof(self):
|
|
178 |
""" Return a ReleaseMetada object pointing to the dependency release. """
|
|
179 |
if self.dependsof_service != None and self.dependsof_product != None and self.dependsof_release != None:
|
|
180 |
filename = os.path.join(os.path.dirname(self._filename), "../../..",
|
|
181 |
self.dependsof_service,
|
|
182 |
self.dependsof_product,
|
|
183 |
self.dependsof_release)
|
|
184 |
return ReleaseMetadata(find_latest_metadata(filename))
|
|
185 |
else:
|
|
186 |
return None
|
|
187 |
|
|
188 |
def set_dependsof(self, filename):
|
|
189 |
""" Setting the dependency release. """
|
|
190 |
metadata = ReleaseMetadata(filename)
|
|
191 |
self.dependsof_service = metadata.service
|
|
192 |
self.dependsof_product = metadata.product
|
|
193 |
self.dependsof_release = metadata.release
|
|
194 |
|
628
|
195 |
def add_package(self, name, type_=None, default=True, filters=None, extract="single", md5checksum=None, size=None):
|
587
|
196 |
""" Adding a package to the metadata file. """
|
|
197 |
# check if update mode
|
|
198 |
package = None
|
|
199 |
|
|
200 |
for pkg in self._xml.getElementsByTagName('package'):
|
|
201 |
if (pkg.getAttribute('name').lower() == os.path.basename(name).lower()):
|
|
202 |
package = pkg
|
|
203 |
break
|
|
204 |
|
|
205 |
# if not found create new package.
|
|
206 |
if package is None:
|
|
207 |
package = self._xml.createElement(u'package')
|
|
208 |
self._releaseFiles.appendChild(package)
|
|
209 |
|
|
210 |
xml_setattr(package, 'name', os.path.basename(name))
|
628
|
211 |
if type_ != None:
|
|
212 |
xml_setattr(package, 'type', type_)
|
587
|
213 |
else:
|
|
214 |
xml_setattr(package, 'type', os.path.splitext(name)[1].lstrip('.'))
|
|
215 |
xml_setattr(package, 'default', str(default).lower())
|
|
216 |
xml_setattr(package, 'extract', extract)
|
|
217 |
if filters and len(filters)>0:
|
|
218 |
xml_setattr(package, 'filters', ','.join(filters))
|
|
219 |
xml_setattr(package, 's60filter', ','.join(filters))
|
|
220 |
else:
|
|
221 |
xml_setattr(package, 'filters', '')
|
|
222 |
xml_setattr(package, 's60filter', '')
|
|
223 |
if md5checksum != None:
|
|
224 |
xml_setattr(package, unicode("md5checksum"), unicode(md5checksum))
|
|
225 |
if size != None:
|
|
226 |
xml_setattr(package, unicode("size"), unicode(size))
|
|
227 |
|
|
228 |
def keys(self):
|
588
|
229 |
"""keys"""
|
587
|
230 |
keys = []
|
|
231 |
for pkg in self._releaseFiles.getElementsByTagName('package'):
|
|
232 |
keys.append(pkg.getAttribute('name'))
|
|
233 |
return keys
|
|
234 |
|
|
235 |
def __getitem__(self, key):
|
|
236 |
for pkg in self._releaseFiles.getElementsByTagName('package'):
|
|
237 |
if pkg.getAttribute('name').lower() == key.lower():
|
|
238 |
filters = []
|
|
239 |
s60filters = []
|
|
240 |
md5checksum = None
|
|
241 |
size = None
|
|
242 |
if pkg.hasAttribute(u'filters'):
|
|
243 |
filters = _cleanup_list(pkg.getAttribute('filters').split(','))
|
|
244 |
if pkg.hasAttribute(u's60filter'):
|
|
245 |
s60filters = _cleanup_list(pkg.getAttribute('s60filter').split(','))
|
|
246 |
if pkg.hasAttribute(u'md5checksum'):
|
|
247 |
md5checksum = pkg.getAttribute('md5checksum')
|
|
248 |
if pkg.hasAttribute(u'size'):
|
|
249 |
size = pkg.getAttribute('size')
|
|
250 |
return {'type': pkg.getAttribute('type'), 'extract': pkg.getAttribute('extract'), 'default': (pkg.getAttribute('default')=="true"), \
|
|
251 |
'filters': filters, 's60filter': s60filters, 'md5checksum': md5checksum, 'size': size}
|
628
|
252 |
raise KeyError("Key '%s' not found." % key)
|
587
|
253 |
|
|
254 |
def __setitem__(self, key, value):
|
|
255 |
self.add_package(key, value['type'], value['default'], value['filters'], value['extract'], value['md5checksum'], value['size'])
|
|
256 |
|
|
257 |
def set_releasedetails_info(self, name, value, details="releaseID"):
|
|
258 |
""" Generic function to set releaseid info. """
|
|
259 |
detailsnode = None
|
|
260 |
if self._releaseDetails.getElementsByTagName(details) == []:
|
|
261 |
detailsnode = self._xml.createElement(details)
|
|
262 |
self._releaseDetails.appendChild(detailsnode)
|
|
263 |
else:
|
|
264 |
detailsnode = self._releaseDetails.getElementsByTagName(details)[0]
|
|
265 |
namenode = None
|
|
266 |
if detailsnode.getElementsByTagName(name) == []:
|
|
267 |
namenode = self._xml.createElement(name)
|
|
268 |
namenode.setAttribute(u'name', unicode(value))
|
|
269 |
detailsnode.appendChild(namenode)
|
|
270 |
else:
|
|
271 |
namenode = detailsnode.getElementsByTagName(name)[0]
|
|
272 |
namenode.setAttribute('name', value)
|
|
273 |
|
|
274 |
|
|
275 |
def get_releasedetails_info(self, name, details="releaseID"):
|
|
276 |
""" Generic function to extract releaseid info. """
|
|
277 |
for group in self._releaseDetails.getElementsByTagName(details):
|
|
278 |
for i in group.getElementsByTagName(name):
|
|
279 |
return i.getAttribute('name')
|
|
280 |
return None
|
|
281 |
|
|
282 |
def getVariantPackage(self, variant_name):
|
588
|
283 |
"""get variant package"""
|
587
|
284 |
for variant in self._xml.getElementsByTagName('variant'):
|
|
285 |
if variant.getAttribute('name').lower() == variant_name.lower():
|
588
|
286 |
for xxx in variant.getElementsByTagName('file'):
|
|
287 |
return xxx.getAttribute('name')
|
587
|
288 |
|
|
289 |
def xml(self):
|
|
290 |
""" Returning the XML as a string. """
|
|
291 |
return self._xml.toprettyxml()
|
|
292 |
|
|
293 |
def save(self, filename = None):
|
|
294 |
""" Saving the XML into the provided filename. """
|
|
295 |
if filename == None:
|
|
296 |
filename = self._filename
|
|
297 |
file_object = codecs.open(os.path.join(filename), 'w', "utf_8")
|
|
298 |
file_object.write(self.xml())
|
|
299 |
file_object.close()
|
|
300 |
|
|
301 |
@property
|
|
302 |
def servicepacks(self):
|
|
303 |
""" Getting the service pack names. """
|
|
304 |
result = []
|
588
|
305 |
for spack in self._releaseInformation.getElementsByTagName('servicePack'):
|
|
306 |
result.append(ServicePack(spack))
|
587
|
307 |
return result
|
|
308 |
|
|
309 |
filename = property(lambda self:self._filename)
|
|
310 |
service = property(lambda self:self.get_releasedetails_info('service'), lambda self, value:self.set_releasedetails_info('service', value))
|
|
311 |
product = property(lambda self:self.get_releasedetails_info('product'), lambda self, value:self.set_releasedetails_info('product', value))
|
|
312 |
release = property(lambda self:self.get_releasedetails_info('release'), lambda self, value:self.set_releasedetails_info('release', value))
|
|
313 |
dependsof_service = property(lambda self:self.get_releasedetails_info('service', 'dependsOf'), lambda self, value:self.set_releasedetails_info('service', value, 'dependsOf'))
|
|
314 |
dependsof_product = property(lambda self:self.get_releasedetails_info('product', 'dependsOf'), lambda self, value:self.set_releasedetails_info('product', value, 'dependsOf'))
|
|
315 |
dependsof_release = property(lambda self:self.get_releasedetails_info('release', 'dependsOf'), lambda self, value:self.set_releasedetails_info('release', value, 'dependsOf'))
|
|
316 |
baseline_service = property(lambda self:self.get_releasedetails_info('service', 'previousBaseline'), lambda self, value:self.set_releasedetails_info('service', value, 'previousBaseline'))
|
|
317 |
baseline_product = property(lambda self:self.get_releasedetails_info('product', 'previousBaseline'), lambda self, value:self.set_releasedetails_info('product', value, 'previousBaseline'))
|
|
318 |
baseline_release = property(lambda self:self.get_releasedetails_info('release', 'previousBaseline'), lambda self, value:self.set_releasedetails_info('release', value, 'previousBaseline'))
|
|
319 |
|
|
320 |
|
|
321 |
class MD5Updater(ReleaseMetadata):
|
|
322 |
""" Update Metadata XML already created from SYMREC/SYMDEC. """
|
|
323 |
def __init__(self, filename):
|
|
324 |
ReleaseMetadata.__init__(self, filename)
|
|
325 |
self._filepath = os.path.dirname(filename)
|
|
326 |
|
|
327 |
def update(self):
|
|
328 |
""" Update each existing package md5checksum and size attribute."""
|
|
329 |
for name in self.keys():
|
|
330 |
fullname = os.path.join(self._filepath, name)
|
|
331 |
if os.path.exists(fullname):
|
588
|
332 |
LOGGER.info("Calculating the MD5 of " + fullname)
|
587
|
333 |
result = self[name]
|
|
334 |
result['md5checksum'] = unicode(fileutils.getmd5(fullname))
|
|
335 |
result['size'] = unicode(os.path.getsize(fullname))
|
|
336 |
self[name] = result
|
|
337 |
|
|
338 |
|
|
339 |
class ValidateReleaseMetadata(ReleaseMetadata):
|
|
340 |
""" This class validate if a metadata file is stored in the correct location and
|
|
341 |
if all deps exists.
|
|
342 |
"""
|
|
343 |
def __init__(self, filename):
|
|
344 |
ReleaseMetadata.__init__(self, filename)
|
|
345 |
self.location = os.path.dirname(filename)
|
|
346 |
|
|
347 |
def is_valid(self, checkmd5=True, checkPath=True):
|
|
348 |
""" Run the validation mechanism. """
|
|
349 |
status = os.path.join(os.path.dirname(self._filename), 'HYDRASTATUS.xml')
|
|
350 |
if os.path.exists(status):
|
|
351 |
hydraxml = xml.dom.minidom.parse(open(status, "r"))
|
588
|
352 |
for t_name in hydraxml.getElementsByTagName('state')[0].childNodes:
|
|
353 |
if t_name.nodeType == t_name.TEXT_NODE:
|
|
354 |
if t_name.nodeValue != 'Ready':
|
587
|
355 |
LOGGER.error("HYDRASTATUS.xml is not ready")
|
|
356 |
return False
|
|
357 |
if checkPath:
|
|
358 |
if os.path.basename(self.location) != self.release:
|
|
359 |
LOGGER.error("Release doesn't match.")
|
|
360 |
return False
|
|
361 |
if os.path.basename(os.path.dirname(self.location)) != self.product:
|
|
362 |
LOGGER.error("Product doesn't match.")
|
|
363 |
return False
|
|
364 |
if os.path.basename(os.path.dirname(os.path.dirname(self.location))) != self.service:
|
|
365 |
LOGGER.error("Service doesn't match.")
|
|
366 |
return False
|
|
367 |
|
|
368 |
for name in self.keys():
|
|
369 |
path = os.path.join(self.location, name)
|
|
370 |
if not os.path.exists(path):
|
|
371 |
LOGGER.error("%s doesn't exist." % path)
|
|
372 |
return False
|
|
373 |
try:
|
|
374 |
LOGGER.debug("Trying to open %s" % path)
|
|
375 |
content_file = open(path)
|
|
376 |
content_file.read(1)
|
|
377 |
except IOError:
|
|
378 |
LOGGER.error("%s is not available yet" % path)
|
|
379 |
return False
|
|
380 |
|
|
381 |
if checkmd5 and self[name].has_key('md5checksum'):
|
|
382 |
if self[name]['md5checksum'] != None:
|
|
383 |
if fileutils.getmd5(path).lower() != self[name]['md5checksum']:
|
|
384 |
LOGGER.error("%s md5checksum missmatch." % path)
|
|
385 |
return False
|
|
386 |
|
588
|
387 |
for spack in self.servicepacks:
|
|
388 |
for name in spack.files:
|
587
|
389 |
path = os.path.join(self.location, name)
|
|
390 |
if not os.path.exists(path):
|
|
391 |
LOGGER.error("%s doesn't exist." % path)
|
|
392 |
return False
|
588
|
393 |
for name in spack.instructions:
|
587
|
394 |
path = os.path.join(self.location, name)
|
|
395 |
if not os.path.exists(path):
|
|
396 |
LOGGER.error("%s doesn't exist." % path)
|
|
397 |
return False
|
|
398 |
|
|
399 |
dependency = self.get_dependsof()
|
|
400 |
if dependency != None:
|
|
401 |
return ValidateReleaseMetadata(dependency.filename).is_valid(checkmd5)
|
|
402 |
return True
|
|
403 |
|
628
|
404 |
|
587
|
405 |
class MetadataMerger(object):
|
|
406 |
""" Merge packages definition to the root metadata. """
|
|
407 |
|
|
408 |
def __init__(self, metadata):
|
|
409 |
""" Construct a metadata merger providing root metadata filename. """
|
|
410 |
self._metadata = ReleaseMetadata(metadata)
|
|
411 |
|
|
412 |
def merge(self, filename):
|
|
413 |
""" Merge the content of filename into the root metadata. """
|
|
414 |
metadata = ReleaseMetadata(filename)
|
|
415 |
for name in metadata.keys():
|
|
416 |
if name in self._metadata.keys():
|
|
417 |
LOGGER.warning('Package %s already declared, overriding previous definition!' % name)
|
|
418 |
self._metadata[name] = metadata[name]
|
|
419 |
|
|
420 |
def xml(self):
|
|
421 |
""" Returning the XML as a string. """
|
|
422 |
return self._metadata.xml()
|
|
423 |
|
|
424 |
def save(self, filename = None):
|
|
425 |
""" Saving the XML into the provided filename. """
|
|
426 |
return self._metadata.save(filename)
|
|
427 |
|
628
|
428 |
|
587
|
429 |
class Metadata2TDD(ReleaseMetadata):
|
|
430 |
""" Convert Metadata to a TDD file """
|
|
431 |
def __init__(self, filename, includes=None, excludes=None):
|
|
432 |
ReleaseMetadata.__init__(self, filename)
|
|
433 |
if includes is None:
|
|
434 |
includes = []
|
|
435 |
if excludes is None:
|
|
436 |
excludes = []
|
|
437 |
self.location = os.path.dirname(filename)
|
|
438 |
self.includes = includes
|
|
439 |
self.excludes = excludes
|
|
440 |
|
|
441 |
def archives_to_tdd(self, metadata):
|
588
|
442 |
"""archives"""
|
587
|
443 |
tdd = "\t[\n"
|
|
444 |
for name in metadata.keys():
|
|
445 |
path_ = os.path.join(os.path.dirname(metadata.filename), name)
|
|
446 |
if (((len(self.includes) == 0) and metadata[name]['extract']) or (self.includes in metadata[name]['s60filter'])) and self.excludes not in metadata[name]['s60filter']:
|
|
447 |
tdd += "\t\t{\n"
|
|
448 |
tdd += "\t\t\t\"command\": \"unzip_%s\",\n" % metadata[name]['extract']
|
|
449 |
tdd += "\t\t\t\"src\": \"%s\",\n" % os.path.normpath(path_).replace('\\', '/')
|
|
450 |
tdd += "\t\t},\n"
|
|
451 |
tdd += "\t],\n"
|
|
452 |
return tdd
|
|
453 |
|
|
454 |
def to_tdd(self):
|
|
455 |
""" Generating a TDD file that contains a list of list of filenames. """
|
|
456 |
tdd = "[\n"
|
|
457 |
# generates unarchiving steps for dependency
|
|
458 |
dependency = self.get_dependsof()
|
|
459 |
if dependency != None:
|
|
460 |
tdd += self.archives_to_tdd(dependency)
|
|
461 |
# generates unarchiving steps
|
|
462 |
tdd += self.archives_to_tdd(self)
|
|
463 |
tdd += "]\n"
|
|
464 |
return tdd
|
|
465 |
|
|
466 |
|
|
467 |
|
|
468 |
def find_latest_metadata(releasedir):
|
|
469 |
""" Finding the release latest release metadata file. """
|
|
470 |
try:
|
|
471 |
metadatas = []
|
|
472 |
for filename in os.listdir(releasedir):
|
|
473 |
if re.match(r'^release_metadata(_\d+)?\.xml$', filename, re.I) is not None:
|
|
474 |
LOGGER.debug("Found %s" % filename)
|
|
475 |
metadatas.append(filename)
|
|
476 |
# reverse the order...
|
|
477 |
metadatas.sort(reverse=True)
|
|
478 |
if len(metadatas) > 0:
|
|
479 |
return os.path.normpath(os.path.join(releasedir, metadatas[0]))
|
628
|
480 |
except (IOError, OSError), exc:
|
587
|
481 |
LOGGER.error(exc)
|
|
482 |
return None
|
|
483 |
return None
|
|
484 |
|
628
|
485 |
|
587
|
486 |
class ValidateReleaseMetadataCached(ValidateReleaseMetadata):
|
|
487 |
""" Cached version of the metadata validation. """
|
|
488 |
def __init__(self, filename, cachefile=None):
|
|
489 |
ValidateReleaseMetadata.__init__(self, filename)
|
|
490 |
self.__cachefile = cachefile
|
|
491 |
|
|
492 |
def is_valid(self, checkmd5=True, checkPath=True):
|
|
493 |
""" Check if file is in the local cache.
|
|
494 |
Add valid release to the cache.
|
|
495 |
"""
|
|
496 |
metadatas = self.load_cache()
|
|
497 |
if self.in_cache(metadatas, os.path.normpath(self._filename)):
|
|
498 |
LOGGER.debug("Release found in cache.")
|
|
499 |
return self.value_from_cache(metadatas, os.path.normpath(self._filename))
|
|
500 |
else:
|
|
501 |
result = ValidateReleaseMetadata.is_valid(self, checkmd5, checkPath)
|
|
502 |
LOGGER.debug("Updating the cache.")
|
|
503 |
metadatas.append([os.path.normpath(self._filename), result])
|
|
504 |
self.update_cache(metadatas)
|
|
505 |
return result
|
|
506 |
|
|
507 |
def in_cache(self, metadatas, key):
|
588
|
508 |
"""in cache"""
|
587
|
509 |
for metadata in metadatas:
|
|
510 |
if metadata[0] == key:
|
|
511 |
return True
|
|
512 |
return False
|
|
513 |
|
|
514 |
def value_from_cache(self, metadatas, key):
|
588
|
515 |
"""value from cache"""
|
587
|
516 |
for metadata in metadatas:
|
|
517 |
if metadata[0] == key:
|
|
518 |
return metadata[1]
|
|
519 |
return None
|
|
520 |
|
|
521 |
def load_cache(self):
|
588
|
522 |
"""load cache"""
|
587
|
523 |
metadatas = []
|
|
524 |
if self.__cachefile is not None and os.path.exists(self.__cachefile):
|
588
|
525 |
f_file = open(self.__cachefile, "rb")
|
|
526 |
for row in csv.reader(f_file):
|
587
|
527 |
if len(row) == 2:
|
|
528 |
metadatas.append([os.path.normpath(row[0]), row[1].lower() == "true"])
|
|
529 |
elif len(row) == 1:
|
|
530 |
# backward compatibility with old cache.
|
|
531 |
metadatas.append([os.path.normpath(row[0]), True])
|
588
|
532 |
f_file.close()
|
587
|
533 |
return metadatas
|
|
534 |
|
|
535 |
def update_cache(self, metadatas):
|
588
|
536 |
"""update cache"""
|
587
|
537 |
if self.__cachefile is not None and os.path.exists(os.path.dirname(self.__cachefile)):
|
588
|
538 |
f_file = open(self.__cachefile, "wb")
|
|
539 |
writer = csv.writer(f_file)
|
587
|
540 |
writer.writerows(metadatas)
|
588
|
541 |
f_file.close()
|
587
|
542 |
|
628
|
543 |
|
587
|
544 |
class ValidateTicklerReleaseMetadata(ValidateReleaseMetadataCached):
|
|
545 |
""" This class validate if a metadata file is stored in the correct location and
|
|
546 |
if all deps exists.
|
|
547 |
"""
|
|
548 |
def __init__(self, filename):
|
628
|
549 |
ValidateReleaseMetadataCached.__init__(self, filename)
|
587
|
550 |
self.location = os.path.dirname(filename)
|
|
551 |
|
|
552 |
def is_valid(self, checkmd5=True):
|
|
553 |
""" Run the validation mechanism. """
|
|
554 |
tickler_path = os.path.join(self.location,"TICKLER")
|
|
555 |
if not os.path.exists(tickler_path):
|
|
556 |
LOGGER.error("Release not available yet")
|
|
557 |
return False
|
|
558 |
else:
|
|
559 |
return ValidateReleaseMetadataCached.is_valid(self, checkmd5)
|