587
|
1 |
#============================================================================
|
|
2 |
#Name : model.py
|
|
3 |
#Part of : Helium
|
|
4 |
|
|
5 |
#Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
|
|
6 |
#All rights reserved.
|
|
7 |
#This component and the accompanying materials are made available
|
|
8 |
#under the terms of the License "Eclipse Public License v1.0"
|
|
9 |
#which accompanies this distribution, and is available
|
|
10 |
#at the URL "http://www.eclipse.org/legal/epl-v10.html".
|
|
11 |
#
|
|
12 |
#Initial Contributors:
|
|
13 |
#Nokia Corporation - initial contribution.
|
|
14 |
#
|
|
15 |
#Contributors:
|
|
16 |
#
|
|
17 |
#Description:
|
|
18 |
#===============================================================================
|
|
19 |
|
|
20 |
""" Models the concepts and objects that exist in a software build. """
|
|
21 |
|
|
22 |
import logging
|
|
23 |
import re
|
|
24 |
import os
|
|
25 |
import amara
|
|
26 |
import ccm
|
|
27 |
import configuration
|
|
28 |
from xmlhelper import recursive_node_scan
|
|
29 |
import symrec
|
|
30 |
|
|
31 |
# Uncomment this line to enable logging in this module, or configure logging elsewhere
|
|
32 |
_logger = logging.getLogger("bom")
|
628
|
33 |
_logger.setLevel(logging.INFO)
|
|
34 |
#logging.basicConfig(level=logging.DEBUG)
|
587
|
35 |
|
|
36 |
|
|
37 |
class SessionCreator(object):
|
|
38 |
""" Session Creator object. """
|
|
39 |
def __init__(self, username=None, password=None, provider=None):
|
|
40 |
""" Init the SessionCreator object."""
|
|
41 |
self.__provider = provider
|
|
42 |
self.__username = username
|
|
43 |
self.__password = password
|
|
44 |
|
|
45 |
def session(self, database):
|
|
46 |
""" Get a session for a database. If no session exists just create a new one."""
|
|
47 |
_logger.info("Creating session for %s" % database)
|
|
48 |
return self.__provider.get(username=self.__username, password=self.__password, database=database)
|
|
49 |
|
|
50 |
def close(self):
|
|
51 |
"""close session"""
|
|
52 |
self.__provider = None
|
|
53 |
|
|
54 |
|
|
55 |
class BOM(object):
|
|
56 |
""" The Bill of Materials for a build. """
|
|
57 |
def __init__(self, config):
|
|
58 |
""" Initialization.
|
|
59 |
|
|
60 |
:param config: The build configuration properties.
|
|
61 |
:param ccm_project: The Synergy project used for reading the BOM.
|
|
62 |
"""
|
|
63 |
self.config = config
|
|
64 |
self.build = ""
|
|
65 |
self._projects = []
|
628
|
66 |
self.icd_icfs = []
|
587
|
67 |
self._flags = []
|
|
68 |
|
|
69 |
self._capture_icd_icfs()
|
|
70 |
self._capture_flags()
|
|
71 |
|
|
72 |
def _capture_icd_icfs(self):
|
|
73 |
"""capture ICD and ICFS"""
|
|
74 |
prep_xml_path = self.config['prep.xml']
|
|
75 |
if prep_xml_path is not None and os.path.exists(prep_xml_path):
|
|
76 |
prep_doc = amara.parse(open(prep_xml_path,'r'))
|
|
77 |
if hasattr(prep_doc.prepSpec, u'source'):
|
|
78 |
for source in prep_doc.prepSpec.source:
|
|
79 |
if hasattr(source, u'unzipicds'):
|
|
80 |
for unzipicds in source.unzipicds:
|
|
81 |
if hasattr(unzipicds, u'location'):
|
|
82 |
for location in unzipicds.location:
|
|
83 |
excludes = []
|
|
84 |
excluded = False
|
|
85 |
if hasattr(location, 'exclude'):
|
|
86 |
for exclude in location.exclude:
|
|
87 |
_logger.debug('Exclude added: %s' % str(exclude.name))
|
|
88 |
excludes.append(str(exclude.name))
|
|
89 |
excluded = False
|
|
90 |
path = str(location.name)
|
|
91 |
if os.path.exists(path):
|
|
92 |
files = os.listdir(str(location.name))
|
|
93 |
for file_ in files:
|
|
94 |
for exclude in excludes:
|
|
95 |
if file_.endswith(exclude):
|
|
96 |
excluded = True
|
|
97 |
if file_.endswith('.zip') and not excluded:
|
628
|
98 |
self.icd_icfs.append(file_)
|
|
99 |
self.icd_icfs.sort(key=str)
|
587
|
100 |
|
|
101 |
def _capture_flags(self):
|
|
102 |
"""capture flags"""
|
|
103 |
pass
|
|
104 |
|
|
105 |
def _getprojects(self):
|
|
106 |
"""get projects"""
|
|
107 |
return self._projects
|
|
108 |
|
|
109 |
projects = property(_getprojects)
|
|
110 |
|
|
111 |
def all_baselines(self):
|
|
112 |
"""get all baselines"""
|
|
113 |
baselines = {}
|
|
114 |
for project in self._projects:
|
|
115 |
for baseline, baseline_attrs in project.baselines.iteritems():
|
|
116 |
baselines[baseline] = baseline_attrs
|
|
117 |
return baselines
|
|
118 |
|
|
119 |
def all_tasks(self):
|
|
120 |
"""get all tasks"""
|
|
121 |
tasks = []
|
|
122 |
for project in self._projects:
|
|
123 |
tasks.extend(project.all_tasks())
|
|
124 |
tasks.sort(key=str)
|
|
125 |
return tasks
|
|
126 |
|
|
127 |
def __str__(self):
|
|
128 |
return str(self._projects)
|
|
129 |
|
|
130 |
class SimpleProject(object):
|
|
131 |
""" This class represents a simple ccm project """
|
|
132 |
def __init__(self, tasks):
|
|
133 |
self.tasks = tasks
|
|
134 |
self.folders = []
|
|
135 |
|
|
136 |
class SimpleBOM(BOM):
|
|
137 |
""" This class represents a simple Bill of Materials for a build.
|
|
138 |
The SimpleBOM is used to load the existing Bill of materials
|
|
139 |
"""
|
|
140 |
def __init__(self, config, bomxml):
|
|
141 |
BOM.__init__(self, config)
|
|
142 |
self._baselines = {}
|
|
143 |
bom = amara.parse(open(bomxml))
|
|
144 |
for p_proj in bom.bom.content.project:
|
|
145 |
tasks = []
|
|
146 |
self._baselines[str(p_proj.baseline)] = {}
|
|
147 |
for t_proj in p_proj.task:
|
|
148 |
tasks.append(str(t_proj.id) + ': ' + str(t_proj.synopsis))
|
|
149 |
self._projects.append(SimpleProject(tasks))
|
|
150 |
|
|
151 |
def all_baselines(self):
|
|
152 |
"""get all baselines"""
|
|
153 |
return self._baselines
|
|
154 |
|
|
155 |
class SynergyBOM(BOM):
|
|
156 |
""" This class opens a user session to fetch the project details and creates the Bill of Materials """
|
|
157 |
def __init__(self, config, ccm_project=None, username=None, password=None, provider=None):
|
|
158 |
BOM.__init__(self, config)
|
|
159 |
self._sessioncreator = SessionCreator(username=username, password=password, provider=provider)
|
|
160 |
self.ccm_project = ccm_project
|
|
161 |
if self.ccm_project != None:
|
|
162 |
self._projects = [Project(ccm_project, config)]
|
|
163 |
self._capture_projects()
|
|
164 |
|
|
165 |
def __find_project(self, project, config):
|
|
166 |
"""find project"""
|
|
167 |
if (os.path.exists(os.path.join(config['dir'], project.name, "project.version"))):
|
|
168 |
return project
|
|
169 |
|
|
170 |
path = os.path.join(config['dir'], project.name, project.name)
|
|
171 |
if (not os.path.exists(path)):
|
|
172 |
return project
|
|
173 |
try:
|
|
174 |
result = project.session.get_workarea_info(path)
|
|
175 |
return result['project']
|
|
176 |
except ccm.CCMException:
|
|
177 |
return project
|
|
178 |
|
|
179 |
def _capture_projects(self):
|
|
180 |
""" grab data from new format of delivery.xml"""
|
|
181 |
configBuilder = configuration.NestedConfigurationBuilder(open(self.config['delivery'], 'r'))
|
|
182 |
for config in configBuilder.getConfiguration().getConfigurations():
|
|
183 |
_logger.debug('Importing project %s from delivery config.' % str(config.name))
|
|
184 |
ccm_project = self._sessioncreator.session(config['database']).create(config.name)
|
|
185 |
project = Project(self.__find_project(ccm_project, config), config)
|
|
186 |
self._projects.append(project)
|
|
187 |
|
|
188 |
def close(self):
|
|
189 |
"""close session creator"""
|
|
190 |
self._sessioncreator.close()
|
|
191 |
|
|
192 |
|
|
193 |
class Project(object):
|
|
194 |
""" An SCM project.
|
|
195 |
|
|
196 |
An input to the build area, typically copied from an SCM work area.
|
|
197 |
"""
|
|
198 |
def __init__(self, ccm_project, config, action=None):
|
|
199 |
""" Initialisation. """
|
|
200 |
self._ccm_project = ccm_project
|
|
201 |
self._baselines = {}
|
|
202 |
#TODO : could querying release attribute return the ccm object? Or add a release attribute to Project
|
|
203 |
# class
|
|
204 |
release = self._ccm_project['release']
|
|
205 |
_logger.debug("Project release: '%s'" % release)
|
|
206 |
self._ccm_release = None
|
|
207 |
if release != '':
|
|
208 |
self._ccm_project.session.create(release)
|
|
209 |
|
|
210 |
# capturing the frozen baseline.
|
|
211 |
_logger.debug('Capture baselines')
|
|
212 |
project_status = self._ccm_project['status']
|
|
213 |
bproject = self._get_toplevel_baselines(self._ccm_project).pop()
|
|
214 |
if bproject != None:
|
|
215 |
self._baselines[unicode(bproject)] = {u'overridden':u'true'}
|
|
216 |
# This section finds the baselines of all of the checked out projects
|
|
217 |
if project_status == "prep" or project_status == "working" or project_status == "shared":
|
|
218 |
for subproject in self._ccm_project.subprojects:
|
|
219 |
overridden = u'false'
|
|
220 |
subprojbaseline = subproject.baseline
|
|
221 |
if config.has_key('subbaselines'):
|
|
222 |
for subbaseline in config['subbaselines']:
|
|
223 |
if str(subbaseline) == str(subprojbaseline):
|
|
224 |
overridden = u'true'
|
|
225 |
|
|
226 |
if subprojbaseline != None:
|
|
227 |
self._baselines[unicode(subprojbaseline)] = {u'overridden': overridden}
|
|
228 |
# When a project is a snapshot, the baselines are the projects themselves
|
|
229 |
else:
|
|
230 |
for subproject in bproject.subprojects:
|
|
231 |
self._baselines[unicode(subproject)] = {u'overridden':u'false'}
|
|
232 |
|
|
233 |
self._tasks = []
|
|
234 |
self._folders = []
|
|
235 |
|
|
236 |
# Get Synergy reconfigure properties for folders and tasks
|
|
237 |
if action == None:
|
|
238 |
self._import_baseline_config()
|
|
239 |
# Get tasks from Synergy if using reconfigure template
|
|
240 |
if config.get_boolean("use.reconfigure.template", False):
|
|
241 |
self._tasks = self._ccm_project.tasks
|
|
242 |
self._folders = self._ccm_project.folders
|
|
243 |
|
|
244 |
# Or get folders and tasks defined in configuration file
|
|
245 |
elif action != None and action.nodeName == "checkout":
|
|
246 |
if not config.get_boolean("use.reconfigure.template", False):
|
|
247 |
for task_node in action.xml_xpath(u'./task[@id]'):
|
|
248 |
for task in [x.strip() for x in task_node.id.split(',')]:
|
|
249 |
self._tasks.append(ccm_project.session.create("Task %s" % task))
|
|
250 |
for folder_node in action.xml_xpath(u'./folder[@id]'):
|
|
251 |
for folder in [x.strip() for x in folder_node.id.split(',')]:
|
|
252 |
self._folders.append(ccm_project.session.create("Folder %s" % folder))
|
|
253 |
else:
|
|
254 |
self._tasks = self._ccm_project.tasks
|
|
255 |
self._folders = self._ccm_project.folders
|
|
256 |
self._import_baseline_config()
|
|
257 |
|
|
258 |
def _import_baseline_config(self):
|
|
259 |
""" Import the baseline folders and tasks. """
|
|
260 |
baselines = self._get_toplevel_baselines(self._ccm_project)
|
|
261 |
baselines.pop()
|
|
262 |
for baseline in baselines:
|
|
263 |
for task in baseline.tasks:
|
|
264 |
if task not in self._tasks:
|
|
265 |
self._tasks.append(task)
|
|
266 |
for folder in baseline.folders:
|
|
267 |
if folder not in self._folders:
|
|
268 |
self._folders.append(folder)
|
|
269 |
|
|
270 |
def _get_toplevel_baselines(self, project):
|
|
271 |
"""get top level baselines"""
|
|
272 |
if project == None:
|
|
273 |
return []
|
|
274 |
project_status = project['status']
|
|
275 |
if project_status == "prep" or project_status == "working" or project_status == "shared":
|
|
276 |
result = [project]
|
|
277 |
baseline = project.baseline
|
|
278 |
if baseline != None:
|
|
279 |
result.extend(self._get_toplevel_baselines(baseline))
|
|
280 |
return result
|
|
281 |
else:
|
|
282 |
return [project]
|
|
283 |
|
|
284 |
def _getbaselines(self):
|
|
285 |
"""get baselines"""
|
|
286 |
return self._baselines
|
|
287 |
|
|
288 |
baselines = property(_getbaselines)
|
|
289 |
|
|
290 |
def _getfolders(self):
|
|
291 |
"""get folders"""
|
|
292 |
return self._folders
|
|
293 |
|
|
294 |
folders = property(_getfolders)
|
|
295 |
|
|
296 |
def all_tasks(self):
|
|
297 |
""" Get all the tasks (individual and folder based). """
|
|
298 |
tasks = [Task(ccm_task) for ccm_task in self._tasks]
|
|
299 |
for folder in self._folders:
|
|
300 |
[tasks.append(Task(ccm_task)) for ccm_task in folder.tasks]
|
|
301 |
tasks.sort(key=str)
|
|
302 |
return tasks
|
|
303 |
|
|
304 |
def _gettasks(self):
|
|
305 |
"""get Tasks"""
|
|
306 |
return [Task(ccm_task) for ccm_task in self._tasks]
|
|
307 |
|
|
308 |
tasks = property(_gettasks)
|
|
309 |
|
|
310 |
def _getsupplier(self):
|
|
311 |
"""get supplier"""
|
|
312 |
if self._ccm_release != None:
|
|
313 |
component = self._ccm_release.component
|
|
314 |
comparisons = {'MC': '^mc',
|
|
315 |
'S60': 'S60',
|
|
316 |
'SPP/NCP': '^spp_config|spp_psw|spp_tools|ncp_sw$',
|
|
317 |
'IBUSAL': '^IBUSAL'}
|
|
318 |
for supplier, regexp in comparisons.iteritems():
|
|
319 |
if re.search(regexp, component) != None:
|
|
320 |
return supplier
|
|
321 |
return "Unknown"
|
|
322 |
|
|
323 |
supplier = property(_getsupplier)
|
|
324 |
|
|
325 |
def __repr__(self):
|
|
326 |
""" Object representation. """
|
|
327 |
return str(self._ccm_project)
|
|
328 |
|
|
329 |
def __str__(self):
|
|
330 |
""" String representation. """
|
|
331 |
return str(self._ccm_project)
|
|
332 |
|
|
333 |
|
|
334 |
class Fix(object):
|
|
335 |
""" A generic fix. """
|
|
336 |
def __init__(self, description):
|
|
337 |
""" Initialisation. """
|
|
338 |
self._description = description
|
|
339 |
|
|
340 |
def __str__(self):
|
|
341 |
""" String representation. """
|
|
342 |
return str(self._description)
|
|
343 |
|
|
344 |
|
|
345 |
class TSWError(Fix):
|
|
346 |
""" A TSW database error. """
|
|
347 |
regex = '([A-Z]{4}-[A-Z0-9]{6})'
|
|
348 |
groupname = 'TSW Errors'
|
|
349 |
|
|
350 |
def __init__(self, description):
|
|
351 |
""" Initialisation. """
|
|
352 |
Fix.__init__(self, description)
|
|
353 |
|
|
354 |
|
|
355 |
class PCPError(Fix):
|
|
356 |
""" A PCP database error. """
|
|
357 |
regex = '([A-Z]{2}-[0-9]{11})'
|
|
358 |
groupname = 'PCP Errors'
|
|
359 |
|
|
360 |
def __init__(self, description):
|
|
361 |
""" Initialisation. """
|
|
362 |
Fix.__init__(self, description)
|
|
363 |
|
|
364 |
|
|
365 |
class TAChange(Fix):
|
|
366 |
""" A Type Approval change. """
|
|
367 |
regex = '^_TA:(\s*)(.*?)(\s*)$'
|
|
368 |
groupname = 'TA Changes'
|
|
369 |
|
|
370 |
def __init__(self, description):
|
|
371 |
""" Initialisation. """
|
|
372 |
Fix.__init__(self, description)
|
|
373 |
|
|
374 |
|
|
375 |
class Task(object):
|
|
376 |
""" A task or unit of change from the SCM system. """
|
|
377 |
fix_types = [TSWError, PCPError, TAChange]
|
|
378 |
|
|
379 |
def __init__(self, ccm_task):
|
|
380 |
""" Initialisation. """
|
|
381 |
self.ccm_task = ccm_task
|
|
382 |
|
|
383 |
def __getitem__(self, name):
|
|
384 |
""" Dictionary of tasks support. """
|
|
385 |
return self.ccm_task[name]
|
|
386 |
|
|
387 |
def has_fixed(self):
|
|
388 |
""" Returns an object representing what this task fixed, if anything. """
|
|
389 |
text = str(self.ccm_task)
|
|
390 |
fix_object = None
|
|
391 |
for fix_type in self.fix_types:
|
|
392 |
match = re.search(fix_type.regex, str(self.ccm_task))
|
|
393 |
if match != None:
|
|
394 |
fix_object = fix_type(text)
|
|
395 |
break
|
|
396 |
return fix_object
|
|
397 |
|
|
398 |
def __cmp__(self, other):
|
|
399 |
""" Compare tasks based on their task number only. """
|
|
400 |
self_task = str(self.ccm_task)
|
|
401 |
other_task = str(other.ccm_task)
|
|
402 |
return cmp(self_task[:self_task.find(':')], other_task[:other_task.find(':')])
|
|
403 |
|
|
404 |
def __hash__(self):
|
|
405 |
""" Hash support. """
|
|
406 |
self_task = str(self.ccm_task)
|
|
407 |
return hash(self_task[:self_task.find(':')])
|
|
408 |
|
|
409 |
def __repr__(self):
|
|
410 |
""" Object representation. """
|
|
411 |
self_task = repr(self.ccm_task)
|
|
412 |
return self_task[:self_task.find(':')]
|
|
413 |
|
|
414 |
def __str__(self):
|
|
415 |
""" String representation. """
|
|
416 |
return str(self.ccm_task)
|
|
417 |
|
|
418 |
|
|
419 |
class BOMDeltaXMLWriter(object):
|
|
420 |
""" This class is used to generate an xml file containing the differences of
|
|
421 |
old and new Bill of materials.
|
|
422 |
"""
|
|
423 |
def __init__(self, bom, bom_log):
|
|
424 |
""" Initialisation. """
|
|
425 |
self._bom = bom
|
|
426 |
self._bom_log = bom_log
|
|
427 |
|
|
428 |
def write(self, path):
|
|
429 |
""" Write the BOM delta information to an XML file. """
|
|
430 |
bom_log = amara.parse(open(self._bom_log, 'r'))
|
|
431 |
doc = amara.create_document(u'bomDelta')
|
628
|
432 |
# pylint: disable=E1101
|
587
|
433 |
doc.bomDelta.xml_append(doc.xml_create_element(u'buildFrom', content=unicode(bom_log.bom.build)))
|
|
434 |
doc.bomDelta.xml_append(doc.xml_create_element(u'buildTo', content=unicode(self._bom.config['build.id'])))
|
|
435 |
content_node = doc.xml_create_element(u'content')
|
|
436 |
doc.bomDelta.xml_append(content_node)
|
|
437 |
|
|
438 |
old_baselines = {}
|
|
439 |
baselines = {}
|
|
440 |
old_folders = {}
|
|
441 |
folders = {}
|
|
442 |
old_tasks = {}
|
|
443 |
tasks = {}
|
|
444 |
if hasattr(bom_log.bom.content, 'project'):
|
|
445 |
for project in bom_log.bom.content.project:
|
|
446 |
if hasattr(project, 'baseline'):
|
|
447 |
for baseline in project.baseline:
|
|
448 |
if not old_baselines.has_key(unicode(baseline)):
|
|
449 |
old_baselines[unicode(baseline)] = {}
|
|
450 |
if hasattr(baseline, 'xml_attributes'):
|
|
451 |
_logger.debug('baseline.xml_attributes: %s' % baseline.xml_attributes)
|
|
452 |
for attr_name, _ in sorted(baseline.xml_attributes.iteritems()):
|
|
453 |
_logger.debug('attr_name: %s' % attr_name)
|
|
454 |
old_baselines[unicode(baseline)][unicode(attr_name)] = unicode(getattr(baseline, attr_name))
|
|
455 |
if hasattr(project, 'folder'):
|
|
456 |
for folder in project.folder:
|
|
457 |
if hasattr(folder, 'name'):
|
|
458 |
for name in folder.name:
|
|
459 |
folder_name = unicode(name)
|
|
460 |
_logger.debug('folder_name: %s' % folder_name)
|
|
461 |
if not old_folders.has_key(unicode(folder_name)):
|
|
462 |
old_folders[unicode(folder_name)] = {}
|
|
463 |
if hasattr(name, 'xml_attributes'):
|
|
464 |
for attr_name, _ in sorted(name.xml_attributes.iteritems()):
|
|
465 |
_logger.debug('attr_name: %s' % attr_name)
|
|
466 |
old_folders[unicode(folder_name)][unicode(attr_name)] = unicode(getattr(name, attr_name))
|
|
467 |
for task in recursive_node_scan(bom_log.bom.content, u'task'):
|
|
468 |
_logger.debug('task: %s' % task)
|
|
469 |
_logger.debug('task: %s' % task.id)
|
|
470 |
_logger.debug('task: %s' % task.synopsis)
|
|
471 |
task_id = u"%s: %s" % (task.id, task.synopsis)
|
|
472 |
if not old_tasks.has_key(task_id):
|
|
473 |
old_tasks[task_id] = {}
|
|
474 |
if hasattr(task, 'xml_attributes'):
|
|
475 |
for attr_name, _ in sorted(task.xml_attributes.iteritems()):
|
|
476 |
_logger.debug('attr_name: %s' % attr_name)
|
|
477 |
old_tasks[task_id][unicode(attr_name)] = unicode(getattr(task, attr_name))
|
|
478 |
for project in self._bom.projects:
|
|
479 |
for folder in project.folders:
|
|
480 |
folders[unicode(folder.instance + "#" + folder.name + ": " + folder.description)] = {u'overridden':u'true'}
|
|
481 |
for task in folder.tasks:
|
|
482 |
_logger.debug("task_bom:'%s'" % unicode(task))
|
|
483 |
tasks[unicode(task)] = {u'overridden':u'false'}
|
|
484 |
for task in project.tasks:
|
|
485 |
_logger.debug("task_bom:'%s'" % unicode(task))
|
|
486 |
tasks[unicode(task)] = {u'overridden':u'true'}
|
|
487 |
|
|
488 |
baselines = self._bom.all_baselines()
|
|
489 |
|
|
490 |
self._write_items_with_attributes(content_node, u'baseline', baselines, old_baselines)
|
|
491 |
self._write_items_with_attributes(content_node, u'folder', folders, old_folders)
|
|
492 |
self._write_items_with_attributes(content_node, u'task', tasks, old_tasks)
|
|
493 |
|
|
494 |
out = open(path, 'w')
|
|
495 |
doc.xml(out, indent='yes')
|
|
496 |
out.close()
|
|
497 |
|
|
498 |
|
|
499 |
def validate_delta_bom_contents(self, delta_bom_log, bom_log, old_bom_log):
|
|
500 |
""" To validate delta bom contents with current bom and old bom. """
|
|
501 |
delta_bom_log = amara.parse(open(delta_bom_log, 'r'))
|
|
502 |
bom_log = amara.parse(open(bom_log, 'r'))
|
|
503 |
old_bom_log = amara.parse(open(old_bom_log, 'r'))
|
|
504 |
bom_contents_are_valid = None
|
|
505 |
if hasattr(delta_bom_log.bomDelta.content, 'folder'):
|
|
506 |
for delta_foder in delta_bom_log.bomDelta.content.folder:
|
|
507 |
if(getattr(delta_foder, 'status'))=='added':
|
|
508 |
for bom_foder in bom_log.bom.content.project.folder:
|
|
509 |
if(unicode(getattr(bom_foder, 'name')) == unicode(delta_foder)):
|
|
510 |
bom_contents_are_valid = True
|
|
511 |
else:
|
|
512 |
bom_contents_are_valid = False
|
|
513 |
if(getattr(delta_foder, 'status'))=='deleted':
|
|
514 |
for old_bom_foder in old_bom_log.bom.content.project.folder:
|
|
515 |
if(unicode(getattr(old_bom_foder, 'name')) == unicode(delta_foder)):
|
|
516 |
bom_contents_are_valid = True
|
|
517 |
else:
|
|
518 |
bom_contents_are_valid = False
|
|
519 |
|
|
520 |
if hasattr(delta_bom_log.bomDelta.content, 'task'):
|
|
521 |
for delta_task in delta_bom_log.bomDelta.content.task:
|
|
522 |
if(getattr(delta_task, 'status'))=='added':
|
|
523 |
for bom_task in recursive_node_scan(bom_log.bom.content, u'task'):
|
|
524 |
bom_task_id = u"%s: %s" % (bom_task.id, bom_task.synopsis)
|
|
525 |
if(bom_task_id == unicode(delta_task)):
|
|
526 |
bom_contents_are_valid = True
|
|
527 |
else:
|
|
528 |
bom_contents_are_valid = False
|
|
529 |
if(getattr(delta_task, 'status'))=='deleted':
|
|
530 |
for old_bom_task in recursive_node_scan(old_bom_log.bom.content, u'task'):
|
|
531 |
old_bom_task_id = u"%s: %s" % (old_bom_task.id, old_bom_task.synopsis)
|
|
532 |
if(old_bom_task_id == unicode(delta_task)):
|
|
533 |
bom_contents_are_valid = True
|
|
534 |
else:
|
|
535 |
bom_contents_are_valid = False
|
|
536 |
return bom_contents_are_valid
|
|
537 |
|
|
538 |
def _write_items(self, node, item_name, items, older_items):
|
|
539 |
"""write items"""
|
|
540 |
items = frozenset(items)
|
|
541 |
older_items = frozenset(older_items)
|
|
542 |
|
|
543 |
items_added = list(items.difference(older_items))
|
|
544 |
items_added.sort()
|
|
545 |
for item in items_added:
|
|
546 |
node.xml_append(node.xml_create_element(item_name, \
|
|
547 |
attributes={u'status': u'added'}, content=unicode(item)))
|
|
548 |
|
|
549 |
items_deleted = list(older_items.difference(items))
|
|
550 |
items_deleted.sort()
|
|
551 |
for item in items_deleted:
|
|
552 |
node.xml_append(node.xml_create_element(item_name, \
|
|
553 |
attributes={u'status': u'deleted'}, content=unicode(item)))
|
|
554 |
|
|
555 |
def _write_items_with_attributes(self, node, item_name, items, older_items):
|
|
556 |
""" This method takes dictionaries as input to pass along attributes"""
|
|
557 |
fr_items = frozenset(items)
|
|
558 |
fr_older_items = frozenset(older_items)
|
|
559 |
|
|
560 |
items_added = list(fr_items.difference(fr_older_items))
|
|
561 |
items_added.sort()
|
|
562 |
for item in items_added:
|
|
563 |
item_attributes = {u'status': u'added'}
|
|
564 |
for attr_name, attr_value in sorted(items[item].iteritems()):
|
|
565 |
_logger.debug('item: %s' % item)
|
|
566 |
_logger.debug('attr_name: %s' % attr_name)
|
|
567 |
_logger.debug('attr_value: %s' % attr_value)
|
|
568 |
item_attributes[attr_name] = attr_value
|
|
569 |
node.xml_append(node.xml_create_element(item_name, \
|
|
570 |
attributes=item_attributes, content=unicode(item)))
|
|
571 |
|
|
572 |
items_deleted = list(fr_older_items.difference(fr_items))
|
|
573 |
items_deleted.sort()
|
|
574 |
for item in items_deleted:
|
|
575 |
item_attributes = {u'status': u'deleted'}
|
|
576 |
for attr_name, attr_value in sorted(older_items[item].iteritems()):
|
|
577 |
_logger.debug('item: %s' % item)
|
|
578 |
_logger.debug('attr_name: %s' % attr_name)
|
|
579 |
_logger.debug('attr_value: %s' % attr_value)
|
|
580 |
item_attributes[attr_name] = attr_value
|
|
581 |
node.xml_append(node.xml_create_element(item_name, \
|
|
582 |
attributes=item_attributes, content=unicode(item)))
|
|
583 |
|
|
584 |
|
|
585 |
class BOMXMLWriter(object):
|
|
586 |
""" This class is used to generate an xml file containing the BOM information """
|
|
587 |
def __init__(self, bom):
|
|
588 |
""" Initialisation. """
|
|
589 |
self._bom = bom
|
|
590 |
|
|
591 |
def write(self, path):
|
|
592 |
""" Write the BOM information to an XML file. """
|
|
593 |
doc = amara.create_document(u'bom')
|
628
|
594 |
# pylint: disable=E1101
|
587
|
595 |
doc.bom.xml_append(doc.xml_create_element(u'build', content=unicode(self._bom.config['build.id'])))
|
|
596 |
doc.bom.xml_append(doc.xml_create_element(u'content'))
|
|
597 |
for project in self._bom.projects:
|
|
598 |
project_node = doc.xml_create_element(u'project')
|
|
599 |
project_node.xml_append(doc.xml_create_element(u'name', content=unicode(project)))
|
|
600 |
project_node.xml_append(doc.xml_create_element(u'database', content=unicode(self._bom.config['ccm.database'])))
|
|
601 |
doc.bom.content.xml_append(project_node)
|
|
602 |
_logger.debug('baselines dictionary: %s' % project.baselines)
|
|
603 |
for baseline, baseline_attrs in sorted(project.baselines.iteritems()):
|
|
604 |
_logger.debug('baseline: %s' % baseline)
|
|
605 |
_logger.debug('baseline_attrs: %s' % baseline_attrs)
|
|
606 |
project_node.xml_append(doc.xml_create_element(u'baseline', content=unicode(baseline), attributes=baseline_attrs))
|
|
607 |
for folder in project.folders:
|
|
608 |
folder_node = doc.xml_create_element(u'folder')
|
|
609 |
folder_node.xml_append(doc.xml_create_element(u'name', content=unicode(folder.instance + "#" + folder.name + ": " + folder.description), \
|
|
610 |
attributes={u'overridden':u'true'}))
|
|
611 |
project_node.xml_append(folder_node)
|
|
612 |
for task in folder.tasks:
|
|
613 |
task_node = doc.xml_create_element(u'task', attributes={u'overridden':u'false'})
|
|
614 |
task_node.xml_append(doc.xml_create_element(u'id', content=(unicode(task['displayname']))))
|
|
615 |
task_node.xml_append(doc.xml_create_element(u'synopsis', content=(unicode(task['task_synopsis']))))
|
|
616 |
task_node.xml_append(doc.xml_create_element(u'owner', content=(unicode(task['owner']))))
|
|
617 |
#task_node.xml_append(doc.xml_create_element(u'completed', content=(unicode(self.parse_status_log(task['status_log'])))))
|
|
618 |
folder_node.xml_append(task_node)
|
|
619 |
for task in project.tasks:
|
|
620 |
task_node = doc.xml_create_element(u'task', attributes={u'overridden':u'true'})
|
|
621 |
task_node.xml_append(doc.xml_create_element(u'id', content=(unicode(task['displayname']))))
|
|
622 |
task_node.xml_append(doc.xml_create_element(u'synopsis', content=(unicode(task['task_synopsis']))))
|
|
623 |
task_node.xml_append(doc.xml_create_element(u'owner', content=(unicode(task['owner']))))
|
|
624 |
#task_node.xml_append(doc.xml_create_element(u'completed', content=(unicode(self.parse_status_log(task['status_log'])))))
|
|
625 |
project_node.xml_append(task_node)
|
|
626 |
|
|
627 |
fix = task.has_fixed()
|
|
628 |
if fix != None:
|
|
629 |
fix_node = doc.xml_create_element(u'fix', content=(unicode(task)), attributes = {u'type': unicode(fix.__class__.__name__)})
|
|
630 |
project_node.xml_append(fix_node)
|
|
631 |
|
628
|
632 |
if self._bom.icd_icfs != []:
|
587
|
633 |
# Add ICD info to BOM
|
|
634 |
doc.bom.content.xml_append(doc.xml_create_element(u'input'))
|
|
635 |
|
|
636 |
# Add default values to unused fields so icds are visible in the BOM
|
|
637 |
empty_bom_str = u'N/A'
|
|
638 |
empty_bom_tm = u'0'
|
|
639 |
doc.bom.content.input.xml_append(doc.xml_create_element(u'name', content=(unicode(empty_bom_str))))
|
|
640 |
doc.bom.content.input.xml_append(doc.xml_create_element(u'year', content=(unicode(empty_bom_tm))))
|
|
641 |
doc.bom.content.input.xml_append(doc.xml_create_element(u'week', content=(unicode(empty_bom_tm))))
|
|
642 |
doc.bom.content.input.xml_append(doc.xml_create_element(u'version', content=(unicode(empty_bom_str))))
|
|
643 |
|
|
644 |
doc.bom.content.input.xml_append(doc.xml_create_element(u'icds'))
|
|
645 |
|
628
|
646 |
# pylint: disable=R0914
|
|
647 |
for i, icd in enumerate(self._bom.icd_icfs):
|
587
|
648 |
doc.bom.content.input.icds.xml_append(doc.xml_create_element(u'icd'))
|
|
649 |
doc.bom.content.input.icds.icd[i].xml_append(doc.xml_create_element(u'name', content=(unicode(icd))))
|
|
650 |
#If currentRelease.xml exists then send s60 <input> tag to diamonds
|
|
651 |
current_release_xml_path = self._bom.config['currentRelease.xml']
|
628
|
652 |
# data from the metadata will go first as they must be safer than the one
|
|
653 |
# given by the user
|
587
|
654 |
if current_release_xml_path is not None and os.path.exists(current_release_xml_path):
|
|
655 |
metadata = symrec.ReleaseMetadata(current_release_xml_path)
|
|
656 |
service = metadata.service
|
|
657 |
product = metadata.product
|
|
658 |
release = metadata.release
|
|
659 |
# Get name, year, week and version from baseline configuration
|
|
660 |
s60_input_node = doc.xml_create_element(u'input')
|
628
|
661 |
s60_type = u's60'
|
|
662 |
s60_year = u'0'
|
|
663 |
s60_week = u'0'
|
|
664 |
s60_release = u''
|
|
665 |
# Using regular expression in first place
|
|
666 |
regexp = r'(?P<TYPE>.*)_(?P<YEAR>\d{4})(?P<WEEK>\d{2})_(?P<REVISION>.*)'
|
|
667 |
if self._bom.config['release_regexp']:
|
|
668 |
if '?P<TYPE>' not in self._bom.config['release_regexp']:
|
|
669 |
_logger.error('Missing TYPE in: %s' % str(self._bom.config['release_regexp']))
|
|
670 |
_logger.info('Using default regular expression: %s' % regexp)
|
|
671 |
elif '?P<YEAR>' not in self._bom.config['release_regexp']:
|
|
672 |
_logger.error('Missing YEAR in: %s' % str(self._bom.config['release_regexp']))
|
|
673 |
_logger.info('Using default regular expression: %s' % regexp)
|
|
674 |
elif '?P<WEEK>' not in self._bom.config['release_regexp']:
|
|
675 |
_logger.error('Missing WEEK in: %s' % str(self._bom.config['release_regexp']))
|
|
676 |
_logger.info('Using default regular expression: %s' % regexp)
|
|
677 |
elif '?P<REVISION>' not in self._bom.config['release_regexp']:
|
|
678 |
_logger.error('Missing REVISION in: %s' % str(self._bom.config['release_regexp']))
|
|
679 |
_logger.info('Using default regular expression: %s' % regexp)
|
|
680 |
else:
|
|
681 |
_logger.info('Using custom regular expression to capture the baseline release information: %s'
|
|
682 |
% str(self._bom.config['release_regexp']))
|
|
683 |
regexp = self._bom.config['release_regexp']
|
|
684 |
res = re.match(regexp, release)
|
|
685 |
if res != None:
|
|
686 |
s60_type = res.group('TYPE')
|
|
687 |
s60_release = res.group('TYPE') + '_' + res.group('REVISION')
|
|
688 |
s60_year = res.group('YEAR')
|
|
689 |
s60_week = res.group('WEEK')
|
587
|
690 |
else:
|
628
|
691 |
_logger.warning("Regular expression '%s' is not matching '%s'." % (regexp, release))
|
|
692 |
if self._bom.config['s60_version'] != None:
|
|
693 |
# last resorts if it doesn't matches
|
|
694 |
_logger.warning("Falling back on s60.version and s60.release to determine input.")
|
|
695 |
s60_version = self._bom.config['s60_version']
|
|
696 |
s60_year = s60_version[0:4]
|
|
697 |
s60_week = s60_version[4:]
|
|
698 |
if self._bom.config['s60_release']:
|
|
699 |
s60_release = self._bom.config['s60_release']
|
|
700 |
|
|
701 |
s60_input_node.xml_append(doc.xml_create_element(u'name', content=(unicode(s60_type))))
|
587
|
702 |
s60_input_node.xml_append(doc.xml_create_element(u'year', content=(unicode(s60_year))))
|
|
703 |
s60_input_node.xml_append(doc.xml_create_element(u'week', content=(unicode(s60_week))))
|
|
704 |
s60_input_node.xml_append(doc.xml_create_element(u'version', content=(unicode(s60_release))))
|
|
705 |
|
|
706 |
s60_input_source = s60_input_node.xml_create_element(u'source')
|
628
|
707 |
s60_input_source.xml_append(doc.xml_create_element(u'type', content=(unicode("hydra"))))
|
587
|
708 |
s60_input_source.xml_append(doc.xml_create_element(u'service', content=(unicode(service))))
|
|
709 |
s60_input_source.xml_append(doc.xml_create_element(u'product', content=(unicode(product))))
|
|
710 |
s60_input_source.xml_append(doc.xml_create_element(u'release', content=(unicode(release))))
|
|
711 |
s60_input_node.xml_append(s60_input_source)
|
|
712 |
doc.bom.content.xml_append(s60_input_node)
|
628
|
713 |
elif self._bom.config['s60_version'] and self._bom.config['s60_release']:
|
|
714 |
_logger.info("currentRelease.xml not defined, falling back on s60.version and s60.release to determine input.")
|
|
715 |
s60_type = u's60'
|
|
716 |
s60_version = self._bom.config['s60_version']
|
|
717 |
s60_year = u'0'
|
|
718 |
s60_week = u'0'
|
|
719 |
if len(s60_version) > 6:
|
|
720 |
s60_year = s60_version[0:4]
|
|
721 |
s60_week = s60_version[4:]
|
|
722 |
s60_release = self._bom.config['s60_release']
|
|
723 |
s60_input_node = doc.xml_create_element(u'input')
|
|
724 |
s60_input_node.xml_append(doc.xml_create_element(u'name', content=(unicode(s60_type))))
|
|
725 |
s60_input_node.xml_append(doc.xml_create_element(u'year', content=(unicode(s60_year))))
|
|
726 |
s60_input_node.xml_append(doc.xml_create_element(u'week', content=(unicode(s60_week))))
|
|
727 |
s60_input_node.xml_append(doc.xml_create_element(u'version', content=(unicode(s60_release))))
|
|
728 |
|
|
729 |
s60_input_source = s60_input_node.xml_create_element(u'source')
|
|
730 |
s60_input_source.xml_append(doc.xml_create_element(u'type', content=(unicode("unknown"))))
|
|
731 |
s60_input_node.xml_append(s60_input_source)
|
|
732 |
doc.bom.content.xml_append(s60_input_node)
|
|
733 |
|
|
734 |
|
587
|
735 |
out = open(path, 'w')
|
|
736 |
doc.xml(out, indent='yes')
|
|
737 |
out.close()
|
|
738 |
|
|
739 |
def parse_status_log(self, log):
|
|
740 |
"""parse status log"""
|
|
741 |
_log_array = log.split('\r')
|
|
742 |
if(len(_log_array) == 3 and log.find('completed') > 0):
|
|
743 |
_completed_line = _log_array[2]
|
|
744 |
return _completed_line[:_completed_line.rfind(':')].strip()
|
|
745 |
else:
|
|
746 |
return u'None'
|