587
|
1 |
#============================================================================
|
|
2 |
#Name : dependancygraph.py
|
|
3 |
#Part of : Helium
|
|
4 |
|
|
5 |
#Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
|
|
6 |
#All rights reserved.
|
|
7 |
#This component and the accompanying materials are made available
|
|
8 |
#under the terms of the License "Eclipse Public License v1.0"
|
|
9 |
#which accompanies this distribution, and is available
|
|
10 |
#at the URL "http://www.eclipse.org/legal/epl-v10.html".
|
|
11 |
#
|
|
12 |
#Initial Contributors:
|
|
13 |
#Nokia Corporation - initial contribution.
|
|
14 |
#
|
|
15 |
#Contributors:
|
|
16 |
#
|
|
17 |
#Description:
|
|
18 |
#===============================================================================
|
|
19 |
"""create the dependancy graph for the documentation"""
|
|
20 |
|
|
21 |
import os
|
|
22 |
import amara
|
|
23 |
import codecs
|
|
24 |
import zipfile
|
|
25 |
|
|
26 |
class Library:
|
|
27 |
""" Class Library holds information of the required modules or components such as license and the version """
|
628
|
28 |
def __init__(self, name, license_, version=''):
|
587
|
29 |
self.name = name
|
628
|
30 |
self.license_ = license_
|
587
|
31 |
self.version = version
|
|
32 |
self.requires = []
|
|
33 |
|
|
34 |
class ModuleGroup:
|
|
35 |
""" This class represents a group of module """
|
|
36 |
def __init__(self):
|
|
37 |
self.libraries = {}
|
|
38 |
def addConf(self, name, des, color):
|
|
39 |
"""add configuration"""
|
|
40 |
self.libraries[name] = (des, [], color)
|
|
41 |
def addLibrary(self, conf, library):
|
|
42 |
"""add library"""
|
|
43 |
for lib in self.getLibraries(conf):
|
|
44 |
if lib.name.lower() == library.name.lower():
|
628
|
45 |
lib.license_ = library.license_
|
587
|
46 |
return
|
|
47 |
self.getLibraries(conf).append(library)
|
|
48 |
def getLibraries(self, conf):
|
|
49 |
"""get Libraries"""
|
|
50 |
(_, libs, _) = self.libraries[conf]
|
|
51 |
return libs
|
|
52 |
def getDescription(self, conf):
|
|
53 |
"""get description"""
|
|
54 |
(des, _, _) = self.libraries[conf]
|
|
55 |
return des
|
|
56 |
def getColor(self, conf):
|
|
57 |
"""get colour"""
|
|
58 |
(_, _, color) = self.libraries[conf]
|
|
59 |
return color
|
|
60 |
|
|
61 |
COLORS = ['pink', 'red', 'lightblue', 'orange', 'green', 'yellow', 'turquoise', 'limegreen']
|
|
62 |
|
|
63 |
class ReadIvyConfig:
|
|
64 |
""" Class to read the ivy configuration """
|
|
65 |
def __init__(self, ivyfilename):
|
|
66 |
self.ivyfilename = ivyfilename
|
|
67 |
self.ivyxml = amara.parse(open(ivyfilename))
|
|
68 |
self.group = ModuleGroup()
|
|
69 |
|
|
70 |
def readConfigurations(self):
|
|
71 |
"""read configurations"""
|
|
72 |
for conf in self.ivyxml['ivy-module'].configurations.conf:
|
|
73 |
color = COLORS.pop()
|
|
74 |
self.group.addConf(conf.name, conf.description, color)
|
|
75 |
|
|
76 |
def readModules(self):
|
|
77 |
"""read modules"""
|
628
|
78 |
license_ = ''
|
587
|
79 |
for module in self.ivyxml['ivy-module'].dependencies.xml_children:
|
|
80 |
if hasattr(module, 'data'):
|
|
81 |
if 'License:' in module.data:
|
628
|
82 |
license_ = module.data.strip()
|
587
|
83 |
elif hasattr(module, 'name'):
|
|
84 |
modulename = module.name.replace('-', '_')
|
|
85 |
|
|
86 |
if module.org != 'SWEPT':
|
628
|
87 |
self.group.addLibrary(module.conf, Library(modulename, license_))
|
|
88 |
license_ = ''
|
587
|
89 |
|
|
90 |
def readSubModules(self):
|
|
91 |
"""read Sub Modules"""
|
|
92 |
for module in self.ivyxml['ivy-module'].dependencies.xml_children:
|
|
93 |
if hasattr(module, 'name'):
|
|
94 |
if 'jars' in module.name:
|
|
95 |
ivydir = os.path.dirname(self.ivyfilename)
|
|
96 |
ivydir = os.path.join(ivydir, 'modules')
|
|
97 |
ivyjarfile = os.path.join(ivydir, module.name + '-1.0.ivy.xml')
|
|
98 |
ivymodulexml = amara.parse(open(ivyjarfile))
|
628
|
99 |
license_ = ''
|
587
|
100 |
for artifact in ivymodulexml['ivy-module'].publications.xml_children:
|
|
101 |
if hasattr(artifact, 'data'):
|
|
102 |
if 'License:' in artifact.data:
|
628
|
103 |
license_ = artifact.data.strip()
|
587
|
104 |
elif hasattr(artifact, 'name'):
|
|
105 |
bits = artifact.name.split('-')
|
|
106 |
name = bits[0]
|
|
107 |
version = ''
|
|
108 |
if len(bits) > 1:
|
|
109 |
version = bits[1]
|
628
|
110 |
self.group.addLibrary(module.conf, Library(name, license_, version))
|
|
111 |
license_ = ''
|
587
|
112 |
|
|
113 |
PYTHON_GROUP = True
|
|
114 |
SUBCON_PYTHON_GROUP = False
|
|
115 |
|
|
116 |
def readEggs(libraries, dirtosearch, internaldir):
|
|
117 |
"""read Egg files"""
|
|
118 |
libraries.addConf(PYTHON_GROUP, 'Python libs', libraries.getColor('core_install'))
|
|
119 |
libraries.addConf(SUBCON_PYTHON_GROUP, 'Python subcon libs', libraries.getColor('subcon'))
|
|
120 |
|
|
121 |
for _xx in [os.walk(dirtosearch, topdown=False), os.walk(internaldir, topdown=False)]:
|
|
122 |
for root, _, files in _xx:
|
|
123 |
notinsubcon = os.path.normpath(internaldir) in os.path.normpath(root)
|
|
124 |
|
|
125 |
for fname in files:
|
|
126 |
filename = os.path.join(root, fname)
|
|
127 |
if fname == 'PKG-INFO':
|
|
128 |
pkgmetafile = open(filename)
|
|
129 |
library = readPkgInfo(pkgmetafile)
|
|
130 |
pkgmetafile.close()
|
|
131 |
|
|
132 |
requirefilename = os.path.join(filename, '..', 'requires.txt')
|
|
133 |
if os.path.exists(requirefilename):
|
|
134 |
requiresfile = open(requirefilename)
|
|
135 |
readRequiresFile(requiresfile, library)
|
|
136 |
requiresfile.close()
|
|
137 |
|
|
138 |
libraries.addLibrary(notinsubcon, library)
|
|
139 |
|
|
140 |
if os.path.isfile(filename) and fname.endswith('.egg'):
|
|
141 |
eggfile = zipfile.ZipFile(filename, 'r', zipfile.ZIP_DEFLATED)
|
|
142 |
|
|
143 |
data = eggfile.read('EGG-INFO/PKG-INFO')
|
|
144 |
|
|
145 |
library = readPkgInfo(data.split('\n'))
|
|
146 |
|
|
147 |
if 'EGG-INFO/requires.txt' in eggfile.namelist():
|
|
148 |
requiresdata = eggfile.read('EGG-INFO/requires.txt')
|
|
149 |
readRequiresFile(requiresdata.split('\n'), library)
|
|
150 |
|
|
151 |
libraries.addLibrary(notinsubcon, library)
|
|
152 |
|
|
153 |
eggfile.close()
|
|
154 |
|
|
155 |
def readRequiresFile(data, library):
|
|
156 |
"""read Requires File"""
|
|
157 |
for line in data:
|
|
158 |
line = line.strip()
|
|
159 |
if line != '' and not (line.startswith('[') and line.endswith(']')):
|
|
160 |
library.requires.append(line.split('>=')[0].strip())
|
|
161 |
|
|
162 |
def readPkgInfo(data):
|
|
163 |
"""read Pkg info"""
|
|
164 |
name = ''
|
|
165 |
version = ''
|
628
|
166 |
license_ = ''
|
587
|
167 |
license2 = ''
|
|
168 |
|
|
169 |
for line in data:
|
|
170 |
if 'Name:' in line:
|
|
171 |
name = line.strip().replace('Name: ', '')
|
|
172 |
if 'Version:' in line:
|
|
173 |
version = line.strip().replace('Version: ', '')
|
|
174 |
if 'License:' in line:
|
628
|
175 |
license_ = line.strip().replace('License: ', '')
|
587
|
176 |
if 'Classifier: License :: ' in line:
|
|
177 |
license2 = license2 + ' ' + line.strip().replace('Classifier: License :: ', '').replace('OSI Approved :: ', '')
|
|
178 |
|
628
|
179 |
if license_.lower() == 'unknown' or license_ == '' or license2 != '':
|
|
180 |
license_ = license2
|
587
|
181 |
|
628
|
182 |
return Library(name, license_, version)
|
587
|
183 |
|
|
184 |
def addLicensesColors(graphdata, group):
|
|
185 |
"""add license colours"""
|
|
186 |
newgraphdata = []
|
|
187 |
for line in graphdata:
|
|
188 |
newline = line
|
|
189 |
for conf in group.libraries:
|
|
190 |
for module in group.getLibraries(conf):
|
|
191 |
if module.name.lower() in line.lower() and 'label=' in line:
|
|
192 |
newline = line.replace('label=', 'color=%s,label=' % group.getColor(conf))
|
628
|
193 |
if module.license_ != '':
|
|
194 |
newline = newline.replace("\"];", "|%s\"];" % module.license_)
|
587
|
195 |
break
|
|
196 |
newgraphdata.append(newline)
|
|
197 |
return newgraphdata
|
|
198 |
|
|
199 |
def createKey(group):
|
|
200 |
"""create key"""
|
|
201 |
key = """subgraph cluster1 {
|
|
202 |
label = "Key";
|
|
203 |
style=filled;
|
|
204 |
color=lightgrey;
|
|
205 |
"""
|
|
206 |
|
|
207 |
for conf in group.libraries:
|
|
208 |
if conf != PYTHON_GROUP and conf != SUBCON_PYTHON_GROUP:
|
|
209 |
key = key + "\"%s: %s\" [style=filled,color=%s];" % (conf, group.getDescription(conf), group.getColor(conf))
|
|
210 |
|
|
211 |
key = key + "}"
|
|
212 |
return key
|
|
213 |
|
|
214 |
def createGraph(ivyxmlfilename, graphfilename, dirtosearch, internaldir, subcon):
|
|
215 |
"""create graph """
|
|
216 |
readivy = ReadIvyConfig(ivyxmlfilename)
|
|
217 |
readivy.readConfigurations()
|
|
218 |
readivy.readModules()
|
|
219 |
readivy.readSubModules()
|
|
220 |
|
|
221 |
group = readivy.group
|
|
222 |
|
|
223 |
readEggs(group, dirtosearch, internaldir)
|
|
224 |
|
|
225 |
key = createKey(group)
|
|
226 |
|
|
227 |
graphdata = loadGraphFile(graphfilename)
|
|
228 |
|
|
229 |
newgraphdata = addLicensesColors(graphdata, group)
|
|
230 |
|
|
231 |
#add key to graph
|
|
232 |
newgraphdata[-1] = newgraphdata[-1].replace('}', key + '\n}')
|
|
233 |
|
|
234 |
graphwritefile = codecs.open(graphfilename, 'w', 'utf8')
|
|
235 |
graphwritefile.writelines(newgraphdata)
|
|
236 |
graphwritefile.close()
|
|
237 |
|
|
238 |
linkPythonLibs(group, graphfilename, subcon)
|
|
239 |
|
|
240 |
def loadGraphFile(graphfilename):
|
|
241 |
"""load graph file"""
|
|
242 |
destgraphfile = codecs.open(graphfilename, 'r', 'utf8')
|
|
243 |
graphdata = []
|
|
244 |
for line in destgraphfile:
|
|
245 |
graphdata.append(line)
|
|
246 |
destgraphfile.close()
|
|
247 |
return graphdata
|
|
248 |
|
|
249 |
def addToGraph(graphfilenametoadd, destgraphfilename):
|
|
250 |
"""add to graph"""
|
|
251 |
graphdata = loadGraphFile(destgraphfilename)
|
|
252 |
|
|
253 |
graphfile = codecs.open(graphfilenametoadd, 'r', 'utf8')
|
|
254 |
graphdatatoadd = ''
|
|
255 |
for line in graphfile:
|
|
256 |
line = line.replace('digraph {', '')
|
|
257 |
graphdatatoadd = graphdatatoadd + line
|
|
258 |
graphfile.close()
|
|
259 |
|
|
260 |
graphdata[-1] = graphdata[-1].replace('}', graphdatatoadd)
|
|
261 |
|
|
262 |
graphwritefile = codecs.open(destgraphfilename, 'w', 'utf8')
|
|
263 |
graphwritefile.writelines(graphdata)
|
|
264 |
graphwritefile.close()
|
|
265 |
|
|
266 |
def linkPythonLibs(libraries, destgraphfilename, subcon):
|
|
267 |
"""link Python Libraries"""
|
|
268 |
graphdata = loadGraphFile(destgraphfilename)
|
|
269 |
|
|
270 |
output = "helium_ant -> helium_python;\n"
|
|
271 |
|
|
272 |
if subcon:
|
628
|
273 |
libs_list = [SUBCON_PYTHON_GROUP]
|
587
|
274 |
else:
|
628
|
275 |
libs_list = [SUBCON_PYTHON_GROUP, PYTHON_GROUP]
|
587
|
276 |
|
628
|
277 |
for group in libs_list:
|
587
|
278 |
for lib in libraries.getLibraries(group):
|
|
279 |
output = output + ("helium_python -> \"%s\";\n" % lib.name)
|
628
|
280 |
output = output + ("\"%s\" [style=filled,shape=record,color=%s,label=\"%s %s|%s\"];\n" % (lib.name, libraries.getColor(group), lib.name, lib.version, lib.license_))
|
587
|
281 |
|
|
282 |
for require in lib.requires:
|
|
283 |
output = output + ("\"%s\" -> \"%s\";\n" % (lib.name, require))
|
|
284 |
|
|
285 |
graphdata.reverse()
|
|
286 |
for line in graphdata:
|
|
287 |
if line.strip() == '':
|
|
288 |
graphdata.pop(0)
|
|
289 |
else:
|
|
290 |
break
|
|
291 |
graphdata.reverse()
|
|
292 |
|
|
293 |
graphdata[-1] = graphdata[-1].replace('}', output + '}')
|
|
294 |
|
|
295 |
graphwritefile = codecs.open(destgraphfilename, 'w', 'utf8')
|
|
296 |
graphwritefile.writelines(graphdata)
|
|
297 |
graphwritefile.close()
|
|
298 |
|
|
299 |
def externalDependancies(database, output):
|
|
300 |
"""External Dependancies"""
|
|
301 |
out = open(output, 'w')
|
|
302 |
dbase = amara.parse(open(database))
|
|
303 |
out.write('digraph G {\n')
|
|
304 |
for proj in dbase.antDatabase.project:
|
|
305 |
items = []
|
|
306 |
if hasattr(proj, 'property'):
|
|
307 |
for prop in proj.property:
|
|
308 |
if 'external' + os.sep in os.path.abspath(str(prop.defaultValue)):
|
|
309 |
items.append(str(prop.defaultValue))
|
|
310 |
if hasattr(proj, 'fileDependency'):
|
|
311 |
for dep in proj.fileDependency:
|
|
312 |
dep = str(dep).split(' ')[0]
|
|
313 |
if 'external' + os.sep in os.path.abspath(str(dep)):
|
|
314 |
items.append(str(dep))
|
|
315 |
|
|
316 |
items = set(items)
|
|
317 |
for i in items:
|
|
318 |
out.write('\"%s\" -> \"%s\"\n' % (str(proj.name), i.replace(os.environ['HELIUM_HOME'], 'helium').replace(os.sep, '/')))
|
|
319 |
out.write('}')
|
|
320 |
out.close()
|
|
321 |
|
|
322 |
def appendLogs(targ, proj, output, macro=False):
|
|
323 |
"""append logs"""
|
|
324 |
if hasattr(targ, 'signal'):
|
|
325 |
for signal in targ.signal:
|
|
326 |
if macro:
|
|
327 |
output.append("\"%s\" [fontname=\"Times-Italic\"];" % str(targ.name))
|
|
328 |
output.append('subgraph \"cluster%s\" {label = \"%s\"; \"%s\"}\n' % (str(proj.name), str(proj.name), str(targ.name)))
|
|
329 |
splt = str(signal).split(',')
|
|
330 |
if len(splt) > 1:
|
|
331 |
if splt[1] == 'now':
|
|
332 |
color = 'red'
|
|
333 |
elif splt[1] == 'defer':
|
|
334 |
color = 'yellow'
|
|
335 |
else:
|
|
336 |
color = 'green'
|
|
337 |
output.append('subgraph \"cluster%s\" {color=%s;style=filled;label = \"Failbuild: %s\"; \"%s\"}\n' % (str(splt[1]), color, str(splt[1]), str(splt[0])))
|
|
338 |
output.append('\"%s\" -> \"%s\" [style=dotted]\n' % (str(targ.name), str(splt[0])))
|
|
339 |
if hasattr(targ, 'log'):
|
|
340 |
for log in targ.log:
|
|
341 |
logdir = '/output/logs/'
|
|
342 |
logname = os.path.basename(str(log))
|
|
343 |
if not ('**' in logname):
|
|
344 |
logname = logname.replace('*', '${sysdef.configuration}').replace('--logfile=', '')
|
|
345 |
if not logdir in logname:
|
|
346 |
logname = logdir + logname
|
|
347 |
logname = logname.replace(os.sep, '/')
|
|
348 |
|
|
349 |
if macro:
|
|
350 |
output.append("\"%s\" [fontname=\"Times-Italic\"];" % str(targ.name))
|
|
351 |
output.append('subgraph \"cluster%s\" {label = \"%s\"; \"%s\"}\n' % (str(proj.name), str(proj.name), str(targ.name)))
|
|
352 |
output.append('\"%s\" -> \"%s\"\n' % (str(targ.name), logname))
|
|
353 |
|
|
354 |
def findLogFiles(database, output):
|
|
355 |
"""find Log files"""
|
|
356 |
out = open(output, 'w')
|
|
357 |
dbase = amara.parse(open(database))
|
|
358 |
out.write('digraph G {\n')
|
|
359 |
output = []
|
|
360 |
|
|
361 |
root_objects = []
|
|
362 |
for project in dbase.antDatabase.project:
|
|
363 |
root_objects.append(project)
|
|
364 |
for antlib in dbase.antDatabase.antlib:
|
|
365 |
root_objects.append(antlib)
|
|
366 |
for p_ro in root_objects:
|
|
367 |
if hasattr(p_ro, 'macro'):
|
|
368 |
for t_targ in p_ro.macro:
|
|
369 |
appendLogs(t_targ, p_ro, output, True)
|
|
370 |
if hasattr(p_ro, 'target'):
|
|
371 |
for t_targ in p_ro.target:
|
|
372 |
appendLogs(t_targ, p_ro, output)
|
|
373 |
for l_list in set(output):
|
|
374 |
out.write(l_list)
|
|
375 |
out.write('}')
|
|
376 |
out.close()
|