--- a/dbrtools/dbr/checkenv.py Tue Mar 16 12:52:44 2010 +0000
+++ b/dbrtools/dbr/checkenv.py Wed Mar 17 12:29:10 2010 +0000
@@ -1,4 +1,4 @@
-# Copyright (c) 2009 Symbian Foundation Ltd
+# Copyright (c) 2010 Symbian Foundation Ltd
# This component and the accompanying materials are made available
# under the terms of the License "Eclipse Public License v1.0"
# which accompanies this distribution, and is available
@@ -11,44 +11,29 @@
# mattd <mattd@symbian.org>
#
# Description:
-# DBR checkenv - Checks your environment against what was installed
-
-import dbrbaseline
-import dbrpatch
-import dbrutils
-import glob
-
-import os.path
+# new checkenv - uses OO interface.
-def main():
- dbfilename = dbrutils.defaultdb()
-
- baseline = dbrbaseline.readdb(dbfilename)
- if(len(baseline ) > 0):
- patches = dbrpatch.loadpatches(dbrpatch.dbrutils.patchpath())
- db = dbrpatch.createpatchedbaseline(baseline,patches)
- env = dbrutils.scanenv()
- dbrpatch.newupdatedb(db,env)
- baseline = dbrpatch.updatebaseline(baseline, db)
- patches = dbrpatch.updatepatches(patches, db)
+import dbrenv
- dbrpatch.savepatches(patches)
- else:
- baseline = createdb()
- dbrbaseline.writedb(baseline,dbfilename)
+def run(args):
+ location = '/'
+#needs a fix to scanenv for this to work...
+# if(len(args)):
+# location = args[0]
+ db = dbrenv.CreateDB(location)
+ local = dbrenv.DBRLocalEnv(location)
+ results = db.compare(local)
+ local.verify(results.unknown)
+ results2 = db.compare(local)hg diff -U
+ results2.printdetail()
+ results2.printsummary()
+ db.update(local, results2.touched)
+ db.save()
+
+def help():
+ print "Checks the status of the current environment"
+ print "Usage:"
+ print "\tdbr checkenv"
+
+
-def createdb():
- print 'creating db...Move CreateDB into dbrutils!!!'
- env = dbrutils.scanenv()
- hashes = glob.glob(os.path.join(dbrutils.patchpath(),'*.md5'))
- for file in hashes:
- print 'Reading: %s\n' % file
- dbrutils.gethashes(env, file)
- return env
-
-def run(args):
- main()
-
-def help():
- print "Shows the current state of the environment"
- print "Usage\n\tdbr checkenv"
\ No newline at end of file
--- a/dbrtools/dbr/cleanenv.py Tue Mar 16 12:52:44 2010 +0000
+++ b/dbrtools/dbr/cleanenv.py Wed Mar 17 12:29:10 2010 +0000
@@ -38,7 +38,7 @@
required.update(results['untestable']) #untestable is going to be a problem...
dbrutils.extractfiles(required, zippath)
for name in sorted(patches):
- dbrutils.extractfromzip(required, re.sub('.txt','.zip',name))
+ dbrutils.extractfromzip(required, re.sub('.txt','.zip',name),'')
env = dbrutils.scanenv()
results2 = dbrpatch.newupdatedb(db,env)
@@ -46,8 +46,10 @@
baseline = dbrpatch.updatebaseline(baseline, db)
patches = dbrpatch.updatepatches(patches, db)
- dbrpatch.savepatches(patches)
-
+ dbrpatch.savepatches(patches)
+ dbrbaseline.writedb(baseline,dbfilename)
+
+
def run(args):
main(args)
--- a/dbrtools/dbr/dbrbaseline.py Tue Mar 16 12:52:44 2010 +0000
+++ b/dbrtools/dbr/dbrbaseline.py Wed Mar 17 12:29:10 2010 +0000
@@ -18,6 +18,9 @@
import re
import os
import string
+import glob
+import tempfile
+import shutil
from os.path import join, isfile, stat
from stat import *
import dbrutils
@@ -120,3 +123,17 @@
for file in sorted(touched):
print 'Updating timestamp for: ',file
db1[file]['time'] = db2[file]['time']
+
+def createdb():
+ print 'creating db...Move CreateDB into dbrutils!!!'
+ env = dbrutils.scanenv()
+ hashes = glob.glob(os.path.join(dbrutils.patchpath(),'*.md5'))
+ for file in hashes:
+ print 'Reading: %s\n' % file
+ dbrutils.gethashes(env, file, False)
+ return env
+
+
+def readzippeddb(drive):
+ return dbrutils.getzippedDB(drive)
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/dbrtools/dbr/dbrenv.py Wed Mar 17 12:29:10 2010 +0000
@@ -0,0 +1,233 @@
+# Copyright (c) 2010 Symbian Foundation Ltd
+# This component and the accompanying materials are made available
+# under the terms of the License "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Symbian Foundation Ltd - initial contribution.
+#
+# Contributors:
+# mattd <mattd@symbian.org>
+#
+# Description:
+# DBREnv - OO rewrite of the Environments
+
+#I'm using the existing stuff as helpers until things get relocated...
+import os.path
+import glob
+
+import dbrutils
+import dbrbaseline
+import dbrpatch
+
+
+def CreateDB(location): #virtual constructor
+ print location
+# print dbrutils.patch_path_internal()
+ if(os.path.exists(os.path.join(location,dbrutils.defaultdb()))):
+# print 'loading baseline environment'
+# return DBRBaselineEnv(location)
+ print 'loading patched baseline environment'
+ return DBRPatchedBaselineEnv(location)
+ if(os.path.exists(os.path.join(location,'build_md5.zip'))):
+ print 'loading zipped environment'
+ return DBRZippedEnv(location)
+ if(os.path.exists(os.path.join(location,dbrutils.patch_path_internal()))): #should do something more fun with creating a basleine if we have MD5s
+ print 'loading new env...warning: this is only here for compatibility'
+ return DBRNewLocalEnv(location)
+ if(os.path.exists(os.path.join(location,'epoc32'))):
+ print 'loading localenv'
+ return DBRLocalEnv(location)
+
+ return DBREnv(location)
+
+
+
+class DBREnv:
+ db = dict()
+ location = ''
+ name = ''
+ def __init__(self, location):
+ self.location = location
+
+ def compare(self, other):
+ db1files = set(self.db.keys())
+ db2files = set(other.db.keys())
+
+ removed = db1files - db2files
+ added = db2files - db1files
+ common = db1files & db2files
+
+ touched = set()
+ for file in common:
+ if(int(self.db[file]['time']) != int(other.db[file]['time'])):
+ touched.add(file)
+
+ sizechanged = set()
+ for file in common:
+ if(int(self.db[file]['size']) != int(other.db[file]['size'])):
+ sizechanged.add(file)
+#can be funny with some zip files...suggest we don't use sizechanged...
+# changed = sizechanged
+ changed = set()
+ touched = touched - changed
+ unknown = set()
+ for file in touched:
+ if((self.db[file]['md5'] == "xxx") or (other.db[file]['md5'] == "xxx")):
+ unknown.add(file)
+# if((self.db[file]['md5'] == "xxx")):
+# print 'unknown left: %s' % file
+# else:
+# print 'unknown right: %s' % file
+ else:
+ if(self.db[file]['md5'] != other.db[file]['md5']):
+# print '%s %s %s' % (file, self.db[file]['md5'], other.db[file]['md5'] )
+ changed.add(file)
+ touched = touched - unknown
+ touched = touched - changed
+
+ results = DBRCompResults(added, removed, touched, changed, unknown)
+ return results
+
+ def verify(self, files):
+ print 'this is a pure virtual...'
+ def save(self):
+ print 'this is a pure virtual...'
+
+ def remove(self, files):
+ for file in files:
+ if(file in self.db):
+ del self.db[file]
+ else:
+ print 'warning: del: %s isnt defined' % file
+
+ def add(self, other, files):
+ for file in files:
+ if(file in self.db):
+ print 'warning: add: %s already defined' % file
+ else:
+ if(other.db[file]['md5'] == 'xxx'): #don't update a null md5
+ print 'warning: MD5: %s isnt defined' % file
+ else:
+ self.db[file] = other.db[file]
+
+ def update(self, other, files):
+ for file in files:
+ if(other.db[file]['md5'] != 'xxx'): #don't update a null md5
+ self.db[file]['md5'] = other.db[file]['md5']
+ else:
+ print 'warning: MD5: %s isnt defined' % file
+
+ self.db[file]['time'] = other.db[file]['time']
+ self.db[file]['size'] = other.db[file]['size']
+
+
+#Database plus local filesystem access
+class DBRLocalEnv (DBREnv):
+ def __init__(self, location):
+ DBREnv.__init__(self, location)
+ #load up local files...
+ self.db = dbrutils.scanenv()
+
+ def verify(self, files):
+ #should assert that the files are in the local DB.
+ localfiles = set(self.db.keys())
+ if(localfiles.issuperset(files)):
+ md5s = dbrutils.generateMD5s(files)
+ for file in files:
+ self.db[file]['md5'] = md5s[file]['md5']
+
+class DBRNewLocalEnv (DBRLocalEnv):
+ def __init__(self, location):
+ DBRLocalEnv.__init__(self, location)
+ #load up local files...
+ hashes = glob.glob(os.path.join(dbrutils.patchpath(),'*.md5'))
+ for file in hashes:
+ print 'Reading: %s\n' % file
+ dbrutils.gethashes(self.db, file, False)
+
+ def save(self):
+ filename = os.path.join(self.location,dbrutils.defaultdb())
+ print 'Saving %s' % filename
+ dbrbaseline.writedb(self.db,filename)
+
+
+
+
+#zipped files, contains MD5s.
+class DBRZippedEnv (DBREnv):
+ def __init__(self, location):
+ DBREnv.__init__(self, location)
+ #load up zip MD5 and stuff
+ self.db = dbrutils.getzippedDB(self.location)
+
+#Database, but no filesystem access
+class DBRBaselineEnv (DBREnv):
+ def __init__(self, location):
+ DBREnv.__init__(self, location)
+ #load up database...
+ filename = os.path.join(self.location,dbrutils.defaultdb())
+ print 'Loading %s' % filename
+ self.db = dbrbaseline.readdb(filename)
+
+ def save(self):
+ filename = os.path.join(self.location,dbrutils.defaultdb())
+ print 'Saving %s' % filename
+ dbrbaseline.writedb(self.db,filename)
+
+class DBRPatchedBaselineEnv (DBRBaselineEnv):
+ patches = []
+ baseline = []
+ def __init__(self, location):
+ DBRBaselineEnv.__init__(self, location)
+ #load up patches...
+ if(len(self.db) > 0):
+ self.baseline = self.db
+ self.patches = dbrpatch.loadpatches(os.path.join(self.location,dbrutils.patchpath()))
+ self.db = dbrpatch.createpatchedbaseline(self.baseline,self.patches)
+
+ def save(self):
+ self.baseline = dbrpatch.updatebaseline(self.baseline, self.db)
+ self.patches = dbrpatch.updatepatches(self.patches, self.db)
+ dbrpatch.savepatches(self.patches)
+ self.db = self.baseline
+ DBRBaselineEnv.save(self)
+
+
+class CBREnv (DBREnv): # placeholder for handling CBR components...
+ def __init__(self, location):
+ DBREnv.__init__(self, location)
+
+
+#comparison results...
+class DBRCompResults:
+ added = set()
+ removed = set()
+ touched = set()
+ changed = set()
+ unknown = set()
+ def __init__(self, added, removed, touched, changed, unknown):
+ #Should probably assert that these are disjoint.
+ self.added = added
+ self.removed = removed
+ self.touched = touched
+ self.changed = changed
+ self.unknown = unknown
+
+ def printdetail(self):
+ for file in sorted(self.added):
+ print 'added:', file
+ for file in sorted(self.removed):
+ print 'removed:', file
+ for file in sorted(self.changed):
+ print 'changed:', file
+ for file in sorted(self.unknown):
+ print 'unknown:', file
+
+ def printsummary(self):
+ if(len(self.added | self.removed | self.changed | self.unknown)):
+ print 'status: dirty'
+ else:
+ print 'status: clean'
+
\ No newline at end of file
--- a/dbrtools/dbr/dbrpatch.py Tue Mar 16 12:52:44 2010 +0000
+++ b/dbrtools/dbr/dbrpatch.py Wed Mar 17 12:29:10 2010 +0000
@@ -30,12 +30,13 @@
touched = set()
for file in common:
- if(db1[file]['time'] != db2[file]['time']):
+ if(int(db1[file]['time']) != int(db2[file]['time'])):
+ print 'touched %s %s - %s' % (db1[file]['time'], db2[file]['time'],file)
touched.add(file)
sizechanged = set()
for file in common:
- if(db1[file]['size'] != db2[file]['size']):
+ if(int(db1[file]['size']) != int(db2[file]['size'])):
sizechanged.add(file)
changed = set()
@@ -56,22 +57,11 @@
if(db1[file]['md5'] != db2[file]['md5']):
changed.add(file)
touched = touched - changed
-
- untestable1 = set()
- untestable2 = set()
- for file in common:
- if(db1[file]['md5'] == "xxx"):
- untestable1.add(file)
- if(db2[file]['md5'] == 'xxx'):
- untestable2.add(file)
-
- untestable = untestable1 & untestable2
- changed = changed - untestable
+
#remove the ones we know are changed
touched = touched - changed
- touched = touched - untestable
-
+
results = dict()
results['added'] = dict()
results['removed'] = dict()
@@ -84,11 +74,11 @@
for file in removed:
results['removed'][file] = 0
for file in touched:
- results['touched'][file] = db2[file]
+ results['touched'][file] = db2[file]
for file in changed:
results['changed'][file] = db2[file]
- for file in untestable:
- results['untestable'][file] = 0
+# for file in untestable:
+# results['untestable'][file] = 0
return results
def printresults(results):
@@ -96,12 +86,12 @@
print 'added:', file
for file in sorted (results['removed']):
print 'removed:', file
- for file in sorted (results['touched']):
- print 'touched:', file
+# for file in sorted (results['touched']):
+# print 'touched:', file
for file in sorted (results['changed']):
print 'changed:', file
- for file in sorted (results['untestable']):
- print 'untestable:', file
+# for file in sorted (results['untestable']):
+# print 'untestable:', file
if(len(results['added']) + len(results['removed']) + len(results['changed']) + len(results['untestable']) == 0):
print '\nStatus:\tclean'
else:
@@ -145,20 +135,22 @@
envdbroot = dbrutils.defaultdb()
print "MattD: should move this function to a better location..."
print 'Comparing %s with %s' % (drive2,drive1)
- print 'Loading %s' % drive1
- baseline1 = dbrbaseline.readdb('%s%s' %(drive1,envdbroot))
- patches1 = loadpatches('%s/%s' %(drive1,dbrutils.patchpath()))
- db1 = createpatchedbaseline(baseline1,patches1)
- print 'Loading %s' % drive2
- baseline2 = dbrbaseline.readdb('%s%s' %(drive2,envdbroot))
- patches2 = loadpatches('%s/%s' %(drive2,dbrutils.patchpath()))
- db2 = createpatchedbaseline(baseline2,patches2)
-
+ db1 = loadpatcheddb(drive1)
+ db2 = loadpatcheddb(drive2)
+
results = newcompare(db1, db2)
printresults(results)
-
+def loadpatcheddb(drive):
+ envdbroot = dbrutils.defaultdb()
+ print 'Loading %s' % drive
+ baseline = dbrbaseline.readdb('%s%s' %(drive,envdbroot))
+ if(len(baseline) > 0):
+ patches = loadpatches('%s/%s' %(drive,dbrutils.patchpath()))
+ return createpatchedbaseline(baseline,patches)
+ else:
+ return dbrbaseline.readzippeddb(drive)
def createpatchzip(patch, patchname):
patchtext = '%s.txt' % patchname
@@ -192,7 +184,7 @@
if(file in patches[patch]['added']):
mod = 'added'
if(file in patches[patch]['changed']):
- mod = 'changed'
+ mod = 'changed'
if(mod):
if (patches[patch][mod][file]['time'] != db[file]['time']):
patches[patch][mod][file]['time'] = db[file]['time']
--- a/dbrtools/dbr/dbrutils.py Tue Mar 16 12:52:44 2010 +0000
+++ b/dbrtools/dbr/dbrutils.py Wed Mar 17 12:29:10 2010 +0000
@@ -17,15 +17,17 @@
import os
import sys
import string
+import shutil
+import time
from os.path import join, isfile, stat
from stat import *
import glob # temporary (I hope) used for grabbing stuf from zip files...
-
+import tempfile
def defaultdb():
- return os.path.join(patchpath(),'baseline.db')
+ return os.path.join(patch_path_internal(),'baseline.db')
def patchpath():
return os.path.join(epocroot(),'%s/' % patch_path_internal())
@@ -68,13 +70,14 @@
def extractfiles(files, path):
zips = glob.glob(os.path.join(path, '*.zip'))
for name in zips:
- extractfromzip(files, name)
+ extractfromzip(files, name,'')
-def extractfromzip(files, name):
+def extractfromzip(files, name, location):
tmpfilename = os.tmpnam( )
print tmpfilename
- os.chdir(epocroot())
+ cwd = os.getcwd();
+ os.chdir(os.path.join(epocroot(),location))
f = open(tmpfilename,'w')
for file in sorted(files):
str = '%s%s' % (file,'\n')
@@ -85,13 +88,68 @@
print 'executing: >%s<\n' %exestr
os.system(exestr)
os.unlink(tmpfilename)
+ os.chdir(cwd)
def deletefiles(files):
os.chdir(epocroot())
for file in files:
print 'deleting %s' %file
os.unlink(file)
-
+
+
+def getzippedDB(location):
+ db = dict()
+ #This is really crude, but will do for now
+# temp = tempfile.NamedTemporaryFile()
+# print temp.name
+ # Date Time Attr Size Compressed Name
+ #------------------- ----- ------------ ------------ ------------------------
+ #2010-03-10 01:02:30 D.... 0 0 epoc32
+ #2010-03-10 01:02:30 D.... 0 0 epoc32\relinfo
+ #2010-03-10 00:49:12 ..... 2327835 575578 epoc32\relinfo\epoc32_data.md5
+ reattribs = re.compile('(\d+-\d+-\d+\s\d+:\d+:\d+)\s+\..+\s+(\d+)\s+\d*\s*(\S.+)')
+ fixpath = re.compile('\\\\')
+
+ tmpfilename = os.tmpnam( )
+ print tmpfilename
+
+# exestr = '7z l -i!epoc32 -x!epoc32/relinfo %s/*.zip >%s' % (path,temp.name)
+ exestr = '7z l -i!epoc32 -x!epoc32/relinfo %s/*.zip >%s' % (location,tmpfilename)
+
+ exeresult = os.system(exestr)
+ if(exeresult):
+ sys.exit('Fatal error executing: %s\nReported error: %s' % (exestr,os.strerror(exeresult)))
+ else:
+ temp = open(tmpfilename,'r')
+ for line in temp:
+ res = reattribs.match(line)
+ if(res):
+ entry = dict()
+ entry['time'] = int(time.mktime(time.strptime(res.group(1), '%Y-%m-%d %H:%M:%S')))
+ entry['size'] = res.group(2)
+ entry['md5'] = 'xxx'
+ filename = string.lower(fixpath.sub('/',res.group(3)))
+ db[filename] = entry
+ temp.close()
+ os.unlink(tmpfilename)
+ #now fill with the MD5s...
+ md5zip = os.path.join(location,'build_md5.zip')
+ print md5zip
+ temp_dir = tempfile.mkdtemp()
+ print temp_dir
+ if(os.path.exists(md5zip)):
+ files = set();
+ files.add('*')
+ extractfromzip(files,md5zip,temp_dir)
+ globsearch = os.path.join(temp_dir, os.path.join(patch_path_internal(),'*.md5'))
+ print globsearch
+ hashes = glob.glob(globsearch)
+ for file in hashes:
+ print 'Reading: %s\n' % file
+ gethashes(db, file, False)
+ shutil.rmtree(temp_dir)
+ return db
+
def generateMD5s(testset):
db = dict()
@@ -115,13 +173,13 @@
if(exeresult):
sys.exit('Fatal error executing: %s\nReported error: %s' % (exestr,os.strerror(exeresult)))
else:
- db = gethashes(db,outputfile)
+ db = gethashes(db,outputfile, False)
os.unlink(outputfile)
os.unlink(tmpfilename)
return db
# Brittle and nasty!!!
-def gethashes(db,md5filename):
+def gethashes(db, md5filename, create):
os.chdir(epocroot())
# print 'trying to open %s' % md5filename
file = open(md5filename,'r')
@@ -136,9 +194,16 @@
if(res):
filename = "%s%s" % (root,res.group(1))
filename = string.lower(fixpath.sub('/',leadingslash.sub('',filename)))
-# print "found %s" % filename
- if(filename in db):
- db[filename]['md5'] = res.group(3)
+# print "found %s" % filename
+ if(create):
+ entry = dict()
+ entry['time'] = 'xxx'
+ entry['size'] = 'xxx'
+ entry['md5'] = res.group(3)
+ db[filename] = entry
+ else:
+ if(filename in db):
+ db[filename]['md5'] = res.group(3)
else:
res = dirparse.match(line)
--- a/dbrtools/dbr/diffenv.py Tue Mar 16 12:52:44 2010 +0000
+++ b/dbrtools/dbr/diffenv.py Wed Mar 17 12:29:10 2010 +0000
@@ -1,4 +1,4 @@
-# Copyright (c) 2009 Symbian Foundation Ltd
+# Copyright (c) 2010 Symbian Foundation Ltd
# This component and the accompanying materials are made available
# under the terms of the License "Eclipse Public License v1.0"
# which accompanies this distribution, and is available
@@ -11,23 +11,30 @@
# mattd <mattd@symbian.org>
#
# Description:
-# DBR diffenv - compares two environments
+# new diffenv - uses OO interface and can have
-import sys
-import dbrpatch
+import dbrenv
def run(args):
- if(len(args) == 2):
- first = args[0]
- second = args[1]
- dbrpatch.newcomparepatcheddbs(first, second)
+ if(len(args)):
+ if(len(args) == 1):
+ first = '/'
+ second = args[0]
+ else:
+ first = args[0]
+ second = args[1]
+ db1=dbrenv.CreateDB(first)
+ db2=dbrenv.CreateDB(second)
+ results = db1.compare(db2)
+ results.printdetail()
+ results.printsummary()
else:
help()
def help():
print "Compares two environments"
print "Usage:"
- print "\tdbr diffenv <drive1> <drive2>"
+ print "\tdbr diffenv <drive1> (<drive2>)"
+
-
--- a/dbrtools/dbr/installpatch.py Tue Mar 16 12:52:44 2010 +0000
+++ b/dbrtools/dbr/installpatch.py Wed Mar 17 12:29:10 2010 +0000
@@ -15,7 +15,7 @@
import sys
import os.path
-#import shutils
+import shutil
import dbrutils
@@ -27,10 +27,10 @@
if(os.path.exists(patch)):
patchname = os.path.basename(patch)
if(not os.path.exists(os.path.join(dbrutils.patchpath(),patchname))):
- shutils.copyfile(patch, os.path.join(dbrutils.patchpath(),patchname))
+ shutil.copyfile(patch, os.path.join(dbrutils.patchpath(),patchname))
files = set();
files.add('*')
- dbrutils.extractfromzip(files,os.path.join(dbrutils.patchpath(),patchname))
+ dbrutils.extractfromzip(files,os.path.join(dbrutils.patchpath(),patchname),'')
print 'Should probably run checkenv now...'
else:
print 'Cannot find patch zip: %s\n' %patch
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/jamesa/_common.py Wed Mar 17 12:29:10 2010 +0000
@@ -0,0 +1,64 @@
+# Copyright (c) 2009 Symbian Foundation Ltd
+# This component and the accompanying materials are made available
+# under the terms of the License "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Symbian Foundation Ltd - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Data structure code used by dependency analysis scripts.
+
+"""Common data structure code for build_graph.py and tools.
+"""
+
+__author__ = 'James Aley'
+__email__ = 'jamesa@symbian.org'
+__version__ = '1.0'
+
+class Node:
+ """Node objects are similar to the Symbian notion of a Component, but
+ they are defined in a practical way for ROM building with less intuitive meaning.
+
+ A Node object is identified by:
+ - the path to bld.inf
+ where by:
+ - the bld.inf file contains a PRJ_MMPFILES section with a least one MMP file.
+ """
+
+ def __str__(self):
+ """Represent node as string, using node_path
+ """
+ return self.node_path
+
+ def __init__(self, path):
+ """Initialize new Node with given path to bld.inf
+ """
+ # path to the bld.inf file associating these mmp components
+ self.node_path = ''
+
+ # list of node_path values for Node objects owning referenced from
+ # the MMP files
+ self.dependencies = []
+
+ # contents of this Node, likely not used algorithmically but might
+ # be useful later for reporting.
+ self.mmp_components = []
+
+ # the following are nodes that also satisfy the dependencies (in part), and may
+ # be of interest when building a ROM.
+ self.interesting = []
+
+ # dependencies that were not linked to another component in the source tree
+ self.unresolved = []
+
+ self.node_path = path
+
+ def add_deps(self, deps):
+ """Add dependencies to the list, filtering duplicates
+ """
+ self.dependencies.extend(filter(lambda x: x not in self.dependencies, deps))
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/jamesa/build_graph.py Wed Mar 17 12:29:10 2010 +0000
@@ -0,0 +1,343 @@
+# Copyright (c) 2009 Symbian Foundation Ltd
+# This component and the accompanying materials are made available
+# under the terms of the License "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Symbian Foundation Ltd - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Generates a dependency graph of the Symbian source tree.
+
+"""Build a graph of component dependencies from Symbian OS source code.
+The graph is serialized to a file, which can then be used by other scripts to extract data.
+
+The script works by recursing over the directory structure from the specified root and then
+analyzing all bld.inf files to locate referenced production MMP files. These are then processed
+for target and dependency information.
+
+You can use the supplementary scripts to then extract useful information from the generated graph
+file.
+"""
+
+from optparse import OptionParser
+from _common import Node
+
+import re
+import os
+import sys
+import pickle
+import logging
+
+__author__ = 'James Aley'
+__email__ = 'jamesa@symbian.org'
+__version__ = '1.0'
+
+# Constants for various default config
+_LOG_FORMAT = '%(levelname)s: %(message)s'
+_MAX_PATH = 260
+
+# Precompile regexes for better performance
+# - Comment filtering
+_RE_CLEAN_INLINE = '^(.*)//.*$'
+_RE_MULTILINE_OPEN = '^(.*)/\\*.*$'
+_RE_MULTILINE_CLOSE = '^.*\\*/(.*)$'
+_p_clean_inline = re.compile(_RE_CLEAN_INLINE)
+_p_multiline_open = re.compile(_RE_MULTILINE_OPEN)
+_p_multiline_close = re.compile(_RE_MULTILINE_CLOSE)
+
+# - MMP file Parsing
+_RE_TARGET = '^\\s*TARGET\\s+([^\\s]+).*$'
+_RE_PLAIN_TARGET = '^\\s*([^\\s\\.]+)\\.?[^\\s]?\\s*'
+_RE_COMPLEX_TARGET = '.*\\((.+),.+\\).*'
+_RE_LIBRARY = '^\\s*[^\\s]*LIBRARY.*\\s+([^\\s]+.*)$'
+_RE_START = '^\\s*START.*$'
+_RE_END = '\\s*END.*$'
+_p_target = re.compile(_RE_TARGET, re.I)
+_p_plain_target = re.compile(_RE_PLAIN_TARGET)
+_p_complex_target = re.compile(_RE_COMPLEX_TARGET)
+_p_library = re.compile(_RE_LIBRARY, re.I)
+_p_start = re.compile(_RE_START)
+_p_end = re.compile(_RE_END)
+
+# - BLD.INF file parsing
+_RE_PRJ_MMPFILES = '^\\s*PRJ_MMPFILES\\s*$'
+_RE_OTHER_SECTION = '^\\s*PRJ_[a-z]+\\s*$'
+_p_prj_mmpfiles = re.compile(_RE_PRJ_MMPFILES, re.I)
+_p_other_section = re.compile(_RE_OTHER_SECTION, re.I)
+
+# Set up a logging instance for output
+logging.basicConfig(format=_LOG_FORMAT, level=logging.WARNING, stream=sys.stdout)
+
+# Cache dictionary to marry Nodes to eachother
+node_cache = {}
+
+# Dictionary representing the dependency graph.
+# Each key identifies the node in the graph, where the value is the node
+# object itself including the arcs to other node_path keys that it requires.
+graph = {}
+
+def rstrip(string, suffix):
+ """Like Python's __str__.rstrip(chars), but it treats the chars as
+ a contiguous string and only strips that complete ending.
+ """
+ if string.endswith(suffix):
+ string = string[:len(string) - len(suffix)]
+ return string
+
+def clean_binary_name(binary_name):
+ """Strips the extension off of binary names so that references to .lib
+ are associated with the correct binaries.
+ """
+ match_complex_target = _p_complex_target.match(binary_name)
+ if match_complex_target:
+ binary_name = match_complex_target.groups()[0].lower().strip()
+ else:
+ match_plain_target = _p_plain_target.match(binary_name)
+ if match_plain_target:
+ binary_name = match_plain_target.groups()[0].lower().strip()
+ return binary_name
+
+def looks_like_test(path):
+ """Returns true if a path looks like it refers to test components.
+ The script does its best to filter test components, as many are missing
+ from the source tree and they're not interesting with respect to building
+ production ROM images anyway.
+ """
+ conventions = ['tsrc', 'test']
+ for convention in conventions:
+ # Iterate through likely test component conventions, if
+ # we match one, return True now
+ if os.path.sep + convention + os.path.sep in path.lower():
+ return True
+ # Otherwise, nothing found, so return False
+ return False
+
+def without_comments(source_file):
+ """Generator function, will yield lines of the source_file object (iterable)
+ with commented regions removed.
+ """
+ multiline_region = False
+ for line in source_file:
+ match_multiline_close = _p_multiline_close.match(line)
+ if match_multiline_close:
+ # Close Comments, strip to the left of the comment
+ multiline_region = False
+ line = match_multiline_close.groups()[0]
+ if multiline_region:
+ # Skip the line if we're in a commented region
+ continue
+ match_multiline_open = _p_multiline_open.match(line)
+ if match_multiline_open:
+ # Open comments, strip to the right of the comment
+ multiline_region = True
+ line = match_multiline_open.groups()[0]
+ match_inline = _p_clean_inline.match(line)
+ if match_inline:
+ # Strip the line to only the left of the comment
+ line = match_inline.groups()[0]
+ if line:
+ yield line
+
+def parse_mmp(mmp_path):
+ """Read an mmp file, return a tuple of the form:
+ (target, required_target_list)
+ """
+ logging.debug('parse_mmp(%s)' % (mmp_path, ))
+
+ mmp_file = None
+ try:
+ mmp_file = open(mmp_path)
+ except IOError, e:
+ logging.error('Unable to open: %s' % (mmp_path, ))
+ return
+
+ # Iterate through MMP file lines to find the TARGET and LIBRARY statements
+ # Note that Symbian projects can compile to different TARGET objects depending on
+ # precompiler macros, so we must index all possible target names.
+ targets = []
+ libs = []
+ resource_block = False
+ for line in without_comments(mmp_file):
+ match_start = _p_start.match(line)
+ if match_start:
+ resource_block = True
+ match_end = _p_end.match(line)
+ if match_end:
+ resource_block = False
+ if resource_block:
+ # need to avoid resource target sections
+ continue
+ match_target = _p_target.match(line)
+ match_library = _p_library.match(line)
+ if match_target:
+ clean_target = clean_binary_name(match_target.groups()[0])
+ targets.append(clean_target)
+ elif match_library:
+ libs_on_line = match_library.groups()[0].split()
+ for lib in libs_on_line:
+ clean_lib = clean_binary_name(lib)
+ libs.append(clean_lib)
+ mmp_file.close()
+
+ return (targets, libs)
+
+def new_node(path, ref_mmps, ref_testmmps):
+ """Construct a new node in the graph with the provided content.
+ """
+ logging.debug('new_node(%s, ref_mmps(%d), ref_testmmps(%d))' % (path, len(ref_mmps), len(ref_testmmps)))
+ node = Node(path)
+
+ # Iterate the MMPs, read dependency and target information
+ for mmp in ref_mmps:
+ (targets, dependencies) = parse_mmp(mmp)
+ if len(targets) > 0:
+ for target in targets:
+ node.mmp_components.append(target)
+ node.add_deps(dependencies)
+
+ # Register the components in the cache, as later we will
+ # join the graph nodes by referring to this cache
+ for c in node.mmp_components:
+ if c in node_cache.keys():
+ existing = node_cache[c]
+ node_cache[c] = existing + [path]
+ else:
+ node_cache[c] = [path]
+
+ # Add this node to the graph
+ graph[path] = node
+
+def parse_bld_inf(path):
+ """Parse a bld.inf file to check to see if references MMP files.
+ For those MMP files included, parse them to build the node object.
+ """
+ logging.debug('parse_bld_inf(%s)' % (path, ))
+
+ # List the files referenced from this bld.inf
+ ref_mmp = []
+ ref_testmmp = []
+
+ bld_inf = None
+ try:
+ bld_inf = open(path, 'r')
+ except IOError, e:
+ logging.error('Unable to open: %s' % (path, ))
+ return
+
+ # Parse the bld_inf file, adding references MMP files to appropriate lists
+ projects_flag = False
+ for line in without_comments(bld_inf):
+ match_projects = _p_prj_mmpfiles.match(line)
+ match_other_section = _p_other_section.match(line)
+ if match_projects:
+ projects_flag = True
+ elif match_other_section:
+ projects_flag = False
+ if projects_flag and len(line) <= _MAX_PATH:
+ rel_name = rstrip(line.lower().strip(), '.mmp')
+ bld_inf_path = os.path.dirname(path)
+ test_path = os.path.join(bld_inf_path, rel_name + '.mmp')
+ test_path = os.path.realpath(test_path)
+ if os.path.exists(test_path):
+ ref_mmp.append(test_path)
+ else:
+ logging.warning('%s refers to %s but it does not exist!' % (path, test_path))
+ bld_inf.close()
+
+ # If we found some MMP files, then this is a new node
+ if len(ref_mmp) > 0:
+ new_node(path, ref_mmp, ref_testmmp)
+
+def make_nodes(not_used, dir_name, file_names):
+ """Call back function for os.path.walk: will analyse the file names, if
+ there are any bld.inf files, it will open them to see if they identify a
+ Node object and create them as appropriate
+ """
+ logging.debug('make_nodes(%s, %s)' % (dir_name, file_names))
+ if looks_like_test(dir_name):
+ return
+ for file_name in file_names:
+ if file_name.lower().endswith('.inf'):
+ abs_path = os.path.join(dir_name, file_name)
+ assert(os.path.exists(abs_path))
+ parse_bld_inf(abs_path)
+
+def connect_nodes():
+ """Walk through the graph and substute the contents of the dependency
+ list members at each node with references to the node_path of that which
+ builds the referenced component.
+
+ There will be instances where multiple graph nodes build overlapping
+ components. This will, in practice, mean that there are many ways of
+ building a suitable ROM for dependencies of one of these nodes.
+ """
+ unresolved_deps = []
+ for node_path in graph.keys():
+ node = graph[node_path]
+ resolved = []
+ for dep in node.dependencies:
+ if dep not in node_cache.keys():
+ logging.warning('Could not resolve %s for %s' % (dep, node.node_path))
+ if dep not in unresolved_deps:
+ unresolved_deps.append(dep)
+ node.unresolved.append(dep)
+ else:
+ solutions = node_cache[dep]
+ proposed = solutions[0]
+ if proposed not in resolved:
+ resolved.append(proposed)
+ node.interesting += filter(lambda x: x not in node.interesting, solutions[1:])
+ node.dependencies = resolved
+ graph[node_path] = node
+ if len(unresolved_deps) > 0:
+ logging.warning('There were %d unresolved dependencies.' % (len(unresolved_deps), ))
+
+def build_graph(root):
+ """Walk nodes from the directory root provided looking for bld.inf files.
+ Graph will be built from the referened production MMP files.
+ """
+ if not os.path.isdir(root):
+ logging.fatal('%s is not a directory, aborting...' % (root, ))
+ exit(1)
+ os.path.walk(root, make_nodes, None)
+ connect_nodes()
+
+def save_graph(path):
+ """Serialize the graph object to path. This will be a Python object pickle at
+ the highest available protocol version for this Python install.
+ """
+ graph_file = None
+ try:
+ graph_file = open(path, 'wb')
+ except IOError, e:
+ logging.error('Could not write graph to file: %s' % (repr(e), ))
+ exit(1)
+ pickle.dump(graph, graph_file, pickle.HIGHEST_PROTOCOL)
+ graph_file.close()
+
+# Main:
+if __name__ == '__main__':
+ parser = OptionParser()
+ parser.set_description(__doc__)
+ parser.add_option('-g', '--graph', dest='graph_file',
+ help='File name to write the graph to.',
+ metavar='GRAPH_FILE', default='dependencies.graph')
+ parser.add_option('-r', '--root', dest='graph_root',
+ help='Directory to recursively build a graph from, usually root of source tree.',
+ metavar='SOURCE_ROOT', default='.')
+ parser.add_option('-v', '--verbose', dest='verbose',
+ help='Verbose logging, will show all warnings as graph is generated. Recommend redirect!',
+ action='store_true', default=False)
+ (options, args) = parser.parse_args()
+ if not options.verbose:
+ logging.disable(logging.ERROR)
+ print 'Walking source from "%s"\nThis can take some time with large source trees...' % (options.graph_root, )
+ build_graph(options.graph_root)
+ print 'Found %d components consisting of %d binaries.' % (len(graph), len(node_cache))
+ print 'Wriing graph to %s' % (options.graph_file)
+ save_graph(options.graph_file)
+ print '...done!'
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/jamesa/generate_oby.py Wed Mar 17 12:29:10 2010 +0000
@@ -0,0 +1,212 @@
+# Copyright (c) 2009 Symbian Foundation Ltd
+# This component and the accompanying materials are made available
+# under the terms of the License "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Symbian Foundation Ltd - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Create a barebones OBY file from a dependency report text file.
+
+"""Take a report generated by get_deps.py and attempt to create an OBY
+file to build a ROM with the dependency list.
+"""
+
+from build_graph import without_comments
+from optparse import OptionParser
+
+import os
+import re
+import sys
+import logging
+
+__author__ = 'James Aley'
+__email__ = 'jamesa@symbian.org'
+__version__ = '1.0'
+
+# Logging config
+_LOG_FORMAT = '%(levelname)s: %(message)s'
+logging.basicConfig(format=_LOG_FORMAT, level=logging.WARNING, stream=sys.stdout)
+
+# Regexes for bld.inf parsing
+_RE_EXPORT_SECTION = '\\s*PRJ_EXPORTS\\s*'
+_RE_OTHER_SECTION = '\\s*PRJ_[a-z]+\\s*'
+_RE_IBY_OBY = '\\s*([^\\s]+\\.[oi]by)\\s+.*'
+_p_export_section = re.compile(_RE_EXPORT_SECTION, re.I)
+_p_other_section = re.compile(_RE_OTHER_SECTION, re.I)
+_p_iby_oby = re.compile(_RE_IBY_OBY, re.I)
+
+# OBY output templates
+_OBY_HEADER = """// OBY file generated by genereate_oby.py.
+// The following includes are derived from the dependency report: %s
+
+"""
+
+_OBY_INCLUDE = """
+// Required for: %s
+%s
+"""
+
+_OBY_UNRESOLVED = """
+
+// The following appear to be exported by this component,
+// but were not found under the include directory:
+%s
+"""
+
+_OBY_NO_EXPORTS = """
+
+// The following components are required in your dependency graph, but
+// they appear not to export an IBY/OBY file. Your ROM will likely not
+// build until you locate the correct include files for these.
+%s
+"""
+
+_INCLUDE_TEMPLATE = '#include <%s>'
+
+def bld_inf_list(report_path):
+ """Returna list of bld.inf files from the report
+ """
+ bld_list = []
+ report_file = None
+ try:
+ report_file = open(report_path)
+ except IOError, e:
+ logging.critical('Could not open report: %s' % (repr(e), ))
+ exit(1)
+ return filter(lambda x: x and not x.isspace(), [line.strip() for line in without_comments(report_file)])
+
+def get_paths(bld_inf_file):
+ """Returns a list of referenced OBY or IBY files from a bld.inf file.
+ bld_inf_file is an open file handle, which will not be closed by this
+ function.
+ """
+ oby_iby = []
+ export_section = False
+ for line in without_comments(bld_inf_file):
+ if export_section:
+ match_iby_oby = _p_iby_oby.search(line)
+ if match_iby_oby:
+ file_name = match_iby_oby.groups()[0].strip()
+ oby_iby.append(file_name)
+ else:
+ match_other_section = _p_other_section.search(line)
+ if match_other_section:
+ export_section = False
+ else:
+ match_export_section = _p_export_section.search(line)
+ if match_export_section:
+ export_section = True
+ obys = filter(lambda x: x.lower().endswith('.oby'), oby_iby)
+ if len(obys) > 0:
+ return obys
+ return oby_iby
+
+def rom_file_list(bld_inf_paths):
+ """Iterate through a list of bld.inf file paths and extra the references
+ to OBY or IBY files where appropriate (OBY takes precedence). Return a
+ dictionary of relevant files in the format:
+ { 'component_bld_inf' : [ iby_file_list] }
+ """
+ obys_ibys = {}
+ for path in bld_inf_paths:
+ bld_inf_file = None
+ try:
+ bld_inf_file = open(path)
+ except IOError, e:
+ logging.error('Unable to open bld.inf file: %s' % (repr(e), ))
+ continue
+ rom_file_paths = get_paths(bld_inf_file)
+ obys_ibys[path] = rom_file_paths
+ bld_inf_file.close()
+ return obys_ibys
+
+def iby_map(iby_dict, dir_name, file_names):
+ """Searches for the specified IBY/OBY file under the include_root path.
+ Returns the absolute path to the IBY/OBY if it was found, otherwise a blank string.
+ """
+ for component in iby_dict.keys():
+ # Map the file names for each component IBY file to a matching
+ # file name under the export directory, if it exists, otherwise
+ # keep the existing name for now - it might be matched later.
+ file_names = map(lambda x: x.lower(), file_names)
+ component_ibys = map(lambda x: os.path.basename(x).lower() in file_names \
+ and os.path.join(dir_name, os.path.basename(x)) \
+ or x, \
+ iby_dict[component])
+ iby_dict[component] = component_ibys
+
+def write_oby(out_path, iby_map, input_path, include_root):
+ """Write an OBY file to include the required IBY and OBY files for this
+ ROM specification, given by iby_map.
+ """
+ out_file = None
+ try:
+ out_file = open(out_path, 'w')
+ except IOError, e:
+ logging.critical('Unable to write OBY file: %s' % repr(e))
+ exit(1)
+
+ # Write the header with the input file name included
+ out_file.write(_OBY_HEADER % (input_path, ))
+
+ exports = filter(lambda x: len(iby_map[x]) > 0, iby_map.keys())
+ no_exports = filter(lambda x: len(iby_map[x]) == 0, iby_map.keys())
+
+ # Write the includes and missing exports
+ for component in exports:
+ iby_list = iby_map[component]
+ exported = filter(lambda x: x.startswith(include_root), iby_list)
+ # Need relative paths for include, but os.path.relpath is added
+ # in Python 2.6, which isn't supported by other Symbian tools
+ # at present :-(
+ exported = map(lambda x: x[len(include_root) + 1:], exported)
+ exported.sort()
+
+ missing = filter(lambda x: not x.startswith(include_root), iby_list)
+ missing = map(lambda x: os.path.basename(x), missing)
+ missing.sort()
+
+ # Write the IBY file includes for this component
+ out_file.write(_OBY_INCLUDE % (component, '\n'.join([_INCLUDE_TEMPLATE % (iby, ) for iby in exported]), ))
+
+ # Write the missing IBY reports
+ if len(missing) > 0:
+ out_file.write(_OBY_UNRESOLVED % ('\n'.join(['// %s' % (missed, ) for missed in missing]), ))
+
+ # Write report for the IBY that appear not to export any ROM include files
+ out_file.write(_OBY_NO_EXPORTS % ('\n'.join(['// %s' % (path,) for path in no_exports]), ))
+ out_file.close()
+
+# Main
+if __name__ == '__main__':
+ # Options config
+ parser = OptionParser()
+ parser.set_description(__doc__)
+ parser.add_option('-r', '--report', dest='report_path',
+ help='File name for the report generated by get_deps.py',
+ metavar='INPUT_FILE', default='dependencies.txt')
+ parser.add_option('-o', '--output', dest='output_file',
+ help='OBY ouput file to write to.',
+ metavar='OUT_FILE', default='generated.oby')
+ parser.add_option('-i', '--include_root', dest='include_root',
+ help='Environment ROM inlcude root.',
+ metavar='INCLUDE_ROOT',
+ default=os.path.sep.join(['epoc32', 'rom']))
+ (options, args) = parser.parse_args()
+
+ # Read the report and get a list of bld.inf files, then convert to
+ # a dictionary of bld_inf -> [IBY file list] mappings.
+ bld_infs = bld_inf_list(options.report_path)
+ bld_iby_map = rom_file_list(bld_infs)
+
+ # Walk the include tree to find the exported IBYs.
+ os.path.walk(options.include_root, iby_map, bld_iby_map)
+
+ # Write the OBY file
+ write_oby(options.output_file, bld_iby_map, options.report_path, options.include_root)
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/jamesa/get_deps.py Wed Mar 17 12:29:10 2010 +0000
@@ -0,0 +1,174 @@
+# Copyright (c) 2009 Symbian Foundation Ltd
+# This component and the accompanying materials are made available
+# under the terms of the License "Eclipse Public License v1.0"
+# which accompanies this distribution, and is available
+# at the URL "http://www.eclipse.org/legal/epl-v10.html".
+#
+# Initial Contributors:
+# Symbian Foundation Ltd - initial contribution.
+#
+# Contributors:
+#
+# Description:
+# Walk all nodes in the dependency graph to create a dependency report.
+
+"""Walk a dependency graph to find dependencies for a particular set of
+components. This script uses the output of build_graph.py to trace
+dependencies.
+
+The directory_list refer to diretories in the source tree for which
+you wish to trace dependencies. The script will find all components
+in the graph file under these directories and trace dependencies from
+that point.
+"""
+
+from optparse import OptionParser
+from _common import Node
+
+import sys
+import pickle
+import logging
+
+__author__ = 'James Aley'
+__email__ = 'jamesa@symbian.org'
+__version__ = '1.0'
+
+_LOG_FORMAT = '%(levelname)s: %(message)s'
+logging.basicConfig(format=_LOG_FORMAT, level=logging.WARNING, stream=sys.stdout)
+
+# Internalized graph object
+graph = {}
+
+# Report formatting
+_REPORT_HEADER = """// Generated by get_deps.py
+//
+// Dependency information for:
+//
+%s
+
+"""
+
+_DEPENDENCY_FORMAT = """
+// Required components:
+
+%s
+
+"""
+
+_MISSING_FORMAT = """
+// The following binary objects were referenced from the build files for
+// components required by your specified root components. However, there
+// were no build files for these objects found in the source tree parsing,
+// so dependencies for them may be missing in the above listing.
+
+%s
+
+"""
+
+def load_graph(path):
+ """Return the internalized graph dictionary object.
+ """
+ graph_file = None
+ graph_loaded = {}
+
+ try:
+ graph_file = open(path, 'rb')
+ except IOError, e:
+ logging.critical('Unable to open graph from file: %s: %s' % (path, repr(e)))
+ exit(1)
+ try:
+ graph_loaded = pickle.load(graph_file)
+ except Exception, e:
+ logging.critical('File %s does not contain a valid graph: %s' % (path, repr(e)))
+ return graph_loaded
+
+def find_roots(root_dirs):
+ """Return a list of root nodes from the graph for tracing, based on
+ the specified component directories in the root_dirs list.
+ """
+ roots = []
+ for root in root_dirs:
+ for node in graph.keys():
+ if node.startswith(root.lower()):
+ if node not in roots:
+ roots.append(node)
+ return roots
+
+def trace(root, visited = set()):
+ """Return a list of components required to support root.
+ """
+ node = graph[root]
+ visited |= set([node.node_path]) | set(node.dependencies)
+ for dep in node.dependencies:
+ if dep not in visited:
+ return trace(dep, visited)
+ return visited
+
+def unresolved(deps):
+ """Return a set of components with unknown dependencies from a
+ provided list of node names.
+ """
+ unresolved = set()
+ for dep in deps:
+ node = graph[dep]
+ unresolved |= set(node.unresolved)
+ return unresolved
+
+def report(out_path, roots, dependencies, missing):
+ """Output the dependency information to file.
+ """
+ # open report file
+ out_file = None
+ try:
+ out_file = open(out_path, 'w')
+ except IOError, e:
+ logging.critical('Unable to write report: %s' % (repr(e)))
+ exit(1)
+
+ # easier to read report with these sorted
+ roots.sort()
+ dependencies.sort()
+ missing.sort()
+
+ # format report
+ formatted_header = _REPORT_HEADER % ('\n'.join(['// %s' % (line, ) for line in roots]), )
+ formatted_body = _DEPENDENCY_FORMAT % ('\n'.join(dependencies))
+ formatted_missing = _MISSING_FORMAT % ('\n'.join(['// %s' % (line, ) for line in missing]), )
+
+ # write report
+ out_file.write(formatted_header)
+ out_file.write(formatted_body)
+ out_file.write(formatted_missing)
+
+ out_file.close()
+
+if __name__ == '__main__':
+ # Options config
+ parser = OptionParser()
+ parser.set_description(__doc__)
+ parser.set_usage('python get_deps.py [options] directory_list')
+ parser.add_option('-g', '--graph', dest='graph_file',
+ help='File name to write the graph to.',
+ metavar='GRAPH_FILE', default='dependencies.graph')
+ parser.add_option('-o', '--output', dest='output_file',
+ help='File to write the dependency report to.',
+ metavar='OUT_FILE', default='dependencies.txt')
+ (options, args) = parser.parse_args()
+
+ # Intenalize the graph file
+ print 'Loading graph from %s' % (options.graph_file, )
+ graph = load_graph(options.graph_file)
+
+ # Extract relevant slices and merge dependencies
+ roots = find_roots(args)
+ print 'Tracing dependencies for %d components under %s' % (len(roots), ', '.join(args))
+ deps = set()
+ for root in roots:
+ deps |= trace(root)
+ print 'Dependency graph slice yields %d of %d components.' % (len(deps), len(graph))
+ unresolved_deps = unresolved(deps)
+ print 'Component dependencies for %d binaries are unresolved' % (len(unresolved_deps, ))
+
+ # Write the report to the output file
+ report(options.output_file, roots, list(deps), list(unresolved_deps))
+ print 'Report written to: %s' % (options.output_file, )