|
0
|
1 |
#
|
|
|
2 |
# Copyright (c) 2008, 2009 Nokia Corporation and/or its subsidiary(-ies).
|
|
|
3 |
# All rights reserved.
|
|
|
4 |
# This component and the accompanying materials are made available
|
|
|
5 |
# under the terms of "Eclipse Public License v1.0"
|
|
|
6 |
# which accompanies this distribution, and is available
|
|
|
7 |
# at the URL "http://www.eclipse.org/legal/epl-v10.html".
|
|
|
8 |
#
|
|
|
9 |
# Initial Contributors:
|
|
|
10 |
# Nokia Corporation - initial contribution.
|
|
|
11 |
#
|
|
|
12 |
# Contributors:
|
|
|
13 |
#
|
|
|
14 |
# Description: End-user Interface for Core Tools execution
|
|
|
15 |
#
|
|
|
16 |
|
|
|
17 |
import sys
|
|
|
18 |
import os
|
|
|
19 |
import re
|
|
|
20 |
#while importing check if all modules are available, else raise error
|
|
|
21 |
try:
|
|
|
22 |
import subprocess
|
|
|
23 |
import platform
|
|
|
24 |
import urllib
|
|
|
25 |
except ImportError:
|
|
|
26 |
python_error()
|
|
|
27 |
|
|
|
28 |
#-------------------------Hardcoded values-----------------------------------------
|
|
|
29 |
#Currently hardcoded values, these will be moved to a metadata file later
|
|
|
30 |
#data version denotes compatibility between the tool and carbide plugin
|
|
|
31 |
DATA_VERSION = "6"
|
|
|
32 |
#tool version denotes the version of the core tools package
|
|
|
33 |
TOOL_VERSION = "2.8.3"
|
|
|
34 |
TOOL_DATE = "1st December 2009"
|
|
|
35 |
|
|
|
36 |
#server to be used for downloading Core tools package and knownissues
|
|
|
37 |
SERVER_PATH = "http://"
|
|
|
38 |
|
|
|
39 |
#-------------------------Global values--------------------------------------------
|
|
|
40 |
CUR_DIR = os.getcwd()
|
|
|
41 |
#used to obtain the path in which checkbc and core tools are placed
|
|
|
42 |
tempStr = os.path.realpath(sys.argv[0])
|
|
|
43 |
[head,tail] = os.path.split(tempStr )
|
|
|
44 |
TOOL_DIR = head + os.sep
|
|
|
45 |
#create the \data and \bin paths which contain the necessary additional headers
|
|
|
46 |
DATA_PATH = TOOL_DIR + "data" + os.sep
|
|
|
47 |
EXEC_PATH = TOOL_DIR + "bin" + os.sep
|
|
|
48 |
REPORT_PATH = TOOL_DIR + "reports" + os.sep
|
|
|
49 |
DEFAULT_ISSUES_FILE = TOOL_DIR + "data" + os.sep + "knownissues.xml"
|
|
|
50 |
s60_build_targets = [ 'armv5', 'armv5_abiv2', 'armv6', 'armv6t2', 'armv7a' ]
|
|
|
51 |
tool_chain = ['gcc','gcce','rvct']
|
|
|
52 |
sdk_version = [ '5.0','5.1','5.2','sf1','sf2']
|
|
|
53 |
|
|
|
54 |
#dictionary elements which hold the platform data(CDS) and forced header(symbian macros) information
|
|
|
55 |
#these are available only when the \data and \bin folders respectively are avaliable
|
|
|
56 |
if os.path.exists( DATA_PATH ):
|
|
|
57 |
platformdata = {
|
|
|
58 |
"5.0": DATA_PATH + "s60_platform_data_50.xml",
|
|
|
59 |
"5.1": DATA_PATH + "s60_platform_data_51.xml",
|
|
|
60 |
"5.2": DATA_PATH + "s60_platform_data_52.xml",
|
|
|
61 |
"SF1": DATA_PATH + "s60_platform_data_51.xml",
|
|
|
62 |
"SF2": DATA_PATH + "s60_platform_data_52.xml"
|
|
|
63 |
}
|
|
|
64 |
else:
|
|
|
65 |
platformdata = {}
|
|
|
66 |
|
|
|
67 |
if os.path.exists( EXEC_PATH ):
|
|
|
68 |
forcedheadersdata = {
|
|
|
69 |
"5.0": EXEC_PATH + "forced_9.4.h",
|
|
|
70 |
"5.0v2": EXEC_PATH + "forced_9.4v2.h",
|
|
|
71 |
"5.1": EXEC_PATH + "forced_9.4v2.h",
|
|
|
72 |
"5.2": EXEC_PATH + "forced_9.4v2.h",
|
|
|
73 |
"SF1": EXEC_PATH + "forced_9.4v2.h",
|
|
|
74 |
"SF2": EXEC_PATH + "forced_9.4v2.h"
|
|
|
75 |
}
|
|
|
76 |
else:
|
|
|
77 |
forcedheadersdata = {}
|
|
|
78 |
|
|
|
79 |
if os.path.exists( DATA_PATH ):
|
|
|
80 |
dllXMLdata = {
|
|
|
81 |
"5.0": DATA_PATH + "s60_dll_data_50.xml",
|
|
|
82 |
"5.1": DATA_PATH + "s60_dll_data_51.xml",
|
|
|
83 |
"5.2": DATA_PATH + "s60_dll_data_52.xml",
|
|
|
84 |
"SF1": DATA_PATH + "s60_dll_data_51.xml",
|
|
|
85 |
"SF2": DATA_PATH + "s60_dll_data_52.xml"
|
|
|
86 |
}
|
|
|
87 |
else:
|
|
|
88 |
dllXMLdata = {}
|
|
|
89 |
|
|
|
90 |
#Lists to hold platform dependant system include paths
|
|
|
91 |
sys_hdr_30 = [ '', 'libc', 'oem', 'ecom' ]
|
|
|
92 |
|
|
|
93 |
sys_hdr_32 = ['middleware', 'domain'+ os.sep +'middleware', 'osextensions', 'domain'+ os.sep +'osextensions', 'applications', 'domain'+ os.sep +'applications']
|
|
|
94 |
sys_hdr_32.extend(sys_hdr_30)
|
|
|
95 |
|
|
|
96 |
sys_hdr_50 = ['domain'+ os.sep +'middleware'+ os.sep + 'loc', 'domain'+ os.sep +'osextensions'+ os.sep +'loc', 'domain'+ os.sep +'applications' + os.sep + 'loc',
|
|
|
97 |
'domain'+ os.sep +'middleware'+ os.sep +'loc'+ os.sep +'sc', 'domain'+ os.sep +'osextensions'+ os.sep +'loc'+ os.sep +'sc',
|
|
|
98 |
'domain'+ os.sep +'applications'+ os.sep +'loc'+ os.sep +'sc']
|
|
|
99 |
sys_hdr_50.extend(sys_hdr_30)
|
|
|
100 |
sys_hdr_50.extend(sys_hdr_32)
|
|
|
101 |
|
|
|
102 |
sys_hdr_51 = ['mw', 'platform'+ os.sep + 'mw', 'platform', 'app','platform'+ os.sep + 'app', 'platform'+ os.sep + 'loc', 'platform'+ os.sep + 'mw' + os.sep + 'loc',
|
|
|
103 |
'platform'+ os.sep + 'app' + os.sep + 'loc', 'platform'+ os.sep + 'loc' + os.sep + 'sc', 'platform'+ os.sep + 'mw' + os.sep + 'loc' + os.sep +'sc',
|
|
|
104 |
'platform'+ os.sep + 'app' + os.sep + 'loc' + os.sep +'sc']
|
|
|
105 |
sys_hdr_51.extend(sys_hdr_50)
|
|
|
106 |
|
|
|
107 |
sys_includes = {
|
|
|
108 |
"5.0": sys_hdr_50,
|
|
|
109 |
"5.1": sys_hdr_51,
|
|
|
110 |
"5.2": sys_hdr_51,
|
|
|
111 |
"SF1": sys_hdr_51,
|
|
|
112 |
"SF2": sys_hdr_51
|
|
|
113 |
}
|
|
|
114 |
|
|
|
115 |
#set of binaries in the Core tools set, this is windows specific, to be added for linux support
|
|
|
116 |
if os.name == 'nt':
|
|
|
117 |
HA_SET = [ EXEC_PATH+"ha.exe", EXEC_PATH+"ha_gccxml_cc1plus.exe", EXEC_PATH+"libxerces-c2_7_0.dll" ]
|
|
|
118 |
LA_SET = [ EXEC_PATH+"la.exe", EXEC_PATH+"cfilt.exe" ]
|
|
|
119 |
BCFILTER_SET = [ EXEC_PATH+"bcfilter.exe", EXEC_PATH+"libxerces-c2_7_0.dll" ]
|
|
|
120 |
else:
|
|
|
121 |
HA_SET = [ EXEC_PATH+"ha", EXEC_PATH+"ha_gccxml_cc1plus", EXEC_PATH+"libxerces-c2_7_0.dll" ]
|
|
|
122 |
LA_SET = [ EXEC_PATH+"la", EXEC_PATH+"cfilt" ]
|
|
|
123 |
BCFILTER_SET = [ EXEC_PATH+"bcfilter", EXEC_PATH+"libxerces-c2_7_0.dll" ]
|
|
|
124 |
|
|
|
125 |
#Default report paths
|
|
|
126 |
HEADER_REPORT = "Headers_CompatibilityReport"
|
|
|
127 |
LIBRARY_REPORT = "Libraries_CompatibilityReport"
|
|
|
128 |
|
|
|
129 |
#-------------------------Global Definitions------------------------------------------
|
|
|
130 |
#defines set of file types analysed supported in header analyser
|
|
|
131 |
ALL_HEADER_SET = '*.h;*.hrh;*.mbg;*.rsg;*.pan;*.hpp;*.rh'
|
|
|
132 |
#true if checkbc is called from carbide plugin,
|
|
|
133 |
#this make additional info available to STDOUT and STDEERR
|
|
|
134 |
CARBIDE_PLUGIN = False
|
|
|
135 |
#-------------------------Error Handling--------------------------------------------
|
|
|
136 |
#exhults with a environment error when the installed python version is unsupported
|
|
|
137 |
def python_error():
|
|
|
138 |
sys.stdout.write("ERROR: Invalid python version")
|
|
|
139 |
sys.stdout.write("\nPython versions from 2.4 to 3.0 are supported")
|
|
|
140 |
sys.exit(1)
|
|
|
141 |
|
|
|
142 |
#this is the set of possible error values, stored as a dictionary, with the "value" represnting error message
|
|
|
143 |
ErrorCode = {
|
|
|
144 |
"cmdlineIP": [ "\nERROR: Commandline input parameter invalid -- " , 1 ],
|
|
|
145 |
"cmdlineIS": [ "\nERROR: Commandline parameter syntax invalid -- " , 2 ],
|
|
|
146 |
"cmdlineMP": [ "\nERROR: Commandline parameter missing -- " , 3 ],
|
|
|
147 |
"confIS": [ "\nERROR: Invalid syntax in config file -- " , 4 ],
|
|
|
148 |
"confIP": [ "\nERROR: Invalid parameter in config file -- ", 5 ],
|
|
|
149 |
"confMP": [ "\nERROR: Missing parameter in config file -- ", 6 ],
|
|
|
150 |
"confMPath": [ "\nERROR: File/path in config file not found -- ", 7 ],
|
|
|
151 |
"cmdhelp": [ "", 8],
|
|
|
152 |
"other": [ "\nGENERAL ERROR: Please recheck the tool inputs.", 9 ]
|
|
|
153 |
}
|
|
|
154 |
|
|
|
155 |
#an exception class, need to update this for better error representation
|
|
|
156 |
#value --> holds the error string, #text --> the info text to be displayed
|
|
|
157 |
class InputError(Exception):
|
|
|
158 |
def __init__(self, list):
|
|
|
159 |
self.error = ErrorCode["other"]
|
|
|
160 |
if list[0] in ErrorCode:
|
|
|
161 |
self.error = ErrorCode[ list[0] ]
|
|
|
162 |
self.text = list[1]
|
|
|
163 |
self.use = list[2]
|
|
|
164 |
|
|
|
165 |
#the exception handler class which prints out the error message and usage info when required
|
|
|
166 |
class ExHandler:
|
|
|
167 |
def __init__(self, e):
|
|
|
168 |
sys.stdout.write( e.error[0] )
|
|
|
169 |
sys.stdout.write( e.text )
|
|
|
170 |
if e.use:
|
|
|
171 |
usage()
|
|
|
172 |
sys.exit(e.error[1])
|
|
|
173 |
|
|
|
174 |
#displays the usage characteristics for the interface when command is invoked without proper arguments
|
|
|
175 |
def usage():
|
|
|
176 |
print ""
|
|
|
177 |
print "Compatibility Analyser v" + TOOL_VERSION + " - " + TOOL_DATE
|
|
|
178 |
print "Copyright (c) 2001-2009 Nokia Corporation and/or its subsidiary(-ies). All rights reserved."
|
|
|
179 |
print ""
|
|
|
180 |
print "Usage: CheckBC <configfile> [-ha/-hm/-hs] [-la/-lm/-ls] [-f] [reportid]"
|
|
|
181 |
print ""
|
|
|
182 |
print "Where:"
|
|
|
183 |
print " configfile Filename of a configuration file"
|
|
|
184 |
print " -ha Check all headers"
|
|
|
185 |
print " -hm FILE Check multiple headers (FILE = file with list of headers)"
|
|
|
186 |
print " -hs FILE Check a single file (FILE = header)"
|
|
|
187 |
print " -la Check all libraries"
|
|
|
188 |
print " -lm FILE Check multiple libraries (FILE = file with list of libraries)"
|
|
|
189 |
print " -ls FILE Check a single file (FILE = library)"
|
|
|
190 |
print " -f Filter results after analysis"
|
|
|
191 |
print " reportid ID to be used for report files"
|
|
|
192 |
print ""
|
|
|
193 |
print "Examples:"
|
|
|
194 |
print " To analyse all headers and libraries and filter results:"
|
|
|
195 |
print " CheckBC myconfig -ha -la -f MYID"
|
|
|
196 |
print " To analyse a single header file"
|
|
|
197 |
print " CheckBC myconfig -hs aknlists.h MYID"
|
|
|
198 |
|
|
|
199 |
#-------------------------Header Analysis--------------------------------------------
|
|
|
200 |
class HeaderAnalyser:
|
|
|
201 |
# static dict to hold the arguments
|
|
|
202 |
args = {}
|
|
|
203 |
|
|
|
204 |
#initialization function for HA component
|
|
|
205 |
def __init__(self, ip_data, set_list, reportid):
|
|
|
206 |
#'args' defines the parametrs required by HA
|
|
|
207 |
self.args = {"BASELINE_NAME":[], "CURRENT_NAME":[], "BASELINE_DIR":[], "CURRENT_DIR":[], "REPORT_FILE":[],
|
|
|
208 |
"BASELINE_SYSTEMINCLUDEDIR":[], "CURRENT_SYSTEMINCLUDEDIR":[], "FORCEBASEINCLUDE":[], "FORCECURRENTINCLUDE":[],
|
|
|
209 |
"RECURSIVE":[], "REPLACE":[], "TEMP":[], "USE_THREAD":[], "USE_PLATFORM_DATA":[], "SET":[], "BUNDLESIZE":['-bundlesize', '50'] }
|
|
|
210 |
|
|
|
211 |
base_sys_include = []
|
|
|
212 |
curr_sys_include = []
|
|
|
213 |
base_forced = []
|
|
|
214 |
curr_forced = []
|
|
|
215 |
|
|
|
216 |
#validate SDK versions
|
|
|
217 |
validateSDKVersion(ip_data["BASELINE_SDK_S60_VERSION"],ip_data["CURRENT_SDK_S60_VERSION"])
|
|
|
218 |
#validate USE_PLATFORM_DATA entry 'true' or 'false'
|
|
|
219 |
if( getdata( ip_data, "USE_PLATFORM_DATA") ):
|
|
|
220 |
if( not('true' == ip_data["USE_PLATFORM_DATA"].lower()) and not('false' == ip_data["USE_PLATFORM_DATA"].lower()) ):
|
|
|
221 |
raise InputError(["confIP", "USE_PLATFORM_DATA\n", False])
|
|
|
222 |
|
|
|
223 |
#validate USE_THREAD entry 'true' or 'false'
|
|
|
224 |
if( getdata( ip_data, "USE_THREAD") ):
|
|
|
225 |
if( not('true' == ip_data["USE_THREAD"].lower()) and not('false' == ip_data["USE_THREAD"].lower()) ):
|
|
|
226 |
raise InputError(["confIP", "USE_THREAD\n", False])
|
|
|
227 |
|
|
|
228 |
|
|
|
229 |
# specify the basline and current names
|
|
|
230 |
if not getdata(ip_data, "BASELINE_NAME"):
|
|
|
231 |
raise InputError(["confMP", "baseline name missing\n", False])
|
|
|
232 |
self.args["BASELINE_NAME"] = ["-baselineversion", quote( ip_data["BASELINE_NAME"] )]
|
|
|
233 |
if not getdata( ip_data, "CURRENT_NAME"):
|
|
|
234 |
raise InputError(["confMP", "current name missing\n", False])
|
|
|
235 |
self.args["CURRENT_NAME"] = ["-currentversion", quote( ip_data["CURRENT_NAME"] )]
|
|
|
236 |
|
|
|
237 |
# get the analysis directories
|
|
|
238 |
tmp = []
|
|
|
239 |
sdkBaseTmp = []
|
|
|
240 |
if getdata( ip_data, "BASELINE_SDK_DIR"):
|
|
|
241 |
if(ip_data["BASELINE_SDK_DIR"] == os.sep):
|
|
|
242 |
sdkBaseTmp.append( validate(os.sep + "epoc32" + os.sep + "include"))
|
|
|
243 |
else:
|
|
|
244 |
sdkBaseTmp.append( validate(ip_data["BASELINE_SDK_DIR"] + os.sep + "epoc32" + os.sep + "include") )
|
|
|
245 |
else:
|
|
|
246 |
raise InputError(["confMP", "baseline header directory missing\n", False])
|
|
|
247 |
|
|
|
248 |
if getdata( ip_data, "BASELINE_HEADERS"):
|
|
|
249 |
if(ip_data["BASELINE_HEADERS"] == os.sep):
|
|
|
250 |
tmp = sdkBaseTmp
|
|
|
251 |
else:
|
|
|
252 |
for i in ip_data["BASELINE_HEADERS"].split(';'):
|
|
|
253 |
if not os.path.exists(i):
|
|
|
254 |
if(ip_data["BASELINE_SDK_DIR"] == os.sep):
|
|
|
255 |
tmp.append(validate(os.sep + "epoc32" + os.sep + "include" + os.sep + i))
|
|
|
256 |
else:
|
|
|
257 |
tmp.append(validate(ip_data["BASELINE_SDK_DIR"] + os.sep + "epoc32" + os.sep + "include" + os.sep + i))
|
|
|
258 |
else:
|
|
|
259 |
tmp.append(validate(i))
|
|
|
260 |
else:
|
|
|
261 |
tmp = sdkBaseTmp
|
|
|
262 |
|
|
|
263 |
self.args["BASELINE_DIR"] = ["-baselinedir", ';'.join(["%s" % quote(i) for i in tmp]) ]
|
|
|
264 |
base_sys_include.extend(tmp)
|
|
|
265 |
|
|
|
266 |
tmp = []
|
|
|
267 |
sdkCurrTmp = []
|
|
|
268 |
if getdata( ip_data, "CURRENT_SDK_DIR"):
|
|
|
269 |
if(ip_data["CURRENT_SDK_DIR"] == os.sep):
|
|
|
270 |
sdkCurrTmp.append( validate(os.sep+"epoc32" + os.sep + "include" ))
|
|
|
271 |
else:
|
|
|
272 |
sdkCurrTmp.append( validate(ip_data["CURRENT_SDK_DIR"] + os.sep + "epoc32" + os.sep + "include") )
|
|
|
273 |
else:
|
|
|
274 |
raise InputError(["confMP", "current header directory missing\n", False])
|
|
|
275 |
|
|
|
276 |
if getdata( ip_data, "CURRENT_HEADERS"):
|
|
|
277 |
if(ip_data["CURRENT_HEADERS"] == os.sep):
|
|
|
278 |
tmp = sdkCurrTmp
|
|
|
279 |
else:
|
|
|
280 |
for i in ip_data["CURRENT_HEADERS"].split(';'):
|
|
|
281 |
if not os.path.exists(i):
|
|
|
282 |
if(ip_data["CURRENT_SDK_DIR"] == os.sep):
|
|
|
283 |
tmp.append(validate(os.sep + "epoc32" + os.sep + "include" + os.sep + i))
|
|
|
284 |
else:
|
|
|
285 |
tmp.append(validate(ip_data["CURRENT_SDK_DIR"] + os.sep + "epoc32" + os.sep + "include" + os.sep + i))
|
|
|
286 |
else:
|
|
|
287 |
tmp.append(validate(i))
|
|
|
288 |
|
|
|
289 |
else:
|
|
|
290 |
tmp = sdkCurrTmp
|
|
|
291 |
|
|
|
292 |
self.args["CURRENT_DIR"] = ["-currentdir", ';'.join(["%s" % quote(i) for i in tmp]) ]
|
|
|
293 |
curr_sys_include.extend(tmp)
|
|
|
294 |
|
|
|
295 |
# get the report file name
|
|
|
296 |
if not getdata( ip_data, "REPORT_FILE_HEADERS"):
|
|
|
297 |
if not reportid:
|
|
|
298 |
ip_data["REPORT_FILE_HEADERS"] = REPORT_PATH + HEADER_REPORT
|
|
|
299 |
else:
|
|
|
300 |
ip_data["REPORT_FILE_HEADERS"] = REPORT_PATH + HEADER_REPORT +'_'+ reportid
|
|
|
301 |
else:
|
|
|
302 |
[head, tail] = os.path.split(ip_data["REPORT_FILE_HEADERS"])
|
|
|
303 |
if tail != '':
|
|
|
304 |
if reportid:
|
|
|
305 |
ip_data["REPORT_FILE_HEADERS"] = ip_data["REPORT_FILE_HEADERS"] +'_'+ reportid
|
|
|
306 |
else:
|
|
|
307 |
if reportid:
|
|
|
308 |
ip_data["REPORT_FILE_HEADERS"] = ip_data["REPORT_FILE_HEADERS"] + HEADER_REPORT +'_'+ reportid
|
|
|
309 |
else:
|
|
|
310 |
ip_data["REPORT_FILE_HEADERS"] = ip_data["REPORT_FILE_HEADERS"] + HEADER_REPORT
|
|
|
311 |
|
|
|
312 |
ip_data["REPORT_FILE_HEADERS"] = ip_data["REPORT_FILE_HEADERS"] + ".xml"
|
|
|
313 |
self.args["REPORT_FILE"] = ["-reportfile", quote( ip_data["REPORT_FILE_HEADERS"])]
|
|
|
314 |
|
|
|
315 |
tmp = []
|
|
|
316 |
# get the base system include directories
|
|
|
317 |
if getdata( ip_data, "BASELINE_SYSTEMINCLUDEDIR"):
|
|
|
318 |
for term in ip_data["BASELINE_SYSTEMINCLUDEDIR"].split(';'):
|
|
|
319 |
if not os.path.exists(term):
|
|
|
320 |
if(ip_data["BASELINE_SDK_DIR"] == os.sep):
|
|
|
321 |
tmp.append(os.sep + "epoc32" + os.sep + "include" + os.sep + term)
|
|
|
322 |
else:
|
|
|
323 |
tmp.append(ip_data["BASELINE_SDK_DIR"] + os.sep + "epoc32" + os.sep + "include" + os.sep + term)
|
|
|
324 |
else:
|
|
|
325 |
tmp.append(term);
|
|
|
326 |
|
|
|
327 |
for i in sys_includes[ip_data["BASELINE_SDK_S60_VERSION"]]:
|
|
|
328 |
if(ip_data["BASELINE_SDK_DIR"] == os.sep):
|
|
|
329 |
tmp.append( os.sep + "epoc32" + os.sep + "include" + os.sep + i )
|
|
|
330 |
else:
|
|
|
331 |
tmp.append( ip_data["BASELINE_SDK_DIR"] + os.sep + "epoc32" + os.sep + "include" + os.sep + i )
|
|
|
332 |
|
|
|
333 |
for i in tmp:
|
|
|
334 |
try:
|
|
|
335 |
term = validate(i)
|
|
|
336 |
except InputError, e:
|
|
|
337 |
sys.stderr.write(os.linesep + "WARNING: Baseline system include path "+ i +" not found")
|
|
|
338 |
else:
|
|
|
339 |
if term not in base_sys_include:
|
|
|
340 |
base_sys_include.append(term)
|
|
|
341 |
self.args["BASELINE_SYSTEMINCLUDEDIR"] = ["-baseplatformheaders", quote( ';'.join(["%s" % quote(i) for i in base_sys_include]) ) ]
|
|
|
342 |
|
|
|
343 |
tmp = []
|
|
|
344 |
# get the current system include directories
|
|
|
345 |
if getdata( ip_data, "CURRENT_SYSTEMINCLUDEDIR"):
|
|
|
346 |
for term in ip_data["CURRENT_SYSTEMINCLUDEDIR"].split(';'):
|
|
|
347 |
if not os.path.exists(term):
|
|
|
348 |
if(ip_data["CURRENT_SDK_DIR"] == os.sep):
|
|
|
349 |
tmp.append(os.sep + "epoc32" + os.sep + "include" + os.sep + term)
|
|
|
350 |
else:
|
|
|
351 |
tmp.append(ip_data["CURRENT_SDK_DIR"] + os.sep + "epoc32" + os.sep + "include" + os.sep + term)
|
|
|
352 |
else:
|
|
|
353 |
tmp.append(term);
|
|
|
354 |
|
|
|
355 |
for i in sys_includes[ip_data["CURRENT_SDK_S60_VERSION"]]:
|
|
|
356 |
if(ip_data["CURRENT_SDK_DIR"] == os.sep):
|
|
|
357 |
tmp.append( os.sep + "epoc32" + os.sep + "include" + os.sep + i )
|
|
|
358 |
else:
|
|
|
359 |
tmp.append( ip_data["CURRENT_SDK_DIR"] + os.sep + "epoc32" + os.sep + "include" + os.sep + i )
|
|
|
360 |
|
|
|
361 |
for i in tmp:
|
|
|
362 |
try:
|
|
|
363 |
term = validate(i)
|
|
|
364 |
except InputError, e:
|
|
|
365 |
sys.stderr.write(os.linesep + "WARNING: Current system include path "+ i +" not found")
|
|
|
366 |
else:
|
|
|
367 |
if term not in curr_sys_include:
|
|
|
368 |
curr_sys_include.append(term)
|
|
|
369 |
self.args["CURRENT_SYSTEMINCLUDEDIR"] = ["-currentplatformheaders", quote( ';'.join(["%s" % quote(i) for i in curr_sys_include]) ) ]
|
|
|
370 |
|
|
|
371 |
# get the forced headers
|
|
|
372 |
if not getdata( ip_data, "BASELINE_SDK_S60_VERSION"):
|
|
|
373 |
raise InputError(["confMP", "Baseline SDK version undefined\n", False])
|
|
|
374 |
for i in getdata( ip_data, "BASELINE_FORCED_HEADERS").split(';'):
|
|
|
375 |
if i:
|
|
|
376 |
base_forced.append(i)
|
|
|
377 |
if forcedheadersdata.has_key(ip_data["BASELINE_SDK_S60_VERSION"]):
|
|
|
378 |
base_forced.append( self.forcedheaders(ip_data["BASELINE_SDK_S60_VERSION"], self.args["BASELINE_SYSTEMINCLUDEDIR"][1]) )
|
|
|
379 |
|
|
|
380 |
#Validate the existence of base forced header
|
|
|
381 |
for i in base_forced:
|
|
|
382 |
if not os.path.exists(i):
|
|
|
383 |
tmp = os.path.abspath(i)
|
|
|
384 |
if not os.path.exists(tmp):
|
|
|
385 |
raise InputError(["confIP", "BASELINE_SDK_S60_VERSION < Field "+ i +" is not valid >" + os.linesep,False])
|
|
|
386 |
|
|
|
387 |
self.args["FORCEBASEINCLUDE"] = ["-forcebaseinclude", ';'.join(["%s" % quotep(i) for i in base_forced ]) ]
|
|
|
388 |
|
|
|
389 |
if not getdata( ip_data, "CURRENT_SDK_S60_VERSION"):
|
|
|
390 |
raise InputError(["confMP", "Current SDK version undefined\n", False])
|
|
|
391 |
for i in getdata( ip_data, "CURRENT_FORCED_HEADERS").split(';'):
|
|
|
392 |
if i:
|
|
|
393 |
curr_forced.append(i)
|
|
|
394 |
if forcedheadersdata.has_key(ip_data["CURRENT_SDK_S60_VERSION"]):
|
|
|
395 |
curr_forced.append( self.forcedheaders(ip_data["CURRENT_SDK_S60_VERSION"], self.args["CURRENT_SYSTEMINCLUDEDIR"][1]) )
|
|
|
396 |
|
|
|
397 |
#Validate the existence of current forced header
|
|
|
398 |
for i in curr_forced:
|
|
|
399 |
if not os.path.exists(i):
|
|
|
400 |
tmp = os.path.abspath(i)
|
|
|
401 |
if not os.path.exists(tmp):
|
|
|
402 |
raise InputError(["confIP", "CURRENT_SDK_S60_VERSION < Field "+ i +" is not valid >" + os.linesep,False])
|
|
|
403 |
|
|
|
404 |
self.args["FORCECURRENTINCLUDE"] = ["-forcecurrentinclude", ';'.join(["%s" % quotep(i) for i in curr_forced ]) ]
|
|
|
405 |
|
|
|
406 |
if getdata( ip_data, "USE_THREAD"):
|
|
|
407 |
if 'true' == ip_data["USE_THREAD"].lower():
|
|
|
408 |
self.args["USE_THREAD"] = [ "-usethread" ]
|
|
|
409 |
|
|
|
410 |
# setup the replace and exclude directories
|
|
|
411 |
self.args["RECURSIVE"] = [ "-recursive" ]
|
|
|
412 |
if getdata( ip_data, "RECURSIVE_HEADERS"):
|
|
|
413 |
if 'false' == ip_data["RECURSIVE_HEADERS"].lower():
|
|
|
414 |
self.args["RECURSIVE"] = [ "" ]
|
|
|
415 |
elif not 'true' == ip_data["RECURSIVE_HEADERS"].lower():
|
|
|
416 |
raise InputError(["confIP", "RECURSIVE_HEADERS\n", False])
|
|
|
417 |
|
|
|
418 |
tmp = []
|
|
|
419 |
for i in getdata( ip_data, "EXCLUDE_DIR_HEADERS").split(';'):
|
|
|
420 |
if i:
|
|
|
421 |
tmp.append(i)
|
|
|
422 |
if tmp:
|
|
|
423 |
self.args["EXCLUDE_DIR_HEADERS"] = ["-excludedirs", ';'.join(["%s" % quote(i) for i in tmp]) ]
|
|
|
424 |
|
|
|
425 |
self.args["TEMP"] = ["-temp", quotep( ip_data["TEMP"] )]
|
|
|
426 |
|
|
|
427 |
if not getdata( ip_data, "USE_PLATFORM_DATA") or ('true' == ip_data["USE_PLATFORM_DATA"].lower()):
|
|
|
428 |
if getdata( platformdata, ip_data["BASELINE_SDK_S60_VERSION"] ) and getdata( platformdata, ip_data["CURRENT_SDK_S60_VERSION"] ):
|
|
|
429 |
self.args["USE_PLATFORM_DATA"] = ["-baseplatformdata", quotep( platformdata[ip_data["BASELINE_SDK_S60_VERSION"]] ),
|
|
|
430 |
"-currentplatformdata", quotep( platformdata[ip_data["CURRENT_SDK_S60_VERSION"]] )]
|
|
|
431 |
else:
|
|
|
432 |
sys.stderr.write( os.linesep + "WARNING: Platform data not available in default paths, continuing without platform data information." )
|
|
|
433 |
|
|
|
434 |
if getdata( ip_data, "REPLACE_HEADERS"):
|
|
|
435 |
#separate the sets
|
|
|
436 |
tlist_1 = ip_data["REPLACE_HEADERS"].split(';')
|
|
|
437 |
str = ''
|
|
|
438 |
#seperate the replacement pairs
|
|
|
439 |
for entry in tlist_1:
|
|
|
440 |
if entry != '':
|
|
|
441 |
tlist_2 = (entry.split(':'))
|
|
|
442 |
#create a single str, which is input param
|
|
|
443 |
str = ' '.join(["%s" % i for i in tlist_2])
|
|
|
444 |
self.args["REPLACE"] = ["-replace", str ]
|
|
|
445 |
else:
|
|
|
446 |
del self.args["REPLACE"]
|
|
|
447 |
|
|
|
448 |
# handling the files provide with 'm' or 's' options on commandline
|
|
|
449 |
str = ''
|
|
|
450 |
line = ''
|
|
|
451 |
tmp = []
|
|
|
452 |
if 's' == set_list[0]:
|
|
|
453 |
str = set_list[1]
|
|
|
454 |
elif 'm' == set_list[0]:
|
|
|
455 |
fname = os.path.abspath(set_list[1])
|
|
|
456 |
if not os.path.exists(fname):
|
|
|
457 |
raise InputError(["confIP", "Input list file unavailable" + os.linesep, False])
|
|
|
458 |
filehandle = open(fname, 'r')
|
|
|
459 |
for line in filehandle:
|
|
|
460 |
tmp.append(clean(line))
|
|
|
461 |
str = ';'.join(["%s" % i for i in tmp])
|
|
|
462 |
filehandle.close()
|
|
|
463 |
elif 'a' == set_list[0]:
|
|
|
464 |
str = ALL_HEADER_SET
|
|
|
465 |
self.args["SET"] = ["-set", quote(str)]
|
|
|
466 |
|
|
|
467 |
#'stringize' all params and invoke the tool
|
|
|
468 |
def run(self):
|
|
|
469 |
str = ''
|
|
|
470 |
for i in HA_SET:
|
|
|
471 |
if not os.path.exists(i):
|
|
|
472 |
print os.linesep + i + " does not exist. Please reinstall."
|
|
|
473 |
sys.exit(1)
|
|
|
474 |
it_tmp = self.args.itervalues()
|
|
|
475 |
for i in xrange(0, len(self.args), 1):
|
|
|
476 |
for val in it_tmp.next():
|
|
|
477 |
str = str + ' ' + val
|
|
|
478 |
cmd_file= unquote(self.args["TEMP"][1]) + os.sep + 'ha.txt'
|
|
|
479 |
file = open(cmd_file,'w')
|
|
|
480 |
file.write(str)
|
|
|
481 |
file.close()
|
|
|
482 |
cmd_str = ' -commandfile ' + cmd_file
|
|
|
483 |
invokeTool(quote(HA_SET[0]), cmd_str)
|
|
|
484 |
|
|
|
485 |
#verify existence of system hrh files
|
|
|
486 |
def forcedheaders(self, str, dirs):
|
|
|
487 |
if "5.0" == str:
|
|
|
488 |
dirs = unquote(dirs).split(';')
|
|
|
489 |
for path in dirs:
|
|
|
490 |
checkpath = os.path.normpath( unquote(path) + os.sep + "variant" + os.sep + "symbian_os_v9.4.hrh" )
|
|
|
491 |
if os.path.exists( checkpath ):
|
|
|
492 |
if forcedheadersdata.has_key(str):
|
|
|
493 |
return forcedheadersdata[str]
|
|
|
494 |
if forcedheadersdata.has_key("5.0v2"):
|
|
|
495 |
return forcedheadersdata["5.0v2"]
|
|
|
496 |
else:
|
|
|
497 |
if forcedheadersdata.has_key(str):
|
|
|
498 |
return forcedheadersdata[str]
|
|
|
499 |
return ''
|
|
|
500 |
|
|
|
501 |
#-------------------------Library Analysis-------------------------------------------
|
|
|
502 |
class LibraryAnalyser:
|
|
|
503 |
args = {}
|
|
|
504 |
def __init__(self, ip_data, set_list, reportid):
|
|
|
505 |
#'args' defines the parametrs required by HA, 'optargs' defines optional ones
|
|
|
506 |
self.args = { "TOOLCHAIN":[], "TOOLCHAIN_PATH":[], "BASELINE_NAME":[], "CURRENT_NAME":[], "BASELINE_DIR":[], "CURRENT_DIR":[],
|
|
|
507 |
"REPORT_FILE":[], "CFILT":[], "TEMP":[], "SET":[] }
|
|
|
508 |
|
|
|
509 |
#validate SDK versions
|
|
|
510 |
validateSDKVersion(ip_data["BASELINE_SDK_S60_VERSION"],ip_data["CURRENT_SDK_S60_VERSION"])
|
|
|
511 |
|
|
|
512 |
# Validate the user input tool chain
|
|
|
513 |
if getdata( ip_data, "TOOLCHAIN"):
|
|
|
514 |
validChain = False
|
|
|
515 |
for i in tool_chain:
|
|
|
516 |
if ( i == ip_data["TOOLCHAIN"].lower()):
|
|
|
517 |
validChain = True
|
|
|
518 |
break
|
|
|
519 |
if validChain == False:
|
|
|
520 |
raise InputError(["confIP", "TOOLCHAIN\n", False])
|
|
|
521 |
|
|
|
522 |
#Validate the build target provided
|
|
|
523 |
if getdata( ip_data, "BASELINE_BUILDTARGET"):
|
|
|
524 |
validateBulidTarget(ip_data["BASELINE_BUILDTARGET"],True)
|
|
|
525 |
|
|
|
526 |
if getdata( ip_data, "CURRENT_BUILDTARGET"):
|
|
|
527 |
validateBulidTarget(ip_data["CURRENT_BUILDTARGET"],False)
|
|
|
528 |
|
|
|
529 |
if getdata( ip_data, "TOOLCHAIN"):
|
|
|
530 |
self.args["TOOLCHAIN"] = [ ip_data["TOOLCHAIN"].upper() ]
|
|
|
531 |
else:
|
|
|
532 |
self.args["TOOLCHAIN"] = [ 'GCCE' ]
|
|
|
533 |
|
|
|
534 |
if getdata( ip_data, "TOOLCHAIN_PATH"):
|
|
|
535 |
self.args["TOOLCHAIN_PATH"] = [ '-tools', quotep( ip_data["TOOLCHAIN_PATH"] )]
|
|
|
536 |
else:
|
|
|
537 |
del self.args["TOOLCHAIN_PATH"]
|
|
|
538 |
|
|
|
539 |
if not getdata( ip_data, "BASELINE_NAME"):
|
|
|
540 |
raise InputError(["confMP", "baseline analysis directory missing" + os.linesep, False])
|
|
|
541 |
self.args["BASELINE_NAME"] = [ '-baselineversion', quote( ip_data["BASELINE_NAME"] )]
|
|
|
542 |
if not getdata( ip_data, "CURRENT_NAME"):
|
|
|
543 |
raise InputError(["confMP", "current analysis directory missing" + os.linesep, False])
|
|
|
544 |
self.args["CURRENT_NAME"] = [ '-currentversion', quote( ip_data["CURRENT_NAME"] )]
|
|
|
545 |
|
|
|
546 |
|
|
|
547 |
#Check if baseline import library is present, import dll should also be present and vice versa.
|
|
|
548 |
libLength = 0
|
|
|
549 |
dllLength = 0
|
|
|
550 |
|
|
|
551 |
if getdata( ip_data, "BASELINE_IMPORTLIBRARIES"):
|
|
|
552 |
libLength = len(ip_data["BASELINE_IMPORTLIBRARIES"])
|
|
|
553 |
|
|
|
554 |
if getdata( ip_data, "BASELINE_IMPORTDLLS"):
|
|
|
555 |
dllLength = len(ip_data["BASELINE_IMPORTDLLS"])
|
|
|
556 |
|
|
|
557 |
if( libLength > 0 and dllLength <= 0 ):
|
|
|
558 |
raise InputError(["confMP", "BASELINE_IMPORTDLLS Path missing" + os.linesep, False])
|
|
|
559 |
if( libLength <= 0 and dllLength > 0 ):
|
|
|
560 |
raise InputError(["confMP", "BASELINE_IMPORTLIBRARIES Path missing" + os.linesep, False])
|
|
|
561 |
|
|
|
562 |
#Check if current import library is present, import dll should also be present and vice versa.
|
|
|
563 |
libLength = 0
|
|
|
564 |
dllLength = 0
|
|
|
565 |
|
|
|
566 |
if getdata( ip_data, "CURRENT_IMPORTLIBRARIES"):
|
|
|
567 |
libLength = len(ip_data["CURRENT_IMPORTLIBRARIES"])
|
|
|
568 |
|
|
|
569 |
if getdata( ip_data, "CURRENT_IMPORTDLLS"):
|
|
|
570 |
dllLength = len(ip_data["CURRENT_IMPORTDLLS"])
|
|
|
571 |
|
|
|
572 |
if( libLength > 0 and dllLength <= 0 ):
|
|
|
573 |
raise InputError(["confMP", "CURRENT_IMPORTDLLS missing" + os.linesep, False])
|
|
|
574 |
if( libLength <= 0 and dllLength > 0 ):
|
|
|
575 |
raise InputError(["confMP", "CURRENT_IMPORTLIBRARIES missing" + os.linesep, False])
|
|
|
576 |
|
|
|
577 |
|
|
|
578 |
if not getdata( ip_data, "BASELINE_SDK_DIR"):
|
|
|
579 |
raise InputError(["confMP", "baseline importlib directory missing" + os.linesep, False])
|
|
|
580 |
if not getdata( ip_data, "CURRENT_SDK_DIR"):
|
|
|
581 |
raise InputError(["confMP", "current importlib directory missing" + os.linesep, False])
|
|
|
582 |
|
|
|
583 |
self.args["TEMP"] = ["-temp", quotep( ip_data["TEMP"] )]
|
|
|
584 |
|
|
|
585 |
if getdata( ip_data, "BASELINE_BUILDTYPE"):
|
|
|
586 |
validateBulidType(ip_data["BASELINE_BUILDTYPE"],True)
|
|
|
587 |
basebldtype = ip_data["BASELINE_BUILDTYPE"]
|
|
|
588 |
else:
|
|
|
589 |
basebldtype = 'urel'
|
|
|
590 |
|
|
|
591 |
if getdata( ip_data, "CURRENT_BUILDTYPE"):
|
|
|
592 |
validateBulidType(ip_data["CURRENT_BUILDTYPE"],False)
|
|
|
593 |
curbldtype = ip_data["CURRENT_BUILDTYPE"]
|
|
|
594 |
else:
|
|
|
595 |
curbldtype = 'urel'
|
|
|
596 |
|
|
|
597 |
if basebldtype == "" and curbldtype == "":
|
|
|
598 |
basebldtype = 'urel'
|
|
|
599 |
curbldtype = 'urel'
|
|
|
600 |
else:
|
|
|
601 |
if basebldtype == "":
|
|
|
602 |
basebldtype = curbldtype
|
|
|
603 |
else:
|
|
|
604 |
curbldtype = basebldtype
|
|
|
605 |
|
|
|
606 |
if getdata( dllXMLdata, ip_data["BASELINE_SDK_S60_VERSION"] ):
|
|
|
607 |
baseDlldata = dllXMLdata[ip_data["BASELINE_SDK_S60_VERSION"]]
|
|
|
608 |
if getdata( dllXMLdata, ip_data["CURRENT_SDK_S60_VERSION"] ):
|
|
|
609 |
currDlldata = dllXMLdata[ip_data["CURRENT_SDK_S60_VERSION"]]
|
|
|
610 |
|
|
|
611 |
dbasebuild = GetBuildTarget(ip_data["BASELINE_SDK_DIR"],validate(baseDlldata),ip_data["TEMP"],basebldtype)
|
|
|
612 |
dcurrentbuild = GetBuildTarget(ip_data["CURRENT_SDK_DIR"],validate(currDlldata),ip_data["TEMP"],curbldtype)
|
|
|
613 |
|
|
|
614 |
if getdata( ip_data, "BASELINE_BUILDTARGET"):
|
|
|
615 |
basebuild = ip_data["BASELINE_BUILDTARGET"]
|
|
|
616 |
else:
|
|
|
617 |
basebuild = dbasebuild
|
|
|
618 |
|
|
|
619 |
if getdata( ip_data, "CURRENT_BUILDTARGET"):
|
|
|
620 |
currentbuild = ip_data["CURRENT_BUILDTARGET"]
|
|
|
621 |
else:
|
|
|
622 |
currentbuild = dcurrentbuild
|
|
|
623 |
|
|
|
624 |
if basebuild == "" and currentbuild == "":
|
|
|
625 |
basebuild = 'armv5'
|
|
|
626 |
currentbuild = 'armv5'
|
|
|
627 |
else:
|
|
|
628 |
if basebuild == "":
|
|
|
629 |
basebuild = currentbuild
|
|
|
630 |
if currentbuild == "":
|
|
|
631 |
currentbuild = basebuild
|
|
|
632 |
|
|
|
633 |
libBasetmp = []
|
|
|
634 |
libtmp = []
|
|
|
635 |
for target in basebuild.split(';'):
|
|
|
636 |
if(ip_data["BASELINE_SDK_DIR"] == os.sep):
|
|
|
637 |
libBasetmp.append(os.sep + 'epoc32' + os.sep + 'release' + os.sep + target + os.sep + 'lib')
|
|
|
638 |
else:
|
|
|
639 |
libBasetmp.append(ip_data["BASELINE_SDK_DIR"] + os.sep + 'epoc32' + os.sep + 'release' + os.sep + target + os.sep + 'lib')
|
|
|
640 |
|
|
|
641 |
if getdata( ip_data, "BASELINE_IMPORTLIBRARIES"):
|
|
|
642 |
if(ip_data["BASELINE_IMPORTLIBRARIES"] == os.sep):
|
|
|
643 |
for path in libBasetmp:
|
|
|
644 |
libtmp.append(validate(path))
|
|
|
645 |
else:
|
|
|
646 |
for i in ip_data["BASELINE_IMPORTLIBRARIES"].split(';'):
|
|
|
647 |
if not os.path.exists(i):
|
|
|
648 |
if(ip_data["BASELINE_SDK_DIR"] == os.sep):
|
|
|
649 |
libtmp.append(validate( os.sep + 'epoc32' + os.sep + 'release' + os.sep + i))
|
|
|
650 |
else:
|
|
|
651 |
libtmp.append(validate( ip_data["BASELINE_SDK_DIR"] + os.sep + 'epoc32' + os.sep + 'release' + os.sep + i ))
|
|
|
652 |
else:
|
|
|
653 |
libtmp.append(validate(i))
|
|
|
654 |
else:
|
|
|
655 |
for path in libBasetmp:
|
|
|
656 |
libtmp.append(validate(path))
|
|
|
657 |
|
|
|
658 |
dllBasetmp = []
|
|
|
659 |
dlltmp = []
|
|
|
660 |
if dbasebuild == "":
|
|
|
661 |
dllBasetmp.append(baseDlldata)
|
|
|
662 |
else:
|
|
|
663 |
for target in basebuild.split(';'):
|
|
|
664 |
if(ip_data["BASELINE_SDK_DIR"] == os.sep):
|
|
|
665 |
dllBasetmp.append(os.sep + 'epoc32' + os.sep + 'release' + os.sep + target + os.sep + basebldtype)
|
|
|
666 |
else:
|
|
|
667 |
dllBasetmp.append(ip_data["BASELINE_SDK_DIR"] + os.sep + 'epoc32' + os.sep + 'release' + os.sep + target + os.sep + basebldtype)
|
|
|
668 |
|
|
|
669 |
if getdata( ip_data, "BASELINE_IMPORTDLLS"):
|
|
|
670 |
if(ip_data["BASELINE_IMPORTDLLS"] == os.sep):
|
|
|
671 |
for path in dllBasetmp:
|
|
|
672 |
dlltmp.append(validate(path))
|
|
|
673 |
else:
|
|
|
674 |
for i in ip_data["BASELINE_IMPORTDLLS"].split(';'):
|
|
|
675 |
if not os.path.exists(i):
|
|
|
676 |
if(ip_data["BASELINE_SDK_DIR"] == os.sep):
|
|
|
677 |
dlltmp.append(validate( os.sep + 'epoc32' + os.sep + 'release' + os.sep + i))
|
|
|
678 |
else:
|
|
|
679 |
dlltmp.append(validate( ip_data["BASELINE_SDK_DIR"] + os.sep + 'epoc32' + os.sep + 'release' + os.sep + i ))
|
|
|
680 |
else:
|
|
|
681 |
dlltmp.append(validate(i))
|
|
|
682 |
else:
|
|
|
683 |
for path in dllBasetmp:
|
|
|
684 |
dlltmp.append(validate(path))
|
|
|
685 |
|
|
|
686 |
self.args["BASELINE_DIR"] = ["-baselinelibdir", ';'.join(["%s" % quote(i) for i in libtmp])]
|
|
|
687 |
self.args["BASELINE_DLL_DIR"] = ["-baselinedlldir", ';'.join(["%s" % quote(i) for i in dlltmp])]
|
|
|
688 |
|
|
|
689 |
libCurrTmp = []
|
|
|
690 |
libtmp = []
|
|
|
691 |
for target in currentbuild.split(';'):
|
|
|
692 |
if(ip_data["CURRENT_SDK_DIR"] == os.sep):
|
|
|
693 |
libCurrTmp.append(os.sep + 'epoc32' + os.sep + 'release' + os.sep + target + os.sep + 'lib')
|
|
|
694 |
else:
|
|
|
695 |
libCurrTmp.append(ip_data["CURRENT_SDK_DIR"] + os.sep + 'epoc32' + os.sep + 'release' + os.sep + target + os.sep + 'lib')
|
|
|
696 |
if getdata( ip_data, "CURRENT_IMPORTLIBRARIES"):
|
|
|
697 |
if(ip_data["CURRENT_IMPORTLIBRARIES"] == os.sep):
|
|
|
698 |
for target in libCurrTmp:
|
|
|
699 |
libtmp.append(validate(target))
|
|
|
700 |
else:
|
|
|
701 |
for i in ip_data["CURRENT_IMPORTLIBRARIES"].split(';'):
|
|
|
702 |
if not os.path.exists(i):
|
|
|
703 |
if(ip_data["CURRENT_SDK_DIR"] == os.sep):
|
|
|
704 |
libtmp.append(validate( os.sep + 'epoc32' + os.sep + 'release' + os.sep + i))
|
|
|
705 |
else:
|
|
|
706 |
libtmp.append(validate( ip_data["CURRENT_SDK_DIR"] + os.sep + 'epoc32' + os.sep + 'release' + os.sep + i ))
|
|
|
707 |
else:
|
|
|
708 |
libtmp.append(validate(i))
|
|
|
709 |
else:
|
|
|
710 |
for target in libCurrTmp:
|
|
|
711 |
libtmp.append(validate(target))
|
|
|
712 |
|
|
|
713 |
#Get baseline and current dll path and provide for analysis
|
|
|
714 |
dllCurrtmp = []
|
|
|
715 |
dlltmp = []
|
|
|
716 |
if dcurrentbuild == "":
|
|
|
717 |
dllCurrtmp.append(currDlldata)
|
|
|
718 |
else:
|
|
|
719 |
for target in currentbuild.split(';'):
|
|
|
720 |
if(ip_data["CURRENT_SDK_DIR"] == os.sep):
|
|
|
721 |
dllCurrtmp.append(os.sep + 'epoc32' + os.sep + 'release' + os.sep + target + os.sep + curbldtype)
|
|
|
722 |
else:
|
|
|
723 |
dllCurrtmp.append(ip_data["CURRENT_SDK_DIR"] + os.sep + 'epoc32' + os.sep + 'release' + os.sep + target + os.sep + curbldtype)
|
|
|
724 |
|
|
|
725 |
if getdata( ip_data, "CURRENT_IMPORTDLLS"):
|
|
|
726 |
if(ip_data["CURRENT_IMPORTDLLS"] == os.sep):
|
|
|
727 |
for target in dllCurrtmp:
|
|
|
728 |
dlltmp.append(validate(target))
|
|
|
729 |
else:
|
|
|
730 |
for i in ip_data["CURRENT_IMPORTDLLS"].split(';'):
|
|
|
731 |
if not os.path.exists(i):
|
|
|
732 |
if(ip_data["CURRENT_SDK_DIR"] == os.sep):
|
|
|
733 |
dlltmp.append(validate( os.sep + 'epoc32' + os.sep + 'release' + os.sep + i))
|
|
|
734 |
else:
|
|
|
735 |
dlltmp.append(validate( ip_data["CURRENT_SDK_DIR"] + os.sep + 'epoc32' + os.sep + 'release' + os.sep + i ))
|
|
|
736 |
else:
|
|
|
737 |
dlltmp.append(validate(i))
|
|
|
738 |
else:
|
|
|
739 |
for target in dllCurrtmp:
|
|
|
740 |
dlltmp.append(validate(target))
|
|
|
741 |
|
|
|
742 |
self.args["CURRENT_DIR"] = ["-currentlibdir", ';'.join(["%s" % quote(i) for i in libtmp])]
|
|
|
743 |
self.args["CURRENT_DLL_DIR"] = ["-currentdlldir", ';'.join(["%s" % quote(i) for i in dlltmp])]
|
|
|
744 |
|
|
|
745 |
# get the report file name
|
|
|
746 |
if not getdata( ip_data, "REPORT_FILE_LIBRARIES"):
|
|
|
747 |
if not reportid:
|
|
|
748 |
ip_data["REPORT_FILE_LIBRARIES"] = REPORT_PATH + LIBRARY_REPORT
|
|
|
749 |
else:
|
|
|
750 |
ip_data["REPORT_FILE_LIBRARIES"] = REPORT_PATH + LIBRARY_REPORT +'_'+ reportid
|
|
|
751 |
else:
|
|
|
752 |
[head, tail] = os.path.split(ip_data["REPORT_FILE_LIBRARIES"])
|
|
|
753 |
if tail != '':
|
|
|
754 |
if reportid:
|
|
|
755 |
ip_data["REPORT_FILE_LIBRARIES"] = ip_data["REPORT_FILE_LIBRARIES"] +'_'+ reportid
|
|
|
756 |
else:
|
|
|
757 |
if reportid:
|
|
|
758 |
ip_data["REPORT_FILE_LIBRARIES"] = ip_data["REPORT_FILE_LIBRARIES"] + LIBRARY_REPORT +'_'+ reportid
|
|
|
759 |
else:
|
|
|
760 |
ip_data["REPORT_FILE_LIBRARIES"] = ip_data["REPORT_FILE_LIBRARIES"] + LIBRARY_REPORT
|
|
|
761 |
|
|
|
762 |
ip_data["REPORT_FILE_LIBRARIES"] = ip_data["REPORT_FILE_LIBRARIES"] + ".xml"
|
|
|
763 |
self.args["REPORT_FILE"] = ["-reportfile", quote( ip_data["REPORT_FILE_LIBRARIES"] )]
|
|
|
764 |
|
|
|
765 |
if 'rvct' == self.args["TOOLCHAIN"][0].lower():
|
|
|
766 |
self.args["CFILT"] = [ '-cfilt', quotep( LA_SET[1] )]
|
|
|
767 |
else:
|
|
|
768 |
del self.args["CFILT"]
|
|
|
769 |
|
|
|
770 |
str = ''
|
|
|
771 |
if 's' == set_list[0]:
|
|
|
772 |
str = unquote(self.args["TEMP"][1]) + os.sep + 'la.txt'
|
|
|
773 |
file = open( str, 'w')
|
|
|
774 |
file.write( set_list[1] )
|
|
|
775 |
file.close()
|
|
|
776 |
elif 'm' == set_list[0]:
|
|
|
777 |
str = os.path.abspath( set_list[1] )
|
|
|
778 |
if not os.path.exists(str):
|
|
|
779 |
raise InputError(["confIP", "Input list file unavailable" + os.linesep, False])
|
|
|
780 |
self.args["SET"] = ["-set", quote(str)]
|
|
|
781 |
if 'a' == set_list[0]:
|
|
|
782 |
del self.args["SET"]
|
|
|
783 |
|
|
|
784 |
#'stringize' all params and invoke the tool
|
|
|
785 |
def run(self):
|
|
|
786 |
for i in LA_SET:
|
|
|
787 |
if not os.path.exists(i):
|
|
|
788 |
print os.linesep + i + " does not exist. Please reinstall."
|
|
|
789 |
sys.exit(1)
|
|
|
790 |
str = ' ' + self.args["TOOLCHAIN"][0]
|
|
|
791 |
del self.args["TOOLCHAIN"]
|
|
|
792 |
it_tmp = self.args.itervalues()
|
|
|
793 |
for i in xrange(0, len(self.args), 1):
|
|
|
794 |
for val in it_tmp.next():
|
|
|
795 |
str = str + ' ' + val
|
|
|
796 |
invokeTool(quote(LA_SET[0]), str)
|
|
|
797 |
|
|
|
798 |
#-------------------------Report Filter-------------------------------------------
|
|
|
799 |
class ReportFilter:
|
|
|
800 |
args = {}
|
|
|
801 |
issueargs = {}
|
|
|
802 |
filterfiles = False
|
|
|
803 |
def __init__(self, ip_data, cmds, reportid):
|
|
|
804 |
reportlist = []
|
|
|
805 |
outputlist = []
|
|
|
806 |
issuelist = []
|
|
|
807 |
#'args' defines the parametrs required by HA, 'optargs' defines optional ones
|
|
|
808 |
self.args = { "ISSUES_FILE":[], "REPORT_FILE":[], "OUTPUT_FILE":[] }
|
|
|
809 |
|
|
|
810 |
for val in cmds:
|
|
|
811 |
if 'h' in val:
|
|
|
812 |
if getdata( ip_data, "REPORT_FILE_HEADERS"):
|
|
|
813 |
reportlist.append( ip_data["REPORT_FILE_HEADERS"] )
|
|
|
814 |
if getdata( ip_data, "FILTER_FILE_HEADERS"):
|
|
|
815 |
if reportid:
|
|
|
816 |
ip_data["FILTER_FILE_HEADERS"] = ip_data["FILTER_FILE_HEADERS"] +'_'+ reportid + ".xml"
|
|
|
817 |
else:
|
|
|
818 |
ip_data["FILTER_FILE_HEADERS"] = ip_data["FILTER_FILE_HEADERS"] + ".xml"
|
|
|
819 |
outputlist.append( getdata(ip_data, "FILTER_FILE_HEADERS") )
|
|
|
820 |
if 'l' in val:
|
|
|
821 |
print ""
|
|
|
822 |
if getdata( ip_data, "REPORT_FILE_LIBRARIES"):
|
|
|
823 |
reportlist.append( ip_data["REPORT_FILE_LIBRARIES"] )
|
|
|
824 |
if getdata( ip_data, "FILTER_FILE_LIBRARIES"):
|
|
|
825 |
if reportid:
|
|
|
826 |
ip_data["FILTER_FILE_LIBRARIES"] = ip_data["FILTER_FILE_LIBRARIES"] +'_'+ reportid + ".xml"
|
|
|
827 |
else:
|
|
|
828 |
ip_data["FILTER_FILE_LIBRARIES"] = ip_data["FILTER_FILE_LIBRARIES"] + ".xml"
|
|
|
829 |
outputlist.append( ip_data["FILTER_FILE_LIBRARIES"] )
|
|
|
830 |
if 'f' in val:
|
|
|
831 |
self.filterfiles=True
|
|
|
832 |
rtmp = []
|
|
|
833 |
otmp = []
|
|
|
834 |
if getdata( ip_data, "REPORT_FILE_FILTER"):
|
|
|
835 |
rtmp = ip_data["REPORT_FILE_FILTER"].split(';')
|
|
|
836 |
for i in xrange(0, len(rtmp)):
|
|
|
837 |
if reportid:
|
|
|
838 |
rtmp[i] = rtmp[i] +'_'+ reportid + ".xml"
|
|
|
839 |
else:
|
|
|
840 |
rtmp[i] = rtmp[i] + ".xml"
|
|
|
841 |
reportlist.extend( rtmp )
|
|
|
842 |
if getdata( ip_data, "OUTPUT_FILE_FILTER"):
|
|
|
843 |
otmp = ip_data["OUTPUT_FILE_FILTER"].split(';')
|
|
|
844 |
for i in xrange(0, len(otmp)):
|
|
|
845 |
if otmp[i] != "":
|
|
|
846 |
if reportid:
|
|
|
847 |
otmp[i] = otmp[i] +'_'+ reportid + ".xml"
|
|
|
848 |
else:
|
|
|
849 |
otmp[i] = otmp[i] + ".xml"
|
|
|
850 |
outputlist.extend( otmp )
|
|
|
851 |
|
|
|
852 |
for term in reportlist:
|
|
|
853 |
if not os.path.exists( term ):
|
|
|
854 |
raise InputError( ["confMP", "report file missing -- " + term, False] )
|
|
|
855 |
self.args["REPORT_FILE"] = reportlist
|
|
|
856 |
|
|
|
857 |
for term in outputlist:
|
|
|
858 |
if '' != term:
|
|
|
859 |
if not os.path.exists( os.path.dirname(term) ):
|
|
|
860 |
os.makedirs( os.path.dirname(term) )
|
|
|
861 |
self.args["OUTPUT_FILE"] = outputlist
|
|
|
862 |
|
|
|
863 |
if not getdata( ip_data, "ISSUES_FILE"):
|
|
|
864 |
issuelist.append(DEFAULT_ISSUES_FILE)
|
|
|
865 |
else:
|
|
|
866 |
issuelist = ip_data["ISSUES_FILE"].split(';')
|
|
|
867 |
for term in issuelist:
|
|
|
868 |
if term.startswith( SERVER_PATH ): #server path used in issues set
|
|
|
869 |
i = issuelist.index(term)
|
|
|
870 |
localfile = ip_data["TEMP"] + os.sep + "tmpissues.xml"
|
|
|
871 |
tmp = os.path.splitext(term)
|
|
|
872 |
if '' == tmp[1]:
|
|
|
873 |
term = getlatestfile( term )
|
|
|
874 |
try:
|
|
|
875 |
urllib.urlretrieve( term, localfile )
|
|
|
876 |
except IOError:
|
|
|
877 |
raise InputError( ["confMPath", "Error in knownissues server path " + term + os.linesep, False] )
|
|
|
878 |
f=open(localfile)
|
|
|
879 |
if ((f.read()).find("404 Not Found") != -1):
|
|
|
880 |
raise InputError( ["confMPath", "Error in knownissues server path " + term + os.linesep, False] )
|
|
|
881 |
f.close()
|
|
|
882 |
issuelist[i] = localfile
|
|
|
883 |
self.issueargs[localfile] = term
|
|
|
884 |
elif os.path.exists( validate(term) ):
|
|
|
885 |
issuelist[issuelist.index(term)] = validate(term)
|
|
|
886 |
elif not os.path.exists( validate(term) ):
|
|
|
887 |
raise InputError( ["confMP", "issues file " + term + " missing" + os.linesep, False] )
|
|
|
888 |
self.args["ISSUES_FILE"] = issuelist
|
|
|
889 |
|
|
|
890 |
#'stringize' all params and invoke the tool
|
|
|
891 |
def run(self):
|
|
|
892 |
report = ''
|
|
|
893 |
output = ''
|
|
|
894 |
issue = ''
|
|
|
895 |
num = 1
|
|
|
896 |
for i in BCFILTER_SET:
|
|
|
897 |
if not os.path.exists(i):
|
|
|
898 |
print os.linesep + i + " does not exist. Please reinstall."
|
|
|
899 |
sys.exit(1)
|
|
|
900 |
total = len(self.args["REPORT_FILE"]) * len(self.args["ISSUES_FILE"])
|
|
|
901 |
for i in range(0, len(self.args["REPORT_FILE"]), 1):
|
|
|
902 |
for j in xrange(0, len(self.args["ISSUES_FILE"]), 1):
|
|
|
903 |
report = self.args["REPORT_FILE"][i]
|
|
|
904 |
if i < len(self.args["OUTPUT_FILE"]):
|
|
|
905 |
output = self.args["OUTPUT_FILE"][i]
|
|
|
906 |
else:
|
|
|
907 |
output = ""
|
|
|
908 |
issue = self.args["ISSUES_FILE"][j]
|
|
|
909 |
str = " " + quote(report) + " " + quote(issue)
|
|
|
910 |
if getdata( self.issueargs, issue):
|
|
|
911 |
issue = self.issueargs[issue]
|
|
|
912 |
if self.filterfiles:
|
|
|
913 |
dispstr = "\nprocessing (" + `num` + "/" + `total` +") files >>\nReport File: " + report + "\nKnown Issues File: " + issue + "\nOutput File: "
|
|
|
914 |
else:
|
|
|
915 |
dispstr = "\nprocessing file >>\nReport File: " + report + "\nKnown Issues File: " + issue + "\nOutput File: "
|
|
|
916 |
if not output:
|
|
|
917 |
dispstr = dispstr + "None(inplace filtration)"
|
|
|
918 |
else:
|
|
|
919 |
dispstr = dispstr + output
|
|
|
920 |
str = str + " " + quote(output)
|
|
|
921 |
print dispstr
|
|
|
922 |
invokeTool(quote(BCFILTER_SET[0]), str)
|
|
|
923 |
num = num + 1
|
|
|
924 |
|
|
|
925 |
#----------------------CONFIGURATION PARSER-------------------------------------------
|
|
|
926 |
class ConfigParser:
|
|
|
927 |
reportstr = ''
|
|
|
928 |
data = {}
|
|
|
929 |
#read the config values into a data structure
|
|
|
930 |
def __init__(self, cmd):
|
|
|
931 |
self.data = {"BASELINE_NAME":'', "BASELINE_SDK_DIR":'', "BASELINE_SDK_S60_VERSION":'', "CURRENT_NAME":'', "CURRENT_SDK_DIR":'', "CURRENT_SDK_S60_VERSION":'', "TEMP":'',
|
|
|
932 |
"BASELINE_HEADERS":'', "CURRENT_HEADERS":'', "BASELINE_SYSTEMINCLUDEDIR":'', "CURRENT_SYSTEMINCLUDEDIR":'', "BASELINE_FORCED_HEADERS":'', "CURRENT_FORCED_HEADERS":'', "USE_PLATFORM_DATA":'', "RECURSIVE_HEADERS":'',
|
|
|
933 |
"EXCLUDE_DIR_HEADERS":'', "REPLACE_HEADERS":'', "REPORT_FILE_HEADERS":'', "FILTER_FILE_HEADERS":'', "TOOLCHAIN":'', "TOOLCHAIN_PATH":'', "BASELINE_BUILDTARGET":'',
|
|
|
934 |
"CURRENT_BUILDTARGET":'',"BASELINE_BUILDTYPE":'',"CURRENT_BUILDTYPE":'', "BASELINE_IMPORTLIBRARIES":'', "CURRENT_IMPORTLIBRARIES":'', "BASELINE_IMPORTDLLS":'', "CURRENT_IMPORTDLLS":'',
|
|
|
935 |
"REPORT_FILE_LIBRARIES":'', "FILTER_FILE_LIBRARIES":'', "REPORT_FILE_FILTER":'',"OUTPUT_FILE_FILTER":'', "ISSUES_FILE":'', "EXCLUDE_DIR":'', "USE_THREAD":''}
|
|
|
936 |
file = open(cmd.conffile)
|
|
|
937 |
for input in file:
|
|
|
938 |
if not input.startswith("#") and input != '\n': # process non-comment lines
|
|
|
939 |
pair = input.split('=')
|
|
|
940 |
if len(pair) == 1:
|
|
|
941 |
pair.append("")
|
|
|
942 |
if not self.data.has_key(clean(pair[0])):
|
|
|
943 |
raise InputError(["confIP", clean(pair[0]) + " is not valid" + os.linesep, False])
|
|
|
944 |
self.data[clean(pair[0])] = clean(pair[1])
|
|
|
945 |
file.close()
|
|
|
946 |
if self.data.has_key("EXCLUDE_DIR"):
|
|
|
947 |
if getdata( self.data,"EXCLUDE_DIR"):
|
|
|
948 |
self.data["EXCLUDE_DIR_HEADERS"] = self.data["EXCLUDE_DIR"]
|
|
|
949 |
del self.data["EXCLUDE_DIR"]
|
|
|
950 |
self.reportstr = cmd.reportstr
|
|
|
951 |
|
|
|
952 |
#updates the report path/file names, creates dirs if they do not exist
|
|
|
953 |
def __updatereport(self,key):
|
|
|
954 |
tmp = []
|
|
|
955 |
if getdata( self.data, key ):
|
|
|
956 |
#if field is empty raise an error
|
|
|
957 |
for term in self.data[key].split(';'):
|
|
|
958 |
if '' != term:
|
|
|
959 |
[head, tail] = os.path.split(term)
|
|
|
960 |
try:
|
|
|
961 |
term = validate(head) + os.sep + os.path.splitext(tail)[0]
|
|
|
962 |
except InputError, e:
|
|
|
963 |
os.makedirs(os.path.abspath(head))
|
|
|
964 |
tmp.append(term)
|
|
|
965 |
else:
|
|
|
966 |
raise InputError(["confIS", key + " syntax incorrect" + os.linesep, False])
|
|
|
967 |
#assign the updated report file str back to dict
|
|
|
968 |
self.data[key] = ';'.join([ "%s" % i for i in tmp])
|
|
|
969 |
|
|
|
970 |
#updates the filter output path/file names, creates dirs if they do not exist
|
|
|
971 |
def __updateoutput(self, key):
|
|
|
972 |
tmp = []
|
|
|
973 |
if getdata( self.data, key ):
|
|
|
974 |
#if field is empty raise an error
|
|
|
975 |
for term in self.data[key].split(';'):
|
|
|
976 |
if '' != term:
|
|
|
977 |
[head, tail] = os.path.split(term)
|
|
|
978 |
try:
|
|
|
979 |
term = validate(head) + os.sep + os.path.splitext(tail)[0]
|
|
|
980 |
except InputError, e:
|
|
|
981 |
os.makedirs(os.path.abspath(head))
|
|
|
982 |
tmp.append(term)
|
|
|
983 |
#assign the updated report file str back to dict
|
|
|
984 |
self.data[key] = ';'.join(["%s" % i for i in tmp])
|
|
|
985 |
|
|
|
986 |
#update necessary values
|
|
|
987 |
def parse(self, cmds):
|
|
|
988 |
for val in cmds:
|
|
|
989 |
if 'h' in val:
|
|
|
990 |
self.__updatereport("REPORT_FILE_HEADERS")
|
|
|
991 |
self.__updateoutput("FILTER_FILE_HEADERS")
|
|
|
992 |
if 'l' in val:
|
|
|
993 |
self.__updatereport("REPORT_FILE_LIBRARIES")
|
|
|
994 |
self.__updateoutput("FILTER_FILE_LIBRARIES")
|
|
|
995 |
if 'f' in val:
|
|
|
996 |
self.__updatereport("REPORT_FILE_FILTER")
|
|
|
997 |
self.__updateoutput("OUTPUT_FILE_FILTER")
|
|
|
998 |
|
|
|
999 |
if not getdata( self.data, "TEMP"):
|
|
|
1000 |
self.data["TEMP"] = TOOL_DIR + "temp"
|
|
|
1001 |
if not os.path.exists(self.data["TEMP"]):
|
|
|
1002 |
os.makedirs(self.data["TEMP"])
|
|
|
1003 |
return self.data
|
|
|
1004 |
|
|
|
1005 |
#-------------------------Utility Analysis-------------------------------------------
|
|
|
1006 |
class CmdLineParser:
|
|
|
1007 |
#these are data interfaces that are exposed
|
|
|
1008 |
cmd = []
|
|
|
1009 |
conffile = ''
|
|
|
1010 |
reportstr = ''
|
|
|
1011 |
def __init__(self, argv):
|
|
|
1012 |
self.__check_help(argv)
|
|
|
1013 |
self.__check_carbide(argv)
|
|
|
1014 |
self.__check_versions(argv)
|
|
|
1015 |
self.__parsecmd(argv)
|
|
|
1016 |
|
|
|
1017 |
#create a list of commands to be executed
|
|
|
1018 |
def __addcmd(self, str):
|
|
|
1019 |
if (not 0 < len(str) < 3) or (len(str) == 1 and str != 'f'):
|
|
|
1020 |
raise InputError(["cmdlineIP", "Parameter -" + str + " incorrect" + os.linesep, True])
|
|
|
1021 |
if (str[0] in ['h', 'l']) and (str[1] in ['s', 'm', 'a']):
|
|
|
1022 |
self.cmd.insert(0, [ str[0], str[1] ])
|
|
|
1023 |
elif str[0] in ['f']:
|
|
|
1024 |
self.cmd.append([ str[0], "" ])
|
|
|
1025 |
else:
|
|
|
1026 |
raise InputError(["cmdlineIP", "Parameter - " + str + " incorrect" + os.linesep, True])
|
|
|
1027 |
|
|
|
1028 |
#check if input parameter is a 'help' command
|
|
|
1029 |
def __check_help(self, argv):
|
|
|
1030 |
for term in ["-?", "--help", "-h", "/h", "/?"]:
|
|
|
1031 |
if term in argv:
|
|
|
1032 |
raise InputError( ["cmdhelp", "", True] )
|
|
|
1033 |
|
|
|
1034 |
#check if any tool version inputs are required
|
|
|
1035 |
def __check_versions(self, argv):
|
|
|
1036 |
errstr = "Cannot combine version parameter with others"+ os.linesep +"Please reissue command without '-v'"
|
|
|
1037 |
if "-v" in argv:
|
|
|
1038 |
sys.stdout.write( gettoolversion() )
|
|
|
1039 |
if len(argv) > 1:
|
|
|
1040 |
raise InputError(["cmdlineIP", errstr, False] )
|
|
|
1041 |
sys.exit(0)
|
|
|
1042 |
if "-dv" in argv:
|
|
|
1043 |
sys.stdout.write( getdataversion() )
|
|
|
1044 |
if len(argv) > 1:
|
|
|
1045 |
raise InputError(["cmdlineIP", errstr, False] )
|
|
|
1046 |
sys.exit(0)
|
|
|
1047 |
|
|
|
1048 |
#check if interface has been invoked from carbide
|
|
|
1049 |
def __check_carbide(self, argv):
|
|
|
1050 |
global CARBIDE_PLUGIN
|
|
|
1051 |
if "-c" in argv:
|
|
|
1052 |
CARBIDE_PLUGIN = True
|
|
|
1053 |
argv.remove("-c")
|
|
|
1054 |
|
|
|
1055 |
#check if input is proper else raise exception
|
|
|
1056 |
def __parsecmd(self, argv):
|
|
|
1057 |
#if no parameters present
|
|
|
1058 |
if not len(argv):
|
|
|
1059 |
raise InputError(["cmdlineMP", "None specified" + os.linesep, True])
|
|
|
1060 |
#check if config file exists
|
|
|
1061 |
if os.path.exists( os.path.abspath(argv[0]) ):
|
|
|
1062 |
self.conffile = os.path.abspath(argv[0])
|
|
|
1063 |
argv.pop(0)
|
|
|
1064 |
else:
|
|
|
1065 |
raise InputError(["cmdlineMP", "Config file not found" + os.linesep, True])
|
|
|
1066 |
#parse the -xx type inputs, add them to command string
|
|
|
1067 |
for term in argv:
|
|
|
1068 |
if term[0] == "-":
|
|
|
1069 |
self.__addcmd(term[1:])
|
|
|
1070 |
|
|
|
1071 |
#append additional args to cmd string
|
|
|
1072 |
i = 0
|
|
|
1073 |
for val in self.cmd:
|
|
|
1074 |
param = "-" + val[0] + val[1]
|
|
|
1075 |
#filter and 'a'- all option do not need any additional input
|
|
|
1076 |
if ('f' != val[0]) and ('a' != val[1]):
|
|
|
1077 |
nxtterm = argv[ argv.index(param) + 1 ]
|
|
|
1078 |
#if next term is not an option string, append it as filename, to the command
|
|
|
1079 |
if nxtterm[0] != "-":
|
|
|
1080 |
self.cmd[i].append(nxtterm)
|
|
|
1081 |
argv.remove(nxtterm)
|
|
|
1082 |
else:
|
|
|
1083 |
raise InputError(["cmdlineMP", "Input error -" + self.cmd[i][0] + self.cmd[i][1] + os.linesep, True])
|
|
|
1084 |
argv.remove(param)
|
|
|
1085 |
i = i + 1
|
|
|
1086 |
|
|
|
1087 |
#if any additional param are defined, it is the report string
|
|
|
1088 |
if len(argv) == 1:
|
|
|
1089 |
self.reportstr = os.path.splitext(argv[0])[0]
|
|
|
1090 |
#if more then 1 additonal param, then raise error condition
|
|
|
1091 |
elif len(argv) > 1:
|
|
|
1092 |
raise InputError(["cmdlineIP", "Error in input" + os.linesep, True])
|
|
|
1093 |
if not len(self.cmd):
|
|
|
1094 |
raise InputError(["cmdlineMP", "No command specified" + os.linesep, True])
|
|
|
1095 |
|
|
|
1096 |
#-------------------------Utility functions-------------------------------------------
|
|
|
1097 |
#cleans the whitespace and trailing '\n', '\t'
|
|
|
1098 |
def clean(str):
|
|
|
1099 |
return str.strip('\r\n\t ')
|
|
|
1100 |
|
|
|
1101 |
#used to wrap a string argument provided to checkbc in quotes
|
|
|
1102 |
def quote(str):
|
|
|
1103 |
return "\"" + str + "\""
|
|
|
1104 |
|
|
|
1105 |
#validate the path provided
|
|
|
1106 |
def validate(str):
|
|
|
1107 |
if not os.path.exists(str):
|
|
|
1108 |
tmp = os.path.abspath(str)
|
|
|
1109 |
if not os.path.exists(tmp):
|
|
|
1110 |
raise InputError(["confMPath", "Field "+ str +" is not valid" + os.linesep,False])
|
|
|
1111 |
str = tmp
|
|
|
1112 |
return os.path.normpath(os.path.abspath(str))
|
|
|
1113 |
|
|
|
1114 |
#used to wrap a string argument provided to checkbc in quotes
|
|
|
1115 |
def quotep(str):
|
|
|
1116 |
return quote(validate(str))
|
|
|
1117 |
|
|
|
1118 |
#return an unquoted version of input string
|
|
|
1119 |
def unquote(str):
|
|
|
1120 |
if str[0] == '\"':
|
|
|
1121 |
str = str[1:]
|
|
|
1122 |
if str[len(str)-1] == '\"':
|
|
|
1123 |
str = str[:len(str)-1]
|
|
|
1124 |
return str
|
|
|
1125 |
|
|
|
1126 |
def getdata( mydict, key):
|
|
|
1127 |
if mydict.has_key(key):
|
|
|
1128 |
if '' != mydict[key]:
|
|
|
1129 |
return mydict[key]
|
|
|
1130 |
return ''
|
|
|
1131 |
|
|
|
1132 |
#return default build target from Rnd SDK and "" from Public SDK
|
|
|
1133 |
def GetBuildTarget(sdk,dlldata,temp_path,bld_type):
|
|
|
1134 |
bldtarget = ""
|
|
|
1135 |
path = ""
|
|
|
1136 |
xmlFile = open (dlldata);
|
|
|
1137 |
dllcases = xmlFile.readlines()
|
|
|
1138 |
xmlFile.close()
|
|
|
1139 |
|
|
|
1140 |
dll_file = temp_path + os.sep + "dll.txt"
|
|
|
1141 |
dir_err_file = temp_path + os.sep + "dir_err.txt"
|
|
|
1142 |
for target in s60_build_targets:
|
|
|
1143 |
if sdk == os.sep:
|
|
|
1144 |
path = quote(os.sep+'epoc32'+os.sep+'release'+os.sep+target+os.sep+bld_type+os.sep)
|
|
|
1145 |
else:
|
|
|
1146 |
path = quote(sdk+os.sep+'epoc32'+os.sep+'release'+os.sep+target+os.sep+bld_type+os.sep)
|
|
|
1147 |
if not os.path.exists(path):
|
|
|
1148 |
pass
|
|
|
1149 |
|
|
|
1150 |
if os.name =='nt':
|
|
|
1151 |
cmd = "dir /b " + path + "*.dll > " + quote(dll_file) + " 2> " + quote (dir_err_file)
|
|
|
1152 |
else:
|
|
|
1153 |
cmd = "ls --format=single-column " + path + "*.dll > " + quote(dll_file) + " 2> " + quote( dir_err_file)
|
|
|
1154 |
|
|
|
1155 |
os.system(cmd)
|
|
|
1156 |
|
|
|
1157 |
file = open (dll_file)
|
|
|
1158 |
cases = file.readlines()
|
|
|
1159 |
file.close()
|
|
|
1160 |
os.remove(dll_file)
|
|
|
1161 |
os.remove(dir_err_file)
|
|
|
1162 |
|
|
|
1163 |
matchFound = 0
|
|
|
1164 |
#Get no of dll present in xml file, 2 lines are for </dll_list> tag and each dll has 8 fileds.
|
|
|
1165 |
dllCount = 0
|
|
|
1166 |
for dll in dllcases:
|
|
|
1167 |
temp = dll.find ('<dllname>')
|
|
|
1168 |
if temp != -1 :
|
|
|
1169 |
dllCount = dllCount + 1
|
|
|
1170 |
exe = dll.lower().split('<dllname>')[1]
|
|
|
1171 |
exe = exe.split('</dllname>')[0]
|
|
|
1172 |
for en in cases:
|
|
|
1173 |
if (en.lower().find (exe.lower()) != -1) :
|
|
|
1174 |
matchFound = matchFound + 1
|
|
|
1175 |
|
|
|
1176 |
if(matchFound > (dllCount /2) ):
|
|
|
1177 |
bldtarget = target
|
|
|
1178 |
break
|
|
|
1179 |
|
|
|
1180 |
return bldtarget
|
|
|
1181 |
|
|
|
1182 |
def validateSDKVersion(baseversion, curversion):
|
|
|
1183 |
baseValid = False
|
|
|
1184 |
curValid = False
|
|
|
1185 |
for i in sdk_version:
|
|
|
1186 |
if( i == baseversion.lower()):
|
|
|
1187 |
baseValid = True
|
|
|
1188 |
break
|
|
|
1189 |
if(baseValid == False):
|
|
|
1190 |
raise InputError(["confIP", "Baseline SDK version\n", False])
|
|
|
1191 |
for i in sdk_version:
|
|
|
1192 |
if(i == curversion.lower()):
|
|
|
1193 |
curValid = True
|
|
|
1194 |
if (curValid == False):
|
|
|
1195 |
raise InputError(["confIP", "Current SDK version\n", False])
|
|
|
1196 |
return True
|
|
|
1197 |
|
|
|
1198 |
def validateBulidTarget(bldtarget,baseline):
|
|
|
1199 |
for target in bldtarget.split(';'):
|
|
|
1200 |
targetFound = False
|
|
|
1201 |
for i in s60_build_targets:
|
|
|
1202 |
if ( i == target.lower()):
|
|
|
1203 |
targetFound = True
|
|
|
1204 |
break
|
|
|
1205 |
if(targetFound == False ):
|
|
|
1206 |
if( baseline == True):
|
|
|
1207 |
raise InputError(["confIP", "BASELINE_BUILDTARGET\n", False])
|
|
|
1208 |
else:
|
|
|
1209 |
raise InputError(["confIP", "CURRENT_BUILDTARGET\n", False])
|
|
|
1210 |
|
|
|
1211 |
def validateBulidType(buildtype,baseline):
|
|
|
1212 |
bldTypelist = ['urel','udeb']
|
|
|
1213 |
found = False
|
|
|
1214 |
for i in bldTypelist:
|
|
|
1215 |
if(i == buildtype.lower()):
|
|
|
1216 |
found = True
|
|
|
1217 |
break
|
|
|
1218 |
if found == False:
|
|
|
1219 |
if( baseline == True):
|
|
|
1220 |
raise InputError(["confIP", "BASELINE_BUILDTYPE\n", False])
|
|
|
1221 |
else:
|
|
|
1222 |
raise InputError(["confIP", "CURRENT_BUILDTYPE\n", False])
|
|
|
1223 |
|
|
|
1224 |
|
|
|
1225 |
|
|
|
1226 |
|
|
|
1227 |
|
|
|
1228 |
|
|
|
1229 |
|
|
|
1230 |
|
|
|
1231 |
#---------------------------Other funcs---------------------------------------------
|
|
|
1232 |
def getdataversion():
|
|
|
1233 |
return DATA_VERSION
|
|
|
1234 |
|
|
|
1235 |
def gettoolversion():
|
|
|
1236 |
return TOOL_VERSION + ' - ' + TOOL_DATE
|
|
|
1237 |
|
|
|
1238 |
#check the metadata file in server path and retrieve the latest file mentioned there
|
|
|
1239 |
def getlatestfile( path ):
|
|
|
1240 |
if not path.endswith('/'):
|
|
|
1241 |
path = path + '/'
|
|
|
1242 |
try:
|
|
|
1243 |
file = urllib.urlopen( path + "knownissues_metadata" )
|
|
|
1244 |
except IOError:
|
|
|
1245 |
return path
|
|
|
1246 |
else:
|
|
|
1247 |
knfile = (file.readline()).strip(' \t\n\r')
|
|
|
1248 |
if (knfile.find("DOCTYPE") != -1):
|
|
|
1249 |
return path
|
|
|
1250 |
return path + knfile
|
|
|
1251 |
|
|
|
1252 |
#all arguments have been assembled, now call the executable
|
|
|
1253 |
#print process-id and exit-code only if invoked form carbide client
|
|
|
1254 |
def invokeTool(exe, args):
|
|
|
1255 |
os.chdir(EXEC_PATH)
|
|
|
1256 |
val= True
|
|
|
1257 |
sys.stderr.write(os.linesep)
|
|
|
1258 |
sys.stderr.flush()
|
|
|
1259 |
sys.stdout.flush()
|
|
|
1260 |
print exe+args
|
|
|
1261 |
if os.name == 'nt':
|
|
|
1262 |
val = False
|
|
|
1263 |
process = subprocess.Popen(exe+args, shell=val)
|
|
|
1264 |
if CARBIDE_PLUGIN:
|
|
|
1265 |
sys.stdout.write("PID:" + str(process.pid) + os.linesep)
|
|
|
1266 |
sys.stdout.flush()
|
|
|
1267 |
#wait for current child to complete before executing next one
|
|
|
1268 |
exitcode = process.wait()
|
|
|
1269 |
if CARBIDE_PLUGIN:
|
|
|
1270 |
sys.stdout.write(os.linesep+ os.path.splitext((os.path.split(exe)[1]))[0] +" exitCode:" + str(exitcode) + os.linesep)
|
|
|
1271 |
sys.stdout.flush()
|
|
|
1272 |
os.chdir(TOOL_DIR)
|
|
|
1273 |
|
|
|
1274 |
#main function which performs the dispatch logic
|
|
|
1275 |
def main(argv):
|
|
|
1276 |
try:
|
|
|
1277 |
cmd = CmdLineParser(argv) #parse command line input
|
|
|
1278 |
except InputError, e:
|
|
|
1279 |
ExHandler(e)
|
|
|
1280 |
os.chdir(TOOL_DIR) #change to checkbc path, to parse relative paths
|
|
|
1281 |
try:
|
|
|
1282 |
cfg = ConfigParser(cmd) #takes the commandline parsed input
|
|
|
1283 |
args = cfg.parse(cmd.cmd) #create key-value pair of inputs form the config file
|
|
|
1284 |
except InputError, e:
|
|
|
1285 |
ExHandler(e)
|
|
|
1286 |
|
|
|
1287 |
isFilterTrue = False
|
|
|
1288 |
if 'f' in cmd.cmd[len(cmd.cmd)-1]:
|
|
|
1289 |
isFilterTrue = True
|
|
|
1290 |
Fcmd = []
|
|
|
1291 |
Fcmd.append(cmd.cmd[len(cmd.cmd)-1])
|
|
|
1292 |
try:
|
|
|
1293 |
r = ReportFilter(args, Fcmd, cmd.reportstr)
|
|
|
1294 |
except InputError, e:
|
|
|
1295 |
ExHandler(e)
|
|
|
1296 |
r.run()
|
|
|
1297 |
|
|
|
1298 |
|
|
|
1299 |
|
|
|
1300 |
|
|
|
1301 |
for val in cmd.cmd:
|
|
|
1302 |
if 'h' in val: #verify headers
|
|
|
1303 |
try:
|
|
|
1304 |
h = HeaderAnalyser(args, val[1:], cmd.reportstr)
|
|
|
1305 |
except InputError, e:
|
|
|
1306 |
ExHandler(e)
|
|
|
1307 |
h.run()
|
|
|
1308 |
if isFilterTrue:
|
|
|
1309 |
HFcmd = []
|
|
|
1310 |
HFcmd.append(val)
|
|
|
1311 |
try:
|
|
|
1312 |
r = ReportFilter(args, HFcmd, cmd.reportstr)
|
|
|
1313 |
except InputError, e:
|
|
|
1314 |
ExHandler(e)
|
|
|
1315 |
r.run()
|
|
|
1316 |
|
|
|
1317 |
if 'l' in val: #verify libraries
|
|
|
1318 |
try:
|
|
|
1319 |
o = LibraryAnalyser(args, val[1:], cmd.reportstr)
|
|
|
1320 |
except InputError, e:
|
|
|
1321 |
ExHandler(e)
|
|
|
1322 |
o.run()
|
|
|
1323 |
if isFilterTrue:
|
|
|
1324 |
LFcmd = []
|
|
|
1325 |
LFcmd.append(val)
|
|
|
1326 |
try:
|
|
|
1327 |
r = ReportFilter(args, LFcmd, cmd.reportstr)
|
|
|
1328 |
except InputError, e:
|
|
|
1329 |
ExHandler(e)
|
|
|
1330 |
r.run()
|
|
|
1331 |
|
|
|
1332 |
|
|
|
1333 |
|
|
|
1334 |
if __name__=="__main__":
|
|
|
1335 |
#Check for proper python version and then continue execution
|
|
|
1336 |
if not "2.4" <= platform.python_version() < "3.0":
|
|
|
1337 |
python_error()
|
|
|
1338 |
main(sys.argv[1:])
|
|
|
1339 |
|