author | shrivatsa |
Mon, 27 Sep 2010 14:51:17 +0530 | |
changeset 20 | a0eee409ff14 |
parent 3 | ebe3f8f03b59 |
permissions | -rw-r--r-- |
0 | 1 |
# |
2 |
# Copyright (c) 2008, 2009 Nokia Corporation and/or its subsidiary(-ies). |
|
3 |
# All rights reserved. |
|
4 |
# This component and the accompanying materials are made available |
|
5 |
# under the terms of "Eclipse Public License v1.0" |
|
6 |
# which accompanies this distribution, and is available |
|
7 |
# at the URL "http://www.eclipse.org/legal/epl-v10.html". |
|
8 |
# |
|
9 |
# Initial Contributors: |
|
10 |
# Nokia Corporation - initial contribution. |
|
11 |
# |
|
12 |
# Contributors: |
|
13 |
# |
|
14 |
# Description: End-user Interface for Core Tools execution |
|
15 |
# |
|
16 |
||
17 |
import sys |
|
18 |
import os |
|
19 |
import re |
|
20 |
#while importing check if all modules are available, else raise error |
|
21 |
try: |
|
22 |
import subprocess |
|
23 |
import platform |
|
24 |
import urllib |
|
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
25 |
import xml.dom.minidom |
0 | 26 |
except ImportError: |
27 |
python_error() |
|
28 |
||
29 |
#-------------------------Hardcoded values----------------------------------------- |
|
30 |
#tool version denotes the version of the core tools package |
|
20 | 31 |
TOOL_VERSION = "2.8.7" |
32 |
TOOL_DATE = "06th August 2010" |
|
0 | 33 |
|
34 |
#server to be used for downloading Core tools package and knownissues |
|
35 |
SERVER_PATH = "http://" |
|
36 |
||
37 |
#-------------------------Global values-------------------------------------------- |
|
38 |
CUR_DIR = os.getcwd() |
|
39 |
#used to obtain the path in which checkbc and core tools are placed |
|
40 |
tempStr = os.path.realpath(sys.argv[0]) |
|
41 |
[head,tail] = os.path.split(tempStr ) |
|
42 |
TOOL_DIR = head + os.sep |
|
43 |
#create the \data and \bin paths which contain the necessary additional headers |
|
44 |
DATA_PATH = TOOL_DIR + "data" + os.sep |
|
45 |
EXEC_PATH = TOOL_DIR + "bin" + os.sep |
|
46 |
REPORT_PATH = TOOL_DIR + "reports" + os.sep |
|
47 |
DEFAULT_ISSUES_FILE = TOOL_DIR + "data" + os.sep + "knownissues.xml" |
|
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
48 |
GLOBAL_DATA_FILE = TOOL_DIR + "global_data.xml" |
0 | 49 |
tool_chain = ['gcc','gcce','rvct'] |
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
50 |
|
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
51 |
DATA_VERSION = '' |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
52 |
ALL_HEADER_SET = '' |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
53 |
sdk_version = [] |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
54 |
s60_build_targets = [] |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
55 |
sys_includes = {} |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
56 |
|
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
57 |
doc = xml.dom.minidom.parse(GLOBAL_DATA_FILE) |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
58 |
|
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
59 |
#data version denotes compatibility between the tool and carbide plugin |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
60 |
#Read dataversion from global_data.xml |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
61 |
DATA_VERSION = doc.getElementsByTagName("dataversion")[0].childNodes[0].data |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
62 |
|
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
63 |
#Read supported filetypes in header analyser from global_data.xml |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
64 |
for filetype in doc.getElementsByTagName("filetypes")[0].getElementsByTagName("type"): |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
65 |
if ALL_HEADER_SET != '': |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
66 |
ALL_HEADER_SET += ';' |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
67 |
ALL_HEADER_SET += filetype.childNodes[0].data |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
68 |
|
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
69 |
#Read supported sdk versions from global_data.xml |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
70 |
for version in doc.getElementsByTagName("supportedversions")[0].getElementsByTagName("version"): |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
71 |
sdk_version.append(version.childNodes[0].data) |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
72 |
|
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
73 |
#Read supported build targets from global_data.xml |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
74 |
for buildtarget in doc.getElementsByTagName("buildtargets")[0].getElementsByTagName("target"): |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
75 |
s60_build_targets.append(buildtarget.childNodes[0].data) |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
76 |
|
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
77 |
#Read system include paths for supported sdk versions from global_data.xml |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
78 |
for node in doc.getElementsByTagName("sys_includes"): |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
79 |
ver = node.getAttribute("version") |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
80 |
includes = [] |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
81 |
for inc in node.getElementsByTagName("inc"): |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
82 |
includes.append(inc.childNodes[0].data) |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
83 |
sys_includes[ver] = includes |
0 | 84 |
|
85 |
#dictionary elements which hold the platform data(CDS) and forced header(symbian macros) information |
|
86 |
#these are available only when the \data and \bin folders respectively are avaliable |
|
87 |
if os.path.exists( DATA_PATH ): |
|
88 |
platformdata = { |
|
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
89 |
"3.0": DATA_PATH + "s60_platform_data_30.xml", |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
90 |
"3.1": DATA_PATH + "s60_platform_data_31.xml", |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
91 |
"3.2": DATA_PATH + "s60_platform_data_32.xml", |
0 | 92 |
"5.0": DATA_PATH + "s60_platform_data_50.xml", |
20 | 93 |
"9.1": DATA_PATH + "s60_platform_data_91.xml", |
94 |
"9.2": DATA_PATH + "s60_platform_data_92.xml", |
|
95 |
"10.1": DATA_PATH + "s60_platform_data_101.xml", |
|
96 |
"S^1": DATA_PATH + "s60_platform_data_50.xml", |
|
97 |
"S^2": DATA_PATH + "s60_platform_data_91.xml", |
|
98 |
"S^3": DATA_PATH + "s60_platform_data_92.xml", |
|
99 |
"S^4": DATA_PATH + "s60_platform_data_101.xml" |
|
100 |
||
0 | 101 |
} |
102 |
else: |
|
103 |
platformdata = {} |
|
104 |
||
105 |
if os.path.exists( EXEC_PATH ): |
|
106 |
forcedheadersdata = { |
|
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
107 |
"3.0": EXEC_PATH + "forced_9.1.h", |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
108 |
"3.1": EXEC_PATH + "forced_9.2.h", |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
109 |
"3.2": EXEC_PATH + "forced_9.3.h", |
0 | 110 |
"5.0": EXEC_PATH + "forced_9.4.h", |
111 |
"5.0v2": EXEC_PATH + "forced_9.4v2.h", |
|
20 | 112 |
"9.1": EXEC_PATH + "forced_9.4v2.h", |
113 |
"9.2": EXEC_PATH + "forced_9.4v2.h", |
|
114 |
"10.1": EXEC_PATH + "forced_10.1.h", |
|
115 |
"S^1": EXEC_PATH + "forced_9.4v2.h", |
|
116 |
"S^2": EXEC_PATH + "forced_9.4v2.h", |
|
117 |
"S^3": EXEC_PATH + "forced_9.4v2.h", |
|
118 |
"S^4": EXEC_PATH + "forced_10.1.h" |
|
0 | 119 |
} |
120 |
else: |
|
121 |
forcedheadersdata = {} |
|
122 |
||
123 |
if os.path.exists( DATA_PATH ): |
|
124 |
dllXMLdata = { |
|
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
125 |
"3.0": DATA_PATH + "s60_dll_data_30.xml", |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
126 |
"3.1": DATA_PATH + "s60_dll_data_31.xml", |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
127 |
"3.2": DATA_PATH + "s60_dll_data_32.xml", |
0 | 128 |
"5.0": DATA_PATH + "s60_dll_data_50.xml", |
20 | 129 |
"9.1": DATA_PATH + "s60_dll_data_91.xml", |
130 |
"9.2": DATA_PATH + "s60_dll_data_92.xml", |
|
131 |
"10.1": DATA_PATH + "s60_dll_data_101.xml", |
|
132 |
"S^1": DATA_PATH + "s60_dll_data_50.xml", |
|
133 |
"S^2": DATA_PATH + "s60_dll_data_91.xml", |
|
134 |
"S^3": DATA_PATH + "s60_dll_data_92.xml", |
|
135 |
"S^4": DATA_PATH + "s60_dll_data_101.xml" |
|
0 | 136 |
} |
137 |
else: |
|
138 |
dllXMLdata = {} |
|
139 |
||
140 |
#set of binaries in the Core tools set, this is windows specific, to be added for linux support |
|
141 |
if os.name == 'nt': |
|
142 |
HA_SET = [ EXEC_PATH+"ha.exe", EXEC_PATH+"ha_gccxml_cc1plus.exe", EXEC_PATH+"libxerces-c2_7_0.dll" ] |
|
143 |
LA_SET = [ EXEC_PATH+"la.exe", EXEC_PATH+"cfilt.exe" ] |
|
144 |
BCFILTER_SET = [ EXEC_PATH+"bcfilter.exe", EXEC_PATH+"libxerces-c2_7_0.dll" ] |
|
145 |
else: |
|
146 |
HA_SET = [ EXEC_PATH+"ha", EXEC_PATH+"ha_gccxml_cc1plus", EXEC_PATH+"libxerces-c2_7_0.dll" ] |
|
147 |
LA_SET = [ EXEC_PATH+"la", EXEC_PATH+"cfilt" ] |
|
148 |
BCFILTER_SET = [ EXEC_PATH+"bcfilter", EXEC_PATH+"libxerces-c2_7_0.dll" ] |
|
149 |
||
150 |
#Default report paths |
|
151 |
HEADER_REPORT = "Headers_CompatibilityReport" |
|
152 |
LIBRARY_REPORT = "Libraries_CompatibilityReport" |
|
153 |
||
154 |
#true if checkbc is called from carbide plugin, |
|
155 |
#this make additional info available to STDOUT and STDEERR |
|
156 |
CARBIDE_PLUGIN = False |
|
157 |
#-------------------------Error Handling-------------------------------------------- |
|
158 |
#exhults with a environment error when the installed python version is unsupported |
|
159 |
def python_error(): |
|
160 |
sys.stdout.write("ERROR: Invalid python version") |
|
161 |
sys.stdout.write("\nPython versions from 2.4 to 3.0 are supported") |
|
162 |
sys.exit(1) |
|
163 |
||
164 |
#this is the set of possible error values, stored as a dictionary, with the "value" represnting error message |
|
165 |
ErrorCode = { |
|
166 |
"cmdlineIP": [ "\nERROR: Commandline input parameter invalid -- " , 1 ], |
|
167 |
"cmdlineIS": [ "\nERROR: Commandline parameter syntax invalid -- " , 2 ], |
|
168 |
"cmdlineMP": [ "\nERROR: Commandline parameter missing -- " , 3 ], |
|
169 |
"confIS": [ "\nERROR: Invalid syntax in config file -- " , 4 ], |
|
170 |
"confIP": [ "\nERROR: Invalid parameter in config file -- ", 5 ], |
|
171 |
"confMP": [ "\nERROR: Missing parameter in config file -- ", 6 ], |
|
172 |
"confMPath": [ "\nERROR: File/path in config file not found -- ", 7 ], |
|
173 |
"cmdhelp": [ "", 8], |
|
174 |
"other": [ "\nGENERAL ERROR: Please recheck the tool inputs.", 9 ] |
|
175 |
} |
|
176 |
||
177 |
#an exception class, need to update this for better error representation |
|
178 |
#value --> holds the error string, #text --> the info text to be displayed |
|
179 |
class InputError(Exception): |
|
180 |
def __init__(self, list): |
|
181 |
self.error = ErrorCode["other"] |
|
182 |
if list[0] in ErrorCode: |
|
183 |
self.error = ErrorCode[ list[0] ] |
|
184 |
self.text = list[1] |
|
185 |
self.use = list[2] |
|
186 |
||
187 |
#the exception handler class which prints out the error message and usage info when required |
|
188 |
class ExHandler: |
|
189 |
def __init__(self, e): |
|
190 |
sys.stdout.write( e.error[0] ) |
|
191 |
sys.stdout.write( e.text ) |
|
192 |
if e.use: |
|
193 |
usage() |
|
194 |
sys.exit(e.error[1]) |
|
195 |
||
196 |
#displays the usage characteristics for the interface when command is invoked without proper arguments |
|
197 |
def usage(): |
|
198 |
print "" |
|
199 |
print "Compatibility Analyser v" + TOOL_VERSION + " - " + TOOL_DATE |
|
200 |
print "Copyright (c) 2001-2009 Nokia Corporation and/or its subsidiary(-ies). All rights reserved." |
|
201 |
print "" |
|
202 |
print "Usage: CheckBC <configfile> [-ha/-hm/-hs] [-la/-lm/-ls] [-f] [reportid]" |
|
203 |
print "" |
|
204 |
print "Where:" |
|
205 |
print " configfile Filename of a configuration file" |
|
206 |
print " -ha Check all headers" |
|
207 |
print " -hm FILE Check multiple headers (FILE = file with list of headers)" |
|
208 |
print " -hs FILE Check a single file (FILE = header)" |
|
209 |
print " -la Check all libraries" |
|
210 |
print " -lm FILE Check multiple libraries (FILE = file with list of libraries)" |
|
211 |
print " -ls FILE Check a single file (FILE = library)" |
|
212 |
print " -f Filter results after analysis" |
|
213 |
print " reportid ID to be used for report files" |
|
214 |
print "" |
|
215 |
print "Examples:" |
|
216 |
print " To analyse all headers and libraries and filter results:" |
|
217 |
print " CheckBC myconfig -ha -la -f MYID" |
|
218 |
print " To analyse a single header file" |
|
219 |
print " CheckBC myconfig -hs aknlists.h MYID" |
|
220 |
||
221 |
#-------------------------Header Analysis-------------------------------------------- |
|
222 |
class HeaderAnalyser: |
|
223 |
# static dict to hold the arguments |
|
224 |
args = {} |
|
225 |
||
226 |
#initialization function for HA component |
|
227 |
def __init__(self, ip_data, set_list, reportid): |
|
228 |
#'args' defines the parametrs required by HA |
|
229 |
self.args = {"BASELINE_NAME":[], "CURRENT_NAME":[], "BASELINE_DIR":[], "CURRENT_DIR":[], "REPORT_FILE":[], |
|
230 |
"BASELINE_SYSTEMINCLUDEDIR":[], "CURRENT_SYSTEMINCLUDEDIR":[], "FORCEBASEINCLUDE":[], "FORCECURRENTINCLUDE":[], |
|
231 |
"RECURSIVE":[], "REPLACE":[], "TEMP":[], "USE_THREAD":[], "USE_PLATFORM_DATA":[], "SET":[], "BUNDLESIZE":['-bundlesize', '50'] } |
|
232 |
||
233 |
base_sys_include = [] |
|
234 |
curr_sys_include = [] |
|
235 |
base_forced = [] |
|
236 |
curr_forced = [] |
|
237 |
||
238 |
#validate SDK versions |
|
239 |
validateSDKVersion(ip_data["BASELINE_SDK_S60_VERSION"],ip_data["CURRENT_SDK_S60_VERSION"]) |
|
240 |
#validate USE_PLATFORM_DATA entry 'true' or 'false' |
|
241 |
if( getdata( ip_data, "USE_PLATFORM_DATA") ): |
|
242 |
if( not('true' == ip_data["USE_PLATFORM_DATA"].lower()) and not('false' == ip_data["USE_PLATFORM_DATA"].lower()) ): |
|
243 |
raise InputError(["confIP", "USE_PLATFORM_DATA\n", False]) |
|
244 |
||
245 |
#validate USE_THREAD entry 'true' or 'false' |
|
246 |
if( getdata( ip_data, "USE_THREAD") ): |
|
247 |
if( not('true' == ip_data["USE_THREAD"].lower()) and not('false' == ip_data["USE_THREAD"].lower()) ): |
|
248 |
raise InputError(["confIP", "USE_THREAD\n", False]) |
|
249 |
||
250 |
||
251 |
# specify the basline and current names |
|
252 |
if not getdata(ip_data, "BASELINE_NAME"): |
|
253 |
raise InputError(["confMP", "baseline name missing\n", False]) |
|
254 |
self.args["BASELINE_NAME"] = ["-baselineversion", quote( ip_data["BASELINE_NAME"] )] |
|
255 |
if not getdata( ip_data, "CURRENT_NAME"): |
|
256 |
raise InputError(["confMP", "current name missing\n", False]) |
|
257 |
self.args["CURRENT_NAME"] = ["-currentversion", quote( ip_data["CURRENT_NAME"] )] |
|
258 |
||
259 |
# get the analysis directories |
|
260 |
tmp = [] |
|
261 |
sdkBaseTmp = [] |
|
262 |
if getdata( ip_data, "BASELINE_SDK_DIR"): |
|
263 |
if(ip_data["BASELINE_SDK_DIR"] == os.sep): |
|
264 |
sdkBaseTmp.append( validate(os.sep + "epoc32" + os.sep + "include")) |
|
265 |
else: |
|
266 |
sdkBaseTmp.append( validate(ip_data["BASELINE_SDK_DIR"] + os.sep + "epoc32" + os.sep + "include") ) |
|
267 |
else: |
|
268 |
raise InputError(["confMP", "baseline header directory missing\n", False]) |
|
269 |
||
270 |
if getdata( ip_data, "BASELINE_HEADERS"): |
|
271 |
if(ip_data["BASELINE_HEADERS"] == os.sep): |
|
272 |
tmp = sdkBaseTmp |
|
273 |
else: |
|
274 |
for i in ip_data["BASELINE_HEADERS"].split(';'): |
|
275 |
if not os.path.exists(i): |
|
276 |
if(ip_data["BASELINE_SDK_DIR"] == os.sep): |
|
277 |
tmp.append(validate(os.sep + "epoc32" + os.sep + "include" + os.sep + i)) |
|
278 |
else: |
|
279 |
tmp.append(validate(ip_data["BASELINE_SDK_DIR"] + os.sep + "epoc32" + os.sep + "include" + os.sep + i)) |
|
280 |
else: |
|
281 |
tmp.append(validate(i)) |
|
282 |
else: |
|
283 |
tmp = sdkBaseTmp |
|
284 |
||
285 |
self.args["BASELINE_DIR"] = ["-baselinedir", ';'.join(["%s" % quote(i) for i in tmp]) ] |
|
286 |
base_sys_include.extend(tmp) |
|
287 |
||
288 |
tmp = [] |
|
289 |
sdkCurrTmp = [] |
|
290 |
if getdata( ip_data, "CURRENT_SDK_DIR"): |
|
291 |
if(ip_data["CURRENT_SDK_DIR"] == os.sep): |
|
292 |
sdkCurrTmp.append( validate(os.sep+"epoc32" + os.sep + "include" )) |
|
293 |
else: |
|
294 |
sdkCurrTmp.append( validate(ip_data["CURRENT_SDK_DIR"] + os.sep + "epoc32" + os.sep + "include") ) |
|
295 |
else: |
|
296 |
raise InputError(["confMP", "current header directory missing\n", False]) |
|
297 |
||
298 |
if getdata( ip_data, "CURRENT_HEADERS"): |
|
299 |
if(ip_data["CURRENT_HEADERS"] == os.sep): |
|
300 |
tmp = sdkCurrTmp |
|
301 |
else: |
|
302 |
for i in ip_data["CURRENT_HEADERS"].split(';'): |
|
303 |
if not os.path.exists(i): |
|
304 |
if(ip_data["CURRENT_SDK_DIR"] == os.sep): |
|
305 |
tmp.append(validate(os.sep + "epoc32" + os.sep + "include" + os.sep + i)) |
|
306 |
else: |
|
307 |
tmp.append(validate(ip_data["CURRENT_SDK_DIR"] + os.sep + "epoc32" + os.sep + "include" + os.sep + i)) |
|
308 |
else: |
|
309 |
tmp.append(validate(i)) |
|
310 |
||
311 |
else: |
|
312 |
tmp = sdkCurrTmp |
|
313 |
||
314 |
self.args["CURRENT_DIR"] = ["-currentdir", ';'.join(["%s" % quote(i) for i in tmp]) ] |
|
315 |
curr_sys_include.extend(tmp) |
|
316 |
||
317 |
# get the report file name |
|
318 |
if not getdata( ip_data, "REPORT_FILE_HEADERS"): |
|
319 |
if not reportid: |
|
320 |
ip_data["REPORT_FILE_HEADERS"] = REPORT_PATH + HEADER_REPORT |
|
321 |
else: |
|
322 |
ip_data["REPORT_FILE_HEADERS"] = REPORT_PATH + HEADER_REPORT +'_'+ reportid |
|
323 |
else: |
|
324 |
[head, tail] = os.path.split(ip_data["REPORT_FILE_HEADERS"]) |
|
325 |
if tail != '': |
|
326 |
if reportid: |
|
327 |
ip_data["REPORT_FILE_HEADERS"] = ip_data["REPORT_FILE_HEADERS"] +'_'+ reportid |
|
328 |
else: |
|
329 |
if reportid: |
|
330 |
ip_data["REPORT_FILE_HEADERS"] = ip_data["REPORT_FILE_HEADERS"] + HEADER_REPORT +'_'+ reportid |
|
331 |
else: |
|
332 |
ip_data["REPORT_FILE_HEADERS"] = ip_data["REPORT_FILE_HEADERS"] + HEADER_REPORT |
|
333 |
||
334 |
ip_data["REPORT_FILE_HEADERS"] = ip_data["REPORT_FILE_HEADERS"] + ".xml" |
|
335 |
self.args["REPORT_FILE"] = ["-reportfile", quote( ip_data["REPORT_FILE_HEADERS"])] |
|
336 |
||
337 |
tmp = [] |
|
338 |
# get the base system include directories |
|
339 |
if getdata( ip_data, "BASELINE_SYSTEMINCLUDEDIR"): |
|
340 |
for term in ip_data["BASELINE_SYSTEMINCLUDEDIR"].split(';'): |
|
341 |
if not os.path.exists(term): |
|
342 |
if(ip_data["BASELINE_SDK_DIR"] == os.sep): |
|
343 |
tmp.append(os.sep + "epoc32" + os.sep + "include" + os.sep + term) |
|
344 |
else: |
|
345 |
tmp.append(ip_data["BASELINE_SDK_DIR"] + os.sep + "epoc32" + os.sep + "include" + os.sep + term) |
|
346 |
else: |
|
347 |
tmp.append(term); |
|
348 |
||
349 |
for i in sys_includes[ip_data["BASELINE_SDK_S60_VERSION"]]: |
|
350 |
if(ip_data["BASELINE_SDK_DIR"] == os.sep): |
|
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
351 |
tmp.append( os.sep + i ) |
0 | 352 |
else: |
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
353 |
tmp.append( ip_data["BASELINE_SDK_DIR"] + os.sep + i ) |
0 | 354 |
|
355 |
for i in tmp: |
|
356 |
try: |
|
357 |
term = validate(i) |
|
358 |
except InputError, e: |
|
359 |
sys.stderr.write(os.linesep + "WARNING: Baseline system include path "+ i +" not found") |
|
360 |
else: |
|
361 |
if term not in base_sys_include: |
|
362 |
base_sys_include.append(term) |
|
363 |
self.args["BASELINE_SYSTEMINCLUDEDIR"] = ["-baseplatformheaders", quote( ';'.join(["%s" % quote(i) for i in base_sys_include]) ) ] |
|
364 |
||
365 |
tmp = [] |
|
366 |
# get the current system include directories |
|
367 |
if getdata( ip_data, "CURRENT_SYSTEMINCLUDEDIR"): |
|
368 |
for term in ip_data["CURRENT_SYSTEMINCLUDEDIR"].split(';'): |
|
369 |
if not os.path.exists(term): |
|
370 |
if(ip_data["CURRENT_SDK_DIR"] == os.sep): |
|
371 |
tmp.append(os.sep + "epoc32" + os.sep + "include" + os.sep + term) |
|
372 |
else: |
|
373 |
tmp.append(ip_data["CURRENT_SDK_DIR"] + os.sep + "epoc32" + os.sep + "include" + os.sep + term) |
|
374 |
else: |
|
375 |
tmp.append(term); |
|
376 |
||
377 |
for i in sys_includes[ip_data["CURRENT_SDK_S60_VERSION"]]: |
|
378 |
if(ip_data["CURRENT_SDK_DIR"] == os.sep): |
|
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
379 |
tmp.append( os.sep + i ) |
0 | 380 |
else: |
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
381 |
tmp.append( ip_data["CURRENT_SDK_DIR"] + os.sep + i ) |
0 | 382 |
|
383 |
for i in tmp: |
|
384 |
try: |
|
385 |
term = validate(i) |
|
386 |
except InputError, e: |
|
387 |
sys.stderr.write(os.linesep + "WARNING: Current system include path "+ i +" not found") |
|
388 |
else: |
|
389 |
if term not in curr_sys_include: |
|
390 |
curr_sys_include.append(term) |
|
391 |
self.args["CURRENT_SYSTEMINCLUDEDIR"] = ["-currentplatformheaders", quote( ';'.join(["%s" % quote(i) for i in curr_sys_include]) ) ] |
|
392 |
||
393 |
# get the forced headers |
|
394 |
if not getdata( ip_data, "BASELINE_SDK_S60_VERSION"): |
|
395 |
raise InputError(["confMP", "Baseline SDK version undefined\n", False]) |
|
396 |
for i in getdata( ip_data, "BASELINE_FORCED_HEADERS").split(';'): |
|
397 |
if i: |
|
398 |
base_forced.append(i) |
|
399 |
if forcedheadersdata.has_key(ip_data["BASELINE_SDK_S60_VERSION"]): |
|
400 |
base_forced.append( self.forcedheaders(ip_data["BASELINE_SDK_S60_VERSION"], self.args["BASELINE_SYSTEMINCLUDEDIR"][1]) ) |
|
401 |
||
402 |
#Validate the existence of base forced header |
|
403 |
for i in base_forced: |
|
404 |
if not os.path.exists(i): |
|
405 |
tmp = os.path.abspath(i) |
|
406 |
if not os.path.exists(tmp): |
|
407 |
raise InputError(["confIP", "BASELINE_SDK_S60_VERSION < Field "+ i +" is not valid >" + os.linesep,False]) |
|
408 |
||
409 |
self.args["FORCEBASEINCLUDE"] = ["-forcebaseinclude", ';'.join(["%s" % quotep(i) for i in base_forced ]) ] |
|
410 |
||
411 |
if not getdata( ip_data, "CURRENT_SDK_S60_VERSION"): |
|
412 |
raise InputError(["confMP", "Current SDK version undefined\n", False]) |
|
413 |
for i in getdata( ip_data, "CURRENT_FORCED_HEADERS").split(';'): |
|
414 |
if i: |
|
415 |
curr_forced.append(i) |
|
416 |
if forcedheadersdata.has_key(ip_data["CURRENT_SDK_S60_VERSION"]): |
|
417 |
curr_forced.append( self.forcedheaders(ip_data["CURRENT_SDK_S60_VERSION"], self.args["CURRENT_SYSTEMINCLUDEDIR"][1]) ) |
|
418 |
||
419 |
#Validate the existence of current forced header |
|
420 |
for i in curr_forced: |
|
421 |
if not os.path.exists(i): |
|
422 |
tmp = os.path.abspath(i) |
|
423 |
if not os.path.exists(tmp): |
|
424 |
raise InputError(["confIP", "CURRENT_SDK_S60_VERSION < Field "+ i +" is not valid >" + os.linesep,False]) |
|
425 |
||
426 |
self.args["FORCECURRENTINCLUDE"] = ["-forcecurrentinclude", ';'.join(["%s" % quotep(i) for i in curr_forced ]) ] |
|
427 |
||
428 |
if getdata( ip_data, "USE_THREAD"): |
|
429 |
if 'true' == ip_data["USE_THREAD"].lower(): |
|
430 |
self.args["USE_THREAD"] = [ "-usethread" ] |
|
431 |
||
432 |
# setup the replace and exclude directories |
|
433 |
self.args["RECURSIVE"] = [ "-recursive" ] |
|
434 |
if getdata( ip_data, "RECURSIVE_HEADERS"): |
|
435 |
if 'false' == ip_data["RECURSIVE_HEADERS"].lower(): |
|
436 |
self.args["RECURSIVE"] = [ "" ] |
|
437 |
elif not 'true' == ip_data["RECURSIVE_HEADERS"].lower(): |
|
438 |
raise InputError(["confIP", "RECURSIVE_HEADERS\n", False]) |
|
439 |
||
440 |
tmp = [] |
|
441 |
for i in getdata( ip_data, "EXCLUDE_DIR_HEADERS").split(';'): |
|
442 |
if i: |
|
443 |
tmp.append(i) |
|
444 |
if tmp: |
|
445 |
self.args["EXCLUDE_DIR_HEADERS"] = ["-excludedirs", ';'.join(["%s" % quote(i) for i in tmp]) ] |
|
446 |
||
447 |
self.args["TEMP"] = ["-temp", quotep( ip_data["TEMP"] )] |
|
448 |
||
449 |
if not getdata( ip_data, "USE_PLATFORM_DATA") or ('true' == ip_data["USE_PLATFORM_DATA"].lower()): |
|
450 |
if getdata( platformdata, ip_data["BASELINE_SDK_S60_VERSION"] ) and getdata( platformdata, ip_data["CURRENT_SDK_S60_VERSION"] ): |
|
451 |
self.args["USE_PLATFORM_DATA"] = ["-baseplatformdata", quotep( platformdata[ip_data["BASELINE_SDK_S60_VERSION"]] ), |
|
452 |
"-currentplatformdata", quotep( platformdata[ip_data["CURRENT_SDK_S60_VERSION"]] )] |
|
453 |
else: |
|
454 |
sys.stderr.write( os.linesep + "WARNING: Platform data not available in default paths, continuing without platform data information." ) |
|
455 |
||
456 |
if getdata( ip_data, "REPLACE_HEADERS"): |
|
457 |
#separate the sets |
|
458 |
tlist_1 = ip_data["REPLACE_HEADERS"].split(';') |
|
459 |
str = '' |
|
460 |
#seperate the replacement pairs |
|
461 |
for entry in tlist_1: |
|
462 |
if entry != '': |
|
463 |
tlist_2 = (entry.split(':')) |
|
464 |
#create a single str, which is input param |
|
465 |
str = ' '.join(["%s" % i for i in tlist_2]) |
|
466 |
self.args["REPLACE"] = ["-replace", str ] |
|
467 |
else: |
|
468 |
del self.args["REPLACE"] |
|
469 |
||
470 |
# handling the files provide with 'm' or 's' options on commandline |
|
471 |
str = '' |
|
472 |
line = '' |
|
473 |
tmp = [] |
|
474 |
if 's' == set_list[0]: |
|
475 |
str = set_list[1] |
|
476 |
elif 'm' == set_list[0]: |
|
477 |
fname = os.path.abspath(set_list[1]) |
|
478 |
if not os.path.exists(fname): |
|
479 |
raise InputError(["confIP", "Input list file unavailable" + os.linesep, False]) |
|
480 |
filehandle = open(fname, 'r') |
|
481 |
for line in filehandle: |
|
482 |
tmp.append(clean(line)) |
|
483 |
str = ';'.join(["%s" % i for i in tmp]) |
|
484 |
filehandle.close() |
|
485 |
elif 'a' == set_list[0]: |
|
486 |
str = ALL_HEADER_SET |
|
487 |
self.args["SET"] = ["-set", quote(str)] |
|
488 |
||
489 |
#'stringize' all params and invoke the tool |
|
490 |
def run(self): |
|
491 |
str = '' |
|
492 |
for i in HA_SET: |
|
493 |
if not os.path.exists(i): |
|
494 |
print os.linesep + i + " does not exist. Please reinstall." |
|
495 |
sys.exit(1) |
|
496 |
it_tmp = self.args.itervalues() |
|
497 |
for i in xrange(0, len(self.args), 1): |
|
498 |
for val in it_tmp.next(): |
|
499 |
str = str + ' ' + val |
|
500 |
cmd_file= unquote(self.args["TEMP"][1]) + os.sep + 'ha.txt' |
|
501 |
file = open(cmd_file,'w') |
|
502 |
file.write(str) |
|
503 |
file.close() |
|
504 |
cmd_str = ' -commandfile ' + cmd_file |
|
505 |
invokeTool(quote(HA_SET[0]), cmd_str) |
|
506 |
||
507 |
#verify existence of system hrh files |
|
508 |
def forcedheaders(self, str, dirs): |
|
509 |
if "5.0" == str: |
|
510 |
dirs = unquote(dirs).split(';') |
|
511 |
for path in dirs: |
|
512 |
checkpath = os.path.normpath( unquote(path) + os.sep + "variant" + os.sep + "symbian_os_v9.4.hrh" ) |
|
513 |
if os.path.exists( checkpath ): |
|
514 |
if forcedheadersdata.has_key(str): |
|
515 |
return forcedheadersdata[str] |
|
516 |
if forcedheadersdata.has_key("5.0v2"): |
|
517 |
return forcedheadersdata["5.0v2"] |
|
518 |
else: |
|
519 |
if forcedheadersdata.has_key(str): |
|
520 |
return forcedheadersdata[str] |
|
521 |
return '' |
|
522 |
||
523 |
#-------------------------Library Analysis------------------------------------------- |
|
524 |
class LibraryAnalyser: |
|
525 |
args = {} |
|
526 |
def __init__(self, ip_data, set_list, reportid): |
|
527 |
#'args' defines the parametrs required by HA, 'optargs' defines optional ones |
|
528 |
self.args = { "TOOLCHAIN":[], "TOOLCHAIN_PATH":[], "BASELINE_NAME":[], "CURRENT_NAME":[], "BASELINE_DIR":[], "CURRENT_DIR":[], |
|
529 |
"REPORT_FILE":[], "CFILT":[], "TEMP":[], "SET":[] } |
|
530 |
||
531 |
#validate SDK versions |
|
532 |
validateSDKVersion(ip_data["BASELINE_SDK_S60_VERSION"],ip_data["CURRENT_SDK_S60_VERSION"]) |
|
533 |
||
534 |
# Validate the user input tool chain |
|
535 |
if getdata( ip_data, "TOOLCHAIN"): |
|
536 |
validChain = False |
|
537 |
for i in tool_chain: |
|
538 |
if ( i == ip_data["TOOLCHAIN"].lower()): |
|
539 |
validChain = True |
|
540 |
break |
|
541 |
if validChain == False: |
|
542 |
raise InputError(["confIP", "TOOLCHAIN\n", False]) |
|
543 |
||
544 |
#Validate the build target provided |
|
545 |
if getdata( ip_data, "BASELINE_BUILDTARGET"): |
|
546 |
validateBulidTarget(ip_data["BASELINE_BUILDTARGET"],True) |
|
547 |
||
548 |
if getdata( ip_data, "CURRENT_BUILDTARGET"): |
|
549 |
validateBulidTarget(ip_data["CURRENT_BUILDTARGET"],False) |
|
550 |
||
551 |
if getdata( ip_data, "TOOLCHAIN"): |
|
552 |
self.args["TOOLCHAIN"] = [ ip_data["TOOLCHAIN"].upper() ] |
|
553 |
else: |
|
554 |
self.args["TOOLCHAIN"] = [ 'GCCE' ] |
|
555 |
||
556 |
if getdata( ip_data, "TOOLCHAIN_PATH"): |
|
557 |
self.args["TOOLCHAIN_PATH"] = [ '-tools', quotep( ip_data["TOOLCHAIN_PATH"] )] |
|
558 |
else: |
|
559 |
del self.args["TOOLCHAIN_PATH"] |
|
560 |
||
561 |
if not getdata( ip_data, "BASELINE_NAME"): |
|
562 |
raise InputError(["confMP", "baseline analysis directory missing" + os.linesep, False]) |
|
563 |
self.args["BASELINE_NAME"] = [ '-baselineversion', quote( ip_data["BASELINE_NAME"] )] |
|
564 |
if not getdata( ip_data, "CURRENT_NAME"): |
|
565 |
raise InputError(["confMP", "current analysis directory missing" + os.linesep, False]) |
|
566 |
self.args["CURRENT_NAME"] = [ '-currentversion', quote( ip_data["CURRENT_NAME"] )] |
|
567 |
||
568 |
||
569 |
#Check if baseline import library is present, import dll should also be present and vice versa. |
|
570 |
libLength = 0 |
|
571 |
dllLength = 0 |
|
572 |
||
573 |
if getdata( ip_data, "BASELINE_IMPORTLIBRARIES"): |
|
574 |
libLength = len(ip_data["BASELINE_IMPORTLIBRARIES"]) |
|
575 |
||
576 |
if getdata( ip_data, "BASELINE_IMPORTDLLS"): |
|
577 |
dllLength = len(ip_data["BASELINE_IMPORTDLLS"]) |
|
578 |
||
579 |
if( libLength > 0 and dllLength <= 0 ): |
|
580 |
raise InputError(["confMP", "BASELINE_IMPORTDLLS Path missing" + os.linesep, False]) |
|
581 |
if( libLength <= 0 and dllLength > 0 ): |
|
582 |
raise InputError(["confMP", "BASELINE_IMPORTLIBRARIES Path missing" + os.linesep, False]) |
|
583 |
||
584 |
#Check if current import library is present, import dll should also be present and vice versa. |
|
585 |
libLength = 0 |
|
586 |
dllLength = 0 |
|
587 |
||
588 |
if getdata( ip_data, "CURRENT_IMPORTLIBRARIES"): |
|
589 |
libLength = len(ip_data["CURRENT_IMPORTLIBRARIES"]) |
|
590 |
||
591 |
if getdata( ip_data, "CURRENT_IMPORTDLLS"): |
|
592 |
dllLength = len(ip_data["CURRENT_IMPORTDLLS"]) |
|
593 |
||
594 |
if( libLength > 0 and dllLength <= 0 ): |
|
595 |
raise InputError(["confMP", "CURRENT_IMPORTDLLS missing" + os.linesep, False]) |
|
596 |
if( libLength <= 0 and dllLength > 0 ): |
|
597 |
raise InputError(["confMP", "CURRENT_IMPORTLIBRARIES missing" + os.linesep, False]) |
|
598 |
||
599 |
||
600 |
if not getdata( ip_data, "BASELINE_SDK_DIR"): |
|
601 |
raise InputError(["confMP", "baseline importlib directory missing" + os.linesep, False]) |
|
602 |
if not getdata( ip_data, "CURRENT_SDK_DIR"): |
|
603 |
raise InputError(["confMP", "current importlib directory missing" + os.linesep, False]) |
|
604 |
||
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
605 |
self.args["TEMP"] = ["-temp", quotep( ip_data["TEMP"] )] |
0 | 606 |
|
607 |
if getdata( dllXMLdata, ip_data["BASELINE_SDK_S60_VERSION"] ): |
|
608 |
baseDlldata = dllXMLdata[ip_data["BASELINE_SDK_S60_VERSION"]] |
|
609 |
if getdata( dllXMLdata, ip_data["CURRENT_SDK_S60_VERSION"] ): |
|
610 |
currDlldata = dllXMLdata[ip_data["CURRENT_SDK_S60_VERSION"]] |
|
611 |
||
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
612 |
dbasebuild = GetBuildTarget(ip_data["BASELINE_SDK_DIR"],validate(baseDlldata),ip_data["TEMP"]) |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
613 |
dcurrentbuild = GetBuildTarget(ip_data["CURRENT_SDK_DIR"],validate(currDlldata),ip_data["TEMP"]) |
0 | 614 |
|
615 |
if getdata( ip_data, "BASELINE_BUILDTARGET"): |
|
616 |
basebuild = ip_data["BASELINE_BUILDTARGET"] |
|
617 |
else: |
|
618 |
basebuild = dbasebuild |
|
619 |
||
620 |
if getdata( ip_data, "CURRENT_BUILDTARGET"): |
|
621 |
currentbuild = ip_data["CURRENT_BUILDTARGET"] |
|
622 |
else: |
|
623 |
currentbuild = dcurrentbuild |
|
624 |
||
625 |
if basebuild == "" and currentbuild == "": |
|
626 |
basebuild = 'armv5' |
|
627 |
currentbuild = 'armv5' |
|
628 |
else: |
|
629 |
if basebuild == "": |
|
630 |
basebuild = currentbuild |
|
631 |
if currentbuild == "": |
|
632 |
currentbuild = basebuild |
|
633 |
||
634 |
libBasetmp = [] |
|
635 |
libtmp = [] |
|
636 |
for target in basebuild.split(';'): |
|
637 |
if(ip_data["BASELINE_SDK_DIR"] == os.sep): |
|
638 |
libBasetmp.append(os.sep + 'epoc32' + os.sep + 'release' + os.sep + target + os.sep + 'lib') |
|
639 |
else: |
|
640 |
libBasetmp.append(ip_data["BASELINE_SDK_DIR"] + os.sep + 'epoc32' + os.sep + 'release' + os.sep + target + os.sep + 'lib') |
|
641 |
||
642 |
if getdata( ip_data, "BASELINE_IMPORTLIBRARIES"): |
|
643 |
if(ip_data["BASELINE_IMPORTLIBRARIES"] == os.sep): |
|
644 |
for path in libBasetmp: |
|
645 |
libtmp.append(validate(path)) |
|
646 |
else: |
|
647 |
for i in ip_data["BASELINE_IMPORTLIBRARIES"].split(';'): |
|
648 |
if not os.path.exists(i): |
|
649 |
if(ip_data["BASELINE_SDK_DIR"] == os.sep): |
|
650 |
libtmp.append(validate( os.sep + 'epoc32' + os.sep + 'release' + os.sep + i)) |
|
651 |
else: |
|
652 |
libtmp.append(validate( ip_data["BASELINE_SDK_DIR"] + os.sep + 'epoc32' + os.sep + 'release' + os.sep + i )) |
|
653 |
else: |
|
654 |
libtmp.append(validate(i)) |
|
655 |
else: |
|
656 |
for path in libBasetmp: |
|
657 |
libtmp.append(validate(path)) |
|
658 |
||
659 |
dllBasetmp = [] |
|
660 |
dlltmp = [] |
|
661 |
if dbasebuild == "": |
|
662 |
dllBasetmp.append(baseDlldata) |
|
663 |
else: |
|
664 |
for target in basebuild.split(';'): |
|
665 |
if(ip_data["BASELINE_SDK_DIR"] == os.sep): |
|
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
666 |
dllBasetmp.append(os.sep + 'epoc32' + os.sep + 'release' + os.sep + target + os.sep + 'urel') |
0 | 667 |
else: |
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
668 |
dllBasetmp.append(ip_data["BASELINE_SDK_DIR"] + os.sep + 'epoc32' + os.sep + 'release' + os.sep + target + os.sep + 'urel') |
0 | 669 |
|
670 |
if getdata( ip_data, "BASELINE_IMPORTDLLS"): |
|
671 |
if(ip_data["BASELINE_IMPORTDLLS"] == os.sep): |
|
672 |
for path in dllBasetmp: |
|
673 |
dlltmp.append(validate(path)) |
|
674 |
else: |
|
675 |
for i in ip_data["BASELINE_IMPORTDLLS"].split(';'): |
|
676 |
if not os.path.exists(i): |
|
677 |
if(ip_data["BASELINE_SDK_DIR"] == os.sep): |
|
678 |
dlltmp.append(validate( os.sep + 'epoc32' + os.sep + 'release' + os.sep + i)) |
|
679 |
else: |
|
680 |
dlltmp.append(validate( ip_data["BASELINE_SDK_DIR"] + os.sep + 'epoc32' + os.sep + 'release' + os.sep + i )) |
|
681 |
else: |
|
682 |
dlltmp.append(validate(i)) |
|
683 |
else: |
|
684 |
for path in dllBasetmp: |
|
685 |
dlltmp.append(validate(path)) |
|
686 |
||
687 |
self.args["BASELINE_DIR"] = ["-baselinelibdir", ';'.join(["%s" % quote(i) for i in libtmp])] |
|
688 |
self.args["BASELINE_DLL_DIR"] = ["-baselinedlldir", ';'.join(["%s" % quote(i) for i in dlltmp])] |
|
689 |
||
690 |
libCurrTmp = [] |
|
691 |
libtmp = [] |
|
692 |
for target in currentbuild.split(';'): |
|
693 |
if(ip_data["CURRENT_SDK_DIR"] == os.sep): |
|
694 |
libCurrTmp.append(os.sep + 'epoc32' + os.sep + 'release' + os.sep + target + os.sep + 'lib') |
|
695 |
else: |
|
696 |
libCurrTmp.append(ip_data["CURRENT_SDK_DIR"] + os.sep + 'epoc32' + os.sep + 'release' + os.sep + target + os.sep + 'lib') |
|
697 |
if getdata( ip_data, "CURRENT_IMPORTLIBRARIES"): |
|
698 |
if(ip_data["CURRENT_IMPORTLIBRARIES"] == os.sep): |
|
699 |
for target in libCurrTmp: |
|
700 |
libtmp.append(validate(target)) |
|
701 |
else: |
|
702 |
for i in ip_data["CURRENT_IMPORTLIBRARIES"].split(';'): |
|
703 |
if not os.path.exists(i): |
|
704 |
if(ip_data["CURRENT_SDK_DIR"] == os.sep): |
|
705 |
libtmp.append(validate( os.sep + 'epoc32' + os.sep + 'release' + os.sep + i)) |
|
706 |
else: |
|
707 |
libtmp.append(validate( ip_data["CURRENT_SDK_DIR"] + os.sep + 'epoc32' + os.sep + 'release' + os.sep + i )) |
|
708 |
else: |
|
709 |
libtmp.append(validate(i)) |
|
710 |
else: |
|
711 |
for target in libCurrTmp: |
|
712 |
libtmp.append(validate(target)) |
|
713 |
||
714 |
#Get baseline and current dll path and provide for analysis |
|
715 |
dllCurrtmp = [] |
|
716 |
dlltmp = [] |
|
717 |
if dcurrentbuild == "": |
|
718 |
dllCurrtmp.append(currDlldata) |
|
719 |
else: |
|
720 |
for target in currentbuild.split(';'): |
|
721 |
if(ip_data["CURRENT_SDK_DIR"] == os.sep): |
|
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
722 |
dllCurrtmp.append(os.sep + 'epoc32' + os.sep + 'release' + os.sep + target + os.sep + 'urel') |
0 | 723 |
else: |
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
724 |
dllCurrtmp.append(ip_data["CURRENT_SDK_DIR"] + os.sep + 'epoc32' + os.sep + 'release' + os.sep + target + os.sep + 'urel') |
0 | 725 |
|
726 |
if getdata( ip_data, "CURRENT_IMPORTDLLS"): |
|
727 |
if(ip_data["CURRENT_IMPORTDLLS"] == os.sep): |
|
728 |
for target in dllCurrtmp: |
|
729 |
dlltmp.append(validate(target)) |
|
730 |
else: |
|
731 |
for i in ip_data["CURRENT_IMPORTDLLS"].split(';'): |
|
732 |
if not os.path.exists(i): |
|
733 |
if(ip_data["CURRENT_SDK_DIR"] == os.sep): |
|
734 |
dlltmp.append(validate( os.sep + 'epoc32' + os.sep + 'release' + os.sep + i)) |
|
735 |
else: |
|
736 |
dlltmp.append(validate( ip_data["CURRENT_SDK_DIR"] + os.sep + 'epoc32' + os.sep + 'release' + os.sep + i )) |
|
737 |
else: |
|
738 |
dlltmp.append(validate(i)) |
|
739 |
else: |
|
740 |
for target in dllCurrtmp: |
|
741 |
dlltmp.append(validate(target)) |
|
742 |
||
743 |
self.args["CURRENT_DIR"] = ["-currentlibdir", ';'.join(["%s" % quote(i) for i in libtmp])] |
|
744 |
self.args["CURRENT_DLL_DIR"] = ["-currentdlldir", ';'.join(["%s" % quote(i) for i in dlltmp])] |
|
745 |
||
746 |
# get the report file name |
|
747 |
if not getdata( ip_data, "REPORT_FILE_LIBRARIES"): |
|
748 |
if not reportid: |
|
749 |
ip_data["REPORT_FILE_LIBRARIES"] = REPORT_PATH + LIBRARY_REPORT |
|
750 |
else: |
|
751 |
ip_data["REPORT_FILE_LIBRARIES"] = REPORT_PATH + LIBRARY_REPORT +'_'+ reportid |
|
752 |
else: |
|
753 |
[head, tail] = os.path.split(ip_data["REPORT_FILE_LIBRARIES"]) |
|
754 |
if tail != '': |
|
755 |
if reportid: |
|
756 |
ip_data["REPORT_FILE_LIBRARIES"] = ip_data["REPORT_FILE_LIBRARIES"] +'_'+ reportid |
|
757 |
else: |
|
758 |
if reportid: |
|
759 |
ip_data["REPORT_FILE_LIBRARIES"] = ip_data["REPORT_FILE_LIBRARIES"] + LIBRARY_REPORT +'_'+ reportid |
|
760 |
else: |
|
761 |
ip_data["REPORT_FILE_LIBRARIES"] = ip_data["REPORT_FILE_LIBRARIES"] + LIBRARY_REPORT |
|
762 |
||
763 |
ip_data["REPORT_FILE_LIBRARIES"] = ip_data["REPORT_FILE_LIBRARIES"] + ".xml" |
|
764 |
self.args["REPORT_FILE"] = ["-reportfile", quote( ip_data["REPORT_FILE_LIBRARIES"] )] |
|
765 |
||
766 |
if 'rvct' == self.args["TOOLCHAIN"][0].lower(): |
|
767 |
self.args["CFILT"] = [ '-cfilt', quotep( LA_SET[1] )] |
|
768 |
else: |
|
769 |
del self.args["CFILT"] |
|
770 |
||
771 |
str = '' |
|
772 |
if 's' == set_list[0]: |
|
773 |
str = unquote(self.args["TEMP"][1]) + os.sep + 'la.txt' |
|
774 |
file = open( str, 'w') |
|
775 |
file.write( set_list[1] ) |
|
776 |
file.close() |
|
777 |
elif 'm' == set_list[0]: |
|
778 |
str = os.path.abspath( set_list[1] ) |
|
779 |
if not os.path.exists(str): |
|
780 |
raise InputError(["confIP", "Input list file unavailable" + os.linesep, False]) |
|
781 |
self.args["SET"] = ["-set", quote(str)] |
|
782 |
if 'a' == set_list[0]: |
|
783 |
del self.args["SET"] |
|
784 |
||
785 |
#'stringize' all params and invoke the tool |
|
786 |
def run(self): |
|
787 |
for i in LA_SET: |
|
788 |
if not os.path.exists(i): |
|
789 |
print os.linesep + i + " does not exist. Please reinstall." |
|
790 |
sys.exit(1) |
|
791 |
str = ' ' + self.args["TOOLCHAIN"][0] |
|
792 |
del self.args["TOOLCHAIN"] |
|
793 |
it_tmp = self.args.itervalues() |
|
794 |
for i in xrange(0, len(self.args), 1): |
|
795 |
for val in it_tmp.next(): |
|
796 |
str = str + ' ' + val |
|
797 |
invokeTool(quote(LA_SET[0]), str) |
|
798 |
||
799 |
#-------------------------Report Filter------------------------------------------- |
|
800 |
class ReportFilter: |
|
801 |
args = {} |
|
802 |
issueargs = {} |
|
803 |
filterfiles = False |
|
804 |
def __init__(self, ip_data, cmds, reportid): |
|
805 |
reportlist = [] |
|
806 |
outputlist = [] |
|
807 |
issuelist = [] |
|
808 |
#'args' defines the parametrs required by HA, 'optargs' defines optional ones |
|
809 |
self.args = { "ISSUES_FILE":[], "REPORT_FILE":[], "OUTPUT_FILE":[] } |
|
810 |
||
811 |
for val in cmds: |
|
812 |
if 'h' in val: |
|
813 |
if getdata( ip_data, "REPORT_FILE_HEADERS"): |
|
814 |
reportlist.append( ip_data["REPORT_FILE_HEADERS"] ) |
|
815 |
if getdata( ip_data, "FILTER_FILE_HEADERS"): |
|
816 |
if reportid: |
|
817 |
ip_data["FILTER_FILE_HEADERS"] = ip_data["FILTER_FILE_HEADERS"] +'_'+ reportid + ".xml" |
|
818 |
else: |
|
819 |
ip_data["FILTER_FILE_HEADERS"] = ip_data["FILTER_FILE_HEADERS"] + ".xml" |
|
820 |
outputlist.append( getdata(ip_data, "FILTER_FILE_HEADERS") ) |
|
821 |
if 'l' in val: |
|
822 |
print "" |
|
823 |
if getdata( ip_data, "REPORT_FILE_LIBRARIES"): |
|
824 |
reportlist.append( ip_data["REPORT_FILE_LIBRARIES"] ) |
|
825 |
if getdata( ip_data, "FILTER_FILE_LIBRARIES"): |
|
826 |
if reportid: |
|
827 |
ip_data["FILTER_FILE_LIBRARIES"] = ip_data["FILTER_FILE_LIBRARIES"] +'_'+ reportid + ".xml" |
|
828 |
else: |
|
829 |
ip_data["FILTER_FILE_LIBRARIES"] = ip_data["FILTER_FILE_LIBRARIES"] + ".xml" |
|
830 |
outputlist.append( ip_data["FILTER_FILE_LIBRARIES"] ) |
|
831 |
if 'f' in val: |
|
832 |
self.filterfiles=True |
|
833 |
rtmp = [] |
|
834 |
otmp = [] |
|
835 |
if getdata( ip_data, "REPORT_FILE_FILTER"): |
|
836 |
rtmp = ip_data["REPORT_FILE_FILTER"].split(';') |
|
837 |
for i in xrange(0, len(rtmp)): |
|
838 |
if reportid: |
|
839 |
rtmp[i] = rtmp[i] +'_'+ reportid + ".xml" |
|
840 |
else: |
|
841 |
rtmp[i] = rtmp[i] + ".xml" |
|
842 |
reportlist.extend( rtmp ) |
|
843 |
if getdata( ip_data, "OUTPUT_FILE_FILTER"): |
|
844 |
otmp = ip_data["OUTPUT_FILE_FILTER"].split(';') |
|
845 |
for i in xrange(0, len(otmp)): |
|
846 |
if otmp[i] != "": |
|
847 |
if reportid: |
|
848 |
otmp[i] = otmp[i] +'_'+ reportid + ".xml" |
|
849 |
else: |
|
850 |
otmp[i] = otmp[i] + ".xml" |
|
851 |
outputlist.extend( otmp ) |
|
852 |
||
853 |
for term in reportlist: |
|
854 |
if not os.path.exists( term ): |
|
855 |
raise InputError( ["confMP", "report file missing -- " + term, False] ) |
|
856 |
self.args["REPORT_FILE"] = reportlist |
|
857 |
||
858 |
for term in outputlist: |
|
859 |
if '' != term: |
|
860 |
if not os.path.exists( os.path.dirname(term) ): |
|
861 |
os.makedirs( os.path.dirname(term) ) |
|
862 |
self.args["OUTPUT_FILE"] = outputlist |
|
863 |
||
864 |
if not getdata( ip_data, "ISSUES_FILE"): |
|
865 |
issuelist.append(DEFAULT_ISSUES_FILE) |
|
866 |
else: |
|
867 |
issuelist = ip_data["ISSUES_FILE"].split(';') |
|
868 |
for term in issuelist: |
|
869 |
if term.startswith( SERVER_PATH ): #server path used in issues set |
|
870 |
i = issuelist.index(term) |
|
871 |
localfile = ip_data["TEMP"] + os.sep + "tmpissues.xml" |
|
872 |
tmp = os.path.splitext(term) |
|
873 |
if '' == tmp[1]: |
|
874 |
term = getlatestfile( term ) |
|
875 |
try: |
|
876 |
urllib.urlretrieve( term, localfile ) |
|
877 |
except IOError: |
|
878 |
raise InputError( ["confMPath", "Error in knownissues server path " + term + os.linesep, False] ) |
|
879 |
f=open(localfile) |
|
880 |
if ((f.read()).find("404 Not Found") != -1): |
|
881 |
raise InputError( ["confMPath", "Error in knownissues server path " + term + os.linesep, False] ) |
|
882 |
f.close() |
|
883 |
issuelist[i] = localfile |
|
884 |
self.issueargs[localfile] = term |
|
885 |
elif os.path.exists( validate(term) ): |
|
886 |
issuelist[issuelist.index(term)] = validate(term) |
|
887 |
elif not os.path.exists( validate(term) ): |
|
888 |
raise InputError( ["confMP", "issues file " + term + " missing" + os.linesep, False] ) |
|
889 |
self.args["ISSUES_FILE"] = issuelist |
|
890 |
||
891 |
#'stringize' all params and invoke the tool |
|
892 |
def run(self): |
|
893 |
report = '' |
|
894 |
output = '' |
|
895 |
issue = '' |
|
896 |
num = 1 |
|
897 |
for i in BCFILTER_SET: |
|
898 |
if not os.path.exists(i): |
|
899 |
print os.linesep + i + " does not exist. Please reinstall." |
|
900 |
sys.exit(1) |
|
901 |
total = len(self.args["REPORT_FILE"]) * len(self.args["ISSUES_FILE"]) |
|
902 |
for i in range(0, len(self.args["REPORT_FILE"]), 1): |
|
903 |
for j in xrange(0, len(self.args["ISSUES_FILE"]), 1): |
|
904 |
report = self.args["REPORT_FILE"][i] |
|
905 |
if i < len(self.args["OUTPUT_FILE"]): |
|
906 |
output = self.args["OUTPUT_FILE"][i] |
|
907 |
else: |
|
908 |
output = "" |
|
909 |
issue = self.args["ISSUES_FILE"][j] |
|
910 |
str = " " + quote(report) + " " + quote(issue) |
|
911 |
if getdata( self.issueargs, issue): |
|
912 |
issue = self.issueargs[issue] |
|
913 |
if self.filterfiles: |
|
914 |
dispstr = "\nprocessing (" + `num` + "/" + `total` +") files >>\nReport File: " + report + "\nKnown Issues File: " + issue + "\nOutput File: " |
|
915 |
else: |
|
916 |
dispstr = "\nprocessing file >>\nReport File: " + report + "\nKnown Issues File: " + issue + "\nOutput File: " |
|
917 |
if not output: |
|
918 |
dispstr = dispstr + "None(inplace filtration)" |
|
919 |
else: |
|
920 |
dispstr = dispstr + output |
|
921 |
str = str + " " + quote(output) |
|
922 |
print dispstr |
|
923 |
invokeTool(quote(BCFILTER_SET[0]), str) |
|
924 |
num = num + 1 |
|
925 |
||
926 |
#----------------------CONFIGURATION PARSER------------------------------------------- |
|
927 |
class ConfigParser: |
|
928 |
reportstr = '' |
|
929 |
data = {} |
|
930 |
#read the config values into a data structure |
|
931 |
def __init__(self, cmd): |
|
932 |
self.data = {"BASELINE_NAME":'', "BASELINE_SDK_DIR":'', "BASELINE_SDK_S60_VERSION":'', "CURRENT_NAME":'', "CURRENT_SDK_DIR":'', "CURRENT_SDK_S60_VERSION":'', "TEMP":'', |
|
933 |
"BASELINE_HEADERS":'', "CURRENT_HEADERS":'', "BASELINE_SYSTEMINCLUDEDIR":'', "CURRENT_SYSTEMINCLUDEDIR":'', "BASELINE_FORCED_HEADERS":'', "CURRENT_FORCED_HEADERS":'', "USE_PLATFORM_DATA":'', "RECURSIVE_HEADERS":'', |
|
934 |
"EXCLUDE_DIR_HEADERS":'', "REPLACE_HEADERS":'', "REPORT_FILE_HEADERS":'', "FILTER_FILE_HEADERS":'', "TOOLCHAIN":'', "TOOLCHAIN_PATH":'', "BASELINE_BUILDTARGET":'', |
|
935 |
"CURRENT_BUILDTARGET":'',"BASELINE_BUILDTYPE":'',"CURRENT_BUILDTYPE":'', "BASELINE_IMPORTLIBRARIES":'', "CURRENT_IMPORTLIBRARIES":'', "BASELINE_IMPORTDLLS":'', "CURRENT_IMPORTDLLS":'', |
|
936 |
"REPORT_FILE_LIBRARIES":'', "FILTER_FILE_LIBRARIES":'', "REPORT_FILE_FILTER":'',"OUTPUT_FILE_FILTER":'', "ISSUES_FILE":'', "EXCLUDE_DIR":'', "USE_THREAD":''} |
|
937 |
file = open(cmd.conffile) |
|
938 |
for input in file: |
|
939 |
if not input.startswith("#") and input != '\n': # process non-comment lines |
|
940 |
pair = input.split('=') |
|
941 |
if len(pair) == 1: |
|
942 |
pair.append("") |
|
943 |
if not self.data.has_key(clean(pair[0])): |
|
944 |
raise InputError(["confIP", clean(pair[0]) + " is not valid" + os.linesep, False]) |
|
945 |
self.data[clean(pair[0])] = clean(pair[1]) |
|
946 |
file.close() |
|
947 |
if self.data.has_key("EXCLUDE_DIR"): |
|
948 |
if getdata( self.data,"EXCLUDE_DIR"): |
|
949 |
self.data["EXCLUDE_DIR_HEADERS"] = self.data["EXCLUDE_DIR"] |
|
950 |
del self.data["EXCLUDE_DIR"] |
|
951 |
self.reportstr = cmd.reportstr |
|
952 |
||
953 |
#updates the report path/file names, creates dirs if they do not exist |
|
954 |
def __updatereport(self,key): |
|
955 |
tmp = [] |
|
956 |
if getdata( self.data, key ): |
|
957 |
#if field is empty raise an error |
|
958 |
for term in self.data[key].split(';'): |
|
959 |
if '' != term: |
|
960 |
[head, tail] = os.path.split(term) |
|
961 |
try: |
|
962 |
term = validate(head) + os.sep + os.path.splitext(tail)[0] |
|
963 |
except InputError, e: |
|
964 |
os.makedirs(os.path.abspath(head)) |
|
965 |
tmp.append(term) |
|
966 |
else: |
|
967 |
raise InputError(["confIS", key + " syntax incorrect" + os.linesep, False]) |
|
968 |
#assign the updated report file str back to dict |
|
969 |
self.data[key] = ';'.join([ "%s" % i for i in tmp]) |
|
970 |
||
971 |
#updates the filter output path/file names, creates dirs if they do not exist |
|
972 |
def __updateoutput(self, key): |
|
973 |
tmp = [] |
|
974 |
if getdata( self.data, key ): |
|
975 |
#if field is empty raise an error |
|
976 |
for term in self.data[key].split(';'): |
|
977 |
if '' != term: |
|
978 |
[head, tail] = os.path.split(term) |
|
979 |
try: |
|
980 |
term = validate(head) + os.sep + os.path.splitext(tail)[0] |
|
981 |
except InputError, e: |
|
982 |
os.makedirs(os.path.abspath(head)) |
|
983 |
tmp.append(term) |
|
984 |
#assign the updated report file str back to dict |
|
985 |
self.data[key] = ';'.join(["%s" % i for i in tmp]) |
|
986 |
||
987 |
#update necessary values |
|
988 |
def parse(self, cmds): |
|
989 |
for val in cmds: |
|
990 |
if 'h' in val: |
|
991 |
self.__updatereport("REPORT_FILE_HEADERS") |
|
992 |
self.__updateoutput("FILTER_FILE_HEADERS") |
|
993 |
if 'l' in val: |
|
994 |
self.__updatereport("REPORT_FILE_LIBRARIES") |
|
995 |
self.__updateoutput("FILTER_FILE_LIBRARIES") |
|
996 |
if 'f' in val: |
|
997 |
self.__updatereport("REPORT_FILE_FILTER") |
|
998 |
self.__updateoutput("OUTPUT_FILE_FILTER") |
|
999 |
||
1000 |
if not getdata( self.data, "TEMP"): |
|
1001 |
self.data["TEMP"] = TOOL_DIR + "temp" |
|
1002 |
if not os.path.exists(self.data["TEMP"]): |
|
1003 |
os.makedirs(self.data["TEMP"]) |
|
1004 |
return self.data |
|
1005 |
||
1006 |
#-------------------------Utility Analysis------------------------------------------- |
|
1007 |
class CmdLineParser: |
|
1008 |
#these are data interfaces that are exposed |
|
1009 |
cmd = [] |
|
1010 |
conffile = '' |
|
1011 |
reportstr = '' |
|
1012 |
def __init__(self, argv): |
|
1013 |
self.__check_help(argv) |
|
1014 |
self.__check_carbide(argv) |
|
1015 |
self.__check_versions(argv) |
|
1016 |
self.__parsecmd(argv) |
|
1017 |
||
1018 |
#create a list of commands to be executed |
|
1019 |
def __addcmd(self, str): |
|
1020 |
if (not 0 < len(str) < 3) or (len(str) == 1 and str != 'f'): |
|
1021 |
raise InputError(["cmdlineIP", "Parameter -" + str + " incorrect" + os.linesep, True]) |
|
1022 |
if (str[0] in ['h', 'l']) and (str[1] in ['s', 'm', 'a']): |
|
1023 |
self.cmd.insert(0, [ str[0], str[1] ]) |
|
1024 |
elif str[0] in ['f']: |
|
1025 |
self.cmd.append([ str[0], "" ]) |
|
1026 |
else: |
|
1027 |
raise InputError(["cmdlineIP", "Parameter - " + str + " incorrect" + os.linesep, True]) |
|
1028 |
||
1029 |
#check if input parameter is a 'help' command |
|
1030 |
def __check_help(self, argv): |
|
1031 |
for term in ["-?", "--help", "-h", "/h", "/?"]: |
|
1032 |
if term in argv: |
|
1033 |
raise InputError( ["cmdhelp", "", True] ) |
|
1034 |
||
1035 |
#check if any tool version inputs are required |
|
1036 |
def __check_versions(self, argv): |
|
1037 |
errstr = "Cannot combine version parameter with others"+ os.linesep +"Please reissue command without '-v'" |
|
1038 |
if "-v" in argv: |
|
1039 |
sys.stdout.write( gettoolversion() ) |
|
1040 |
if len(argv) > 1: |
|
1041 |
raise InputError(["cmdlineIP", errstr, False] ) |
|
1042 |
sys.exit(0) |
|
1043 |
if "-dv" in argv: |
|
1044 |
sys.stdout.write( getdataversion() ) |
|
1045 |
if len(argv) > 1: |
|
1046 |
raise InputError(["cmdlineIP", errstr, False] ) |
|
1047 |
sys.exit(0) |
|
1048 |
||
1049 |
#check if interface has been invoked from carbide |
|
1050 |
def __check_carbide(self, argv): |
|
1051 |
global CARBIDE_PLUGIN |
|
1052 |
if "-c" in argv: |
|
1053 |
CARBIDE_PLUGIN = True |
|
1054 |
argv.remove("-c") |
|
1055 |
||
1056 |
#check if input is proper else raise exception |
|
1057 |
def __parsecmd(self, argv): |
|
1058 |
#if no parameters present |
|
1059 |
if not len(argv): |
|
1060 |
raise InputError(["cmdlineMP", "None specified" + os.linesep, True]) |
|
1061 |
#check if config file exists |
|
1062 |
if os.path.exists( os.path.abspath(argv[0]) ): |
|
1063 |
self.conffile = os.path.abspath(argv[0]) |
|
1064 |
argv.pop(0) |
|
1065 |
else: |
|
1066 |
raise InputError(["cmdlineMP", "Config file not found" + os.linesep, True]) |
|
1067 |
#parse the -xx type inputs, add them to command string |
|
1068 |
for term in argv: |
|
1069 |
if term[0] == "-": |
|
1070 |
self.__addcmd(term[1:]) |
|
1071 |
||
1072 |
#append additional args to cmd string |
|
1073 |
i = 0 |
|
1074 |
for val in self.cmd: |
|
1075 |
param = "-" + val[0] + val[1] |
|
1076 |
#filter and 'a'- all option do not need any additional input |
|
1077 |
if ('f' != val[0]) and ('a' != val[1]): |
|
1078 |
nxtterm = argv[ argv.index(param) + 1 ] |
|
1079 |
#if next term is not an option string, append it as filename, to the command |
|
1080 |
if nxtterm[0] != "-": |
|
1081 |
self.cmd[i].append(nxtterm) |
|
1082 |
argv.remove(nxtterm) |
|
1083 |
else: |
|
1084 |
raise InputError(["cmdlineMP", "Input error -" + self.cmd[i][0] + self.cmd[i][1] + os.linesep, True]) |
|
1085 |
argv.remove(param) |
|
1086 |
i = i + 1 |
|
1087 |
||
1088 |
#if any additional param are defined, it is the report string |
|
1089 |
if len(argv) == 1: |
|
1090 |
self.reportstr = os.path.splitext(argv[0])[0] |
|
1091 |
#if more then 1 additonal param, then raise error condition |
|
1092 |
elif len(argv) > 1: |
|
1093 |
raise InputError(["cmdlineIP", "Error in input" + os.linesep, True]) |
|
1094 |
if not len(self.cmd): |
|
1095 |
raise InputError(["cmdlineMP", "No command specified" + os.linesep, True]) |
|
1096 |
||
1097 |
#-------------------------Utility functions------------------------------------------- |
|
1098 |
#cleans the whitespace and trailing '\n', '\t' |
|
1099 |
def clean(str): |
|
1100 |
return str.strip('\r\n\t ') |
|
1101 |
||
1102 |
#used to wrap a string argument provided to checkbc in quotes |
|
1103 |
def quote(str): |
|
1104 |
return "\"" + str + "\"" |
|
1105 |
||
1106 |
#validate the path provided |
|
1107 |
def validate(str): |
|
1108 |
if not os.path.exists(str): |
|
1109 |
tmp = os.path.abspath(str) |
|
1110 |
if not os.path.exists(tmp): |
|
1111 |
raise InputError(["confMPath", "Field "+ str +" is not valid" + os.linesep,False]) |
|
1112 |
str = tmp |
|
1113 |
return os.path.normpath(os.path.abspath(str)) |
|
1114 |
||
1115 |
#used to wrap a string argument provided to checkbc in quotes |
|
1116 |
def quotep(str): |
|
1117 |
return quote(validate(str)) |
|
1118 |
||
1119 |
#return an unquoted version of input string |
|
1120 |
def unquote(str): |
|
1121 |
if str[0] == '\"': |
|
1122 |
str = str[1:] |
|
1123 |
if str[len(str)-1] == '\"': |
|
1124 |
str = str[:len(str)-1] |
|
1125 |
return str |
|
1126 |
||
1127 |
def getdata( mydict, key): |
|
1128 |
if mydict.has_key(key): |
|
1129 |
if '' != mydict[key]: |
|
1130 |
return mydict[key] |
|
1131 |
return '' |
|
1132 |
||
1133 |
#return default build target from Rnd SDK and "" from Public SDK |
|
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
1134 |
def GetBuildTarget(sdk,dlldata,temp_path): |
0 | 1135 |
bldtarget = "" |
1136 |
path = "" |
|
1137 |
xmlFile = open (dlldata); |
|
1138 |
dllcases = xmlFile.readlines() |
|
1139 |
xmlFile.close() |
|
1140 |
||
1141 |
dll_file = temp_path + os.sep + "dll.txt" |
|
1142 |
dir_err_file = temp_path + os.sep + "dir_err.txt" |
|
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
1143 |
|
0 | 1144 |
for target in s60_build_targets: |
1145 |
if sdk == os.sep: |
|
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
1146 |
path = quote(os.sep+'epoc32'+os.sep+'release'+os.sep+target+os.sep+'urel'+os.sep) |
0 | 1147 |
else: |
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
1148 |
path = quote(validateTargetPath(sdk+os.sep+'epoc32'+os.sep+'release'+os.sep+target+os.sep+'urel')+os.sep) |
0 | 1149 |
if not os.path.exists(path): |
1150 |
pass |
|
1151 |
||
1152 |
if os.name =='nt': |
|
1153 |
cmd = "dir /b " + path + "*.dll > " + quote(dll_file) + " 2> " + quote (dir_err_file) |
|
1154 |
else: |
|
1155 |
cmd = "ls --format=single-column " + path + "*.dll > " + quote(dll_file) + " 2> " + quote( dir_err_file) |
|
1156 |
||
1157 |
os.system(cmd) |
|
1158 |
||
1159 |
file = open (dll_file) |
|
1160 |
cases = file.readlines() |
|
1161 |
file.close() |
|
1162 |
os.remove(dll_file) |
|
1163 |
os.remove(dir_err_file) |
|
1164 |
||
1165 |
matchFound = 0 |
|
1166 |
#Get no of dll present in xml file, 2 lines are for </dll_list> tag and each dll has 8 fileds. |
|
1167 |
dllCount = 0 |
|
1168 |
for dll in dllcases: |
|
1169 |
temp = dll.find ('<dllname>') |
|
1170 |
if temp != -1 : |
|
1171 |
dllCount = dllCount + 1 |
|
1172 |
exe = dll.lower().split('<dllname>')[1] |
|
1173 |
exe = exe.split('</dllname>')[0] |
|
1174 |
for en in cases: |
|
1175 |
if (en.lower().find (exe.lower()) != -1) : |
|
1176 |
matchFound = matchFound + 1 |
|
1177 |
||
1178 |
if(matchFound > (dllCount /2) ): |
|
1179 |
bldtarget = target |
|
1180 |
break |
|
1181 |
||
1182 |
return bldtarget |
|
1183 |
||
1184 |
def validateSDKVersion(baseversion, curversion): |
|
1185 |
baseValid = False |
|
1186 |
curValid = False |
|
1187 |
for i in sdk_version: |
|
20 | 1188 |
if( i.lower() == baseversion.lower()): |
0 | 1189 |
baseValid = True |
1190 |
break |
|
1191 |
if(baseValid == False): |
|
1192 |
raise InputError(["confIP", "Baseline SDK version\n", False]) |
|
1193 |
for i in sdk_version: |
|
20 | 1194 |
if(i.lower() == curversion.lower()): |
0 | 1195 |
curValid = True |
1196 |
if (curValid == False): |
|
1197 |
raise InputError(["confIP", "Current SDK version\n", False]) |
|
1198 |
return True |
|
1199 |
||
1200 |
def validateBulidTarget(bldtarget,baseline): |
|
1201 |
for target in bldtarget.split(';'): |
|
1202 |
targetFound = False |
|
1203 |
for i in s60_build_targets: |
|
1204 |
if ( i == target.lower()): |
|
1205 |
targetFound = True |
|
1206 |
break |
|
1207 |
if(targetFound == False ): |
|
1208 |
if( baseline == True): |
|
1209 |
raise InputError(["confIP", "BASELINE_BUILDTARGET\n", False]) |
|
1210 |
else: |
|
1211 |
raise InputError(["confIP", "CURRENT_BUILDTARGET\n", False]) |
|
1212 |
||
3
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
1213 |
def validateTargetPath(path): |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
1214 |
if not os.path.exists(path): |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
1215 |
tmp = os.path.abspath(path) |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
1216 |
path = tmp |
ebe3f8f03b59
Compatibility Analyser updated to version 2.8.4. Support for Qt code analysis added.
noe\swadi
parents:
0
diff
changeset
|
1217 |
return os.path.normpath(os.path.abspath(path)) |
0 | 1218 |
|
1219 |
||
1220 |
#---------------------------Other funcs--------------------------------------------- |
|
1221 |
def getdataversion(): |
|
1222 |
return DATA_VERSION |
|
1223 |
||
1224 |
def gettoolversion(): |
|
1225 |
return TOOL_VERSION + ' - ' + TOOL_DATE |
|
1226 |
||
1227 |
#check the metadata file in server path and retrieve the latest file mentioned there |
|
1228 |
def getlatestfile( path ): |
|
1229 |
if not path.endswith('/'): |
|
1230 |
path = path + '/' |
|
1231 |
try: |
|
1232 |
file = urllib.urlopen( path + "knownissues_metadata" ) |
|
1233 |
except IOError: |
|
1234 |
return path |
|
1235 |
else: |
|
1236 |
knfile = (file.readline()).strip(' \t\n\r') |
|
1237 |
if (knfile.find("DOCTYPE") != -1): |
|
1238 |
return path |
|
1239 |
return path + knfile |
|
1240 |
||
1241 |
#all arguments have been assembled, now call the executable |
|
1242 |
#print process-id and exit-code only if invoked form carbide client |
|
1243 |
def invokeTool(exe, args): |
|
1244 |
os.chdir(EXEC_PATH) |
|
1245 |
val= True |
|
1246 |
sys.stderr.write(os.linesep) |
|
1247 |
sys.stderr.flush() |
|
1248 |
sys.stdout.flush() |
|
1249 |
print exe+args |
|
1250 |
if os.name == 'nt': |
|
1251 |
val = False |
|
1252 |
process = subprocess.Popen(exe+args, shell=val) |
|
1253 |
if CARBIDE_PLUGIN: |
|
1254 |
sys.stdout.write("PID:" + str(process.pid) + os.linesep) |
|
1255 |
sys.stdout.flush() |
|
1256 |
#wait for current child to complete before executing next one |
|
1257 |
exitcode = process.wait() |
|
1258 |
if CARBIDE_PLUGIN: |
|
1259 |
sys.stdout.write(os.linesep+ os.path.splitext((os.path.split(exe)[1]))[0] +" exitCode:" + str(exitcode) + os.linesep) |
|
1260 |
sys.stdout.flush() |
|
1261 |
os.chdir(TOOL_DIR) |
|
1262 |
||
1263 |
#main function which performs the dispatch logic |
|
1264 |
def main(argv): |
|
1265 |
try: |
|
1266 |
cmd = CmdLineParser(argv) #parse command line input |
|
1267 |
except InputError, e: |
|
1268 |
ExHandler(e) |
|
1269 |
os.chdir(TOOL_DIR) #change to checkbc path, to parse relative paths |
|
1270 |
try: |
|
1271 |
cfg = ConfigParser(cmd) #takes the commandline parsed input |
|
1272 |
args = cfg.parse(cmd.cmd) #create key-value pair of inputs form the config file |
|
1273 |
except InputError, e: |
|
1274 |
ExHandler(e) |
|
1275 |
||
1276 |
isFilterTrue = False |
|
1277 |
if 'f' in cmd.cmd[len(cmd.cmd)-1]: |
|
1278 |
isFilterTrue = True |
|
1279 |
Fcmd = [] |
|
1280 |
Fcmd.append(cmd.cmd[len(cmd.cmd)-1]) |
|
1281 |
try: |
|
1282 |
r = ReportFilter(args, Fcmd, cmd.reportstr) |
|
1283 |
except InputError, e: |
|
1284 |
ExHandler(e) |
|
1285 |
r.run() |
|
1286 |
||
1287 |
||
1288 |
||
1289 |
||
1290 |
for val in cmd.cmd: |
|
1291 |
if 'h' in val: #verify headers |
|
1292 |
try: |
|
1293 |
h = HeaderAnalyser(args, val[1:], cmd.reportstr) |
|
1294 |
except InputError, e: |
|
1295 |
ExHandler(e) |
|
1296 |
h.run() |
|
1297 |
if isFilterTrue: |
|
1298 |
HFcmd = [] |
|
1299 |
HFcmd.append(val) |
|
1300 |
try: |
|
1301 |
r = ReportFilter(args, HFcmd, cmd.reportstr) |
|
1302 |
except InputError, e: |
|
1303 |
ExHandler(e) |
|
1304 |
r.run() |
|
1305 |
||
1306 |
if 'l' in val: #verify libraries |
|
1307 |
try: |
|
1308 |
o = LibraryAnalyser(args, val[1:], cmd.reportstr) |
|
1309 |
except InputError, e: |
|
1310 |
ExHandler(e) |
|
1311 |
o.run() |
|
1312 |
if isFilterTrue: |
|
1313 |
LFcmd = [] |
|
1314 |
LFcmd.append(val) |
|
1315 |
try: |
|
1316 |
r = ReportFilter(args, LFcmd, cmd.reportstr) |
|
1317 |
except InputError, e: |
|
1318 |
ExHandler(e) |
|
1319 |
r.run() |
|
1320 |
||
1321 |
||
1322 |
||
1323 |
if __name__=="__main__": |
|
1324 |
#Check for proper python version and then continue execution |
|
1325 |
if not "2.4" <= platform.python_version() < "3.0": |
|
1326 |
python_error() |
|
1327 |
main(sys.argv[1:]) |
|
1328 |